github.com/euank/go@v0.0.0-20160829210321-495514729181/src/cmd/compile/internal/mips64/ssa.go (about) 1 // Copyright 2016 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package mips64 6 7 import ( 8 "math" 9 10 "cmd/compile/internal/gc" 11 "cmd/compile/internal/ssa" 12 "cmd/internal/obj" 13 "cmd/internal/obj/mips" 14 ) 15 16 var ssaRegToReg = []int16{ 17 mips.REG_R0, // constant 0 18 mips.REG_R1, 19 mips.REG_R2, 20 mips.REG_R3, 21 mips.REG_R4, 22 mips.REG_R5, 23 mips.REG_R6, 24 mips.REG_R7, 25 mips.REG_R8, 26 mips.REG_R9, 27 mips.REG_R10, 28 mips.REG_R11, 29 mips.REG_R12, 30 mips.REG_R13, 31 mips.REG_R14, 32 mips.REG_R15, 33 mips.REG_R16, 34 mips.REG_R17, 35 mips.REG_R18, 36 mips.REG_R19, 37 mips.REG_R20, 38 mips.REG_R21, 39 mips.REG_R22, 40 // R23 = REGTMP not used in regalloc 41 mips.REG_R24, 42 mips.REG_R25, 43 // R26 reserved by kernel 44 // R27 reserved by kernel 45 // R28 = REGSB not used in regalloc 46 mips.REGSP, // R29 47 mips.REGG, // R30 48 // R31 = REGLINK not used in regalloc 49 50 mips.REG_F0, 51 mips.REG_F1, 52 mips.REG_F2, 53 mips.REG_F3, 54 mips.REG_F4, 55 mips.REG_F5, 56 mips.REG_F6, 57 mips.REG_F7, 58 mips.REG_F8, 59 mips.REG_F9, 60 mips.REG_F10, 61 mips.REG_F11, 62 mips.REG_F12, 63 mips.REG_F13, 64 mips.REG_F14, 65 mips.REG_F15, 66 mips.REG_F16, 67 mips.REG_F17, 68 mips.REG_F18, 69 mips.REG_F19, 70 mips.REG_F20, 71 mips.REG_F21, 72 mips.REG_F22, 73 mips.REG_F23, 74 mips.REG_F24, 75 mips.REG_F25, 76 mips.REG_F26, 77 mips.REG_F27, 78 mips.REG_F28, 79 mips.REG_F29, 80 mips.REG_F30, 81 mips.REG_F31, 82 83 mips.REG_HI, // high bits of multiplication 84 mips.REG_LO, // low bits of multiplication 85 86 0, // SB isn't a real register. We fill an Addr.Reg field with 0 in this case. 87 } 88 89 // Smallest possible faulting page at address zero, 90 // see ../../../../runtime/mheap.go:/minPhysPageSize 91 const minZeroPage = 4096 92 93 // isFPreg returns whether r is an FP register 94 func isFPreg(r int16) bool { 95 return mips.REG_F0 <= r && r <= mips.REG_F31 96 } 97 98 // isHILO returns whether r is HI or LO register 99 func isHILO(r int16) bool { 100 return r == mips.REG_HI || r == mips.REG_LO 101 } 102 103 // loadByType returns the load instruction of the given type. 104 func loadByType(t ssa.Type, r int16) obj.As { 105 if isFPreg(r) { 106 if t.Size() == 4 { // float32 or int32 107 return mips.AMOVF 108 } else { // float64 or int64 109 return mips.AMOVD 110 } 111 } else { 112 switch t.Size() { 113 case 1: 114 if t.IsSigned() { 115 return mips.AMOVB 116 } else { 117 return mips.AMOVBU 118 } 119 case 2: 120 if t.IsSigned() { 121 return mips.AMOVH 122 } else { 123 return mips.AMOVHU 124 } 125 case 4: 126 if t.IsSigned() { 127 return mips.AMOVW 128 } else { 129 return mips.AMOVWU 130 } 131 case 8: 132 return mips.AMOVV 133 } 134 } 135 panic("bad load type") 136 } 137 138 // storeByType returns the store instruction of the given type. 139 func storeByType(t ssa.Type, r int16) obj.As { 140 if isFPreg(r) { 141 if t.Size() == 4 { // float32 or int32 142 return mips.AMOVF 143 } else { // float64 or int64 144 return mips.AMOVD 145 } 146 } else { 147 switch t.Size() { 148 case 1: 149 return mips.AMOVB 150 case 2: 151 return mips.AMOVH 152 case 4: 153 return mips.AMOVW 154 case 8: 155 return mips.AMOVV 156 } 157 } 158 panic("bad store type") 159 } 160 161 func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { 162 s.SetLineno(v.Line) 163 switch v.Op { 164 case ssa.OpInitMem: 165 // memory arg needs no code 166 case ssa.OpArg: 167 // input args need no code 168 case ssa.OpSP, ssa.OpSB, ssa.OpGetG: 169 // nothing to do 170 case ssa.OpCopy, ssa.OpMIPS64MOVVconvert, ssa.OpMIPS64MOVVreg: 171 if v.Type.IsMemory() { 172 return 173 } 174 x := gc.SSARegNum(v.Args[0]) 175 y := gc.SSARegNum(v) 176 if x == y { 177 return 178 } 179 as := mips.AMOVV 180 if isFPreg(x) && isFPreg(y) { 181 as = mips.AMOVD 182 } 183 p := gc.Prog(as) 184 p.From.Type = obj.TYPE_REG 185 p.From.Reg = x 186 p.To.Type = obj.TYPE_REG 187 p.To.Reg = y 188 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) { 189 // cannot move between special registers, use TMP as intermediate 190 p.To.Reg = mips.REGTMP 191 p = gc.Prog(mips.AMOVV) 192 p.From.Type = obj.TYPE_REG 193 p.From.Reg = mips.REGTMP 194 p.To.Type = obj.TYPE_REG 195 p.To.Reg = y 196 } 197 case ssa.OpMIPS64MOVVnop: 198 if gc.SSARegNum(v) != gc.SSARegNum(v.Args[0]) { 199 v.Fatalf("input[0] and output not in same register %s", v.LongString()) 200 } 201 // nothing to do 202 case ssa.OpLoadReg: 203 if v.Type.IsFlags() { 204 v.Unimplementedf("load flags not implemented: %v", v.LongString()) 205 return 206 } 207 r := gc.SSARegNum(v) 208 p := gc.Prog(loadByType(v.Type, r)) 209 n, off := gc.AutoVar(v.Args[0]) 210 p.From.Type = obj.TYPE_MEM 211 p.From.Node = n 212 p.From.Sym = gc.Linksym(n.Sym) 213 p.From.Offset = off 214 if n.Class == gc.PPARAM || n.Class == gc.PPARAMOUT { 215 p.From.Name = obj.NAME_PARAM 216 p.From.Offset += n.Xoffset 217 } else { 218 p.From.Name = obj.NAME_AUTO 219 } 220 p.To.Type = obj.TYPE_REG 221 p.To.Reg = r 222 if isHILO(r) { 223 // cannot directly load, load to TMP and move 224 p.To.Reg = mips.REGTMP 225 p = gc.Prog(mips.AMOVV) 226 p.From.Type = obj.TYPE_REG 227 p.From.Reg = mips.REGTMP 228 p.To.Type = obj.TYPE_REG 229 p.To.Reg = r 230 } 231 case ssa.OpPhi: 232 gc.CheckLoweredPhi(v) 233 case ssa.OpStoreReg: 234 if v.Type.IsFlags() { 235 v.Unimplementedf("store flags not implemented: %v", v.LongString()) 236 return 237 } 238 r := gc.SSARegNum(v.Args[0]) 239 if isHILO(r) { 240 // cannot directly store, move to TMP and store 241 p := gc.Prog(mips.AMOVV) 242 p.From.Type = obj.TYPE_REG 243 p.From.Reg = r 244 p.To.Type = obj.TYPE_REG 245 p.To.Reg = mips.REGTMP 246 r = mips.REGTMP 247 } 248 p := gc.Prog(storeByType(v.Type, r)) 249 p.From.Type = obj.TYPE_REG 250 p.From.Reg = r 251 n, off := gc.AutoVar(v) 252 p.To.Type = obj.TYPE_MEM 253 p.To.Node = n 254 p.To.Sym = gc.Linksym(n.Sym) 255 p.To.Offset = off 256 if n.Class == gc.PPARAM || n.Class == gc.PPARAMOUT { 257 p.To.Name = obj.NAME_PARAM 258 p.To.Offset += n.Xoffset 259 } else { 260 p.To.Name = obj.NAME_AUTO 261 } 262 case ssa.OpMIPS64ADDV, 263 ssa.OpMIPS64SUBV, 264 ssa.OpMIPS64AND, 265 ssa.OpMIPS64OR, 266 ssa.OpMIPS64XOR, 267 ssa.OpMIPS64NOR, 268 ssa.OpMIPS64SLLV, 269 ssa.OpMIPS64SRLV, 270 ssa.OpMIPS64SRAV, 271 ssa.OpMIPS64ADDF, 272 ssa.OpMIPS64ADDD, 273 ssa.OpMIPS64SUBF, 274 ssa.OpMIPS64SUBD, 275 ssa.OpMIPS64MULF, 276 ssa.OpMIPS64MULD, 277 ssa.OpMIPS64DIVF, 278 ssa.OpMIPS64DIVD: 279 p := gc.Prog(v.Op.Asm()) 280 p.From.Type = obj.TYPE_REG 281 p.From.Reg = gc.SSARegNum(v.Args[1]) 282 p.Reg = gc.SSARegNum(v.Args[0]) 283 p.To.Type = obj.TYPE_REG 284 p.To.Reg = gc.SSARegNum(v) 285 case ssa.OpMIPS64SGT, 286 ssa.OpMIPS64SGTU: 287 p := gc.Prog(v.Op.Asm()) 288 p.From.Type = obj.TYPE_REG 289 p.From.Reg = gc.SSARegNum(v.Args[0]) 290 p.Reg = gc.SSARegNum(v.Args[1]) 291 p.To.Type = obj.TYPE_REG 292 p.To.Reg = gc.SSARegNum(v) 293 case ssa.OpMIPS64ADDVconst, 294 ssa.OpMIPS64SUBVconst, 295 ssa.OpMIPS64ANDconst, 296 ssa.OpMIPS64ORconst, 297 ssa.OpMIPS64XORconst, 298 ssa.OpMIPS64NORconst, 299 ssa.OpMIPS64SLLVconst, 300 ssa.OpMIPS64SRLVconst, 301 ssa.OpMIPS64SRAVconst, 302 ssa.OpMIPS64SGTconst, 303 ssa.OpMIPS64SGTUconst: 304 p := gc.Prog(v.Op.Asm()) 305 p.From.Type = obj.TYPE_CONST 306 p.From.Offset = v.AuxInt 307 p.Reg = gc.SSARegNum(v.Args[0]) 308 p.To.Type = obj.TYPE_REG 309 p.To.Reg = gc.SSARegNum(v) 310 case ssa.OpMIPS64MULV, 311 ssa.OpMIPS64MULVU, 312 ssa.OpMIPS64DIVV, 313 ssa.OpMIPS64DIVVU: 314 // result in hi,lo 315 p := gc.Prog(v.Op.Asm()) 316 p.From.Type = obj.TYPE_REG 317 p.From.Reg = gc.SSARegNum(v.Args[1]) 318 p.Reg = gc.SSARegNum(v.Args[0]) 319 case ssa.OpMIPS64MOVVconst: 320 r := gc.SSARegNum(v) 321 p := gc.Prog(v.Op.Asm()) 322 p.From.Type = obj.TYPE_CONST 323 p.From.Offset = v.AuxInt 324 p.To.Type = obj.TYPE_REG 325 p.To.Reg = r 326 if isFPreg(r) || isHILO(r) { 327 // cannot move into FP or special registers, use TMP as intermediate 328 p.To.Reg = mips.REGTMP 329 p = gc.Prog(mips.AMOVV) 330 p.From.Type = obj.TYPE_REG 331 p.From.Reg = mips.REGTMP 332 p.To.Type = obj.TYPE_REG 333 p.To.Reg = r 334 } 335 case ssa.OpMIPS64MOVFconst, 336 ssa.OpMIPS64MOVDconst: 337 p := gc.Prog(v.Op.Asm()) 338 p.From.Type = obj.TYPE_FCONST 339 p.From.Val = math.Float64frombits(uint64(v.AuxInt)) 340 p.To.Type = obj.TYPE_REG 341 p.To.Reg = gc.SSARegNum(v) 342 case ssa.OpMIPS64CMPEQF, 343 ssa.OpMIPS64CMPEQD, 344 ssa.OpMIPS64CMPGEF, 345 ssa.OpMIPS64CMPGED, 346 ssa.OpMIPS64CMPGTF, 347 ssa.OpMIPS64CMPGTD: 348 p := gc.Prog(v.Op.Asm()) 349 p.From.Type = obj.TYPE_REG 350 p.From.Reg = gc.SSARegNum(v.Args[0]) 351 p.Reg = gc.SSARegNum(v.Args[1]) 352 case ssa.OpMIPS64MOVVaddr: 353 p := gc.Prog(mips.AMOVV) 354 p.From.Type = obj.TYPE_ADDR 355 var wantreg string 356 // MOVV $sym+off(base), R 357 // the assembler expands it as the following: 358 // - base is SP: add constant offset to SP (R29) 359 // when constant is large, tmp register (R23) may be used 360 // - base is SB: load external address with relocation 361 switch v.Aux.(type) { 362 default: 363 v.Fatalf("aux is of unknown type %T", v.Aux) 364 case *ssa.ExternSymbol: 365 wantreg = "SB" 366 gc.AddAux(&p.From, v) 367 case *ssa.ArgSymbol, *ssa.AutoSymbol: 368 wantreg = "SP" 369 gc.AddAux(&p.From, v) 370 case nil: 371 // No sym, just MOVV $off(SP), R 372 wantreg = "SP" 373 p.From.Reg = mips.REGSP 374 p.From.Offset = v.AuxInt 375 } 376 if reg := gc.SSAReg(v.Args[0]); reg.Name() != wantreg { 377 v.Fatalf("bad reg %s for symbol type %T, want %s", reg.Name(), v.Aux, wantreg) 378 } 379 p.To.Type = obj.TYPE_REG 380 p.To.Reg = gc.SSARegNum(v) 381 case ssa.OpMIPS64MOVBload, 382 ssa.OpMIPS64MOVBUload, 383 ssa.OpMIPS64MOVHload, 384 ssa.OpMIPS64MOVHUload, 385 ssa.OpMIPS64MOVWload, 386 ssa.OpMIPS64MOVWUload, 387 ssa.OpMIPS64MOVVload, 388 ssa.OpMIPS64MOVFload, 389 ssa.OpMIPS64MOVDload: 390 p := gc.Prog(v.Op.Asm()) 391 p.From.Type = obj.TYPE_MEM 392 p.From.Reg = gc.SSARegNum(v.Args[0]) 393 gc.AddAux(&p.From, v) 394 p.To.Type = obj.TYPE_REG 395 p.To.Reg = gc.SSARegNum(v) 396 case ssa.OpMIPS64MOVBstore, 397 ssa.OpMIPS64MOVHstore, 398 ssa.OpMIPS64MOVWstore, 399 ssa.OpMIPS64MOVVstore, 400 ssa.OpMIPS64MOVFstore, 401 ssa.OpMIPS64MOVDstore: 402 p := gc.Prog(v.Op.Asm()) 403 p.From.Type = obj.TYPE_REG 404 p.From.Reg = gc.SSARegNum(v.Args[1]) 405 p.To.Type = obj.TYPE_MEM 406 p.To.Reg = gc.SSARegNum(v.Args[0]) 407 gc.AddAux(&p.To, v) 408 case ssa.OpMIPS64MOVBstorezero, 409 ssa.OpMIPS64MOVHstorezero, 410 ssa.OpMIPS64MOVWstorezero, 411 ssa.OpMIPS64MOVVstorezero: 412 p := gc.Prog(v.Op.Asm()) 413 p.From.Type = obj.TYPE_REG 414 p.From.Reg = mips.REGZERO 415 p.To.Type = obj.TYPE_MEM 416 p.To.Reg = gc.SSARegNum(v.Args[0]) 417 gc.AddAux(&p.To, v) 418 case ssa.OpMIPS64MOVBreg, 419 ssa.OpMIPS64MOVBUreg, 420 ssa.OpMIPS64MOVHreg, 421 ssa.OpMIPS64MOVHUreg, 422 ssa.OpMIPS64MOVWreg, 423 ssa.OpMIPS64MOVWUreg: 424 a := v.Args[0] 425 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg { 426 a = a.Args[0] 427 } 428 if a.Op == ssa.OpLoadReg { 429 t := a.Type 430 switch { 431 case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(), 432 v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(), 433 v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(), 434 v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(), 435 v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(), 436 v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned(): 437 // arg is a proper-typed load, already zero/sign-extended, don't extend again 438 if gc.SSARegNum(v) == gc.SSARegNum(v.Args[0]) { 439 return 440 } 441 p := gc.Prog(mips.AMOVV) 442 p.From.Type = obj.TYPE_REG 443 p.From.Reg = gc.SSARegNum(v.Args[0]) 444 p.To.Type = obj.TYPE_REG 445 p.To.Reg = gc.SSARegNum(v) 446 return 447 default: 448 } 449 } 450 fallthrough 451 case ssa.OpMIPS64MOVWF, 452 ssa.OpMIPS64MOVWD, 453 ssa.OpMIPS64TRUNCFW, 454 ssa.OpMIPS64TRUNCDW, 455 ssa.OpMIPS64MOVVF, 456 ssa.OpMIPS64MOVVD, 457 ssa.OpMIPS64TRUNCFV, 458 ssa.OpMIPS64TRUNCDV, 459 ssa.OpMIPS64MOVFD, 460 ssa.OpMIPS64MOVDF, 461 ssa.OpMIPS64NEGF, 462 ssa.OpMIPS64NEGD: 463 p := gc.Prog(v.Op.Asm()) 464 p.From.Type = obj.TYPE_REG 465 p.From.Reg = gc.SSARegNum(v.Args[0]) 466 p.To.Type = obj.TYPE_REG 467 p.To.Reg = gc.SSARegNum(v) 468 case ssa.OpMIPS64NEGV: 469 // SUB from REGZERO 470 p := gc.Prog(mips.ASUBVU) 471 p.From.Type = obj.TYPE_REG 472 p.From.Reg = gc.SSARegNum(v.Args[0]) 473 p.Reg = mips.REGZERO 474 p.To.Type = obj.TYPE_REG 475 p.To.Reg = gc.SSARegNum(v) 476 case ssa.OpMIPS64DUFFZERO: 477 // runtime.duffzero expects start address - 8 in R1 478 p := gc.Prog(mips.ASUBVU) 479 p.From.Type = obj.TYPE_CONST 480 p.From.Offset = 8 481 p.Reg = gc.SSARegNum(v.Args[0]) 482 p.To.Type = obj.TYPE_REG 483 p.To.Reg = mips.REG_R1 484 p = gc.Prog(obj.ADUFFZERO) 485 p.To.Type = obj.TYPE_MEM 486 p.To.Name = obj.NAME_EXTERN 487 p.To.Sym = gc.Linksym(gc.Pkglookup("duffzero", gc.Runtimepkg)) 488 p.To.Offset = v.AuxInt 489 case ssa.OpMIPS64LoweredZero: 490 // SUBV $8, R1 491 // MOVV R0, 8(R1) 492 // ADDV $8, R1 493 // BNE Rarg1, R1, -2(PC) 494 // arg1 is the address of the last element to zero 495 var sz int64 496 var mov obj.As 497 switch { 498 case v.AuxInt%8 == 0: 499 sz = 8 500 mov = mips.AMOVV 501 case v.AuxInt%4 == 0: 502 sz = 4 503 mov = mips.AMOVW 504 case v.AuxInt%2 == 0: 505 sz = 2 506 mov = mips.AMOVH 507 default: 508 sz = 1 509 mov = mips.AMOVB 510 } 511 p := gc.Prog(mips.ASUBVU) 512 p.From.Type = obj.TYPE_CONST 513 p.From.Offset = sz 514 p.To.Type = obj.TYPE_REG 515 p.To.Reg = mips.REG_R1 516 p2 := gc.Prog(mov) 517 p2.From.Type = obj.TYPE_REG 518 p2.From.Reg = mips.REGZERO 519 p2.To.Type = obj.TYPE_MEM 520 p2.To.Reg = mips.REG_R1 521 p2.To.Offset = sz 522 p3 := gc.Prog(mips.AADDVU) 523 p3.From.Type = obj.TYPE_CONST 524 p3.From.Offset = sz 525 p3.To.Type = obj.TYPE_REG 526 p3.To.Reg = mips.REG_R1 527 p4 := gc.Prog(mips.ABNE) 528 p4.From.Type = obj.TYPE_REG 529 p4.From.Reg = gc.SSARegNum(v.Args[1]) 530 p4.Reg = mips.REG_R1 531 p4.To.Type = obj.TYPE_BRANCH 532 gc.Patch(p4, p2) 533 case ssa.OpMIPS64LoweredMove: 534 // SUBV $8, R1 535 // MOVV 8(R1), Rtmp 536 // MOVV Rtmp, (R2) 537 // ADDV $8, R1 538 // ADDV $8, R2 539 // BNE Rarg2, R1, -4(PC) 540 // arg2 is the address of the last element of src 541 var sz int64 542 var mov obj.As 543 switch { 544 case v.AuxInt%8 == 0: 545 sz = 8 546 mov = mips.AMOVV 547 case v.AuxInt%4 == 0: 548 sz = 4 549 mov = mips.AMOVW 550 case v.AuxInt%2 == 0: 551 sz = 2 552 mov = mips.AMOVH 553 default: 554 sz = 1 555 mov = mips.AMOVB 556 } 557 p := gc.Prog(mips.ASUBVU) 558 p.From.Type = obj.TYPE_CONST 559 p.From.Offset = sz 560 p.To.Type = obj.TYPE_REG 561 p.To.Reg = mips.REG_R1 562 p2 := gc.Prog(mov) 563 p2.From.Type = obj.TYPE_MEM 564 p2.From.Reg = mips.REG_R1 565 p2.From.Offset = sz 566 p2.To.Type = obj.TYPE_REG 567 p2.To.Reg = mips.REGTMP 568 p3 := gc.Prog(mov) 569 p3.From.Type = obj.TYPE_REG 570 p3.From.Reg = mips.REGTMP 571 p3.To.Type = obj.TYPE_MEM 572 p3.To.Reg = mips.REG_R2 573 p4 := gc.Prog(mips.AADDVU) 574 p4.From.Type = obj.TYPE_CONST 575 p4.From.Offset = sz 576 p4.To.Type = obj.TYPE_REG 577 p4.To.Reg = mips.REG_R1 578 p5 := gc.Prog(mips.AADDVU) 579 p5.From.Type = obj.TYPE_CONST 580 p5.From.Offset = sz 581 p5.To.Type = obj.TYPE_REG 582 p5.To.Reg = mips.REG_R2 583 p6 := gc.Prog(mips.ABNE) 584 p6.From.Type = obj.TYPE_REG 585 p6.From.Reg = gc.SSARegNum(v.Args[2]) 586 p6.Reg = mips.REG_R1 587 p6.To.Type = obj.TYPE_BRANCH 588 gc.Patch(p6, p2) 589 case ssa.OpMIPS64CALLstatic: 590 if v.Aux.(*gc.Sym) == gc.Deferreturn.Sym { 591 // Deferred calls will appear to be returning to 592 // the CALL deferreturn(SB) that we are about to emit. 593 // However, the stack trace code will show the line 594 // of the instruction byte before the return PC. 595 // To avoid that being an unrelated instruction, 596 // insert an actual hardware NOP that will have the right line number. 597 // This is different from obj.ANOP, which is a virtual no-op 598 // that doesn't make it into the instruction stream. 599 ginsnop() 600 } 601 p := gc.Prog(obj.ACALL) 602 p.To.Type = obj.TYPE_MEM 603 p.To.Name = obj.NAME_EXTERN 604 p.To.Sym = gc.Linksym(v.Aux.(*gc.Sym)) 605 if gc.Maxarg < v.AuxInt { 606 gc.Maxarg = v.AuxInt 607 } 608 case ssa.OpMIPS64CALLclosure: 609 p := gc.Prog(obj.ACALL) 610 p.To.Type = obj.TYPE_MEM 611 p.To.Offset = 0 612 p.To.Reg = gc.SSARegNum(v.Args[0]) 613 if gc.Maxarg < v.AuxInt { 614 gc.Maxarg = v.AuxInt 615 } 616 case ssa.OpMIPS64CALLdefer: 617 p := gc.Prog(obj.ACALL) 618 p.To.Type = obj.TYPE_MEM 619 p.To.Name = obj.NAME_EXTERN 620 p.To.Sym = gc.Linksym(gc.Deferproc.Sym) 621 if gc.Maxarg < v.AuxInt { 622 gc.Maxarg = v.AuxInt 623 } 624 case ssa.OpMIPS64CALLgo: 625 p := gc.Prog(obj.ACALL) 626 p.To.Type = obj.TYPE_MEM 627 p.To.Name = obj.NAME_EXTERN 628 p.To.Sym = gc.Linksym(gc.Newproc.Sym) 629 if gc.Maxarg < v.AuxInt { 630 gc.Maxarg = v.AuxInt 631 } 632 case ssa.OpMIPS64CALLinter: 633 p := gc.Prog(obj.ACALL) 634 p.To.Type = obj.TYPE_MEM 635 p.To.Offset = 0 636 p.To.Reg = gc.SSARegNum(v.Args[0]) 637 if gc.Maxarg < v.AuxInt { 638 gc.Maxarg = v.AuxInt 639 } 640 case ssa.OpMIPS64LoweredNilCheck: 641 // Optimization - if the subsequent block has a load or store 642 // at the same address, we don't need to issue this instruction. 643 mem := v.Args[1] 644 for _, w := range v.Block.Succs[0].Block().Values { 645 if w.Op == ssa.OpPhi { 646 if w.Type.IsMemory() { 647 mem = w 648 } 649 continue 650 } 651 if len(w.Args) == 0 || !w.Args[len(w.Args)-1].Type.IsMemory() { 652 // w doesn't use a store - can't be a memory op. 653 continue 654 } 655 if w.Args[len(w.Args)-1] != mem { 656 v.Fatalf("wrong store after nilcheck v=%s w=%s", v, w) 657 } 658 switch w.Op { 659 case ssa.OpMIPS64MOVBload, ssa.OpMIPS64MOVBUload, ssa.OpMIPS64MOVHload, ssa.OpMIPS64MOVHUload, 660 ssa.OpMIPS64MOVWload, ssa.OpMIPS64MOVWUload, ssa.OpMIPS64MOVVload, 661 ssa.OpMIPS64MOVFload, ssa.OpMIPS64MOVDload, 662 ssa.OpMIPS64MOVBstore, ssa.OpMIPS64MOVHstore, ssa.OpMIPS64MOVWstore, ssa.OpMIPS64MOVVstore, 663 ssa.OpMIPS64MOVFstore, ssa.OpMIPS64MOVDstore: 664 // arg0 is ptr, auxint is offset 665 if w.Args[0] == v.Args[0] && w.Aux == nil && w.AuxInt >= 0 && w.AuxInt < minZeroPage { 666 if gc.Debug_checknil != 0 && int(v.Line) > 1 { 667 gc.Warnl(v.Line, "removed nil check") 668 } 669 return 670 } 671 case ssa.OpMIPS64DUFFZERO, ssa.OpMIPS64LoweredZero: 672 // arg0 is ptr 673 if w.Args[0] == v.Args[0] { 674 if gc.Debug_checknil != 0 && int(v.Line) > 1 { 675 gc.Warnl(v.Line, "removed nil check") 676 } 677 return 678 } 679 case ssa.OpMIPS64LoweredMove: 680 // arg0 is dst ptr, arg1 is src ptr 681 if w.Args[0] == v.Args[0] || w.Args[1] == v.Args[0] { 682 if gc.Debug_checknil != 0 && int(v.Line) > 1 { 683 gc.Warnl(v.Line, "removed nil check") 684 } 685 return 686 } 687 default: 688 } 689 if w.Type.IsMemory() { 690 if w.Op == ssa.OpVarDef || w.Op == ssa.OpVarKill || w.Op == ssa.OpVarLive { 691 // these ops are OK 692 mem = w 693 continue 694 } 695 // We can't delay the nil check past the next store. 696 break 697 } 698 } 699 // Issue a load which will fault if arg is nil. 700 p := gc.Prog(mips.AMOVB) 701 p.From.Type = obj.TYPE_MEM 702 p.From.Reg = gc.SSARegNum(v.Args[0]) 703 gc.AddAux(&p.From, v) 704 p.To.Type = obj.TYPE_REG 705 p.To.Reg = mips.REGTMP 706 if gc.Debug_checknil != 0 && v.Line > 1 { // v.Line==1 in generated wrappers 707 gc.Warnl(v.Line, "generated nil check") 708 } 709 case ssa.OpVarDef: 710 gc.Gvardef(v.Aux.(*gc.Node)) 711 case ssa.OpVarKill: 712 gc.Gvarkill(v.Aux.(*gc.Node)) 713 case ssa.OpVarLive: 714 gc.Gvarlive(v.Aux.(*gc.Node)) 715 case ssa.OpKeepAlive: 716 if !v.Args[0].Type.IsPtrShaped() { 717 v.Fatalf("keeping non-pointer alive %v", v.Args[0]) 718 } 719 n, off := gc.AutoVar(v.Args[0]) 720 if n == nil { 721 v.Fatalf("KeepLive with non-spilled value %s %s", v, v.Args[0]) 722 } 723 if off != 0 { 724 v.Fatalf("KeepLive with non-zero offset spill location %s:%d", n, off) 725 } 726 gc.Gvarlive(n) 727 case ssa.OpMIPS64FPFlagTrue, 728 ssa.OpMIPS64FPFlagFalse: 729 // MOVV $0, r 730 // BFPF 2(PC) 731 // MOVV $1, r 732 branch := mips.ABFPF 733 if v.Op == ssa.OpMIPS64FPFlagFalse { 734 branch = mips.ABFPT 735 } 736 p := gc.Prog(mips.AMOVV) 737 p.From.Type = obj.TYPE_REG 738 p.From.Reg = mips.REGZERO 739 p.To.Type = obj.TYPE_REG 740 p.To.Reg = gc.SSARegNum(v) 741 p2 := gc.Prog(branch) 742 p2.To.Type = obj.TYPE_BRANCH 743 p3 := gc.Prog(mips.AMOVV) 744 p3.From.Type = obj.TYPE_CONST 745 p3.From.Offset = 1 746 p3.To.Type = obj.TYPE_REG 747 p3.To.Reg = gc.SSARegNum(v) 748 p4 := gc.Prog(obj.ANOP) // not a machine instruction, for branch to land 749 gc.Patch(p2, p4) 750 case ssa.OpSelect0, ssa.OpSelect1: 751 // nothing to do 752 case ssa.OpMIPS64LoweredGetClosurePtr: 753 // Closure pointer is R22 (mips.REGCTXT). 754 gc.CheckLoweredGetClosurePtr(v) 755 default: 756 v.Unimplementedf("genValue not implemented: %s", v.LongString()) 757 } 758 } 759 760 var blockJump = map[ssa.BlockKind]struct { 761 asm, invasm obj.As 762 }{ 763 ssa.BlockMIPS64EQ: {mips.ABEQ, mips.ABNE}, 764 ssa.BlockMIPS64NE: {mips.ABNE, mips.ABEQ}, 765 ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ}, 766 ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ}, 767 ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ}, 768 ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ}, 769 ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF}, 770 ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT}, 771 } 772 773 func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) { 774 s.SetLineno(b.Line) 775 776 switch b.Kind { 777 case ssa.BlockPlain, ssa.BlockCall, ssa.BlockCheck: 778 if b.Succs[0].Block() != next { 779 p := gc.Prog(obj.AJMP) 780 p.To.Type = obj.TYPE_BRANCH 781 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 782 } 783 case ssa.BlockDefer: 784 // defer returns in R1: 785 // 0 if we should continue executing 786 // 1 if we should jump to deferreturn call 787 p := gc.Prog(mips.ABNE) 788 p.From.Type = obj.TYPE_REG 789 p.From.Reg = mips.REGZERO 790 p.Reg = mips.REG_R1 791 p.To.Type = obj.TYPE_BRANCH 792 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 793 if b.Succs[0].Block() != next { 794 p := gc.Prog(obj.AJMP) 795 p.To.Type = obj.TYPE_BRANCH 796 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 797 } 798 case ssa.BlockExit: 799 gc.Prog(obj.AUNDEF) // tell plive.go that we never reach here 800 case ssa.BlockRet: 801 gc.Prog(obj.ARET) 802 case ssa.BlockRetJmp: 803 p := gc.Prog(obj.ARET) 804 p.To.Type = obj.TYPE_MEM 805 p.To.Name = obj.NAME_EXTERN 806 p.To.Sym = gc.Linksym(b.Aux.(*gc.Sym)) 807 case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE, 808 ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ, 809 ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ, 810 ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF: 811 jmp := blockJump[b.Kind] 812 var p *obj.Prog 813 switch next { 814 case b.Succs[0].Block(): 815 p = gc.Prog(jmp.invasm) 816 p.To.Type = obj.TYPE_BRANCH 817 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1].Block()}) 818 case b.Succs[1].Block(): 819 p = gc.Prog(jmp.asm) 820 p.To.Type = obj.TYPE_BRANCH 821 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 822 default: 823 p = gc.Prog(jmp.asm) 824 p.To.Type = obj.TYPE_BRANCH 825 s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0].Block()}) 826 q := gc.Prog(obj.AJMP) 827 q.To.Type = obj.TYPE_BRANCH 828 s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1].Block()}) 829 } 830 if !b.Control.Type.IsFlags() { 831 p.From.Type = obj.TYPE_REG 832 p.From.Reg = gc.SSARegNum(b.Control) 833 } 834 default: 835 b.Unimplementedf("branch not implemented: %s. Control: %s", b.LongString(), b.Control.LongString()) 836 } 837 }