github.com/4ad/go@v0.0.0-20161219182952-69a12818b605/src/cmd/internal/obj/sparc64/obj.go (about) 1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package sparc64 6 7 import ( 8 "cmd/internal/obj" 9 "cmd/internal/sys" 10 "fmt" 11 "log" 12 "math" 13 ) 14 15 var isUncondJump = map[obj.As]bool{ 16 obj.ACALL: true, 17 obj.ADUFFZERO: true, 18 obj.ADUFFCOPY: true, 19 obj.AJMP: true, 20 obj.ARET: true, 21 AFBA: true, 22 AJMPL: true, 23 ARETRESTORE: true, 24 } 25 26 var isCondJump = map[obj.As]bool{ 27 ABN: true, 28 ABNE: true, 29 ABE: true, 30 ABG: true, 31 ABLE: true, 32 ABGE: true, 33 ABL: true, 34 ABGU: true, 35 ABLEU: true, 36 ABCC: true, 37 ABCS: true, 38 ABPOS: true, 39 ABNEG: true, 40 ABVC: true, 41 ABVS: true, 42 ABNW: true, 43 ABNEW: true, 44 ABEW: true, 45 ABGW: true, 46 ABLEW: true, 47 ABGEW: true, 48 ABLW: true, 49 ABGUW: true, 50 ABLEUW: true, 51 ABCCW: true, 52 ABCSW: true, 53 ABPOSW: true, 54 ABNEGW: true, 55 ABVCW: true, 56 ABVSW: true, 57 ABND: true, 58 ABNED: true, 59 ABED: true, 60 ABGD: true, 61 ABLED: true, 62 ABGED: true, 63 ABLD: true, 64 ABGUD: true, 65 ABLEUD: true, 66 ABCCD: true, 67 ABCSD: true, 68 ABPOSD: true, 69 ABNEGD: true, 70 ABVCD: true, 71 ABVSD: true, 72 ABRZ: true, 73 ABRLEZ: true, 74 ABRLZ: true, 75 ABRNZ: true, 76 ABRGZ: true, 77 ABRGEZ: true, 78 AFBN: true, 79 AFBU: true, 80 AFBG: true, 81 AFBUG: true, 82 AFBL: true, 83 AFBUL: true, 84 AFBLG: true, 85 AFBNE: true, 86 AFBE: true, 87 AFBUE: true, 88 AFBGE: true, 89 AFBUGE: true, 90 AFBLE: true, 91 AFBULE: true, 92 AFBO: true, 93 } 94 95 var isJump = make(map[obj.As]bool) 96 97 func init() { 98 for k := range isUncondJump { 99 isJump[k] = true 100 } 101 for k := range isCondJump { 102 isJump[k] = true 103 } 104 } 105 106 // The stacksplit code is the first thing emitted in the prologue, so must 107 // carefully limit its register usage to only those safe for use by the 108 // runtime (e.g. REG_RT1) when storing values in a register as it's 109 // essentially a leaf function executing in the caller's frame. 110 // 111 // In addition, since it must be executed before the initial stack setup, 112 // any arguments expected in registers (such as ILR) will instead be 113 // found in the output registers (OLR) since a 'save' instruction has not 114 // been executed yet. 115 func stacksplit(ctxt *obj.Link, p *obj.Prog, framesize int32) *obj.Prog { 116 // MOV g_stackguard(g), RT1 117 p = obj.Appendp(ctxt, p) 118 119 p.As = AMOVD 120 p.From.Type = obj.TYPE_MEM 121 p.From.Reg = REG_G 122 p.From.Offset = 2 * int64(ctxt.Arch.PtrSize) // G.stackguard0 123 if ctxt.Cursym.Cfunc { 124 p.From.Offset = 3 * int64(ctxt.Arch.PtrSize) // G.stackguard1 125 } 126 p.To.Type = obj.TYPE_REG 127 p.To.Reg = REG_RT1 128 129 q := (*obj.Prog)(nil) 130 if framesize <= obj.StackSmall { 131 // small stack: SP+StackBias <= stackguard 132 // ADD Stackbias, RSP, RT2 133 // CMP stackguard, RT2 134 p = obj.Appendp(ctxt, p) 135 p.As = AADD 136 p.From.Type = obj.TYPE_CONST 137 p.From.Offset = StackBias 138 p.Reg = REG_RSP 139 p.To.Type = obj.TYPE_REG 140 p.To.Reg = REG_RT2 141 142 p = obj.Appendp(ctxt, p) 143 p.As = ACMP 144 p.From.Type = obj.TYPE_REG 145 p.From.Reg = REG_RT1 146 p.Reg = REG_RT2 147 } else if framesize <= obj.StackBig { 148 // large stack: SP-framesize <= stackguard-StackSmall 149 // ADD $StackBias - $(framesize - StackSmall), RSP, RT2 150 // CMP stackguard, RT2 151 p = obj.Appendp(ctxt, p) 152 p.As = AADD 153 p.From.Type = obj.TYPE_CONST 154 p.From.Offset = int64(StackBias - (framesize - obj.StackSmall)) 155 p.Reg = REG_RSP 156 p.To.Type = obj.TYPE_REG 157 p.To.Reg = REG_RT2 158 159 p = obj.Appendp(ctxt, p) 160 p.As = ACMP 161 p.From.Type = obj.TYPE_REG 162 p.From.Reg = REG_RT1 163 p.Reg = REG_RT2 164 } else { 165 // Such a large stack we need to protect against wraparound 166 // if SP is close to zero. 167 // SP-stackguard+StackGuard <= framesize + (StackGuard-StackSmall) 168 // The +StackGuard on both sides is required to keep the left side positive: 169 // SP is allowed to be slightly below stackguard. See stack.h. 170 // CMP $StackPreempt, RT1 171 // BED label_of_call_to_morestack 172 // ADD $StackBias, RSP, RT2 173 // ADD $StackGuard, RT2, RT2 174 // SUB RT1, RT2 175 // MOV $(framesize+(StackGuard-StackSmall)), RT1 176 // CMP RT1, RT2 177 p = obj.Appendp(ctxt, p) 178 179 p.As = ACMP 180 p.From.Type = obj.TYPE_CONST 181 p.From.Offset = obj.StackPreempt 182 p.Reg = REG_RT1 183 184 p = obj.Appendp(ctxt, p) 185 q = p 186 p.As = ABED 187 p.To.Type = obj.TYPE_BRANCH 188 189 p = obj.Appendp(ctxt, p) 190 p.As = AADD 191 p.From.Type = obj.TYPE_CONST 192 p.From.Offset = StackBias 193 p.Reg = REG_RSP 194 p.To.Type = obj.TYPE_REG 195 p.To.Reg = REG_RT2 196 197 p = obj.Appendp(ctxt, p) 198 p.As = AADD 199 p.From.Type = obj.TYPE_CONST 200 p.From.Offset = obj.StackGuard 201 p.Reg = REG_RT2 202 p.To.Type = obj.TYPE_REG 203 p.To.Reg = REG_RT2 204 205 p = obj.Appendp(ctxt, p) 206 p.As = ASUB 207 p.From.Type = obj.TYPE_REG 208 p.From.Reg = REG_RT1 209 p.To.Type = obj.TYPE_REG 210 p.To.Reg = REG_RT2 211 212 p = obj.Appendp(ctxt, p) 213 p.As = AMOVD 214 p.From.Type = obj.TYPE_CONST 215 p.From.Offset = int64(framesize) + (obj.StackGuard - obj.StackSmall) 216 p.To.Type = obj.TYPE_REG 217 p.To.Reg = REG_RT1 218 219 p = obj.Appendp(ctxt, p) 220 p.As = ACMP 221 p.From.Type = obj.TYPE_REG 222 p.From.Reg = REG_RT1 223 p.Reg = REG_RT2 224 } 225 226 // BLEU do-morestack; note the comparison here is explicitly unsigned, 227 // so that in the event that g_stackguardX is set to StackPreempt or 228 // StackFork, a call to morestack will be triggered. 229 bleu := obj.Appendp(ctxt, p) 230 bleu.As = ABLEUD 231 bleu.To.Type = obj.TYPE_BRANCH 232 233 var last *obj.Prog 234 for last = ctxt.Cursym.Text; last.Link != nil; last = last.Link { 235 } 236 237 // MOV OLR, I0 238 movlr := obj.Appendp(ctxt, last) 239 movlr.As = AMOVD 240 movlr.From.Type = obj.TYPE_REG 241 movlr.From.Reg = REG_OLR 242 movlr.To.Type = obj.TYPE_REG 243 movlr.To.Reg = REG_I0 244 movlr.Spadj = -framesize 245 246 // CALL runtime.morestack(SB) 247 call := obj.Appendp(ctxt, movlr) 248 call.Lineno = ctxt.Cursym.Text.Lineno 249 call.Mode = ctxt.Cursym.Text.Mode 250 call.As = obj.ACALL 251 call.To.Type = obj.TYPE_MEM 252 call.To.Name = obj.NAME_EXTERN 253 morestack := "runtime.morestack" 254 switch { 255 case ctxt.Cursym.Cfunc: 256 morestack = "runtime.morestackc" 257 case ctxt.Cursym.Text.From3.Offset&obj.NEEDCTXT == 0: 258 morestack = "runtime.morestack_noctxt" 259 } 260 call.To.Sym = obj.Linklookup(ctxt, morestack, 0) 261 262 // A CALL is used here instead of a JMP so that we have a full 32 263 // bit-signed displacement that we can encode into the instruction. 264 // Since this instruction is never actually executed, but instead used 265 // by rewindmorestack(), this is safe (if it was actually executed, it 266 // would overwrite %o7 which would destroy our original return 267 // address). 268 269 // CALL start 270 jmp := obj.Appendp(ctxt, call) 271 jmp.As = obj.ACALL 272 jmp.To.Type = obj.TYPE_BRANCH 273 jmp.Pcond = ctxt.Cursym.Text.Link 274 jmp.Spadj = +framesize 275 276 bleu.Pcond = movlr 277 if q != nil { 278 q.Pcond = movlr 279 } 280 281 return bleu 282 } 283 284 // AutoeditProg returns a new obj.Prog, with off(SP), off(FP), $off(SP), 285 // and $off(FP) replaced with new(RSP). 286 func autoeditprog(ctxt *obj.Link, p *obj.Prog) *obj.Prog { 287 r := new(obj.Prog) 288 *r = *p 289 r.From = *autoeditaddr(ctxt, &r.From) 290 r.From3 = autoeditaddr(ctxt, r.From3) 291 r.To = *autoeditaddr(ctxt, &r.To) 292 return r 293 } 294 295 // Autoeditaddr returns a new obj.Addr, with off(SP), off(FP), $off(SP), 296 // and $off(FP) replaced with new(RSP). 297 func autoeditaddr(ctxt *obj.Link, a *obj.Addr) *obj.Addr { 298 if a == nil { 299 return nil 300 } 301 if a.Type != obj.TYPE_MEM && a.Type != obj.TYPE_ADDR { 302 return a 303 } 304 r := new(obj.Addr) 305 *r = *a 306 if r.Name == obj.NAME_PARAM { 307 r.Reg = REG_RSP 308 r.Name = obj.NAME_NONE 309 if ctxt.Cursym.Text.From3Offset()&obj.NOFRAME != 0 { 310 r.Offset += MinStackFrameSize + StackBias 311 return r 312 } 313 r.Offset += int64(ctxt.Cursym.Locals) + 2*MinStackFrameSize + StackBias 314 return r 315 } 316 if r.Name == obj.NAME_AUTO { 317 r.Reg = REG_RSP 318 r.Offset += int64(ctxt.Cursym.Locals) + MinStackFrameSize + StackBias 319 r.Name = obj.NAME_NONE 320 } 321 return r 322 } 323 324 // yfix rewrites references to Y registers (issued by compiler) 325 // to F and D registers. 326 func yfix(p *obj.Prog) { 327 if REG_Y0 <= p.From.Reg && p.From.Reg <= REG_Y15 { 328 if isInstDouble[p.As] || isSrcDouble[p.As] { 329 p.From.Reg = REG_D0 + (p.From.Reg-REG_Y0)*2 330 } else if isInstFloat[p.As] || isSrcFloat[p.As] { 331 p.From.Reg = REG_F0 + (p.From.Reg-REG_Y0)*2 332 } 333 } 334 if REG_Y0 <= p.Reg && p.Reg <= REG_Y15 { 335 if isInstDouble[p.As] { 336 p.Reg = REG_D0 + (p.Reg-REG_Y0)*2 337 } else { 338 p.Reg = REG_F0 + (p.Reg-REG_Y0)*2 339 } 340 } 341 if p.From3 != nil && REG_Y0 <= p.From3.Reg && p.From3.Reg <= REG_Y15 { 342 if isInstDouble[p.As] { 343 p.From3.Reg = REG_D0 + (p.From3.Reg-REG_Y0)*2 344 } else { 345 p.From3.Reg = REG_F0 + (p.From3.Reg-REG_Y0)*2 346 } 347 } 348 if REG_Y0 <= p.To.Reg && p.To.Reg <= REG_Y15 { 349 if isInstDouble[p.As] || isDstDouble[p.As] { 350 p.To.Reg = REG_D0 + (p.To.Reg-REG_Y0)*2 351 } else if isInstFloat[p.As] || isDstFloat[p.As] { 352 p.To.Reg = REG_F0 + (p.To.Reg-REG_Y0)*2 353 } 354 } 355 } 356 357 // biasfix rewrites referencing to BSP and BFP to RSP and RFP and 358 // adding the stack bias. 359 func biasfix(p *obj.Prog) { 360 // Only match 2-operand instructions. 361 if p.From3 != nil || p.Reg != 0 { 362 return 363 } 364 switch p.As { 365 case AMOVD: 366 switch aclass(p.Ctxt, &p.From) { 367 case ClassReg, ClassZero: 368 switch { 369 // MOVD R, BSP -> ADD -$STACK_BIAS, R, RSP 370 case aclass(p.Ctxt, &p.To) == ClassReg|ClassBias: 371 p.As = AADD 372 p.Reg = p.From.Reg 373 if p.From.Type == obj.TYPE_CONST { 374 p.Reg = REG_ZR 375 } 376 p.From.Reg = 0 377 p.From.Offset = -StackBias 378 p.From.Type = obj.TYPE_CONST 379 p.From.Class = aclass(p.Ctxt, &p.From) 380 p.To.Reg -= 256 // must match a.out.go:/REG_BSP 381 p.To.Class = aclass(p.Ctxt, &p.To) 382 } 383 384 case ClassReg | ClassBias: 385 // MOVD BSP, R -> ADD $STACK_BIAS, RSP, R 386 if aclass(p.Ctxt, &p.To) == ClassReg { 387 p.Reg = p.From.Reg - 256 // must match a.out.go:/REG_BSP 388 p.As = AADD 389 p.From.Reg = 0 390 p.From.Offset = StackBias 391 p.From.Type = obj.TYPE_CONST 392 p.From.Class = aclass(p.Ctxt, &p.From) 393 } 394 395 // MOVD $off(BSP), R -> MOVD $(off+STACK_BIAS)(RSP), R 396 case ClassRegConst13 | ClassBias, ClassRegConst | ClassBias: 397 p.From.Reg -= 256 // must match a.out.go:/REG_BSP 398 p.From.Offset += StackBias 399 p.From.Class = aclass(p.Ctxt, &p.From) 400 } 401 402 case AADD, ASUB: 403 // ADD $const, BSP -> ADD $const, RSP 404 if isAddrCompatible(p.Ctxt, &p.From, ClassConst) && aclass(p.Ctxt, &p.To) == ClassReg|ClassBias { 405 p.To.Reg -= 256 // must match a.out.go:/REG_BSP 406 p.To.Class = aclass(p.Ctxt, &p.To) 407 } 408 } 409 switch p.As { 410 case AMOVD, AMOVW, AMOVUW, AMOVH, AMOVUH, AMOVB, AMOVUB, 411 AFMOVD, AFMOVS: 412 switch aclass(p.Ctxt, &p.From) { 413 case ClassZero, ClassReg, ClassFReg, ClassDReg: 414 switch { 415 // MOVD R, off(BSP) -> MOVD R, (off+STACK_BIAS)(RSP) 416 case aclass(p.Ctxt, &p.To)&ClassBias != 0 && isAddrCompatible(p.Ctxt, &p.To, ClassIndir): 417 p.To.Offset += StackBias 418 p.To.Reg -= 256 // must match a.out.go:/REG_BSP 419 p.To.Class = aclass(p.Ctxt, &p.To) 420 } 421 422 // MOVD off(BSP), R -> MOVD (off+STACK_BIAS)(RSP), R 423 case ClassIndir0 | ClassBias, ClassIndir13 | ClassBias, ClassIndir | ClassBias: 424 p.From.Reg -= 256 // must match a.out.go:/REG_BSP 425 p.From.Offset += StackBias 426 p.From.Class = aclass(p.Ctxt, &p.From) 427 } 428 } 429 } 430 431 func progedit(ctxt *obj.Link, p *obj.Prog) { 432 // Rewrite constant moves to memory to go through an intermediary 433 // register 434 switch p.As { 435 case AMOVD: 436 if (p.From.Type == obj.TYPE_CONST || p.From.Type == obj.TYPE_ADDR) && (p.To.Type == obj.TYPE_MEM) { 437 q := obj.Appendp(ctxt, p) 438 q.As = p.As 439 q.To = p.To 440 q.From.Type = obj.TYPE_REG 441 q.From.Reg = REG_TMP 442 q.From.Offset = 0 443 444 p.To = obj.Addr{} 445 p.To.Type = obj.TYPE_REG 446 p.To.Reg = REG_TMP 447 p.To.Offset = 0 448 } 449 450 case AFMOVS: 451 if (p.From.Type == obj.TYPE_FCONST || p.From.Type == obj.TYPE_ADDR) && (p.To.Type == obj.TYPE_MEM) { 452 q := obj.Appendp(ctxt, p) 453 q.As = p.As 454 q.To = p.To 455 q.From.Type = obj.TYPE_REG 456 q.From.Reg = REG_FTMP 457 q.From.Offset = 0 458 459 p.To = obj.Addr{} 460 p.To.Type = obj.TYPE_REG 461 p.To.Reg = REG_FTMP 462 p.To.Offset = 0 463 } 464 465 case AFMOVD: 466 if (p.From.Type == obj.TYPE_FCONST || p.From.Type == obj.TYPE_ADDR) && (p.To.Type == obj.TYPE_MEM) { 467 q := obj.Appendp(ctxt, p) 468 q.As = p.As 469 q.To = p.To 470 q.From.Type = obj.TYPE_REG 471 q.From.Reg = REG_DTMP 472 q.From.Offset = 0 473 474 p.To = obj.Addr{} 475 p.To.Type = obj.TYPE_REG 476 p.To.Reg = REG_DTMP 477 p.To.Offset = 0 478 } 479 } 480 481 // Rewrite 64-bit integer constants and float constants 482 // to values stored in memory. 483 switch p.As { 484 case AMOVD: 485 if aclass(p.Ctxt, &p.From) == ClassConst { 486 literal := fmt.Sprintf("$i64.%016x", uint64(p.From.Offset)) 487 s := obj.Linklookup(ctxt, literal, 0) 488 s.Size = 8 489 p.From.Type = obj.TYPE_MEM 490 p.From.Sym = s 491 p.From.Name = obj.NAME_EXTERN 492 p.From.Offset = 0 493 } 494 495 case AFMOVS: 496 if p.From.Type == obj.TYPE_FCONST { 497 f32 := float32(p.From.Val.(float64)) 498 i32 := math.Float32bits(f32) 499 literal := fmt.Sprintf("$f32.%08x", uint32(i32)) 500 s := obj.Linklookup(ctxt, literal, 0) 501 s.Size = 4 502 p.From.Type = obj.TYPE_MEM 503 p.From.Sym = s 504 p.From.Name = obj.NAME_EXTERN 505 p.From.Offset = 0 506 } 507 508 case AFMOVD: 509 if p.From.Type == obj.TYPE_FCONST { 510 i64 := math.Float64bits(p.From.Val.(float64)) 511 literal := fmt.Sprintf("$f64.%016x", uint64(i64)) 512 s := obj.Linklookup(ctxt, literal, 0) 513 s.Size = 8 514 p.From.Type = obj.TYPE_MEM 515 p.From.Sym = s 516 p.From.Name = obj.NAME_EXTERN 517 p.From.Offset = 0 518 } 519 } 520 521 // TODO(aram): remove this when compiler can use F and 522 // D registers directly. 523 yfix(p) 524 525 biasfix(p) 526 } 527 528 func isNOFRAME(p *obj.Prog) bool { 529 return p.From3Offset()&obj.NOFRAME != 0 530 } 531 532 func isREGWIN(p *obj.Prog) bool { 533 return p.From3Offset()&obj.REGWIN != 0 534 } 535 536 // TODO(aram): 537 func preprocess(ctxt *obj.Link, cursym *obj.LSym) { 538 cursym.Text.Pc = 0 539 cursym.Args = cursym.Text.To.Val.(int32) 540 cursym.Locals = int32(cursym.Text.To.Offset) 541 542 // Find leaf subroutines, 543 // Strip NOPs. 544 var q *obj.Prog 545 var q1 *obj.Prog 546 for p := cursym.Text; p != nil; p = p.Link { 547 switch { 548 case p.As == obj.ATEXT: 549 p.Mark |= LEAF 550 551 case p.As == obj.ARET: 552 if cursym.Text.Mark&LEAF != 0 && p.To.Sym != nil { // RETJMP 553 cursym.Text.From3.Offset |= obj.NOFRAME 554 } 555 break 556 557 case p.As == obj.ANOP: 558 q1 = p.Link 559 q.Link = q1 /* q is non-nop */ 560 q1.Mark |= p.Mark 561 continue 562 563 case isUncondJump[p.As]: 564 cursym.Text.Mark &^= LEAF 565 fallthrough 566 567 case isCondJump[p.As]: 568 q1 = p.Pcond 569 570 if q1 != nil { 571 for q1.As == obj.ANOP { 572 q1 = q1.Link 573 p.Pcond = q1 574 } 575 } 576 577 break 578 } 579 580 q = p 581 } 582 583 for p := cursym.Text; p != nil; p = p.Link { 584 switch p.As { 585 case obj.ATEXT: 586 if cursym.Text.Mark&LEAF != 0 { 587 cursym.Leaf = true 588 } 589 } 590 } 591 592 for p := cursym.Text; p != nil; p = p.Link { 593 switch p.As { 594 case obj.ATEXT: 595 frameSize := cursym.Locals 596 if frameSize < 0 { 597 ctxt.Diag("%v: negative frame size %d", p, frameSize) 598 } 599 if frameSize%16 != 0 { 600 ctxt.Diag("%v: unaligned frame size %d - must be 0 mod 16", p, frameSize) 601 } 602 if frameSize != 0 && isNOFRAME(p) { 603 ctxt.Diag("%v: non-zero framesize for NOFRAME function", p) 604 } 605 606 if frameSize == 0 && cursym.Leaf { 607 // promote leaves without automatics to NOFRAME. 608 cursym.Text.From3.Offset |= obj.NOFRAME 609 } 610 611 // Without these NOPs, DTrace changes the execution of the binary, 612 // This should never happen, but these NOPs seems to fix it. 613 // Keep these NOPs in here until we understand the DTrace behavior. 614 p = obj.Appendp(ctxt, p) 615 p.As = ARNOP 616 p = obj.Appendp(ctxt, p) 617 p.As = ARNOP 618 619 if isNOFRAME(cursym.Text) { 620 break 621 } 622 623 // split check must be done before reserving stack 624 // space or changing register windows. 625 if !(cursym.Text.From3.Offset&obj.NOSPLIT != 0) { 626 p = stacksplit(ctxt, p, frameSize+MinStackFrameSize) // emit split check 627 } 628 629 if isREGWIN(cursym.Text) { 630 // SAVE -(frameSize+MinStackFrameSize), RSP, RSP 631 p = obj.Appendp(ctxt, p) 632 p.As = ASAVE 633 p.From.Type = obj.TYPE_CONST 634 p.From.Offset = -int64(frameSize + MinStackFrameSize) 635 p.Reg = REG_RSP 636 p.To.Type = obj.TYPE_REG 637 p.To.Reg = REG_RSP 638 p.Spadj = frameSize + MinStackFrameSize 639 640 // FLUSHW 641 p = obj.Appendp(ctxt, p) 642 p.As = AFLUSHW 643 644 // MOVD RFP, (112+bias)(RSP) 645 p = obj.Appendp(ctxt, p) 646 p.As = AMOVD 647 p.From.Type = obj.TYPE_REG 648 p.From.Reg = REG_RFP 649 p.To.Type = obj.TYPE_MEM 650 p.To.Reg = REG_RSP 651 p.To.Offset = int64(112 + StackBias) 652 653 // MOVD ILR, (120+bias)(RSP) 654 p = obj.Appendp(ctxt, p) 655 p.As = AMOVD 656 p.From.Type = obj.TYPE_REG 657 p.From.Reg = REG_ILR 658 p.To.Type = obj.TYPE_MEM 659 p.To.Reg = REG_RSP 660 p.To.Offset = int64(120 + StackBias) 661 } else { 662 // ADD -(frameSize+MinStackFrameSize), RSP 663 p = obj.Appendp(ctxt, p) 664 p.As = AADD 665 p.From.Type = obj.TYPE_CONST 666 p.From.Offset = -int64(frameSize + MinStackFrameSize) 667 p.To.Type = obj.TYPE_REG 668 p.To.Reg = REG_RSP 669 p.Spadj = frameSize + MinStackFrameSize 670 671 // SUB -(frameSize+MinStackFrameSize), RSP, RFP 672 p = obj.Appendp(ctxt, p) 673 p.As = ASUB 674 p.From.Type = obj.TYPE_CONST 675 p.From.Offset = -int64(frameSize + MinStackFrameSize) 676 p.Reg = REG_RSP 677 p.To.Type = obj.TYPE_REG 678 p.To.Reg = REG_RFP 679 680 // MOVD RFP, (112+bias)(RSP) 681 p = obj.Appendp(ctxt, p) 682 p.As = AMOVD 683 p.From.Type = obj.TYPE_REG 684 p.From.Reg = REG_RFP 685 p.To.Type = obj.TYPE_MEM 686 p.To.Reg = REG_RSP 687 p.To.Offset = int64(112 + StackBias) 688 689 // MOVD OLR, ILR 690 p = obj.Appendp(ctxt, p) 691 p.As = AMOVD 692 p.From.Type = obj.TYPE_REG 693 p.From.Reg = REG_OLR 694 p.To.Type = obj.TYPE_REG 695 p.To.Reg = REG_ILR 696 697 // MOVD ILR, (120+bias)(RSP) 698 p = obj.Appendp(ctxt, p) 699 p.As = AMOVD 700 p.From.Type = obj.TYPE_REG 701 p.From.Reg = REG_ILR 702 p.To.Type = obj.TYPE_MEM 703 p.To.Reg = REG_RSP 704 p.To.Offset = int64(120 + StackBias) 705 706 // MOVD 0, OLR 707 p = obj.Appendp(ctxt, p) 708 p.As = AMOVD 709 p.From.Type = obj.TYPE_REG 710 p.From.Reg = REG_ZR 711 p.To.Type = obj.TYPE_REG 712 p.To.Reg = REG_OLR 713 } 714 715 if cursym.Text.From3.Offset&obj.WRAPPER != 0 { 716 // if(g->panic != nil && g->panic->argp == FP) g->panic->argp = bottom-of-frame 717 // 718 // MOVD g_panic(g), L1 719 // CMP ZR, L1 720 // BED end 721 // MOVD panic_argp(L1), L2 722 // ADD $(STACK_BIAS+MinStackFrameSize), RFP, L3 723 // CMP L2, L3 724 // BNED end 725 // ADD $(STACK_BIAS+MinStackFrameSize), RSP, L4 726 // MOVD L4, panic_argp(L1) 727 // end: 728 // RNOP 729 // 730 // The RNOP is needed to give the jumps somewhere to land. 731 q = obj.Appendp(ctxt, p) 732 q.As = AMOVD 733 q.From.Type = obj.TYPE_MEM 734 q.From.Reg = REG_G 735 q.From.Offset = 4 * int64(ctxt.Arch.PtrSize) // G.panic 736 q.To.Type = obj.TYPE_REG 737 q.To.Reg = REG_L1 738 739 q = obj.Appendp(ctxt, q) 740 q.As = ACMP 741 q.From.Type = obj.TYPE_REG 742 q.From.Reg = REG_ZR 743 q.Reg = REG_L1 744 745 q = obj.Appendp(ctxt, q) 746 q.As = ABED 747 q.To.Type = obj.TYPE_BRANCH 748 q1 := q 749 750 q = obj.Appendp(ctxt, q) 751 q.As = AMOVD 752 q.From.Type = obj.TYPE_MEM 753 q.From.Reg = REG_L1 754 q.From.Offset = 0 // Panic.argp 755 q.To.Type = obj.TYPE_REG 756 q.To.Reg = REG_L2 757 758 q = obj.Appendp(ctxt, q) 759 q.As = AADD 760 q.From.Type = obj.TYPE_CONST 761 q.From.Offset = StackBias + MinStackFrameSize 762 q.Reg = REG_RFP 763 q.To.Type = obj.TYPE_REG 764 q.To.Reg = REG_L3 765 766 q = obj.Appendp(ctxt, q) 767 q.As = ACMP 768 q.From.Type = obj.TYPE_REG 769 q.From.Reg = REG_L2 770 q.Reg = REG_L3 771 772 q = obj.Appendp(ctxt, q) 773 q.As = ABNED 774 q.To.Type = obj.TYPE_BRANCH 775 q2 := q 776 777 q = obj.Appendp(ctxt, q) 778 q.As = AADD 779 q.From.Type = obj.TYPE_CONST 780 q.From.Offset = StackBias + MinStackFrameSize 781 q.Reg = REG_RSP 782 q.To.Type = obj.TYPE_REG 783 q.To.Reg = REG_L4 784 785 q = obj.Appendp(ctxt, q) 786 q.As = AMOVD 787 q.From.Type = obj.TYPE_REG 788 q.From.Reg = REG_L4 789 q.To.Type = obj.TYPE_MEM 790 q.To.Reg = REG_L1 791 q.To.Offset = 0 // Panic.argp 792 793 q = obj.Appendp(ctxt, q) 794 q.As = ARNOP 795 q1.Pcond = q 796 q2.Pcond = q 797 } 798 799 case obj.ARET: 800 if isNOFRAME(cursym.Text) { 801 if p.To.Sym != nil { // RETJMP 802 p.As = obj.AJMP 803 } 804 break 805 } 806 807 if isREGWIN(cursym.Text) { 808 // MOVD (112+bias)(RSP), RFP 809 q = obj.Appendp(ctxt, p) 810 p.As = AMOVD 811 p.From.Type = obj.TYPE_MEM 812 p.From.Reg = REG_RSP 813 p.From.Offset = int64(112 + StackBias) 814 p.To.Type = obj.TYPE_REG 815 p.To.Reg = REG_RFP 816 817 q.As = ARETRESTORE 818 q.From.Type = obj.TYPE_NONE 819 q.From.Offset = 0 820 q.Reg = 0 821 q.To.Type = obj.TYPE_NONE 822 q.To.Reg = 0 823 // The SP restore operation needs a Spadj of 824 // -(cursym.Locals + MinStackFrameSize), 825 // and the JMP operation needs a Spadj of 826 // +(cursym.Locals + MinStackFrameSize). 827 // 828 // Since this operation does both, they cancel out 829 // so we don't do any Spadj adjustment. 830 // 831 // The best solution would be to split RETRESTORE 832 // into the constituent instructions, but that requires 833 // more sophisticated delay-slot processing, 834 // since the RESTORE has to be in the delay 835 // slot of the branch. 836 837 break 838 } 839 840 frameSize := cursym.Locals 841 842 // MOVD RFP, TMP 843 q1 = p 844 p = obj.Appendp(ctxt, p) 845 p.As = AJMPL 846 p.From.Type = obj.TYPE_ADDR 847 p.From.Offset = 8 848 p.From.Reg = REG_OLR 849 p.From.Index = 0 850 p.Reg = 0 851 p.To.Type = obj.TYPE_REG 852 p.To.Reg = REG_ZR 853 p.Spadj = frameSize + MinStackFrameSize 854 q1.As = AMOVD 855 q1.From.Type = obj.TYPE_REG 856 q1.From.Reg = REG_RFP 857 q1.To.Type = obj.TYPE_REG 858 q1.To.Reg = REG_TMP 859 860 // restore registers before resetting the stack 861 // pointer; otherwise a spill will overwrite the saved 862 // link register. 863 864 // MOVD (120+StackBias)(RSP), OLR 865 q1 = obj.Appendp(ctxt, q1) 866 q1.As = AMOVD 867 q1.From.Type = obj.TYPE_MEM 868 q1.From.Reg = REG_RSP 869 q1.From.Offset = 120 + StackBias 870 q1.To.Type = obj.TYPE_REG 871 q1.To.Reg = REG_OLR 872 873 // MOVD (120+StackBias)(RFP), ILR 874 q1 = obj.Appendp(ctxt, q1) 875 q1.As = AMOVD 876 q1.From.Type = obj.TYPE_MEM 877 q1.From.Reg = REG_RFP 878 q1.From.Offset = 120 + StackBias 879 q1.To.Type = obj.TYPE_REG 880 q1.To.Reg = REG_ILR 881 882 // MOVD (112+StackBias)(RFP), RFP 883 q1 = obj.Appendp(ctxt, q1) 884 q1.As = AMOVD 885 q1.From.Type = obj.TYPE_MEM 886 q1.From.Reg = REG_RFP 887 q1.From.Offset = 112 + StackBias 888 q1.To.Type = obj.TYPE_REG 889 q1.To.Reg = REG_RFP 890 891 // MOVD TMP, RSP 892 q1 = obj.Appendp(ctxt, q1) 893 q1.As = AMOVD 894 q1.From.Type = obj.TYPE_REG 895 q1.From.Reg = REG_TMP 896 q1.To.Type = obj.TYPE_REG 897 q1.To.Reg = REG_RSP 898 q1.Spadj = -(frameSize + MinStackFrameSize) 899 900 case AADD, ASUB: 901 if p.To.Type == obj.TYPE_REG && p.To.Reg == REG_BSP && p.From.Type == obj.TYPE_CONST { 902 if p.As == AADD { 903 p.Spadj = int32(-p.From.Offset) 904 } else { 905 p.Spadj = int32(+p.From.Offset) 906 } 907 } 908 } 909 } 910 911 // Schedule delay-slots. Only RNOPs for now. 912 for p := cursym.Text; p != nil; p = p.Link { 913 if !isJump[p.As] || p.As == ARETRESTORE { 914 continue 915 } 916 if p.Link != nil && p.Link.As == ARNOP { 917 continue 918 } 919 p = obj.Appendp(ctxt, p) 920 p.As = ARNOP 921 } 922 923 // For future use by oplook and friends. 924 for p := cursym.Text; p != nil; p = p.Link { 925 p.From.Class = aclass(ctxt, &p.From) 926 if p.From3 != nil { 927 p.From3.Class = aclass(ctxt, p.From3) 928 } 929 p.To.Class = aclass(ctxt, &p.To) 930 } 931 } 932 933 func relinv(a obj.As) obj.As { 934 switch a { 935 case obj.AJMP: 936 return ABN 937 case ABN: 938 return obj.AJMP 939 case ABE: 940 return ABNE 941 case ABNE: 942 return ABE 943 case ABG: 944 return ABLE 945 case ABLE: 946 return ABG 947 case ABGE: 948 return ABL 949 case ABL: 950 return ABGE 951 case ABGU: 952 return ABLEU 953 case ABLEU: 954 return ABGU 955 case ABCC: 956 return ABCS 957 case ABCS: 958 return ABCC 959 case ABPOS: 960 return ABNEG 961 case ABNEG: 962 return ABPOS 963 case ABVC: 964 return ABVS 965 case ABVS: 966 return ABVC 967 case ABNW: 968 return obj.AJMP 969 case ABEW: 970 return ABNEW 971 case ABNEW: 972 return ABEW 973 case ABGW: 974 return ABLEW 975 case ABLEW: 976 return ABGW 977 case ABGEW: 978 return ABLW 979 case ABLW: 980 return ABGEW 981 case ABGUW: 982 return ABLEUW 983 case ABLEUW: 984 return ABGUW 985 case ABCCW: 986 return ABCSW 987 case ABCSW: 988 return ABCCW 989 case ABPOSW: 990 return ABNEGW 991 case ABNEGW: 992 return ABPOSW 993 case ABVCW: 994 return ABVSW 995 case ABVSW: 996 return ABVCW 997 case ABND: 998 return obj.AJMP 999 case ABED: 1000 return ABNED 1001 case ABNED: 1002 return ABED 1003 case ABGD: 1004 return ABLED 1005 case ABLED: 1006 return ABGD 1007 case ABGED: 1008 return ABLD 1009 case ABLD: 1010 return ABGED 1011 case ABGUD: 1012 return ABLEUD 1013 case ABLEUD: 1014 return ABGUD 1015 case ABCCD: 1016 return ABCSD 1017 case ABCSD: 1018 return ABCCD 1019 case ABPOSD: 1020 return ABNEGD 1021 case ABNEGD: 1022 return ABPOSD 1023 case ABVCD: 1024 return ABVSD 1025 case ABVSD: 1026 return ABVCD 1027 case AFBN: 1028 return AFBA 1029 case AFBA: 1030 return AFBN 1031 case AFBE: 1032 return AFBNE 1033 case AFBNE: 1034 return AFBE 1035 case AFBG: 1036 return AFBLE 1037 case AFBLE: 1038 return AFBG 1039 case AFBGE: 1040 return AFBL 1041 case AFBL: 1042 return AFBGE 1043 } 1044 1045 log.Fatalf("unknown relation: %s", obj.Aconv(a)) 1046 return 0 1047 } 1048 1049 var unaryDst = map[obj.As]bool{ 1050 obj.ACALL: true, 1051 obj.AJMP: true, 1052 AWORD: true, 1053 ADWORD: true, 1054 ABNW: true, 1055 ABNEW: true, 1056 ABEW: true, 1057 ABGW: true, 1058 ABLEW: true, 1059 ABGEW: true, 1060 ABLW: true, 1061 ABGUW: true, 1062 ABLEUW: true, 1063 ABCCW: true, 1064 ABCSW: true, 1065 ABPOSW: true, 1066 ABNEGW: true, 1067 ABVCW: true, 1068 ABVSW: true, 1069 ABND: true, 1070 ABNED: true, 1071 ABED: true, 1072 ABGD: true, 1073 ABLED: true, 1074 ABGED: true, 1075 ABLD: true, 1076 ABGUD: true, 1077 ABLEUD: true, 1078 ABCCD: true, 1079 ABCSD: true, 1080 ABPOSD: true, 1081 ABNEGD: true, 1082 ABVCD: true, 1083 ABVSD: true, 1084 } 1085 1086 var Linksparc64 = obj.LinkArch{ 1087 Arch: sys.ArchSPARC64, 1088 Preprocess: preprocess, 1089 Assemble: span, 1090 Follow: follow, 1091 Progedit: progedit, 1092 UnaryDst: unaryDst, 1093 }