github.com/miolini/go@v0.0.0-20160405192216-fca68c8cb408/src/cmd/compile/internal/ppc64/peep.go (about) 1 // Derived from Inferno utils/6c/peep.c 2 // http://code.google.com/p/inferno-os/source/browse/utils/6c/peep.c 3 // 4 // Copyright © 1994-1999 Lucent Technologies Inc. All rights reserved. 5 // Portions Copyright © 1995-1997 C H Forsyth (forsyth@terzarima.net) 6 // Portions Copyright © 1997-1999 Vita Nuova Limited 7 // Portions Copyright © 2000-2007 Vita Nuova Holdings Limited (www.vitanuova.com) 8 // Portions Copyright © 2004,2006 Bruce Ellis 9 // Portions Copyright © 2005-2007 C H Forsyth (forsyth@terzarima.net) 10 // Revisions Copyright © 2000-2007 Lucent Technologies Inc. and others 11 // Portions Copyright © 2009 The Go Authors. All rights reserved. 12 // 13 // Permission is hereby granted, free of charge, to any person obtaining a copy 14 // of this software and associated documentation files (the "Software"), to deal 15 // in the Software without restriction, including without limitation the rights 16 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 17 // copies of the Software, and to permit persons to whom the Software is 18 // furnished to do so, subject to the following conditions: 19 // 20 // The above copyright notice and this permission notice shall be included in 21 // all copies or substantial portions of the Software. 22 // 23 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 24 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 25 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 26 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 27 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 28 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 29 // THE SOFTWARE. 30 31 package ppc64 32 33 import ( 34 "cmd/compile/internal/gc" 35 "cmd/internal/obj" 36 "cmd/internal/obj/ppc64" 37 "fmt" 38 ) 39 40 var gactive uint32 41 42 func peep(firstp *obj.Prog) { 43 g := gc.Flowstart(firstp, nil) 44 if g == nil { 45 return 46 } 47 gactive = 0 48 49 var p *obj.Prog 50 var r *gc.Flow 51 var t obj.As 52 loop1: 53 if gc.Debug['P'] != 0 && gc.Debug['v'] != 0 { 54 gc.Dumpit("loop1", g.Start, 0) 55 } 56 57 t = 0 58 for r = g.Start; r != nil; r = r.Link { 59 p = r.Prog 60 61 // TODO(austin) Handle smaller moves. arm and amd64 62 // distinguish between moves that moves that *must* 63 // sign/zero extend and moves that don't care so they 64 // can eliminate moves that don't care without 65 // breaking moves that do care. This might let us 66 // simplify or remove the next peep loop, too. 67 if p.As == ppc64.AMOVD || p.As == ppc64.AFMOVD { 68 if regtyp(&p.To) { 69 // Try to eliminate reg->reg moves 70 if regtyp(&p.From) { 71 if p.From.Type == p.To.Type { 72 if copyprop(r) { 73 excise(r) 74 t++ 75 } else if subprop(r) && copyprop(r) { 76 excise(r) 77 t++ 78 } 79 } 80 } 81 82 // Convert uses to $0 to uses of R0 and 83 // propagate R0 84 if regzer(&p.From) { 85 if p.To.Type == obj.TYPE_REG { 86 p.From.Type = obj.TYPE_REG 87 p.From.Reg = ppc64.REGZERO 88 if copyprop(r) { 89 excise(r) 90 t++ 91 } else if subprop(r) && copyprop(r) { 92 excise(r) 93 t++ 94 } 95 } 96 } 97 } 98 } 99 } 100 101 if t != 0 { 102 goto loop1 103 } 104 105 /* 106 * look for MOVB x,R; MOVB R,R (for small MOVs not handled above) 107 */ 108 var p1 *obj.Prog 109 var r1 *gc.Flow 110 for r := g.Start; r != nil; r = r.Link { 111 p = r.Prog 112 switch p.As { 113 default: 114 continue 115 116 case ppc64.AMOVH, 117 ppc64.AMOVHZ, 118 ppc64.AMOVB, 119 ppc64.AMOVBZ, 120 ppc64.AMOVW, 121 ppc64.AMOVWZ: 122 if p.To.Type != obj.TYPE_REG { 123 continue 124 } 125 } 126 127 r1 = r.Link 128 if r1 == nil { 129 continue 130 } 131 p1 = r1.Prog 132 if p1.As != p.As { 133 continue 134 } 135 if p1.From.Type != obj.TYPE_REG || p1.From.Reg != p.To.Reg { 136 continue 137 } 138 if p1.To.Type != obj.TYPE_REG || p1.To.Reg != p.To.Reg { 139 continue 140 } 141 excise(r1) 142 } 143 144 if gc.Debug['D'] > 1 { 145 goto ret /* allow following code improvement to be suppressed */ 146 } 147 148 /* 149 * look for OP x,y,R; CMP R, $0 -> OPCC x,y,R 150 * when OP can set condition codes correctly 151 */ 152 for r := g.Start; r != nil; r = r.Link { 153 p = r.Prog 154 switch p.As { 155 case ppc64.ACMP, 156 ppc64.ACMPW: /* always safe? */ 157 if !regzer(&p.To) { 158 continue 159 } 160 r1 = r.S1 161 if r1 == nil { 162 continue 163 } 164 switch r1.Prog.As { 165 default: 166 continue 167 168 /* the conditions can be complex and these are currently little used */ 169 case ppc64.ABCL, 170 ppc64.ABC: 171 continue 172 173 case ppc64.ABEQ, 174 ppc64.ABGE, 175 ppc64.ABGT, 176 ppc64.ABLE, 177 ppc64.ABLT, 178 ppc64.ABNE, 179 ppc64.ABVC, 180 ppc64.ABVS: 181 break 182 } 183 184 r1 = r 185 for { 186 r1 = gc.Uniqp(r1) 187 if r1 == nil || r1.Prog.As != obj.ANOP { 188 break 189 } 190 } 191 192 if r1 == nil { 193 continue 194 } 195 p1 = r1.Prog 196 if p1.To.Type != obj.TYPE_REG || p1.To.Reg != p.From.Reg { 197 continue 198 } 199 switch p1.As { 200 /* irregular instructions */ 201 case ppc64.ASUB, 202 ppc64.AADD, 203 ppc64.AXOR, 204 ppc64.AOR: 205 if p1.From.Type == obj.TYPE_CONST || p1.From.Type == obj.TYPE_ADDR { 206 continue 207 } 208 } 209 210 switch p1.As { 211 default: 212 continue 213 214 case ppc64.AMOVW, 215 ppc64.AMOVD: 216 if p1.From.Type != obj.TYPE_REG { 217 continue 218 } 219 continue 220 221 case ppc64.AANDCC, 222 ppc64.AANDNCC, 223 ppc64.AORCC, 224 ppc64.AORNCC, 225 ppc64.AXORCC, 226 ppc64.ASUBCC, 227 ppc64.ASUBECC, 228 ppc64.ASUBMECC, 229 ppc64.ASUBZECC, 230 ppc64.AADDCC, 231 ppc64.AADDCCC, 232 ppc64.AADDECC, 233 ppc64.AADDMECC, 234 ppc64.AADDZECC, 235 ppc64.ARLWMICC, 236 ppc64.ARLWNMCC, 237 /* don't deal with floating point instructions for now */ 238 /* 239 case AFABS: 240 case AFADD: 241 case AFADDS: 242 case AFCTIW: 243 case AFCTIWZ: 244 case AFDIV: 245 case AFDIVS: 246 case AFMADD: 247 case AFMADDS: 248 case AFMOVD: 249 case AFMSUB: 250 case AFMSUBS: 251 case AFMUL: 252 case AFMULS: 253 case AFNABS: 254 case AFNEG: 255 case AFNMADD: 256 case AFNMADDS: 257 case AFNMSUB: 258 case AFNMSUBS: 259 case AFRSP: 260 case AFSUB: 261 case AFSUBS: 262 case ACNTLZW: 263 case AMTFSB0: 264 case AMTFSB1: 265 */ 266 ppc64.AADD, 267 ppc64.AADDV, 268 ppc64.AADDC, 269 ppc64.AADDCV, 270 ppc64.AADDME, 271 ppc64.AADDMEV, 272 ppc64.AADDE, 273 ppc64.AADDEV, 274 ppc64.AADDZE, 275 ppc64.AADDZEV, 276 ppc64.AAND, 277 ppc64.AANDN, 278 ppc64.ADIVW, 279 ppc64.ADIVWV, 280 ppc64.ADIVWU, 281 ppc64.ADIVWUV, 282 ppc64.ADIVD, 283 ppc64.ADIVDV, 284 ppc64.ADIVDU, 285 ppc64.ADIVDUV, 286 ppc64.AEQV, 287 ppc64.AEXTSB, 288 ppc64.AEXTSH, 289 ppc64.AEXTSW, 290 ppc64.AMULHW, 291 ppc64.AMULHWU, 292 ppc64.AMULLW, 293 ppc64.AMULLWV, 294 ppc64.AMULHD, 295 ppc64.AMULHDU, 296 ppc64.AMULLD, 297 ppc64.AMULLDV, 298 ppc64.ANAND, 299 ppc64.ANEG, 300 ppc64.ANEGV, 301 ppc64.ANOR, 302 ppc64.AOR, 303 ppc64.AORN, 304 ppc64.AREM, 305 ppc64.AREMV, 306 ppc64.AREMU, 307 ppc64.AREMUV, 308 ppc64.AREMD, 309 ppc64.AREMDV, 310 ppc64.AREMDU, 311 ppc64.AREMDUV, 312 ppc64.ARLWMI, 313 ppc64.ARLWNM, 314 ppc64.ASLW, 315 ppc64.ASRAW, 316 ppc64.ASRW, 317 ppc64.ASLD, 318 ppc64.ASRAD, 319 ppc64.ASRD, 320 ppc64.ASUB, 321 ppc64.ASUBV, 322 ppc64.ASUBC, 323 ppc64.ASUBCV, 324 ppc64.ASUBME, 325 ppc64.ASUBMEV, 326 ppc64.ASUBE, 327 ppc64.ASUBEV, 328 ppc64.ASUBZE, 329 ppc64.ASUBZEV, 330 ppc64.AXOR: 331 t = variant2as(p1.As, as2variant(p1.As)|V_CC) 332 } 333 334 if gc.Debug['D'] != 0 { 335 fmt.Printf("cmp %v; %v -> ", p1, p) 336 } 337 p1.As = t 338 if gc.Debug['D'] != 0 { 339 fmt.Printf("%v\n", p1) 340 } 341 excise(r) 342 continue 343 } 344 } 345 346 ret: 347 gc.Flowend(g) 348 } 349 350 func excise(r *gc.Flow) { 351 p := r.Prog 352 if gc.Debug['P'] != 0 && gc.Debug['v'] != 0 { 353 fmt.Printf("%v ===delete===\n", p) 354 } 355 obj.Nopout(p) 356 gc.Ostats.Ndelmov++ 357 } 358 359 // regzer returns true if a's value is 0 (a is R0 or $0) 360 func regzer(a *obj.Addr) bool { 361 if a.Type == obj.TYPE_CONST || a.Type == obj.TYPE_ADDR { 362 if a.Sym == nil && a.Reg == 0 { 363 if a.Offset == 0 { 364 return true 365 } 366 } 367 } 368 return a.Type == obj.TYPE_REG && a.Reg == ppc64.REGZERO 369 } 370 371 func regtyp(a *obj.Addr) bool { 372 // TODO(rsc): Floating point register exclusions? 373 return a.Type == obj.TYPE_REG && ppc64.REG_R0 <= a.Reg && a.Reg <= ppc64.REG_F31 && a.Reg != ppc64.REGZERO 374 } 375 376 /* 377 * the idea is to substitute 378 * one register for another 379 * from one MOV to another 380 * MOV a, R1 381 * ADD b, R1 / no use of R2 382 * MOV R1, R2 383 * would be converted to 384 * MOV a, R2 385 * ADD b, R2 386 * MOV R2, R1 387 * hopefully, then the former or latter MOV 388 * will be eliminated by copy propagation. 389 * 390 * r0 (the argument, not the register) is the MOV at the end of the 391 * above sequences. This returns 1 if it modified any instructions. 392 */ 393 func subprop(r0 *gc.Flow) bool { 394 p := r0.Prog 395 v1 := &p.From 396 if !regtyp(v1) { 397 return false 398 } 399 v2 := &p.To 400 if !regtyp(v2) { 401 return false 402 } 403 for r := gc.Uniqp(r0); r != nil; r = gc.Uniqp(r) { 404 if gc.Uniqs(r) == nil { 405 break 406 } 407 p = r.Prog 408 if p.As == obj.AVARDEF || p.As == obj.AVARKILL { 409 continue 410 } 411 if p.Info.Flags&gc.Call != 0 { 412 return false 413 } 414 415 if p.Info.Flags&(gc.RightRead|gc.RightWrite) == gc.RightWrite { 416 if p.To.Type == v1.Type { 417 if p.To.Reg == v1.Reg { 418 copysub(&p.To, v1, v2, true) 419 if gc.Debug['P'] != 0 { 420 fmt.Printf("gotit: %v->%v\n%v", gc.Ctxt.Dconv(v1), gc.Ctxt.Dconv(v2), r.Prog) 421 if p.From.Type == v2.Type { 422 fmt.Printf(" excise") 423 } 424 fmt.Printf("\n") 425 } 426 427 for r = gc.Uniqs(r); r != r0; r = gc.Uniqs(r) { 428 p = r.Prog 429 copysub(&p.From, v1, v2, true) 430 copysub1(p, v1, v2, true) 431 copysub(&p.To, v1, v2, true) 432 if gc.Debug['P'] != 0 { 433 fmt.Printf("%v\n", r.Prog) 434 } 435 } 436 437 v1.Reg, v2.Reg = v2.Reg, v1.Reg 438 if gc.Debug['P'] != 0 { 439 fmt.Printf("%v last\n", r.Prog) 440 } 441 return true 442 } 443 } 444 } 445 446 if copyau(&p.From, v2) || copyau1(p, v2) || copyau(&p.To, v2) { 447 break 448 } 449 if copysub(&p.From, v1, v2, false) || copysub1(p, v1, v2, false) || copysub(&p.To, v1, v2, false) { 450 break 451 } 452 } 453 454 return false 455 } 456 457 /* 458 * The idea is to remove redundant copies. 459 * v1->v2 F=0 460 * (use v2 s/v2/v1/)* 461 * set v1 F=1 462 * use v2 return fail (v1->v2 move must remain) 463 * ----------------- 464 * v1->v2 F=0 465 * (use v2 s/v2/v1/)* 466 * set v1 F=1 467 * set v2 return success (caller can remove v1->v2 move) 468 */ 469 func copyprop(r0 *gc.Flow) bool { 470 p := r0.Prog 471 v1 := &p.From 472 v2 := &p.To 473 if copyas(v1, v2) { 474 if gc.Debug['P'] != 0 { 475 fmt.Printf("eliminating self-move: %v\n", r0.Prog) 476 } 477 return true 478 } 479 480 gactive++ 481 if gc.Debug['P'] != 0 { 482 fmt.Printf("trying to eliminate %v->%v move from:\n%v\n", gc.Ctxt.Dconv(v1), gc.Ctxt.Dconv(v2), r0.Prog) 483 } 484 return copy1(v1, v2, r0.S1, false) 485 } 486 487 // copy1 replaces uses of v2 with v1 starting at r and returns 1 if 488 // all uses were rewritten. 489 func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f bool) bool { 490 if uint32(r.Active) == gactive { 491 if gc.Debug['P'] != 0 { 492 fmt.Printf("act set; return 1\n") 493 } 494 return true 495 } 496 497 r.Active = int32(gactive) 498 if gc.Debug['P'] != 0 { 499 fmt.Printf("copy1 replace %v with %v f=%v\n", gc.Ctxt.Dconv(v2), gc.Ctxt.Dconv(v1), f) 500 } 501 for ; r != nil; r = r.S1 { 502 p := r.Prog 503 if gc.Debug['P'] != 0 { 504 fmt.Printf("%v", p) 505 } 506 if !f && gc.Uniqp(r) == nil { 507 // Multiple predecessors; conservatively 508 // assume v1 was set on other path 509 f = true 510 511 if gc.Debug['P'] != 0 { 512 fmt.Printf("; merge; f=%v", f) 513 } 514 } 515 516 switch t := copyu(p, v2, nil); t { 517 case 2: /* rar, can't split */ 518 if gc.Debug['P'] != 0 { 519 fmt.Printf("; %v rar; return 0\n", gc.Ctxt.Dconv(v2)) 520 } 521 return false 522 523 case 3: /* set */ 524 if gc.Debug['P'] != 0 { 525 fmt.Printf("; %v set; return 1\n", gc.Ctxt.Dconv(v2)) 526 } 527 return true 528 529 case 1, /* used, substitute */ 530 4: /* use and set */ 531 if f { 532 if gc.Debug['P'] == 0 { 533 return false 534 } 535 if t == 4 { 536 fmt.Printf("; %v used+set and f=%v; return 0\n", gc.Ctxt.Dconv(v2), f) 537 } else { 538 fmt.Printf("; %v used and f=%v; return 0\n", gc.Ctxt.Dconv(v2), f) 539 } 540 return false 541 } 542 543 if copyu(p, v2, v1) != 0 { 544 if gc.Debug['P'] != 0 { 545 fmt.Printf("; sub fail; return 0\n") 546 } 547 return false 548 } 549 550 if gc.Debug['P'] != 0 { 551 fmt.Printf("; sub %v->%v\n => %v", gc.Ctxt.Dconv(v2), gc.Ctxt.Dconv(v1), p) 552 } 553 if t == 4 { 554 if gc.Debug['P'] != 0 { 555 fmt.Printf("; %v used+set; return 1\n", gc.Ctxt.Dconv(v2)) 556 } 557 return true 558 } 559 } 560 561 if !f { 562 t := copyu(p, v1, nil) 563 if t == 2 || t == 3 || t == 4 { 564 f = true 565 if gc.Debug['P'] != 0 { 566 fmt.Printf("; %v set and !f; f=%v", gc.Ctxt.Dconv(v1), f) 567 } 568 } 569 } 570 571 if gc.Debug['P'] != 0 { 572 fmt.Printf("\n") 573 } 574 if r.S2 != nil { 575 if !copy1(v1, v2, r.S2, f) { 576 return false 577 } 578 } 579 } 580 581 return true 582 } 583 584 // If s==nil, copyu returns the set/use of v in p; otherwise, it 585 // modifies p to replace reads of v with reads of s and returns 0 for 586 // success or non-zero for failure. 587 // 588 // If s==nil, copy returns one of the following values: 589 // 1 if v only used 590 // 2 if v is set and used in one address (read-alter-rewrite; 591 // can't substitute) 592 // 3 if v is only set 593 // 4 if v is set in one address and used in another (so addresses 594 // can be rewritten independently) 595 // 0 otherwise (not touched) 596 func copyu(p *obj.Prog, v *obj.Addr, s *obj.Addr) int { 597 if p.From3Type() != obj.TYPE_NONE { 598 // 9g never generates a from3 599 fmt.Printf("copyu: from3 (%v) not implemented\n", gc.Ctxt.Dconv(p.From3)) 600 } 601 602 switch p.As { 603 default: 604 fmt.Printf("copyu: can't find %v\n", obj.Aconv(p.As)) 605 return 2 606 607 case obj.ANOP, /* read p->from, write p->to */ 608 ppc64.AMOVH, 609 ppc64.AMOVHZ, 610 ppc64.AMOVB, 611 ppc64.AMOVBZ, 612 ppc64.AMOVW, 613 ppc64.AMOVWZ, 614 ppc64.AMOVD, 615 ppc64.ANEG, 616 ppc64.ANEGCC, 617 ppc64.AADDME, 618 ppc64.AADDMECC, 619 ppc64.AADDZE, 620 ppc64.AADDZECC, 621 ppc64.ASUBME, 622 ppc64.ASUBMECC, 623 ppc64.ASUBZE, 624 ppc64.ASUBZECC, 625 ppc64.AFCTIW, 626 ppc64.AFCTIWZ, 627 ppc64.AFCTID, 628 ppc64.AFCTIDZ, 629 ppc64.AFCFID, 630 ppc64.AFCFIDCC, 631 ppc64.AFMOVS, 632 ppc64.AFMOVD, 633 ppc64.AFRSP, 634 ppc64.AFNEG, 635 ppc64.AFNEGCC, 636 ppc64.AFSQRT: 637 if s != nil { 638 if copysub(&p.From, v, s, true) { 639 return 1 640 } 641 642 // Update only indirect uses of v in p->to 643 if !copyas(&p.To, v) { 644 if copysub(&p.To, v, s, true) { 645 return 1 646 } 647 } 648 return 0 649 } 650 651 if copyas(&p.To, v) { 652 // Fix up implicit from 653 if p.From.Type == obj.TYPE_NONE { 654 p.From = p.To 655 } 656 if copyau(&p.From, v) { 657 return 4 658 } 659 return 3 660 } 661 662 if copyau(&p.From, v) { 663 return 1 664 } 665 if copyau(&p.To, v) { 666 // p->to only indirectly uses v 667 return 1 668 } 669 670 return 0 671 672 case ppc64.AMOVBU, /* rar p->from, write p->to or read p->from, rar p->to */ 673 ppc64.AMOVBZU, 674 ppc64.AMOVHU, 675 ppc64.AMOVHZU, 676 ppc64.AMOVWZU, 677 ppc64.AMOVDU: 678 if p.From.Type == obj.TYPE_MEM { 679 if copyas(&p.From, v) { 680 // No s!=nil check; need to fail 681 // anyway in that case 682 return 2 683 } 684 685 if s != nil { 686 if copysub(&p.To, v, s, true) { 687 return 1 688 } 689 return 0 690 } 691 692 if copyas(&p.To, v) { 693 return 3 694 } 695 } else if p.To.Type == obj.TYPE_MEM { 696 if copyas(&p.To, v) { 697 return 2 698 } 699 if s != nil { 700 if copysub(&p.From, v, s, true) { 701 return 1 702 } 703 return 0 704 } 705 706 if copyau(&p.From, v) { 707 return 1 708 } 709 } else { 710 fmt.Printf("copyu: bad %v\n", p) 711 } 712 713 return 0 714 715 case ppc64.ARLWMI, /* read p->from, read p->reg, rar p->to */ 716 ppc64.ARLWMICC: 717 if copyas(&p.To, v) { 718 return 2 719 } 720 fallthrough 721 722 /* fall through */ 723 case ppc64.AADD, 724 /* read p->from, read p->reg, write p->to */ 725 ppc64.AADDC, 726 ppc64.AADDE, 727 ppc64.ASUB, 728 ppc64.ASLW, 729 ppc64.ASRW, 730 ppc64.ASRAW, 731 ppc64.ASLD, 732 ppc64.ASRD, 733 ppc64.ASRAD, 734 ppc64.AOR, 735 ppc64.AORCC, 736 ppc64.AORN, 737 ppc64.AORNCC, 738 ppc64.AAND, 739 ppc64.AANDCC, 740 ppc64.AANDN, 741 ppc64.AANDNCC, 742 ppc64.ANAND, 743 ppc64.ANANDCC, 744 ppc64.ANOR, 745 ppc64.ANORCC, 746 ppc64.AXOR, 747 ppc64.AMULHW, 748 ppc64.AMULHWU, 749 ppc64.AMULLW, 750 ppc64.AMULLD, 751 ppc64.ADIVW, 752 ppc64.ADIVD, 753 ppc64.ADIVWU, 754 ppc64.ADIVDU, 755 ppc64.AREM, 756 ppc64.AREMU, 757 ppc64.AREMD, 758 ppc64.AREMDU, 759 ppc64.ARLWNM, 760 ppc64.ARLWNMCC, 761 ppc64.AFADDS, 762 ppc64.AFADD, 763 ppc64.AFSUBS, 764 ppc64.AFSUB, 765 ppc64.AFMULS, 766 ppc64.AFMUL, 767 ppc64.AFDIVS, 768 ppc64.AFDIV: 769 if s != nil { 770 if copysub(&p.From, v, s, true) { 771 return 1 772 } 773 if copysub1(p, v, s, true) { 774 return 1 775 } 776 777 // Update only indirect uses of v in p->to 778 if !copyas(&p.To, v) { 779 if copysub(&p.To, v, s, true) { 780 return 1 781 } 782 } 783 return 0 784 } 785 786 if copyas(&p.To, v) { 787 if p.Reg == 0 { 788 // Fix up implicit reg (e.g., ADD 789 // R3,R4 -> ADD R3,R4,R4) so we can 790 // update reg and to separately. 791 p.Reg = p.To.Reg 792 } 793 794 if copyau(&p.From, v) { 795 return 4 796 } 797 if copyau1(p, v) { 798 return 4 799 } 800 return 3 801 } 802 803 if copyau(&p.From, v) { 804 return 1 805 } 806 if copyau1(p, v) { 807 return 1 808 } 809 if copyau(&p.To, v) { 810 return 1 811 } 812 return 0 813 814 case ppc64.ABEQ, 815 ppc64.ABGT, 816 ppc64.ABGE, 817 ppc64.ABLT, 818 ppc64.ABLE, 819 ppc64.ABNE, 820 ppc64.ABVC, 821 ppc64.ABVS: 822 return 0 823 824 case obj.ACHECKNIL, /* read p->from */ 825 ppc64.ACMP, /* read p->from, read p->to */ 826 ppc64.ACMPU, 827 ppc64.ACMPW, 828 ppc64.ACMPWU, 829 ppc64.AFCMPO, 830 ppc64.AFCMPU: 831 if s != nil { 832 if copysub(&p.From, v, s, true) { 833 return 1 834 } 835 if copysub(&p.To, v, s, true) { 836 return 1 837 } 838 return 0 839 } 840 841 if copyau(&p.From, v) { 842 return 1 843 } 844 if copyau(&p.To, v) { 845 return 1 846 } 847 return 0 848 849 // 9g never generates a branch to a GPR (this isn't 850 // even a normal instruction; liblink turns it in to a 851 // mov and a branch). 852 case ppc64.ABR: /* read p->to */ 853 if s != nil { 854 if copysub(&p.To, v, s, true) { 855 return 1 856 } 857 return 0 858 } 859 860 if copyau(&p.To, v) { 861 return 1 862 } 863 return 0 864 865 case obj.ARET: /* funny */ 866 if s != nil { 867 return 0 868 } 869 870 // All registers die at this point, so claim 871 // everything is set (and not used). 872 return 3 873 874 case ppc64.ABL: /* funny */ 875 if v.Type == obj.TYPE_REG { 876 // TODO(rsc): REG_R0 and REG_F0 used to be 877 // (when register numbers started at 0) exregoffset and exfregoffset, 878 // which are unset entirely. 879 // It's strange that this handles R0 and F0 differently from the other 880 // registers. Possible failure to optimize? 881 if ppc64.REG_R0 < v.Reg && v.Reg <= ppc64.REGEXT { 882 return 2 883 } 884 if v.Reg == ppc64.REGARG { 885 return 2 886 } 887 if ppc64.REG_F0 < v.Reg && v.Reg <= ppc64.FREGEXT { 888 return 2 889 } 890 } 891 892 if p.From.Type == obj.TYPE_REG && v.Type == obj.TYPE_REG && p.From.Reg == v.Reg { 893 return 2 894 } 895 896 if s != nil { 897 if copysub(&p.To, v, s, true) { 898 return 1 899 } 900 return 0 901 } 902 if copyau(&p.To, v) { 903 return 4 904 } 905 return 3 906 907 // R0 is zero, used by DUFFZERO, cannot be substituted. 908 // R3 is ptr to memory, used and set, cannot be substituted. 909 case obj.ADUFFZERO: 910 if v.Type == obj.TYPE_REG { 911 if v.Reg == 0 { 912 return 1 913 } 914 if v.Reg == 3 { 915 return 2 916 } 917 } 918 919 return 0 920 921 // R3, R4 are ptr to src, dst, used and set, cannot be substituted. 922 // R5 is scratch, set by DUFFCOPY, cannot be substituted. 923 case obj.ADUFFCOPY: 924 if v.Type == obj.TYPE_REG { 925 if v.Reg == 3 || v.Reg == 4 { 926 return 2 927 } 928 if v.Reg == 5 { 929 return 3 930 } 931 } 932 933 return 0 934 935 case obj.ATEXT: /* funny */ 936 if v.Type == obj.TYPE_REG { 937 if v.Reg == ppc64.REGARG { 938 return 3 939 } 940 } 941 return 0 942 943 case obj.APCDATA, 944 obj.AFUNCDATA, 945 obj.AVARDEF, 946 obj.AVARKILL, 947 obj.AVARLIVE, 948 obj.AUSEFIELD: 949 return 0 950 } 951 } 952 953 // copyas returns true if a and v address the same register. 954 // 955 // If a is the from operand, this means this operation reads the 956 // register in v. If a is the to operand, this means this operation 957 // writes the register in v. 958 func copyas(a *obj.Addr, v *obj.Addr) bool { 959 return regtyp(v) && a.Type == v.Type && a.Reg == v.Reg 960 } 961 962 // copyau returns true if a either directly or indirectly addresses the 963 // same register as v. 964 // 965 // If a is the from operand, this means this operation reads the 966 // register in v. If a is the to operand, this means the operation 967 // either reads or writes the register in v (if !copyas(a, v), then 968 // the operation reads the register in v). 969 func copyau(a *obj.Addr, v *obj.Addr) bool { 970 if copyas(a, v) { 971 return true 972 } 973 if v.Type == obj.TYPE_REG { 974 if a.Type == obj.TYPE_MEM || (a.Type == obj.TYPE_ADDR && a.Reg != 0) { 975 if v.Reg == a.Reg { 976 return true 977 } 978 } 979 } 980 return false 981 } 982 983 // copyau1 returns true if p->reg references the same register as v and v 984 // is a direct reference. 985 func copyau1(p *obj.Prog, v *obj.Addr) bool { 986 return regtyp(v) && v.Reg != 0 && p.Reg == v.Reg 987 } 988 989 // copysub replaces v with s in a if f==true or indicates it if could if f==false. 990 // Returns true on failure to substitute (it always succeeds on ppc64). 991 // TODO(dfc) remove unused return value and callers where f=false. 992 func copysub(a *obj.Addr, v *obj.Addr, s *obj.Addr, f bool) bool { 993 if f && copyau(a, v) { 994 a.Reg = s.Reg 995 } 996 return false 997 } 998 999 // copysub1 replaces v with s in p1->reg if f==true or indicates if it could if f==false. 1000 // Returns true on failure to substitute (it always succeeds on ppc64). 1001 // TODO(dfc) remove unused return value and callers where f=false. 1002 func copysub1(p1 *obj.Prog, v *obj.Addr, s *obj.Addr, f bool) bool { 1003 if f && copyau1(p1, v) { 1004 p1.Reg = s.Reg 1005 } 1006 return false 1007 } 1008 1009 func sameaddr(a *obj.Addr, v *obj.Addr) bool { 1010 if a.Type != v.Type { 1011 return false 1012 } 1013 if regtyp(v) && a.Reg == v.Reg { 1014 return true 1015 } 1016 if v.Type == obj.NAME_AUTO || v.Type == obj.NAME_PARAM { 1017 if v.Offset == a.Offset { 1018 return true 1019 } 1020 } 1021 return false 1022 } 1023 1024 func smallindir(a *obj.Addr, reg *obj.Addr) bool { 1025 return reg.Type == obj.TYPE_REG && a.Type == obj.TYPE_MEM && a.Reg == reg.Reg && 0 <= a.Offset && a.Offset < 4096 1026 } 1027 1028 func stackaddr(a *obj.Addr) bool { 1029 return a.Type == obj.TYPE_REG && a.Reg == ppc64.REGSP 1030 }