github.com/d4l3k/go@v0.0.0-20151015000803-65fc379daeda/src/cmd/compile/internal/arm/peep.go (about) 1 // Inferno utils/5c/peep.c 2 // http://code.google.com/p/inferno-os/source/browse/utils/5c/peep.c 3 // 4 // Copyright © 1994-1999 Lucent Technologies Inc. All rights reserved. 5 // Portions Copyright © 1995-1997 C H Forsyth (forsyth@terzarima.net) 6 // Portions Copyright © 1997-1999 Vita Nuova Limited 7 // Portions Copyright © 2000-2007 Vita Nuova Holdings Limited (www.vitanuova.com) 8 // Portions Copyright © 2004,2006 Bruce Ellis 9 // Portions Copyright © 2005-2007 C H Forsyth (forsyth@terzarima.net) 10 // Revisions Copyright © 2000-2007 Lucent Technologies Inc. and others 11 // Portions Copyright © 2009 The Go Authors. All rights reserved. 12 // 13 // Permission is hereby granted, free of charge, to any person obtaining a copy 14 // of this software and associated documentation files (the "Software"), to deal 15 // in the Software without restriction, including without limitation the rights 16 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 17 // copies of the Software, and to permit persons to whom the Software is 18 // furnished to do so, subject to the following conditions: 19 // 20 // The above copyright notice and this permission notice shall be included in 21 // all copies or substantial portions of the Software. 22 // 23 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 24 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 25 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 26 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 27 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 28 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 29 // THE SOFTWARE. 30 31 package arm 32 33 import ( 34 "cmd/compile/internal/gc" 35 "cmd/internal/obj" 36 "cmd/internal/obj/arm" 37 "fmt" 38 ) 39 40 var gactive uint32 41 42 // UNUSED 43 func peep(firstp *obj.Prog) { 44 g := (*gc.Graph)(gc.Flowstart(firstp, nil)) 45 if g == nil { 46 return 47 } 48 gactive = 0 49 50 var r *gc.Flow 51 var p *obj.Prog 52 var t int 53 loop1: 54 if gc.Debug['P'] != 0 && gc.Debug['v'] != 0 { 55 gc.Dumpit("loop1", g.Start, 0) 56 } 57 58 t = 0 59 for r = g.Start; r != nil; r = r.Link { 60 p = r.Prog 61 switch p.As { 62 /* 63 * elide shift into TYPE_SHIFT operand of subsequent instruction 64 */ 65 // if(shiftprop(r)) { 66 // excise(r); 67 // t++; 68 // break; 69 // } 70 case arm.ASLL, 71 arm.ASRL, 72 arm.ASRA: 73 break 74 75 case arm.AMOVB, 76 arm.AMOVH, 77 arm.AMOVW, 78 arm.AMOVF, 79 arm.AMOVD: 80 if regtyp(&p.From) { 81 if p.From.Type == p.To.Type && isfloatreg(&p.From) == isfloatreg(&p.To) { 82 if p.Scond == arm.C_SCOND_NONE { 83 if copyprop(g, r) { 84 excise(r) 85 t++ 86 break 87 } 88 89 if subprop(r) && copyprop(g, r) { 90 excise(r) 91 t++ 92 break 93 } 94 } 95 } 96 } 97 98 case arm.AMOVHS, 99 arm.AMOVHU, 100 arm.AMOVBS, 101 arm.AMOVBU: 102 if p.From.Type == obj.TYPE_REG { 103 if shortprop(r) { 104 t++ 105 } 106 } 107 } 108 } 109 110 /* 111 if(p->scond == C_SCOND_NONE) 112 if(regtyp(&p->to)) 113 if(isdconst(&p->from)) { 114 constprop(&p->from, &p->to, r->s1); 115 } 116 break; 117 */ 118 if t != 0 { 119 goto loop1 120 } 121 122 for r := (*gc.Flow)(g.Start); r != nil; r = r.Link { 123 p = r.Prog 124 switch p.As { 125 /* 126 * EOR -1,x,y => MVN x,y 127 */ 128 case arm.AEOR: 129 if isdconst(&p.From) && p.From.Offset == -1 { 130 p.As = arm.AMVN 131 p.From.Type = obj.TYPE_REG 132 if p.Reg != 0 { 133 p.From.Reg = p.Reg 134 } else { 135 p.From.Reg = p.To.Reg 136 } 137 p.Reg = 0 138 } 139 } 140 } 141 142 for r := (*gc.Flow)(g.Start); r != nil; r = r.Link { 143 p = r.Prog 144 switch p.As { 145 case arm.AMOVW, 146 arm.AMOVB, 147 arm.AMOVBS, 148 arm.AMOVBU: 149 if p.From.Type == obj.TYPE_MEM && p.From.Offset == 0 { 150 xtramodes(g, r, &p.From) 151 } else if p.To.Type == obj.TYPE_MEM && p.To.Offset == 0 { 152 xtramodes(g, r, &p.To) 153 } else { 154 continue 155 } 156 } 157 } 158 159 // case ACMP: 160 // /* 161 // * elide CMP $0,x if calculation of x can set condition codes 162 // */ 163 // if(isdconst(&p->from) || p->from.offset != 0) 164 // continue; 165 // r2 = r->s1; 166 // if(r2 == nil) 167 // continue; 168 // t = r2->prog->as; 169 // switch(t) { 170 // default: 171 // continue; 172 // case ABEQ: 173 // case ABNE: 174 // case ABMI: 175 // case ABPL: 176 // break; 177 // case ABGE: 178 // t = ABPL; 179 // break; 180 // case ABLT: 181 // t = ABMI; 182 // break; 183 // case ABHI: 184 // t = ABNE; 185 // break; 186 // case ABLS: 187 // t = ABEQ; 188 // break; 189 // } 190 // r1 = r; 191 // do 192 // r1 = uniqp(r1); 193 // while (r1 != nil && r1->prog->as == ANOP); 194 // if(r1 == nil) 195 // continue; 196 // p1 = r1->prog; 197 // if(p1->to.type != TYPE_REG) 198 // continue; 199 // if(p1->to.reg != p->reg) 200 // if(!(p1->as == AMOVW && p1->from.type == TYPE_REG && p1->from.reg == p->reg)) 201 // continue; 202 // 203 // switch(p1->as) { 204 // default: 205 // continue; 206 // case AMOVW: 207 // if(p1->from.type != TYPE_REG) 208 // continue; 209 // case AAND: 210 // case AEOR: 211 // case AORR: 212 // case ABIC: 213 // case AMVN: 214 // case ASUB: 215 // case ARSB: 216 // case AADD: 217 // case AADC: 218 // case ASBC: 219 // case ARSC: 220 // break; 221 // } 222 // p1->scond |= C_SBIT; 223 // r2->prog->as = t; 224 // excise(r); 225 // continue; 226 227 // predicate(g); 228 229 gc.Flowend(g) 230 } 231 232 func regtyp(a *obj.Addr) bool { 233 return a.Type == obj.TYPE_REG && (arm.REG_R0 <= a.Reg && a.Reg <= arm.REG_R15 || arm.REG_F0 <= a.Reg && a.Reg <= arm.REG_F15) 234 } 235 236 /* 237 * the idea is to substitute 238 * one register for another 239 * from one MOV to another 240 * MOV a, R0 241 * ADD b, R0 / no use of R1 242 * MOV R0, R1 243 * would be converted to 244 * MOV a, R1 245 * ADD b, R1 246 * MOV R1, R0 247 * hopefully, then the former or latter MOV 248 * will be eliminated by copy propagation. 249 */ 250 func subprop(r0 *gc.Flow) bool { 251 p := (*obj.Prog)(r0.Prog) 252 v1 := (*obj.Addr)(&p.From) 253 if !regtyp(v1) { 254 return false 255 } 256 v2 := (*obj.Addr)(&p.To) 257 if !regtyp(v2) { 258 return false 259 } 260 for r := gc.Uniqp(r0); r != nil; r = gc.Uniqp(r) { 261 if gc.Uniqs(r) == nil { 262 break 263 } 264 p = r.Prog 265 if p.As == obj.AVARDEF || p.As == obj.AVARKILL { 266 continue 267 } 268 if p.Info.Flags&gc.Call != 0 { 269 return false 270 } 271 272 // TODO(rsc): Whatever invalidated the info should have done this call. 273 proginfo(p) 274 275 if (p.Info.Flags&gc.CanRegRead != 0) && p.To.Type == obj.TYPE_REG { 276 p.Info.Flags |= gc.RegRead 277 p.Info.Flags &^= (gc.CanRegRead | gc.RightRead) 278 p.Reg = p.To.Reg 279 } 280 281 switch p.As { 282 case arm.AMULLU, 283 arm.AMULA, 284 arm.AMVN: 285 return false 286 } 287 288 if p.Info.Flags&(gc.RightRead|gc.RightWrite) == gc.RightWrite { 289 if p.To.Type == v1.Type { 290 if p.To.Reg == v1.Reg { 291 if p.Scond == arm.C_SCOND_NONE { 292 copysub(&p.To, v1, v2, 1) 293 if gc.Debug['P'] != 0 { 294 fmt.Printf("gotit: %v->%v\n%v", gc.Ctxt.Dconv(v1), gc.Ctxt.Dconv(v2), r.Prog) 295 if p.From.Type == v2.Type { 296 fmt.Printf(" excise") 297 } 298 fmt.Printf("\n") 299 } 300 301 for r = gc.Uniqs(r); r != r0; r = gc.Uniqs(r) { 302 p = r.Prog 303 copysub(&p.From, v1, v2, 1) 304 copysub1(p, v1, v2, 1) 305 copysub(&p.To, v1, v2, 1) 306 if gc.Debug['P'] != 0 { 307 fmt.Printf("%v\n", r.Prog) 308 } 309 } 310 311 t := int(int(v1.Reg)) 312 v1.Reg = v2.Reg 313 v2.Reg = int16(t) 314 if gc.Debug['P'] != 0 { 315 fmt.Printf("%v last\n", r.Prog) 316 } 317 return true 318 } 319 } 320 } 321 } 322 323 if copyau(&p.From, v2) || copyau1(p, v2) || copyau(&p.To, v2) { 324 break 325 } 326 if copysub(&p.From, v1, v2, 0) != 0 || copysub1(p, v1, v2, 0) != 0 || copysub(&p.To, v1, v2, 0) != 0 { 327 break 328 } 329 } 330 331 return false 332 } 333 334 /* 335 * The idea is to remove redundant copies. 336 * v1->v2 F=0 337 * (use v2 s/v2/v1/)* 338 * set v1 F=1 339 * use v2 return fail 340 * ----------------- 341 * v1->v2 F=0 342 * (use v2 s/v2/v1/)* 343 * set v1 F=1 344 * set v2 return success 345 */ 346 func copyprop(g *gc.Graph, r0 *gc.Flow) bool { 347 p := (*obj.Prog)(r0.Prog) 348 v1 := (*obj.Addr)(&p.From) 349 v2 := (*obj.Addr)(&p.To) 350 if copyas(v1, v2) { 351 return true 352 } 353 gactive++ 354 return copy1(v1, v2, r0.S1, 0) 355 } 356 357 func copy1(v1 *obj.Addr, v2 *obj.Addr, r *gc.Flow, f int) bool { 358 if uint32(r.Active) == gactive { 359 if gc.Debug['P'] != 0 { 360 fmt.Printf("act set; return 1\n") 361 } 362 return true 363 } 364 365 r.Active = int32(gactive) 366 if gc.Debug['P'] != 0 { 367 fmt.Printf("copy %v->%v f=%d\n", gc.Ctxt.Dconv(v1), gc.Ctxt.Dconv(v2), f) 368 } 369 var t int 370 var p *obj.Prog 371 for ; r != nil; r = r.S1 { 372 p = r.Prog 373 if gc.Debug['P'] != 0 { 374 fmt.Printf("%v", p) 375 } 376 if f == 0 && gc.Uniqp(r) == nil { 377 f = 1 378 if gc.Debug['P'] != 0 { 379 fmt.Printf("; merge; f=%d", f) 380 } 381 } 382 383 t = copyu(p, v2, nil) 384 switch t { 385 case 2: /* rar, can't split */ 386 if gc.Debug['P'] != 0 { 387 fmt.Printf("; %vrar; return 0\n", gc.Ctxt.Dconv(v2)) 388 } 389 return false 390 391 case 3: /* set */ 392 if gc.Debug['P'] != 0 { 393 fmt.Printf("; %vset; return 1\n", gc.Ctxt.Dconv(v2)) 394 } 395 return true 396 397 case 1, /* used, substitute */ 398 4: /* use and set */ 399 if f != 0 { 400 if gc.Debug['P'] == 0 { 401 return false 402 } 403 if t == 4 { 404 fmt.Printf("; %vused+set and f=%d; return 0\n", gc.Ctxt.Dconv(v2), f) 405 } else { 406 fmt.Printf("; %vused and f=%d; return 0\n", gc.Ctxt.Dconv(v2), f) 407 } 408 return false 409 } 410 411 if copyu(p, v2, v1) != 0 { 412 if gc.Debug['P'] != 0 { 413 fmt.Printf("; sub fail; return 0\n") 414 } 415 return false 416 } 417 418 if gc.Debug['P'] != 0 { 419 fmt.Printf("; sub%v/%v", gc.Ctxt.Dconv(v2), gc.Ctxt.Dconv(v1)) 420 } 421 if t == 4 { 422 if gc.Debug['P'] != 0 { 423 fmt.Printf("; %vused+set; return 1\n", gc.Ctxt.Dconv(v2)) 424 } 425 return true 426 } 427 } 428 429 if f == 0 { 430 t = copyu(p, v1, nil) 431 if f == 0 && (t == 2 || t == 3 || t == 4) { 432 f = 1 433 if gc.Debug['P'] != 0 { 434 fmt.Printf("; %vset and !f; f=%d", gc.Ctxt.Dconv(v1), f) 435 } 436 } 437 } 438 439 if gc.Debug['P'] != 0 { 440 fmt.Printf("\n") 441 } 442 if r.S2 != nil { 443 if !copy1(v1, v2, r.S2, f) { 444 return false 445 } 446 } 447 } 448 449 return true 450 } 451 452 // UNUSED 453 /* 454 * The idea is to remove redundant constants. 455 * $c1->v1 456 * ($c1->v2 s/$c1/v1)* 457 * set v1 return 458 * The v1->v2 should be eliminated by copy propagation. 459 */ 460 func constprop(c1 *obj.Addr, v1 *obj.Addr, r *gc.Flow) { 461 if gc.Debug['P'] != 0 { 462 fmt.Printf("constprop %v->%v\n", gc.Ctxt.Dconv(c1), gc.Ctxt.Dconv(v1)) 463 } 464 var p *obj.Prog 465 for ; r != nil; r = r.S1 { 466 p = r.Prog 467 if gc.Debug['P'] != 0 { 468 fmt.Printf("%v", p) 469 } 470 if gc.Uniqp(r) == nil { 471 if gc.Debug['P'] != 0 { 472 fmt.Printf("; merge; return\n") 473 } 474 return 475 } 476 477 if p.As == arm.AMOVW && copyas(&p.From, c1) { 478 if gc.Debug['P'] != 0 { 479 fmt.Printf("; sub%v/%v", gc.Ctxt.Dconv(&p.From), gc.Ctxt.Dconv(v1)) 480 } 481 p.From = *v1 482 } else if copyu(p, v1, nil) > 1 { 483 if gc.Debug['P'] != 0 { 484 fmt.Printf("; %vset; return\n", gc.Ctxt.Dconv(v1)) 485 } 486 return 487 } 488 489 if gc.Debug['P'] != 0 { 490 fmt.Printf("\n") 491 } 492 if r.S2 != nil { 493 constprop(c1, v1, r.S2) 494 } 495 } 496 } 497 498 /* 499 * shortprop eliminates redundant zero/sign extensions. 500 * 501 * MOVBS x, R 502 * <no use R> 503 * MOVBS R, R' 504 * 505 * changed to 506 * 507 * MOVBS x, R 508 * ... 509 * MOVB R, R' (compiled to mov) 510 * 511 * MOVBS above can be a MOVBS, MOVBU, MOVHS or MOVHU. 512 */ 513 func shortprop(r *gc.Flow) bool { 514 p := (*obj.Prog)(r.Prog) 515 r1 := (*gc.Flow)(findpre(r, &p.From)) 516 if r1 == nil { 517 return false 518 } 519 520 p1 := (*obj.Prog)(r1.Prog) 521 if p1.As == p.As { 522 // Two consecutive extensions. 523 goto gotit 524 } 525 526 if p1.As == arm.AMOVW && isdconst(&p1.From) && p1.From.Offset >= 0 && p1.From.Offset < 128 { 527 // Loaded an immediate. 528 goto gotit 529 } 530 531 return false 532 533 gotit: 534 if gc.Debug['P'] != 0 { 535 fmt.Printf("shortprop\n%v\n%v", p1, p) 536 } 537 switch p.As { 538 case arm.AMOVBS, 539 arm.AMOVBU: 540 p.As = arm.AMOVB 541 542 case arm.AMOVHS, 543 arm.AMOVHU: 544 p.As = arm.AMOVH 545 } 546 547 if gc.Debug['P'] != 0 { 548 fmt.Printf(" => %v\n", obj.Aconv(int(p.As))) 549 } 550 return true 551 } 552 553 // UNUSED 554 /* 555 * ASLL x,y,w 556 * .. (not use w, not set x y w) 557 * AXXX w,a,b (a != w) 558 * .. (not use w) 559 * (set w) 560 * ----------- changed to 561 * .. 562 * AXXX (x<<y),a,b 563 * .. 564 */ 565 func shiftprop(r *gc.Flow) bool { 566 p := (*obj.Prog)(r.Prog) 567 if p.To.Type != obj.TYPE_REG { 568 if gc.Debug['P'] != 0 { 569 fmt.Printf("\tBOTCH: result not reg; FAILURE\n") 570 } 571 return false 572 } 573 574 n := int(int(p.To.Reg)) 575 a := obj.Addr(obj.Addr{}) 576 if p.Reg != 0 && p.Reg != p.To.Reg { 577 a.Type = obj.TYPE_REG 578 a.Reg = p.Reg 579 } 580 581 if gc.Debug['P'] != 0 { 582 fmt.Printf("shiftprop\n%v", p) 583 } 584 r1 := (*gc.Flow)(r) 585 var p1 *obj.Prog 586 for { 587 /* find first use of shift result; abort if shift operands or result are changed */ 588 r1 = gc.Uniqs(r1) 589 590 if r1 == nil { 591 if gc.Debug['P'] != 0 { 592 fmt.Printf("\tbranch; FAILURE\n") 593 } 594 return false 595 } 596 597 if gc.Uniqp(r1) == nil { 598 if gc.Debug['P'] != 0 { 599 fmt.Printf("\tmerge; FAILURE\n") 600 } 601 return false 602 } 603 604 p1 = r1.Prog 605 if gc.Debug['P'] != 0 { 606 fmt.Printf("\n%v", p1) 607 } 608 switch copyu(p1, &p.To, nil) { 609 case 0: /* not used or set */ 610 if (p.From.Type == obj.TYPE_REG && copyu(p1, &p.From, nil) > 1) || (a.Type == obj.TYPE_REG && copyu(p1, &a, nil) > 1) { 611 if gc.Debug['P'] != 0 { 612 fmt.Printf("\targs modified; FAILURE\n") 613 } 614 return false 615 } 616 617 continue 618 case 3: /* set, not used */ 619 { 620 if gc.Debug['P'] != 0 { 621 fmt.Printf("\tBOTCH: noref; FAILURE\n") 622 } 623 return false 624 } 625 } 626 627 break 628 } 629 630 /* check whether substitution can be done */ 631 switch p1.As { 632 default: 633 if gc.Debug['P'] != 0 { 634 fmt.Printf("\tnon-dpi; FAILURE\n") 635 } 636 return false 637 638 case arm.AAND, 639 arm.AEOR, 640 arm.AADD, 641 arm.AADC, 642 arm.AORR, 643 arm.ASUB, 644 arm.ASBC, 645 arm.ARSB, 646 arm.ARSC: 647 if int(p1.Reg) == n || (p1.Reg == 0 && p1.To.Type == obj.TYPE_REG && int(p1.To.Reg) == n) { 648 if p1.From.Type != obj.TYPE_REG { 649 if gc.Debug['P'] != 0 { 650 fmt.Printf("\tcan't swap; FAILURE\n") 651 } 652 return false 653 } 654 655 p1.Reg = p1.From.Reg 656 p1.From.Reg = int16(n) 657 switch p1.As { 658 case arm.ASUB: 659 p1.As = arm.ARSB 660 661 case arm.ARSB: 662 p1.As = arm.ASUB 663 664 case arm.ASBC: 665 p1.As = arm.ARSC 666 667 case arm.ARSC: 668 p1.As = arm.ASBC 669 } 670 671 if gc.Debug['P'] != 0 { 672 fmt.Printf("\t=>%v", p1) 673 } 674 } 675 fallthrough 676 677 case arm.ABIC, 678 arm.ATST, 679 arm.ACMP, 680 arm.ACMN: 681 if int(p1.Reg) == n { 682 if gc.Debug['P'] != 0 { 683 fmt.Printf("\tcan't swap; FAILURE\n") 684 } 685 return false 686 } 687 688 if p1.Reg == 0 && int(p1.To.Reg) == n { 689 if gc.Debug['P'] != 0 { 690 fmt.Printf("\tshift result used twice; FAILURE\n") 691 } 692 return false 693 } 694 695 // case AMVN: 696 if p1.From.Type == obj.TYPE_SHIFT { 697 if gc.Debug['P'] != 0 { 698 fmt.Printf("\tshift result used in shift; FAILURE\n") 699 } 700 return false 701 } 702 703 if p1.From.Type != obj.TYPE_REG || int(p1.From.Reg) != n { 704 if gc.Debug['P'] != 0 { 705 fmt.Printf("\tBOTCH: where is it used?; FAILURE\n") 706 } 707 return false 708 } 709 } 710 711 /* check whether shift result is used subsequently */ 712 p2 := (*obj.Prog)(p1) 713 714 if int(p1.To.Reg) != n { 715 var p1 *obj.Prog 716 for { 717 r1 = gc.Uniqs(r1) 718 if r1 == nil { 719 if gc.Debug['P'] != 0 { 720 fmt.Printf("\tinconclusive; FAILURE\n") 721 } 722 return false 723 } 724 725 p1 = r1.Prog 726 if gc.Debug['P'] != 0 { 727 fmt.Printf("\n%v", p1) 728 } 729 switch copyu(p1, &p.To, nil) { 730 case 0: /* not used or set */ 731 continue 732 733 case 3: /* set, not used */ 734 break 735 736 default: /* used */ 737 if gc.Debug['P'] != 0 { 738 fmt.Printf("\treused; FAILURE\n") 739 } 740 return false 741 } 742 743 break 744 } 745 } 746 747 /* make the substitution */ 748 p2.From.Reg = 0 749 750 o := int(int(p.Reg)) 751 if o == 0 { 752 o = int(p.To.Reg) 753 } 754 o &= 15 755 756 switch p.From.Type { 757 case obj.TYPE_CONST: 758 o |= int((p.From.Offset & 0x1f) << 7) 759 760 case obj.TYPE_REG: 761 o |= 1<<4 | (int(p.From.Reg)&15)<<8 762 } 763 764 switch p.As { 765 case arm.ASLL: 766 o |= 0 << 5 767 768 case arm.ASRL: 769 o |= 1 << 5 770 771 case arm.ASRA: 772 o |= 2 << 5 773 } 774 775 p2.From = obj.Addr{} 776 p2.From.Type = obj.TYPE_SHIFT 777 p2.From.Offset = int64(o) 778 if gc.Debug['P'] != 0 { 779 fmt.Printf("\t=>%v\tSUCCEED\n", p2) 780 } 781 return true 782 } 783 784 /* 785 * findpre returns the last instruction mentioning v 786 * before r. It must be a set, and there must be 787 * a unique path from that instruction to r. 788 */ 789 func findpre(r *gc.Flow, v *obj.Addr) *gc.Flow { 790 var r1 *gc.Flow 791 792 for r1 = gc.Uniqp(r); r1 != nil; r, r1 = r1, gc.Uniqp(r1) { 793 if gc.Uniqs(r1) != r { 794 return nil 795 } 796 switch copyu(r1.Prog, v, nil) { 797 case 1, /* used */ 798 2: /* read-alter-rewrite */ 799 return nil 800 801 case 3, /* set */ 802 4: /* set and used */ 803 return r1 804 } 805 } 806 807 return nil 808 } 809 810 /* 811 * findinc finds ADD instructions with a constant 812 * argument which falls within the immed_12 range. 813 */ 814 func findinc(r *gc.Flow, r2 *gc.Flow, v *obj.Addr) *gc.Flow { 815 var r1 *gc.Flow 816 var p *obj.Prog 817 818 for r1 = gc.Uniqs(r); r1 != nil && r1 != r2; r, r1 = r1, gc.Uniqs(r1) { 819 if gc.Uniqp(r1) != r { 820 return nil 821 } 822 switch copyu(r1.Prog, v, nil) { 823 case 0: /* not touched */ 824 continue 825 826 case 4: /* set and used */ 827 p = r1.Prog 828 829 if p.As == arm.AADD { 830 if isdconst(&p.From) { 831 if p.From.Offset > -4096 && p.From.Offset < 4096 { 832 return r1 833 } 834 } 835 } 836 fallthrough 837 838 default: 839 return nil 840 } 841 } 842 843 return nil 844 } 845 846 func nochange(r *gc.Flow, r2 *gc.Flow, p *obj.Prog) bool { 847 if r == r2 { 848 return true 849 } 850 n := int(0) 851 var a [3]obj.Addr 852 if p.Reg != 0 && p.Reg != p.To.Reg { 853 a[n].Type = obj.TYPE_REG 854 a[n].Reg = p.Reg 855 n++ 856 } 857 858 switch p.From.Type { 859 case obj.TYPE_SHIFT: 860 a[n].Type = obj.TYPE_REG 861 a[n].Reg = int16(arm.REG_R0 + (p.From.Offset & 0xf)) 862 n++ 863 fallthrough 864 865 case obj.TYPE_REG: 866 a[n].Type = obj.TYPE_REG 867 a[n].Reg = p.From.Reg 868 n++ 869 } 870 871 if n == 0 { 872 return true 873 } 874 var i int 875 for ; r != nil && r != r2; r = gc.Uniqs(r) { 876 p = r.Prog 877 for i = 0; i < n; i++ { 878 if copyu(p, &a[i], nil) > 1 { 879 return false 880 } 881 } 882 } 883 884 return true 885 } 886 887 func findu1(r *gc.Flow, v *obj.Addr) bool { 888 for ; r != nil; r = r.S1 { 889 if r.Active != 0 { 890 return false 891 } 892 r.Active = 1 893 switch copyu(r.Prog, v, nil) { 894 case 1, /* used */ 895 2, /* read-alter-rewrite */ 896 4: /* set and used */ 897 return true 898 899 case 3: /* set */ 900 return false 901 } 902 903 if r.S2 != nil { 904 if findu1(r.S2, v) { 905 return true 906 } 907 } 908 } 909 910 return false 911 } 912 913 func finduse(g *gc.Graph, r *gc.Flow, v *obj.Addr) bool { 914 for r1 := (*gc.Flow)(g.Start); r1 != nil; r1 = r1.Link { 915 r1.Active = 0 916 } 917 return findu1(r, v) 918 } 919 920 /* 921 * xtramodes enables the ARM post increment and 922 * shift offset addressing modes to transform 923 * MOVW 0(R3),R1 924 * ADD $4,R3,R3 925 * into 926 * MOVW.P 4(R3),R1 927 * and 928 * ADD R0,R1 929 * MOVBU 0(R1),R0 930 * into 931 * MOVBU R0<<0(R1),R0 932 */ 933 func xtramodes(g *gc.Graph, r *gc.Flow, a *obj.Addr) bool { 934 p := (*obj.Prog)(r.Prog) 935 v := obj.Addr(*a) 936 v.Type = obj.TYPE_REG 937 r1 := (*gc.Flow)(findpre(r, &v)) 938 if r1 != nil { 939 p1 := r1.Prog 940 if p1.To.Type == obj.TYPE_REG && p1.To.Reg == v.Reg { 941 switch p1.As { 942 case arm.AADD: 943 if p1.Scond&arm.C_SBIT != 0 { 944 // avoid altering ADD.S/ADC sequences. 945 break 946 } 947 948 if p1.From.Type == obj.TYPE_REG || (p1.From.Type == obj.TYPE_SHIFT && p1.From.Offset&(1<<4) == 0 && ((p.As != arm.AMOVB && p.As != arm.AMOVBS) || (a == &p.From && p1.From.Offset&^0xf == 0))) || ((p1.From.Type == obj.TYPE_ADDR || p1.From.Type == obj.TYPE_CONST) && p1.From.Offset > -4096 && p1.From.Offset < 4096) { 949 if nochange(gc.Uniqs(r1), r, p1) { 950 if a != &p.From || v.Reg != p.To.Reg { 951 if finduse(g, r.S1, &v) { 952 if p1.Reg == 0 || p1.Reg == v.Reg { 953 /* pre-indexing */ 954 p.Scond |= arm.C_WBIT 955 } else { 956 return false 957 } 958 } 959 } 960 961 switch p1.From.Type { 962 /* register offset */ 963 case obj.TYPE_REG: 964 if gc.Nacl { 965 return false 966 } 967 *a = obj.Addr{} 968 a.Type = obj.TYPE_SHIFT 969 a.Offset = int64(p1.From.Reg) & 15 970 971 /* scaled register offset */ 972 case obj.TYPE_SHIFT: 973 if gc.Nacl { 974 return false 975 } 976 *a = obj.Addr{} 977 a.Type = obj.TYPE_SHIFT 978 fallthrough 979 980 /* immediate offset */ 981 case obj.TYPE_CONST, 982 obj.TYPE_ADDR: 983 a.Offset = p1.From.Offset 984 } 985 986 if p1.Reg != 0 { 987 a.Reg = p1.Reg 988 } 989 excise(r1) 990 return true 991 } 992 } 993 994 case arm.AMOVW: 995 if p1.From.Type == obj.TYPE_REG { 996 r2 := (*gc.Flow)(findinc(r1, r, &p1.From)) 997 if r2 != nil { 998 var r3 *gc.Flow 999 for r3 = gc.Uniqs(r2); r3.Prog.As == obj.ANOP; r3 = gc.Uniqs(r3) { 1000 } 1001 if r3 == r { 1002 /* post-indexing */ 1003 p1 := r2.Prog 1004 1005 a.Reg = p1.To.Reg 1006 a.Offset = p1.From.Offset 1007 p.Scond |= arm.C_PBIT 1008 if !finduse(g, r, &r1.Prog.To) { 1009 excise(r1) 1010 } 1011 excise(r2) 1012 return true 1013 } 1014 } 1015 } 1016 } 1017 } 1018 } 1019 1020 if a != &p.From || a.Reg != p.To.Reg { 1021 r1 := (*gc.Flow)(findinc(r, nil, &v)) 1022 if r1 != nil { 1023 /* post-indexing */ 1024 p1 := r1.Prog 1025 1026 a.Offset = p1.From.Offset 1027 p.Scond |= arm.C_PBIT 1028 excise(r1) 1029 return true 1030 } 1031 } 1032 1033 return false 1034 } 1035 1036 /* 1037 * return 1038 * 1 if v only used (and substitute), 1039 * 2 if read-alter-rewrite 1040 * 3 if set 1041 * 4 if set and used 1042 * 0 otherwise (not touched) 1043 */ 1044 func copyu(p *obj.Prog, v *obj.Addr, s *obj.Addr) int { 1045 switch p.As { 1046 default: 1047 fmt.Printf("copyu: can't find %v\n", obj.Aconv(int(p.As))) 1048 return 2 1049 1050 case arm.AMOVM: 1051 if v.Type != obj.TYPE_REG { 1052 return 0 1053 } 1054 if p.From.Type == obj.TYPE_CONST { /* read reglist, read/rar */ 1055 if s != nil { 1056 if p.From.Offset&(1<<uint(v.Reg)) != 0 { 1057 return 1 1058 } 1059 if copysub(&p.To, v, s, 1) != 0 { 1060 return 1 1061 } 1062 return 0 1063 } 1064 1065 if copyau(&p.To, v) { 1066 if p.Scond&arm.C_WBIT != 0 { 1067 return 2 1068 } 1069 return 1 1070 } 1071 1072 if p.From.Offset&(1<<uint(v.Reg)) != 0 { 1073 return 1 /* read/rar, write reglist */ 1074 } 1075 } else { 1076 if s != nil { 1077 if p.To.Offset&(1<<uint(v.Reg)) != 0 { 1078 return 1 1079 } 1080 if copysub(&p.From, v, s, 1) != 0 { 1081 return 1 1082 } 1083 return 0 1084 } 1085 1086 if copyau(&p.From, v) { 1087 if p.Scond&arm.C_WBIT != 0 { 1088 return 2 1089 } 1090 if p.To.Offset&(1<<uint(v.Reg)) != 0 { 1091 return 4 1092 } 1093 return 1 1094 } 1095 1096 if p.To.Offset&(1<<uint(v.Reg)) != 0 { 1097 return 3 1098 } 1099 } 1100 1101 return 0 1102 1103 case obj.ANOP, /* read,, write */ 1104 arm.ASQRTD, 1105 arm.AMOVW, 1106 arm.AMOVF, 1107 arm.AMOVD, 1108 arm.AMOVH, 1109 arm.AMOVHS, 1110 arm.AMOVHU, 1111 arm.AMOVB, 1112 arm.AMOVBS, 1113 arm.AMOVBU, 1114 arm.AMOVFW, 1115 arm.AMOVWF, 1116 arm.AMOVDW, 1117 arm.AMOVWD, 1118 arm.AMOVFD, 1119 arm.AMOVDF: 1120 if p.Scond&(arm.C_WBIT|arm.C_PBIT) != 0 { 1121 if v.Type == obj.TYPE_REG { 1122 if p.From.Type == obj.TYPE_MEM || p.From.Type == obj.TYPE_SHIFT { 1123 if p.From.Reg == v.Reg { 1124 return 2 1125 } 1126 } else { 1127 if p.To.Reg == v.Reg { 1128 return 2 1129 } 1130 } 1131 } 1132 } 1133 1134 if s != nil { 1135 if copysub(&p.From, v, s, 1) != 0 { 1136 return 1 1137 } 1138 if !copyas(&p.To, v) { 1139 if copysub(&p.To, v, s, 1) != 0 { 1140 return 1 1141 } 1142 } 1143 return 0 1144 } 1145 1146 if copyas(&p.To, v) { 1147 if p.Scond != arm.C_SCOND_NONE { 1148 return 2 1149 } 1150 if copyau(&p.From, v) { 1151 return 4 1152 } 1153 return 3 1154 } 1155 1156 if copyau(&p.From, v) { 1157 return 1 1158 } 1159 if copyau(&p.To, v) { 1160 return 1 1161 } 1162 return 0 1163 1164 case arm.AMULLU, /* read, read, write, write */ 1165 arm.AMULL, 1166 arm.AMULA, 1167 arm.AMVN: 1168 return 2 1169 1170 case arm.AADD, /* read, read, write */ 1171 arm.AADC, 1172 arm.ASUB, 1173 arm.ASBC, 1174 arm.ARSB, 1175 arm.ASLL, 1176 arm.ASRL, 1177 arm.ASRA, 1178 arm.AORR, 1179 arm.AAND, 1180 arm.AEOR, 1181 arm.AMUL, 1182 arm.AMULU, 1183 arm.ADIV, 1184 arm.ADIVU, 1185 arm.AMOD, 1186 arm.AMODU, 1187 arm.AADDF, 1188 arm.AADDD, 1189 arm.ASUBF, 1190 arm.ASUBD, 1191 arm.AMULF, 1192 arm.AMULD, 1193 arm.ADIVF, 1194 arm.ADIVD, 1195 obj.ACHECKNIL, 1196 /* read */ 1197 arm.ACMPF, /* read, read, */ 1198 arm.ACMPD, 1199 arm.ACMP, 1200 arm.ACMN, 1201 arm.ATST: 1202 /* read,, */ 1203 if s != nil { 1204 if copysub(&p.From, v, s, 1) != 0 { 1205 return 1 1206 } 1207 if copysub1(p, v, s, 1) != 0 { 1208 return 1 1209 } 1210 if !copyas(&p.To, v) { 1211 if copysub(&p.To, v, s, 1) != 0 { 1212 return 1 1213 } 1214 } 1215 return 0 1216 } 1217 1218 if copyas(&p.To, v) { 1219 if p.Scond != arm.C_SCOND_NONE { 1220 return 2 1221 } 1222 if p.Reg == 0 { 1223 p.Reg = p.To.Reg 1224 } 1225 if copyau(&p.From, v) { 1226 return 4 1227 } 1228 if copyau1(p, v) { 1229 return 4 1230 } 1231 return 3 1232 } 1233 1234 if copyau(&p.From, v) { 1235 return 1 1236 } 1237 if copyau1(p, v) { 1238 return 1 1239 } 1240 if copyau(&p.To, v) { 1241 return 1 1242 } 1243 return 0 1244 1245 case arm.ABEQ, /* read, read */ 1246 arm.ABNE, 1247 arm.ABCS, 1248 arm.ABHS, 1249 arm.ABCC, 1250 arm.ABLO, 1251 arm.ABMI, 1252 arm.ABPL, 1253 arm.ABVS, 1254 arm.ABVC, 1255 arm.ABHI, 1256 arm.ABLS, 1257 arm.ABGE, 1258 arm.ABLT, 1259 arm.ABGT, 1260 arm.ABLE: 1261 if s != nil { 1262 if copysub(&p.From, v, s, 1) != 0 { 1263 return 1 1264 } 1265 return copysub1(p, v, s, 1) 1266 } 1267 1268 if copyau(&p.From, v) { 1269 return 1 1270 } 1271 if copyau1(p, v) { 1272 return 1 1273 } 1274 return 0 1275 1276 case arm.AB: /* funny */ 1277 if s != nil { 1278 if copysub(&p.To, v, s, 1) != 0 { 1279 return 1 1280 } 1281 return 0 1282 } 1283 1284 if copyau(&p.To, v) { 1285 return 1 1286 } 1287 return 0 1288 1289 case obj.ARET: /* funny */ 1290 if s != nil { 1291 return 1 1292 } 1293 return 3 1294 1295 case arm.ABL: /* funny */ 1296 if v.Type == obj.TYPE_REG { 1297 // TODO(rsc): REG_R0 and REG_F0 used to be 1298 // (when register numbers started at 0) exregoffset and exfregoffset, 1299 // which are unset entirely. 1300 // It's strange that this handles R0 and F0 differently from the other 1301 // registers. Possible failure to optimize? 1302 if arm.REG_R0 < v.Reg && v.Reg <= arm.REGEXT { 1303 return 2 1304 } 1305 if v.Reg == arm.REGARG { 1306 return 2 1307 } 1308 if arm.REG_F0 < v.Reg && v.Reg <= arm.FREGEXT { 1309 return 2 1310 } 1311 } 1312 1313 if p.From.Type == obj.TYPE_REG && v.Type == obj.TYPE_REG && p.From.Reg == v.Reg { 1314 return 2 1315 } 1316 1317 if s != nil { 1318 if copysub(&p.To, v, s, 1) != 0 { 1319 return 1 1320 } 1321 return 0 1322 } 1323 1324 if copyau(&p.To, v) { 1325 return 4 1326 } 1327 return 3 1328 1329 // R0 is zero, used by DUFFZERO, cannot be substituted. 1330 // R1 is ptr to memory, used and set, cannot be substituted. 1331 case obj.ADUFFZERO: 1332 if v.Type == obj.TYPE_REG { 1333 if v.Reg == arm.REG_R0 { 1334 return 1 1335 } 1336 if v.Reg == arm.REG_R0+1 { 1337 return 2 1338 } 1339 } 1340 1341 return 0 1342 1343 // R0 is scratch, set by DUFFCOPY, cannot be substituted. 1344 // R1, R2 areptr to src, dst, used and set, cannot be substituted. 1345 case obj.ADUFFCOPY: 1346 if v.Type == obj.TYPE_REG { 1347 if v.Reg == arm.REG_R0 { 1348 return 3 1349 } 1350 if v.Reg == arm.REG_R0+1 || v.Reg == arm.REG_R0+2 { 1351 return 2 1352 } 1353 } 1354 1355 return 0 1356 1357 case obj.ATEXT: /* funny */ 1358 if v.Type == obj.TYPE_REG { 1359 if v.Reg == arm.REGARG { 1360 return 3 1361 } 1362 } 1363 return 0 1364 1365 case obj.APCDATA, 1366 obj.AFUNCDATA, 1367 obj.AVARDEF, 1368 obj.AVARKILL: 1369 return 0 1370 } 1371 } 1372 1373 /* 1374 * direct reference, 1375 * could be set/use depending on 1376 * semantics 1377 */ 1378 func copyas(a *obj.Addr, v *obj.Addr) bool { 1379 if regtyp(v) { 1380 if a.Type == v.Type { 1381 if a.Reg == v.Reg { 1382 return true 1383 } 1384 } 1385 } else if v.Type == obj.TYPE_CONST { /* for constprop */ 1386 if a.Type == v.Type { 1387 if a.Name == v.Name { 1388 if a.Sym == v.Sym { 1389 if a.Reg == v.Reg { 1390 if a.Offset == v.Offset { 1391 return true 1392 } 1393 } 1394 } 1395 } 1396 } 1397 } 1398 1399 return false 1400 } 1401 1402 func sameaddr(a *obj.Addr, v *obj.Addr) bool { 1403 if a.Type != v.Type { 1404 return false 1405 } 1406 if regtyp(v) && a.Reg == v.Reg { 1407 return true 1408 } 1409 1410 // TODO(rsc): Change v->type to v->name and enable. 1411 //if(v->type == NAME_AUTO || v->type == NAME_PARAM) { 1412 // if(v->offset == a->offset) 1413 // return 1; 1414 //} 1415 return false 1416 } 1417 1418 /* 1419 * either direct or indirect 1420 */ 1421 func copyau(a *obj.Addr, v *obj.Addr) bool { 1422 if copyas(a, v) { 1423 return true 1424 } 1425 if v.Type == obj.TYPE_REG { 1426 if a.Type == obj.TYPE_ADDR && a.Reg != 0 { 1427 if a.Reg == v.Reg { 1428 return true 1429 } 1430 } else if a.Type == obj.TYPE_MEM { 1431 if a.Reg == v.Reg { 1432 return true 1433 } 1434 } else if a.Type == obj.TYPE_REGREG || a.Type == obj.TYPE_REGREG2 { 1435 if a.Reg == v.Reg { 1436 return true 1437 } 1438 if a.Offset == int64(v.Reg) { 1439 return true 1440 } 1441 } else if a.Type == obj.TYPE_SHIFT { 1442 if a.Offset&0xf == int64(v.Reg-arm.REG_R0) { 1443 return true 1444 } 1445 if (a.Offset&(1<<4) != 0) && (a.Offset>>8)&0xf == int64(v.Reg-arm.REG_R0) { 1446 return true 1447 } 1448 } 1449 } 1450 1451 return false 1452 } 1453 1454 /* 1455 * compare v to the center 1456 * register in p (p->reg) 1457 */ 1458 func copyau1(p *obj.Prog, v *obj.Addr) bool { 1459 if v.Type == obj.TYPE_REG && v.Reg == 0 { 1460 return false 1461 } 1462 return p.Reg == v.Reg 1463 } 1464 1465 /* 1466 * substitute s for v in a 1467 * return failure to substitute 1468 */ 1469 func copysub(a *obj.Addr, v *obj.Addr, s *obj.Addr, f int) int { 1470 if f != 0 { 1471 if copyau(a, v) { 1472 if a.Type == obj.TYPE_SHIFT { 1473 if a.Offset&0xf == int64(v.Reg-arm.REG_R0) { 1474 a.Offset = a.Offset&^0xf | int64(s.Reg)&0xf 1475 } 1476 if (a.Offset&(1<<4) != 0) && (a.Offset>>8)&0xf == int64(v.Reg-arm.REG_R0) { 1477 a.Offset = a.Offset&^(0xf<<8) | (int64(s.Reg)&0xf)<<8 1478 } 1479 } else if a.Type == obj.TYPE_REGREG || a.Type == obj.TYPE_REGREG2 { 1480 if a.Offset == int64(v.Reg) { 1481 a.Offset = int64(s.Reg) 1482 } 1483 if a.Reg == v.Reg { 1484 a.Reg = s.Reg 1485 } 1486 } else { 1487 a.Reg = s.Reg 1488 } 1489 } 1490 } 1491 1492 return 0 1493 } 1494 1495 func copysub1(p1 *obj.Prog, v *obj.Addr, s *obj.Addr, f int) int { 1496 if f != 0 { 1497 if copyau1(p1, v) { 1498 p1.Reg = s.Reg 1499 } 1500 } 1501 return 0 1502 } 1503 1504 var predinfo = []struct { 1505 opcode int 1506 notopcode int 1507 scond int 1508 notscond int 1509 }{ 1510 {arm.ABEQ, arm.ABNE, 0x0, 0x1}, 1511 {arm.ABNE, arm.ABEQ, 0x1, 0x0}, 1512 {arm.ABCS, arm.ABCC, 0x2, 0x3}, 1513 {arm.ABHS, arm.ABLO, 0x2, 0x3}, 1514 {arm.ABCC, arm.ABCS, 0x3, 0x2}, 1515 {arm.ABLO, arm.ABHS, 0x3, 0x2}, 1516 {arm.ABMI, arm.ABPL, 0x4, 0x5}, 1517 {arm.ABPL, arm.ABMI, 0x5, 0x4}, 1518 {arm.ABVS, arm.ABVC, 0x6, 0x7}, 1519 {arm.ABVC, arm.ABVS, 0x7, 0x6}, 1520 {arm.ABHI, arm.ABLS, 0x8, 0x9}, 1521 {arm.ABLS, arm.ABHI, 0x9, 0x8}, 1522 {arm.ABGE, arm.ABLT, 0xA, 0xB}, 1523 {arm.ABLT, arm.ABGE, 0xB, 0xA}, 1524 {arm.ABGT, arm.ABLE, 0xC, 0xD}, 1525 {arm.ABLE, arm.ABGT, 0xD, 0xC}, 1526 } 1527 1528 type Joininfo struct { 1529 start *gc.Flow 1530 last *gc.Flow 1531 end *gc.Flow 1532 len int 1533 } 1534 1535 const ( 1536 Join = iota 1537 Split 1538 End 1539 Branch 1540 Setcond 1541 Toolong 1542 ) 1543 1544 const ( 1545 Falsecond = iota 1546 Truecond 1547 Delbranch 1548 Keepbranch 1549 ) 1550 1551 func isbranch(p *obj.Prog) bool { 1552 return (arm.ABEQ <= p.As) && (p.As <= arm.ABLE) 1553 } 1554 1555 func predicable(p *obj.Prog) bool { 1556 switch p.As { 1557 case obj.ANOP, 1558 obj.AXXX, 1559 obj.ADATA, 1560 obj.AGLOBL, 1561 obj.ATEXT, 1562 arm.AWORD: 1563 return false 1564 } 1565 1566 if isbranch(p) { 1567 return false 1568 } 1569 return true 1570 } 1571 1572 /* 1573 * Depends on an analysis of the encodings performed by 5l. 1574 * These seem to be all of the opcodes that lead to the "S" bit 1575 * being set in the instruction encodings. 1576 * 1577 * C_SBIT may also have been set explicitly in p->scond. 1578 */ 1579 func modifiescpsr(p *obj.Prog) bool { 1580 switch p.As { 1581 case arm.AMULLU, 1582 arm.AMULA, 1583 arm.AMULU, 1584 arm.ADIVU, 1585 arm.ATEQ, 1586 arm.ACMN, 1587 arm.ATST, 1588 arm.ACMP, 1589 arm.AMUL, 1590 arm.ADIV, 1591 arm.AMOD, 1592 arm.AMODU, 1593 arm.ABL: 1594 return true 1595 } 1596 1597 if p.Scond&arm.C_SBIT != 0 { 1598 return true 1599 } 1600 return false 1601 } 1602 1603 /* 1604 * Find the maximal chain of instructions starting with r which could 1605 * be executed conditionally 1606 */ 1607 func joinsplit(r *gc.Flow, j *Joininfo) int { 1608 j.start = r 1609 j.last = r 1610 j.len = 0 1611 for { 1612 if r.P2 != nil && (r.P1 != nil || r.P2.P2link != nil) { 1613 j.end = r 1614 return Join 1615 } 1616 1617 if r.S1 != nil && r.S2 != nil { 1618 j.end = r 1619 return Split 1620 } 1621 1622 j.last = r 1623 if r.Prog.As != obj.ANOP { 1624 j.len++ 1625 } 1626 if r.S1 == nil && r.S2 == nil { 1627 j.end = r.Link 1628 return End 1629 } 1630 1631 if r.S2 != nil { 1632 j.end = r.S2 1633 return Branch 1634 } 1635 1636 if modifiescpsr(r.Prog) { 1637 j.end = r.S1 1638 return Setcond 1639 } 1640 1641 r = r.S1 1642 if j.len >= 4 { 1643 break 1644 } 1645 } 1646 1647 j.end = r 1648 return Toolong 1649 } 1650 1651 func successor(r *gc.Flow) *gc.Flow { 1652 if r.S1 != nil { 1653 return r.S1 1654 } else { 1655 return r.S2 1656 } 1657 } 1658 1659 func applypred(rstart *gc.Flow, j *Joininfo, cond int, branch int) { 1660 if j.len == 0 { 1661 return 1662 } 1663 var pred int 1664 if cond == Truecond { 1665 pred = predinfo[rstart.Prog.As-arm.ABEQ].scond 1666 } else { 1667 pred = predinfo[rstart.Prog.As-arm.ABEQ].notscond 1668 } 1669 1670 for r := (*gc.Flow)(j.start); ; r = successor(r) { 1671 if r.Prog.As == arm.AB { 1672 if r != j.last || branch == Delbranch { 1673 excise(r) 1674 } else { 1675 if cond == Truecond { 1676 r.Prog.As = int16(predinfo[rstart.Prog.As-arm.ABEQ].opcode) 1677 } else { 1678 r.Prog.As = int16(predinfo[rstart.Prog.As-arm.ABEQ].notopcode) 1679 } 1680 } 1681 } else if predicable(r.Prog) { 1682 r.Prog.Scond = uint8(int(r.Prog.Scond&^arm.C_SCOND) | pred) 1683 } 1684 if r.S1 != r.Link { 1685 r.S1 = r.Link 1686 r.Link.P1 = r 1687 } 1688 1689 if r == j.last { 1690 break 1691 } 1692 } 1693 } 1694 1695 func predicate(g *gc.Graph) { 1696 var t1 int 1697 var t2 int 1698 var j1 Joininfo 1699 var j2 Joininfo 1700 1701 for r := (*gc.Flow)(g.Start); r != nil; r = r.Link { 1702 if isbranch(r.Prog) { 1703 t1 = joinsplit(r.S1, &j1) 1704 t2 = joinsplit(r.S2, &j2) 1705 if j1.last.Link != j2.start { 1706 continue 1707 } 1708 if j1.end == j2.end { 1709 if (t1 == Branch && (t2 == Join || t2 == Setcond)) || (t2 == Join && (t1 == Join || t1 == Setcond)) { 1710 applypred(r, &j1, Falsecond, Delbranch) 1711 applypred(r, &j2, Truecond, Delbranch) 1712 excise(r) 1713 continue 1714 } 1715 } 1716 1717 if t1 == End || t1 == Branch { 1718 applypred(r, &j1, Falsecond, Keepbranch) 1719 excise(r) 1720 continue 1721 } 1722 } 1723 } 1724 } 1725 1726 func isdconst(a *obj.Addr) bool { 1727 return a.Type == obj.TYPE_CONST 1728 } 1729 1730 func isfloatreg(a *obj.Addr) bool { 1731 return arm.REG_F0 <= a.Reg && a.Reg <= arm.REG_F15 1732 } 1733 1734 func stackaddr(a *obj.Addr) bool { 1735 return regtyp(a) && a.Reg == arm.REGSP 1736 } 1737 1738 func smallindir(a *obj.Addr, reg *obj.Addr) bool { 1739 return reg.Type == obj.TYPE_REG && a.Type == obj.TYPE_MEM && a.Reg == reg.Reg && 0 <= a.Offset && a.Offset < 4096 1740 } 1741 1742 func excise(r *gc.Flow) { 1743 p := (*obj.Prog)(r.Prog) 1744 obj.Nopout(p) 1745 }