github.com/gocuntian/go@v0.0.0-20160610041250-fee02d270bf8/src/cmd/compile/internal/gc/cgen.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/internal/obj" 9 "cmd/internal/obj/ppc64" 10 "cmd/internal/sys" 11 "fmt" 12 ) 13 14 // generate: 15 // res = n; 16 // simplifies and calls Thearch.Gmove. 17 // if wb is true, need to emit write barriers. 18 func Cgen(n, res *Node) { 19 cgen_wb(n, res, false) 20 } 21 22 func cgen_wb(n, res *Node, wb bool) { 23 if Debug['g'] != 0 { 24 op := "cgen" 25 if wb { 26 op = "cgen_wb" 27 } 28 Dump("\n"+op+"-n", n) 29 Dump(op+"-res", res) 30 } 31 32 if n == nil || n.Type == nil { 33 return 34 } 35 36 if res == nil || res.Type == nil { 37 Fatalf("cgen: res nil") 38 } 39 40 for n.Op == OCONVNOP { 41 n = n.Left 42 } 43 44 switch n.Op { 45 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 46 cgen_slice(n, res, wb) 47 return 48 49 case OEFACE: 50 if res.Op != ONAME || !res.Addable || wb { 51 var n1 Node 52 Tempname(&n1, n.Type) 53 Cgen_eface(n, &n1) 54 cgen_wb(&n1, res, wb) 55 } else { 56 Cgen_eface(n, res) 57 } 58 return 59 60 case ODOTTYPE: 61 cgen_dottype(n, res, nil, wb) 62 return 63 64 case OAPPEND: 65 cgen_append(n, res) 66 return 67 } 68 69 if n.Ullman >= UINF { 70 if n.Op == OINDREG { 71 Fatalf("cgen: this is going to miscompile") 72 } 73 if res.Ullman >= UINF { 74 var n1 Node 75 Tempname(&n1, n.Type) 76 Cgen(n, &n1) 77 cgen_wb(&n1, res, wb) 78 return 79 } 80 } 81 82 if Isfat(n.Type) { 83 if n.Type.Width < 0 { 84 Fatalf("forgot to compute width for %v", n.Type) 85 } 86 sgen_wb(n, res, n.Type.Width, wb) 87 return 88 } 89 90 if !res.Addable { 91 if n.Ullman > res.Ullman { 92 if Ctxt.Arch.RegSize == 4 && Is64(n.Type) { 93 var n1 Node 94 Tempname(&n1, n.Type) 95 Cgen(n, &n1) 96 cgen_wb(&n1, res, wb) 97 return 98 } 99 100 var n1 Node 101 Regalloc(&n1, n.Type, res) 102 Cgen(n, &n1) 103 if n1.Ullman > res.Ullman { 104 Dump("n1", &n1) 105 Dump("res", res) 106 Fatalf("loop in cgen") 107 } 108 109 cgen_wb(&n1, res, wb) 110 Regfree(&n1) 111 return 112 } 113 114 if res.Ullman < UINF { 115 if Complexop(n, res) { 116 Complexgen(n, res) 117 return 118 } 119 120 f := true // gen through register 121 switch n.Op { 122 case OLITERAL: 123 if Smallintconst(n) { 124 f = false 125 } 126 127 case OREGISTER: 128 f = false 129 } 130 131 if !n.Type.IsComplex() && Ctxt.Arch.RegSize == 8 && !wb { 132 a := Thearch.Optoas(OAS, res.Type) 133 var addr obj.Addr 134 if Thearch.Sudoaddable(a, res, &addr) { 135 var p1 *obj.Prog 136 if f { 137 var n2 Node 138 Regalloc(&n2, res.Type, nil) 139 Cgen(n, &n2) 140 p1 = Thearch.Gins(a, &n2, nil) 141 Regfree(&n2) 142 } else { 143 p1 = Thearch.Gins(a, n, nil) 144 } 145 p1.To = addr 146 if Debug['g'] != 0 { 147 fmt.Printf("%v [ignore previous line]\n", p1) 148 } 149 Thearch.Sudoclean() 150 return 151 } 152 } 153 } 154 155 if Ctxt.Arch.Family == sys.I386 { 156 // no registers to speak of 157 var n1, n2 Node 158 Tempname(&n1, n.Type) 159 Cgen(n, &n1) 160 Igen(res, &n2, nil) 161 cgen_wb(&n1, &n2, wb) 162 Regfree(&n2) 163 return 164 } 165 166 var n1 Node 167 Igen(res, &n1, nil) 168 cgen_wb(n, &n1, wb) 169 Regfree(&n1) 170 return 171 } 172 173 // update addressability for string, slice 174 // can't do in walk because n->left->addable 175 // changes if n->left is an escaping local variable. 176 switch n.Op { 177 case OSPTR, OLEN: 178 if n.Left.Type.IsSlice() || n.Left.Type.IsString() { 179 n.Addable = n.Left.Addable 180 } 181 182 case OCAP: 183 if n.Left.Type.IsSlice() { 184 n.Addable = n.Left.Addable 185 } 186 187 case OITAB: 188 n.Addable = n.Left.Addable 189 } 190 191 if wb { 192 if Simtype[res.Type.Etype] != Tptr { 193 Fatalf("cgen_wb of type %v", res.Type) 194 } 195 if n.Ullman >= UINF { 196 var n1 Node 197 Tempname(&n1, n.Type) 198 Cgen(n, &n1) 199 n = &n1 200 } 201 cgen_wbptr(n, res) 202 return 203 } 204 205 // Write barrier now handled. Code below this line can ignore wb. 206 207 if Ctxt.Arch.Family == sys.ARM { // TODO(rsc): Maybe more often? 208 // if both are addressable, move 209 if n.Addable && res.Addable { 210 if Is64(n.Type) || Is64(res.Type) || n.Op == OREGISTER || res.Op == OREGISTER || n.Type.IsComplex() || res.Type.IsComplex() { 211 Thearch.Gmove(n, res) 212 } else { 213 var n1 Node 214 Regalloc(&n1, n.Type, nil) 215 Thearch.Gmove(n, &n1) 216 Cgen(&n1, res) 217 Regfree(&n1) 218 } 219 220 return 221 } 222 223 // if both are not addressable, use a temporary. 224 if !n.Addable && !res.Addable { 225 // could use regalloc here sometimes, 226 // but have to check for ullman >= UINF. 227 var n1 Node 228 Tempname(&n1, n.Type) 229 Cgen(n, &n1) 230 Cgen(&n1, res) 231 return 232 } 233 234 // if result is not addressable directly but n is, 235 // compute its address and then store via the address. 236 if !res.Addable { 237 var n1 Node 238 Igen(res, &n1, nil) 239 Cgen(n, &n1) 240 Regfree(&n1) 241 return 242 } 243 } 244 245 if Complexop(n, res) { 246 Complexgen(n, res) 247 return 248 } 249 250 if Ctxt.Arch.InFamily(sys.AMD64, sys.I386, sys.S390X) && n.Addable { 251 Thearch.Gmove(n, res) 252 return 253 } 254 255 if Ctxt.Arch.InFamily(sys.ARM64, sys.MIPS64, sys.PPC64) { 256 // if both are addressable, move 257 if n.Addable { 258 if n.Op == OREGISTER || res.Op == OREGISTER { 259 Thearch.Gmove(n, res) 260 } else { 261 var n1 Node 262 Regalloc(&n1, n.Type, nil) 263 Thearch.Gmove(n, &n1) 264 Cgen(&n1, res) 265 Regfree(&n1) 266 } 267 return 268 } 269 } 270 271 // if n is sudoaddable generate addr and move 272 if Ctxt.Arch.Family == sys.ARM && !Is64(n.Type) && !Is64(res.Type) && !n.Type.IsComplex() && !res.Type.IsComplex() { 273 a := Thearch.Optoas(OAS, n.Type) 274 var addr obj.Addr 275 if Thearch.Sudoaddable(a, n, &addr) { 276 if res.Op != OREGISTER { 277 var n2 Node 278 Regalloc(&n2, res.Type, nil) 279 p1 := Thearch.Gins(a, nil, &n2) 280 p1.From = addr 281 if Debug['g'] != 0 { 282 fmt.Printf("%v [ignore previous line]\n", p1) 283 } 284 Thearch.Gmove(&n2, res) 285 Regfree(&n2) 286 } else { 287 p1 := Thearch.Gins(a, nil, res) 288 p1.From = addr 289 if Debug['g'] != 0 { 290 fmt.Printf("%v [ignore previous line]\n", p1) 291 } 292 } 293 Thearch.Sudoclean() 294 return 295 } 296 } 297 298 nl := n.Left 299 nr := n.Right 300 301 if nl != nil && nl.Ullman >= UINF { 302 if nr != nil && nr.Ullman >= UINF { 303 var n1 Node 304 Tempname(&n1, nl.Type) 305 Cgen(nl, &n1) 306 n2 := *n 307 n2.Left = &n1 308 Cgen(&n2, res) 309 return 310 } 311 } 312 313 // 64-bit ops are hard on 32-bit machine. 314 if Ctxt.Arch.RegSize == 4 && (Is64(n.Type) || Is64(res.Type) || n.Left != nil && Is64(n.Left.Type)) { 315 switch n.Op { 316 // math goes to cgen64. 317 case OMINUS, 318 OCOM, 319 OADD, 320 OSUB, 321 OMUL, 322 OLROT, 323 OLSH, 324 ORSH, 325 OAND, 326 OOR, 327 OXOR: 328 Thearch.Cgen64(n, res) 329 return 330 } 331 } 332 333 if Thearch.Cgen_float != nil && nl != nil && n.Type.IsFloat() && nl.Type.IsFloat() { 334 Thearch.Cgen_float(n, res) 335 return 336 } 337 338 if !n.Type.IsComplex() && Ctxt.Arch.RegSize == 8 { 339 a := Thearch.Optoas(OAS, n.Type) 340 var addr obj.Addr 341 if Thearch.Sudoaddable(a, n, &addr) { 342 if res.Op == OREGISTER { 343 p1 := Thearch.Gins(a, nil, res) 344 p1.From = addr 345 } else { 346 var n2 Node 347 Regalloc(&n2, n.Type, nil) 348 p1 := Thearch.Gins(a, nil, &n2) 349 p1.From = addr 350 Thearch.Gins(a, &n2, res) 351 Regfree(&n2) 352 } 353 354 Thearch.Sudoclean() 355 return 356 } 357 } 358 359 var a obj.As 360 switch n.Op { 361 default: 362 Dump("cgen", n) 363 Dump("cgen-res", res) 364 Fatalf("cgen: unknown op %v", Nconv(n, FmtShort|FmtSign)) 365 366 case OOROR, OANDAND, 367 OEQ, ONE, 368 OLT, OLE, 369 OGE, OGT, 370 ONOT: 371 Bvgen(n, res, true) 372 return 373 374 case OPLUS: 375 Cgen(nl, res) 376 return 377 378 // unary 379 case OCOM: 380 a := Thearch.Optoas(OXOR, nl.Type) 381 382 var n1 Node 383 Regalloc(&n1, nl.Type, nil) 384 Cgen(nl, &n1) 385 var n2 Node 386 Nodconst(&n2, nl.Type, -1) 387 Thearch.Gins(a, &n2, &n1) 388 cgen_norm(n, &n1, res) 389 return 390 391 case OMINUS: 392 if nl.Type.IsFloat() { 393 nr = Nodintconst(-1) 394 nr = convlit(nr, n.Type) 395 a = Thearch.Optoas(OMUL, nl.Type) 396 goto sbop 397 } 398 399 a := Thearch.Optoas(n.Op, nl.Type) 400 // unary 401 var n1 Node 402 Regalloc(&n1, nl.Type, res) 403 404 Cgen(nl, &n1) 405 if Ctxt.Arch.Family == sys.ARM { 406 var n2 Node 407 Nodconst(&n2, nl.Type, 0) 408 Thearch.Gins(a, &n2, &n1) 409 } else if Ctxt.Arch.Family == sys.ARM64 { 410 Thearch.Gins(a, &n1, &n1) 411 } else { 412 Thearch.Gins(a, nil, &n1) 413 } 414 cgen_norm(n, &n1, res) 415 return 416 417 case OSQRT: 418 var n1 Node 419 Regalloc(&n1, nl.Type, res) 420 Cgen(n.Left, &n1) 421 Thearch.Gins(Thearch.Optoas(OSQRT, nl.Type), &n1, &n1) 422 Thearch.Gmove(&n1, res) 423 Regfree(&n1) 424 return 425 426 case OGETG: 427 Thearch.Getg(res) 428 return 429 430 // symmetric binary 431 case OAND, 432 OOR, 433 OXOR, 434 OADD, 435 OMUL: 436 if n.Op == OMUL && Thearch.Cgen_bmul != nil && Thearch.Cgen_bmul(n.Op, nl, nr, res) { 437 break 438 } 439 a = Thearch.Optoas(n.Op, nl.Type) 440 goto sbop 441 442 // asymmetric binary 443 case OSUB: 444 a = Thearch.Optoas(n.Op, nl.Type) 445 goto abop 446 447 case OHMUL: 448 Thearch.Cgen_hmul(nl, nr, res) 449 450 case OCONV: 451 if Eqtype(n.Type, nl.Type) || Noconv(n.Type, nl.Type) { 452 Cgen(nl, res) 453 return 454 } 455 456 if Ctxt.Arch.Family == sys.I386 { 457 var n1 Node 458 var n2 Node 459 Tempname(&n2, n.Type) 460 Mgen(nl, &n1, res) 461 Thearch.Gmove(&n1, &n2) 462 Thearch.Gmove(&n2, res) 463 Mfree(&n1) 464 break 465 } 466 467 var n1 Node 468 var n2 Node 469 if Ctxt.Arch.Family == sys.ARM { 470 if nl.Addable && !Is64(nl.Type) { 471 Regalloc(&n1, nl.Type, res) 472 Thearch.Gmove(nl, &n1) 473 } else { 474 if n.Type.Width > int64(Widthptr) || Is64(nl.Type) || nl.Type.IsFloat() { 475 Tempname(&n1, nl.Type) 476 } else { 477 Regalloc(&n1, nl.Type, res) 478 } 479 Cgen(nl, &n1) 480 } 481 if n.Type.Width > int64(Widthptr) || Is64(n.Type) || n.Type.IsFloat() { 482 Tempname(&n2, n.Type) 483 } else { 484 Regalloc(&n2, n.Type, nil) 485 } 486 } else { 487 if n.Type.Width > nl.Type.Width { 488 // If loading from memory, do conversion during load, 489 // so as to avoid use of 8-bit register in, say, int(*byteptr). 490 switch nl.Op { 491 case ODOT, ODOTPTR, OINDEX, OIND, ONAME: 492 Igen(nl, &n1, res) 493 Regalloc(&n2, n.Type, res) 494 Thearch.Gmove(&n1, &n2) 495 Thearch.Gmove(&n2, res) 496 Regfree(&n2) 497 Regfree(&n1) 498 return 499 } 500 } 501 Regalloc(&n1, nl.Type, res) 502 Regalloc(&n2, n.Type, &n1) 503 Cgen(nl, &n1) 504 } 505 506 // if we do the conversion n1 -> n2 here 507 // reusing the register, then gmove won't 508 // have to allocate its own register. 509 Thearch.Gmove(&n1, &n2) 510 Thearch.Gmove(&n2, res) 511 if n2.Op == OREGISTER { 512 Regfree(&n2) 513 } 514 if n1.Op == OREGISTER { 515 Regfree(&n1) 516 } 517 518 case ODOT, 519 ODOTPTR, 520 OINDEX, 521 OIND: 522 var n1 Node 523 Igen(n, &n1, res) 524 525 Thearch.Gmove(&n1, res) 526 Regfree(&n1) 527 528 // interface table is first word of interface value 529 case OITAB: 530 var n1 Node 531 Igen(nl, &n1, res) 532 533 n1.Type = n.Type 534 Thearch.Gmove(&n1, res) 535 Regfree(&n1) 536 537 case OSPTR: 538 // pointer is the first word of string or slice. 539 if Isconst(nl, CTSTR) { 540 var n1 Node 541 Regalloc(&n1, Types[Tptr], res) 542 p1 := Thearch.Gins(Thearch.Optoas(OAS, n1.Type), nil, &n1) 543 Datastring(nl.Val().U.(string), &p1.From) 544 p1.From.Type = obj.TYPE_ADDR 545 Thearch.Gmove(&n1, res) 546 Regfree(&n1) 547 break 548 } 549 550 var n1 Node 551 Igen(nl, &n1, res) 552 n1.Type = n.Type 553 Thearch.Gmove(&n1, res) 554 Regfree(&n1) 555 556 case OLEN: 557 if nl.Type.IsMap() || nl.Type.IsChan() { 558 // map and chan have len in the first int-sized word. 559 // a zero pointer means zero length 560 var n1 Node 561 Regalloc(&n1, Types[Tptr], res) 562 563 Cgen(nl, &n1) 564 565 var n2 Node 566 Nodconst(&n2, Types[Tptr], 0) 567 p1 := Thearch.Ginscmp(OEQ, Types[Tptr], &n1, &n2, 0) 568 569 n2 = n1 570 n2.Op = OINDREG 571 n2.Type = Types[Simtype[TINT]] 572 Thearch.Gmove(&n2, &n1) 573 574 Patch(p1, Pc) 575 576 Thearch.Gmove(&n1, res) 577 Regfree(&n1) 578 break 579 } 580 581 if nl.Type.IsString() || nl.Type.IsSlice() { 582 // both slice and string have len one pointer into the struct. 583 // a zero pointer means zero length 584 var n1 Node 585 Igen(nl, &n1, res) 586 587 n1.Type = Types[Simtype[TUINT]] 588 n1.Xoffset += int64(Array_nel) 589 Thearch.Gmove(&n1, res) 590 Regfree(&n1) 591 break 592 } 593 594 Fatalf("cgen: OLEN: unknown type %v", Tconv(nl.Type, FmtLong)) 595 596 case OCAP: 597 if nl.Type.IsChan() { 598 // chan has cap in the second int-sized word. 599 // a zero pointer means zero length 600 var n1 Node 601 Regalloc(&n1, Types[Tptr], res) 602 603 Cgen(nl, &n1) 604 605 var n2 Node 606 Nodconst(&n2, Types[Tptr], 0) 607 p1 := Thearch.Ginscmp(OEQ, Types[Tptr], &n1, &n2, 0) 608 609 n2 = n1 610 n2.Op = OINDREG 611 n2.Xoffset = int64(Widthint) 612 n2.Type = Types[Simtype[TINT]] 613 Thearch.Gmove(&n2, &n1) 614 615 Patch(p1, Pc) 616 617 Thearch.Gmove(&n1, res) 618 Regfree(&n1) 619 break 620 } 621 622 if nl.Type.IsSlice() { 623 var n1 Node 624 Igen(nl, &n1, res) 625 n1.Type = Types[Simtype[TUINT]] 626 n1.Xoffset += int64(Array_cap) 627 Thearch.Gmove(&n1, res) 628 Regfree(&n1) 629 break 630 } 631 632 Fatalf("cgen: OCAP: unknown type %v", Tconv(nl.Type, FmtLong)) 633 634 case OADDR: 635 if n.Bounded { // let race detector avoid nil checks 636 Disable_checknil++ 637 } 638 Agen(nl, res) 639 if n.Bounded { 640 Disable_checknil-- 641 } 642 643 case OCALLMETH: 644 cgen_callmeth(n, 0) 645 cgen_callret(n, res) 646 647 case OCALLINTER: 648 cgen_callinter(n, res, 0) 649 cgen_callret(n, res) 650 651 case OCALLFUNC: 652 cgen_call(n, 0) 653 cgen_callret(n, res) 654 655 case OMOD, ODIV: 656 if n.Type.IsFloat() || Thearch.Dodiv == nil { 657 a = Thearch.Optoas(n.Op, nl.Type) 658 goto abop 659 } 660 661 if nl.Ullman >= nr.Ullman { 662 var n1 Node 663 Regalloc(&n1, nl.Type, res) 664 Cgen(nl, &n1) 665 cgen_div(n.Op, &n1, nr, res) 666 Regfree(&n1) 667 } else { 668 var n2 Node 669 if !Smallintconst(nr) { 670 Regalloc(&n2, nr.Type, res) 671 Cgen(nr, &n2) 672 } else { 673 n2 = *nr 674 } 675 676 cgen_div(n.Op, nl, &n2, res) 677 if n2.Op != OLITERAL { 678 Regfree(&n2) 679 } 680 } 681 682 case OLSH, ORSH, OLROT: 683 Thearch.Cgen_shift(n.Op, n.Bounded, nl, nr, res) 684 } 685 686 return 687 688 // put simplest on right - we'll generate into left 689 // and then adjust it using the computation of right. 690 // constants and variables have the same ullman 691 // count, so look for constants specially. 692 // 693 // an integer constant we can use as an immediate 694 // is simpler than a variable - we can use the immediate 695 // in the adjustment instruction directly - so it goes 696 // on the right. 697 // 698 // other constants, like big integers or floating point 699 // constants, require a mov into a register, so those 700 // might as well go on the left, so we can reuse that 701 // register for the computation. 702 sbop: // symmetric binary 703 if nl.Ullman < nr.Ullman || (nl.Ullman == nr.Ullman && (Smallintconst(nl) || (nr.Op == OLITERAL && !Smallintconst(nr)))) { 704 nl, nr = nr, nl 705 } 706 707 abop: // asymmetric binary 708 var n1 Node 709 var n2 Node 710 if Ctxt.Arch.Family == sys.I386 { 711 // no registers, sigh 712 if Smallintconst(nr) { 713 var n1 Node 714 Mgen(nl, &n1, res) 715 var n2 Node 716 Regalloc(&n2, nl.Type, &n1) 717 Thearch.Gmove(&n1, &n2) 718 Thearch.Gins(a, nr, &n2) 719 Thearch.Gmove(&n2, res) 720 Regfree(&n2) 721 Mfree(&n1) 722 } else if nl.Ullman >= nr.Ullman { 723 var nt Node 724 Tempname(&nt, nl.Type) 725 Cgen(nl, &nt) 726 var n2 Node 727 Mgen(nr, &n2, nil) 728 var n1 Node 729 Regalloc(&n1, nl.Type, res) 730 Thearch.Gmove(&nt, &n1) 731 Thearch.Gins(a, &n2, &n1) 732 Thearch.Gmove(&n1, res) 733 Regfree(&n1) 734 Mfree(&n2) 735 } else { 736 var n2 Node 737 Regalloc(&n2, nr.Type, res) 738 Cgen(nr, &n2) 739 var n1 Node 740 Regalloc(&n1, nl.Type, nil) 741 Cgen(nl, &n1) 742 Thearch.Gins(a, &n2, &n1) 743 Regfree(&n2) 744 Thearch.Gmove(&n1, res) 745 Regfree(&n1) 746 } 747 return 748 } 749 750 if nl.Ullman >= nr.Ullman { 751 Regalloc(&n1, nl.Type, res) 752 Cgen(nl, &n1) 753 754 if Smallintconst(nr) && Ctxt.Arch.Family != sys.MIPS64 && Ctxt.Arch.Family != sys.ARM && Ctxt.Arch.Family != sys.ARM64 && Ctxt.Arch.Family != sys.PPC64 { // TODO(rsc): Check opcode for arm 755 n2 = *nr 756 } else { 757 Regalloc(&n2, nr.Type, nil) 758 Cgen(nr, &n2) 759 } 760 } else { 761 if Smallintconst(nr) && Ctxt.Arch.Family != sys.MIPS64 && Ctxt.Arch.Family != sys.ARM && Ctxt.Arch.Family != sys.ARM64 && Ctxt.Arch.Family != sys.PPC64 { // TODO(rsc): Check opcode for arm 762 n2 = *nr 763 } else { 764 Regalloc(&n2, nr.Type, res) 765 Cgen(nr, &n2) 766 } 767 768 Regalloc(&n1, nl.Type, nil) 769 Cgen(nl, &n1) 770 } 771 772 Thearch.Gins(a, &n2, &n1) 773 if n2.Op != OLITERAL { 774 Regfree(&n2) 775 } 776 cgen_norm(n, &n1, res) 777 } 778 779 var sys_wbptr *Node 780 781 func cgen_wbptr(n, res *Node) { 782 if Curfn != nil { 783 if Curfn.Func.Pragma&Nowritebarrier != 0 { 784 Yyerror("write barrier prohibited") 785 } 786 if Curfn.Func.WBLineno == 0 { 787 Curfn.Func.WBLineno = lineno 788 } 789 } 790 if Debug_wb > 0 { 791 Warn("write barrier") 792 } 793 794 var dst, src Node 795 Igen(res, &dst, nil) 796 if n.Op == OREGISTER { 797 src = *n 798 Regrealloc(&src) 799 } else { 800 Cgenr(n, &src, nil) 801 } 802 803 wbVar := syslook("writeBarrier") 804 wbEnabled := NodSym(ODOT, wbVar, wbVar.Type.Field(0).Sym) 805 wbEnabled = typecheck(wbEnabled, Erv) 806 pbr := Thearch.Ginscmp(ONE, Types[TUINT8], wbEnabled, Nodintconst(0), -1) 807 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &src, &dst) 808 pjmp := Gbranch(obj.AJMP, nil, 0) 809 Patch(pbr, Pc) 810 var adst Node 811 Agenr(&dst, &adst, &dst) 812 p := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &adst, nil) 813 a := &p.To 814 a.Type = obj.TYPE_MEM 815 a.Reg = int16(Thearch.REGSP) 816 a.Offset = Ctxt.FixedFrameSize() 817 p2 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &src, nil) 818 p2.To = p.To 819 p2.To.Offset += int64(Widthptr) 820 Regfree(&adst) 821 if sys_wbptr == nil { 822 sys_wbptr = writebarrierfn("writebarrierptr", Types[Tptr], Types[Tptr]) 823 } 824 Ginscall(sys_wbptr, 0) 825 Patch(pjmp, Pc) 826 827 Regfree(&dst) 828 Regfree(&src) 829 } 830 831 func cgen_wbfat(n, res *Node) { 832 if Curfn != nil { 833 if Curfn.Func.Pragma&Nowritebarrier != 0 { 834 Yyerror("write barrier prohibited") 835 } 836 if Curfn.Func.WBLineno == 0 { 837 Curfn.Func.WBLineno = lineno 838 } 839 } 840 if Debug_wb > 0 { 841 Warn("write barrier") 842 } 843 needType := true 844 funcName := "typedmemmove" 845 var dst, src Node 846 if n.Ullman >= res.Ullman { 847 Agenr(n, &src, nil) 848 Agenr(res, &dst, nil) 849 } else { 850 Agenr(res, &dst, nil) 851 Agenr(n, &src, nil) 852 } 853 p := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &dst, nil) 854 a := &p.To 855 a.Type = obj.TYPE_MEM 856 a.Reg = int16(Thearch.REGSP) 857 a.Offset = Ctxt.FixedFrameSize() 858 if needType { 859 a.Offset += int64(Widthptr) 860 } 861 p2 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &src, nil) 862 p2.To = p.To 863 p2.To.Offset += int64(Widthptr) 864 Regfree(&dst) 865 if needType { 866 src.Type = Types[Tptr] 867 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), typename(n.Type), &src) 868 p3 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &src, nil) 869 p3.To = p2.To 870 p3.To.Offset -= 2 * int64(Widthptr) 871 } 872 Regfree(&src) 873 Ginscall(writebarrierfn(funcName, Types[Tptr], Types[Tptr]), 0) 874 } 875 876 // cgen_norm moves n1 to res, truncating to expected type if necessary. 877 // n1 is a register, and cgen_norm frees it. 878 func cgen_norm(n, n1, res *Node) { 879 switch Ctxt.Arch.Family { 880 case sys.AMD64, sys.I386: 881 // We use sized math, so the result is already truncated. 882 default: 883 switch n.Op { 884 case OADD, OSUB, OMUL, ODIV, OCOM, OMINUS: 885 // TODO(rsc): What about left shift? 886 Thearch.Gins(Thearch.Optoas(OAS, n.Type), n1, n1) 887 } 888 } 889 890 Thearch.Gmove(n1, res) 891 Regfree(n1) 892 } 893 894 func Mgen(n *Node, n1 *Node, rg *Node) { 895 n1.Op = OEMPTY 896 897 if n.Addable { 898 *n1 = *n 899 if n1.Op == OREGISTER || n1.Op == OINDREG { 900 reg[n.Reg-int16(Thearch.REGMIN)]++ 901 } 902 return 903 } 904 905 Tempname(n1, n.Type) 906 Cgen(n, n1) 907 if n.Type.Width <= int64(Widthptr) || n.Type.IsFloat() { 908 n2 := *n1 909 Regalloc(n1, n.Type, rg) 910 Thearch.Gmove(&n2, n1) 911 } 912 } 913 914 func Mfree(n *Node) { 915 if n.Op == OREGISTER { 916 Regfree(n) 917 } 918 } 919 920 // allocate a register (reusing res if possible) and generate 921 // a = n 922 // The caller must call Regfree(a). 923 func Cgenr(n *Node, a *Node, res *Node) { 924 if Debug['g'] != 0 { 925 Dump("cgenr-n", n) 926 } 927 928 if Isfat(n.Type) { 929 Fatalf("cgenr on fat node") 930 } 931 932 if n.Addable { 933 Regalloc(a, n.Type, res) 934 Thearch.Gmove(n, a) 935 return 936 } 937 938 switch n.Op { 939 case ONAME, 940 ODOT, 941 ODOTPTR, 942 OINDEX, 943 OCALLFUNC, 944 OCALLMETH, 945 OCALLINTER: 946 var n1 Node 947 Igen(n, &n1, res) 948 Regalloc(a, n.Type, &n1) 949 Thearch.Gmove(&n1, a) 950 Regfree(&n1) 951 952 default: 953 Regalloc(a, n.Type, res) 954 Cgen(n, a) 955 } 956 } 957 958 // allocate a register (reusing res if possible) and generate 959 // a = &n 960 // The caller must call Regfree(a). 961 // The generated code checks that the result is not nil. 962 func Agenr(n *Node, a *Node, res *Node) { 963 if Debug['g'] != 0 { 964 Dump("\nagenr-n", n) 965 } 966 967 nl := n.Left 968 nr := n.Right 969 970 switch n.Op { 971 case ODOT, ODOTPTR, OCALLFUNC, OCALLMETH, OCALLINTER: 972 var n1 Node 973 Igen(n, &n1, res) 974 Regalloc(a, Types[Tptr], &n1) 975 Agen(&n1, a) 976 Regfree(&n1) 977 978 case OIND: 979 Cgenr(n.Left, a, res) 980 if !n.Left.NonNil { 981 Cgen_checknil(a) 982 } else if Debug_checknil != 0 && n.Lineno > 1 { 983 Warnl(n.Lineno, "removed nil check") 984 } 985 986 case OINDEX: 987 if Ctxt.Arch.Family == sys.ARM { 988 var p2 *obj.Prog // to be patched to panicindex. 989 w := uint32(n.Type.Width) 990 bounded := Debug['B'] != 0 || n.Bounded 991 var n1 Node 992 var n3 Node 993 if nr.Addable { 994 var tmp Node 995 if !Isconst(nr, CTINT) { 996 Tempname(&tmp, Types[TINT32]) 997 } 998 if !Isconst(nl, CTSTR) { 999 Agenr(nl, &n3, res) 1000 } 1001 if !Isconst(nr, CTINT) { 1002 p2 = Thearch.Cgenindex(nr, &tmp, bounded) 1003 Regalloc(&n1, tmp.Type, nil) 1004 Thearch.Gmove(&tmp, &n1) 1005 } 1006 } else if nl.Addable { 1007 if !Isconst(nr, CTINT) { 1008 var tmp Node 1009 Tempname(&tmp, Types[TINT32]) 1010 p2 = Thearch.Cgenindex(nr, &tmp, bounded) 1011 Regalloc(&n1, tmp.Type, nil) 1012 Thearch.Gmove(&tmp, &n1) 1013 } 1014 1015 if !Isconst(nl, CTSTR) { 1016 Agenr(nl, &n3, res) 1017 } 1018 } else { 1019 var tmp Node 1020 Tempname(&tmp, Types[TINT32]) 1021 p2 = Thearch.Cgenindex(nr, &tmp, bounded) 1022 nr = &tmp 1023 if !Isconst(nl, CTSTR) { 1024 Agenr(nl, &n3, res) 1025 } 1026 Regalloc(&n1, tmp.Type, nil) 1027 Thearch.Gins(Thearch.Optoas(OAS, tmp.Type), &tmp, &n1) 1028 } 1029 1030 // &a is in &n3 (allocated in res) 1031 // i is in &n1 (if not constant) 1032 // w is width 1033 1034 // constant index 1035 if Isconst(nr, CTINT) { 1036 if Isconst(nl, CTSTR) { 1037 Fatalf("constant string constant index") 1038 } 1039 v := uint64(nr.Int64()) 1040 var n2 Node 1041 if nl.Type.IsSlice() || nl.Type.IsString() { 1042 if Debug['B'] == 0 && !n.Bounded { 1043 n1 = n3 1044 n1.Op = OINDREG 1045 n1.Type = Types[Tptr] 1046 n1.Xoffset = int64(Array_nel) 1047 Nodconst(&n2, Types[TUINT32], int64(v)) 1048 p1 := Thearch.Ginscmp(OGT, Types[TUINT32], &n1, &n2, +1) 1049 Ginscall(Panicindex, -1) 1050 Patch(p1, Pc) 1051 } 1052 1053 n1 = n3 1054 n1.Op = OINDREG 1055 n1.Type = Types[Tptr] 1056 n1.Xoffset = int64(Array_array) 1057 Thearch.Gmove(&n1, &n3) 1058 } 1059 1060 Nodconst(&n2, Types[Tptr], int64(v*uint64(w))) 1061 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1062 *a = n3 1063 break 1064 } 1065 1066 var n2 Node 1067 Regalloc(&n2, Types[TINT32], &n1) // i 1068 Thearch.Gmove(&n1, &n2) 1069 Regfree(&n1) 1070 1071 var n4 Node 1072 if Debug['B'] == 0 && !n.Bounded { 1073 // check bounds 1074 if Isconst(nl, CTSTR) { 1075 Nodconst(&n4, Types[TUINT32], int64(len(nl.Val().U.(string)))) 1076 } else if nl.Type.IsSlice() || nl.Type.IsString() { 1077 n1 = n3 1078 n1.Op = OINDREG 1079 n1.Type = Types[Tptr] 1080 n1.Xoffset = int64(Array_nel) 1081 Regalloc(&n4, Types[TUINT32], nil) 1082 Thearch.Gmove(&n1, &n4) 1083 } else { 1084 Nodconst(&n4, Types[TUINT32], nl.Type.NumElem()) 1085 } 1086 p1 := Thearch.Ginscmp(OLT, Types[TUINT32], &n2, &n4, +1) 1087 if n4.Op == OREGISTER { 1088 Regfree(&n4) 1089 } 1090 if p2 != nil { 1091 Patch(p2, Pc) 1092 } 1093 Ginscall(Panicindex, -1) 1094 Patch(p1, Pc) 1095 } 1096 1097 if Isconst(nl, CTSTR) { 1098 Regalloc(&n3, Types[Tptr], res) 1099 p1 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &n3) 1100 Datastring(nl.Val().U.(string), &p1.From) 1101 p1.From.Type = obj.TYPE_ADDR 1102 } else if nl.Type.IsSlice() || nl.Type.IsString() { 1103 n1 = n3 1104 n1.Op = OINDREG 1105 n1.Type = Types[Tptr] 1106 n1.Xoffset = int64(Array_array) 1107 Thearch.Gmove(&n1, &n3) 1108 } 1109 1110 if w == 0 { 1111 // nothing to do 1112 } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) { 1113 // done by back end 1114 } else if w == 1 { 1115 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1116 } else { 1117 if w&(w-1) == 0 { 1118 // Power of 2. Use shift. 1119 Thearch.Ginscon(Thearch.Optoas(OLSH, Types[TUINT32]), int64(log2(uint64(w))), &n2) 1120 } else { 1121 // Not a power of 2. Use multiply. 1122 Regalloc(&n4, Types[TUINT32], nil) 1123 Nodconst(&n1, Types[TUINT32], int64(w)) 1124 Thearch.Gmove(&n1, &n4) 1125 Thearch.Gins(Thearch.Optoas(OMUL, Types[TUINT32]), &n4, &n2) 1126 Regfree(&n4) 1127 } 1128 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1129 } 1130 *a = n3 1131 Regfree(&n2) 1132 break 1133 } 1134 if Ctxt.Arch.Family == sys.I386 { 1135 var p2 *obj.Prog // to be patched to panicindex. 1136 w := uint32(n.Type.Width) 1137 bounded := Debug['B'] != 0 || n.Bounded 1138 var n3 Node 1139 var tmp Node 1140 var n1 Node 1141 if nr.Addable { 1142 // Generate &nl first, and move nr into register. 1143 if !Isconst(nl, CTSTR) { 1144 Igen(nl, &n3, res) 1145 } 1146 if !Isconst(nr, CTINT) { 1147 p2 = Thearch.Igenindex(nr, &tmp, bounded) 1148 Regalloc(&n1, tmp.Type, nil) 1149 Thearch.Gmove(&tmp, &n1) 1150 } 1151 } else if nl.Addable { 1152 // Generate nr first, and move &nl into register. 1153 if !Isconst(nr, CTINT) { 1154 p2 = Thearch.Igenindex(nr, &tmp, bounded) 1155 Regalloc(&n1, tmp.Type, nil) 1156 Thearch.Gmove(&tmp, &n1) 1157 } 1158 1159 if !Isconst(nl, CTSTR) { 1160 Igen(nl, &n3, res) 1161 } 1162 } else { 1163 p2 = Thearch.Igenindex(nr, &tmp, bounded) 1164 nr = &tmp 1165 if !Isconst(nl, CTSTR) { 1166 Igen(nl, &n3, res) 1167 } 1168 Regalloc(&n1, tmp.Type, nil) 1169 Thearch.Gins(Thearch.Optoas(OAS, tmp.Type), &tmp, &n1) 1170 } 1171 1172 // For fixed array we really want the pointer in n3. 1173 var n2 Node 1174 if nl.Type.IsArray() { 1175 Regalloc(&n2, Types[Tptr], &n3) 1176 Agen(&n3, &n2) 1177 Regfree(&n3) 1178 n3 = n2 1179 } 1180 1181 // &a[0] is in n3 (allocated in res) 1182 // i is in n1 (if not constant) 1183 // len(a) is in nlen (if needed) 1184 // w is width 1185 1186 // constant index 1187 if Isconst(nr, CTINT) { 1188 if Isconst(nl, CTSTR) { 1189 Fatalf("constant string constant index") // front end should handle 1190 } 1191 v := uint64(nr.Int64()) 1192 if nl.Type.IsSlice() || nl.Type.IsString() { 1193 if Debug['B'] == 0 && !n.Bounded { 1194 nlen := n3 1195 nlen.Type = Types[TUINT32] 1196 nlen.Xoffset += int64(Array_nel) 1197 Nodconst(&n2, Types[TUINT32], int64(v)) 1198 p1 := Thearch.Ginscmp(OGT, Types[TUINT32], &nlen, &n2, +1) 1199 Ginscall(Panicindex, -1) 1200 Patch(p1, Pc) 1201 } 1202 } 1203 1204 // Load base pointer in n2 = n3. 1205 Regalloc(&n2, Types[Tptr], &n3) 1206 1207 n3.Type = Types[Tptr] 1208 n3.Xoffset += int64(Array_array) 1209 Thearch.Gmove(&n3, &n2) 1210 Regfree(&n3) 1211 if v*uint64(w) != 0 { 1212 Nodconst(&n1, Types[Tptr], int64(v*uint64(w))) 1213 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n1, &n2) 1214 } 1215 *a = n2 1216 break 1217 } 1218 1219 // i is in register n1, extend to 32 bits. 1220 t := Types[TUINT32] 1221 1222 if n1.Type.IsSigned() { 1223 t = Types[TINT32] 1224 } 1225 1226 Regalloc(&n2, t, &n1) // i 1227 Thearch.Gmove(&n1, &n2) 1228 Regfree(&n1) 1229 1230 if Debug['B'] == 0 && !n.Bounded { 1231 // check bounds 1232 t := Types[TUINT32] 1233 1234 var nlen Node 1235 if Isconst(nl, CTSTR) { 1236 Nodconst(&nlen, t, int64(len(nl.Val().U.(string)))) 1237 } else if nl.Type.IsSlice() || nl.Type.IsString() { 1238 nlen = n3 1239 nlen.Type = t 1240 nlen.Xoffset += int64(Array_nel) 1241 } else { 1242 Nodconst(&nlen, t, nl.Type.NumElem()) 1243 } 1244 1245 p1 := Thearch.Ginscmp(OLT, t, &n2, &nlen, +1) 1246 if p2 != nil { 1247 Patch(p2, Pc) 1248 } 1249 Ginscall(Panicindex, -1) 1250 Patch(p1, Pc) 1251 } 1252 1253 if Isconst(nl, CTSTR) { 1254 Regalloc(&n3, Types[Tptr], res) 1255 p1 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &n3) 1256 Datastring(nl.Val().U.(string), &p1.From) 1257 p1.From.Type = obj.TYPE_ADDR 1258 Thearch.Gins(Thearch.Optoas(OADD, n3.Type), &n2, &n3) 1259 goto indexdone1 1260 } 1261 1262 // Load base pointer in n3. 1263 Regalloc(&tmp, Types[Tptr], &n3) 1264 1265 if nl.Type.IsSlice() || nl.Type.IsString() { 1266 n3.Type = Types[Tptr] 1267 n3.Xoffset += int64(Array_array) 1268 Thearch.Gmove(&n3, &tmp) 1269 } 1270 1271 Regfree(&n3) 1272 n3 = tmp 1273 1274 if w == 0 { 1275 // nothing to do 1276 } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) { 1277 // done by back end 1278 } else if w == 1 { 1279 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1280 } else { 1281 if w&(w-1) == 0 { 1282 // Power of 2. Use shift. 1283 Thearch.Ginscon(Thearch.Optoas(OLSH, Types[TUINT32]), int64(log2(uint64(w))), &n2) 1284 } else { 1285 // Not a power of 2. Use multiply. 1286 Thearch.Ginscon(Thearch.Optoas(OMUL, Types[TUINT32]), int64(w), &n2) 1287 } 1288 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1289 } 1290 1291 indexdone1: 1292 *a = n3 1293 Regfree(&n2) 1294 break 1295 } 1296 1297 freelen := 0 1298 w := uint64(n.Type.Width) 1299 1300 // Generate the non-addressable child first. 1301 var n3 Node 1302 var nlen Node 1303 var tmp Node 1304 var n1 Node 1305 if nr.Addable { 1306 goto irad 1307 } 1308 if nl.Addable { 1309 Cgenr(nr, &n1, nil) 1310 if !Isconst(nl, CTSTR) { 1311 if nl.Type.IsArray() { 1312 Agenr(nl, &n3, res) 1313 } else { 1314 Igen(nl, &nlen, res) 1315 freelen = 1 1316 nlen.Type = Types[Tptr] 1317 nlen.Xoffset += int64(Array_array) 1318 Regalloc(&n3, Types[Tptr], res) 1319 Thearch.Gmove(&nlen, &n3) 1320 nlen.Type = Types[Simtype[TUINT]] 1321 nlen.Xoffset += int64(Array_nel) - int64(Array_array) 1322 } 1323 } 1324 1325 goto index 1326 } 1327 1328 Tempname(&tmp, nr.Type) 1329 Cgen(nr, &tmp) 1330 nr = &tmp 1331 1332 irad: 1333 if !Isconst(nl, CTSTR) { 1334 if nl.Type.IsArray() { 1335 Agenr(nl, &n3, res) 1336 } else { 1337 if !nl.Addable { 1338 if res != nil && res.Op == OREGISTER { // give up res, which we don't need yet. 1339 Regfree(res) 1340 } 1341 1342 // igen will need an addressable node. 1343 var tmp2 Node 1344 Tempname(&tmp2, nl.Type) 1345 Cgen(nl, &tmp2) 1346 nl = &tmp2 1347 1348 if res != nil && res.Op == OREGISTER { // reacquire res 1349 Regrealloc(res) 1350 } 1351 } 1352 1353 Igen(nl, &nlen, res) 1354 freelen = 1 1355 nlen.Type = Types[Tptr] 1356 nlen.Xoffset += int64(Array_array) 1357 Regalloc(&n3, Types[Tptr], res) 1358 Thearch.Gmove(&nlen, &n3) 1359 nlen.Type = Types[Simtype[TUINT]] 1360 nlen.Xoffset += int64(Array_nel) - int64(Array_array) 1361 } 1362 } 1363 1364 if !Isconst(nr, CTINT) { 1365 Cgenr(nr, &n1, nil) 1366 } 1367 1368 goto index 1369 1370 // &a is in &n3 (allocated in res) 1371 // i is in &n1 (if not constant) 1372 // len(a) is in nlen (if needed) 1373 // w is width 1374 1375 // constant index 1376 index: 1377 if Isconst(nr, CTINT) { 1378 if Isconst(nl, CTSTR) { 1379 Fatalf("constant string constant index") // front end should handle 1380 } 1381 v := uint64(nr.Int64()) 1382 if nl.Type.IsSlice() || nl.Type.IsString() { 1383 if Debug['B'] == 0 && !n.Bounded { 1384 p1 := Thearch.Ginscmp(OGT, Types[Simtype[TUINT]], &nlen, Nodintconst(int64(v)), +1) 1385 Ginscall(Panicindex, -1) 1386 Patch(p1, Pc) 1387 } 1388 1389 Regfree(&nlen) 1390 } 1391 1392 if v*w != 0 { 1393 Thearch.Ginscon(Thearch.Optoas(OADD, Types[Tptr]), int64(v*w), &n3) 1394 } 1395 *a = n3 1396 break 1397 } 1398 1399 // type of the index 1400 t := Types[TUINT64] 1401 1402 if n1.Type.IsSigned() { 1403 t = Types[TINT64] 1404 } 1405 1406 var n2 Node 1407 Regalloc(&n2, t, &n1) // i 1408 Thearch.Gmove(&n1, &n2) 1409 Regfree(&n1) 1410 1411 if Debug['B'] == 0 && !n.Bounded { 1412 // check bounds 1413 t = Types[Simtype[TUINT]] 1414 1415 if Is64(nr.Type) { 1416 t = Types[TUINT64] 1417 } 1418 if Isconst(nl, CTSTR) { 1419 Nodconst(&nlen, t, int64(len(nl.Val().U.(string)))) 1420 } else if nl.Type.IsSlice() || nl.Type.IsString() { 1421 // nlen already initialized 1422 } else { 1423 Nodconst(&nlen, t, nl.Type.NumElem()) 1424 } 1425 1426 p1 := Thearch.Ginscmp(OLT, t, &n2, &nlen, +1) 1427 Ginscall(Panicindex, -1) 1428 Patch(p1, Pc) 1429 } 1430 1431 if Isconst(nl, CTSTR) { 1432 Regalloc(&n3, Types[Tptr], res) 1433 p1 := Thearch.Gins(Thearch.Optoas(OAS, n3.Type), nil, &n3) // XXX was LEAQ! 1434 Datastring(nl.Val().U.(string), &p1.From) 1435 p1.From.Type = obj.TYPE_ADDR 1436 Thearch.Gins(Thearch.Optoas(OADD, n3.Type), &n2, &n3) 1437 goto indexdone 1438 } 1439 1440 if w == 0 { 1441 // nothing to do 1442 } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) { 1443 // done by back end 1444 } else if w == 1 { 1445 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1446 } else { 1447 if w&(w-1) == 0 { 1448 // Power of 2. Use shift. 1449 Thearch.Ginscon(Thearch.Optoas(OLSH, t), int64(log2(w)), &n2) 1450 } else { 1451 // Not a power of 2. Use multiply. 1452 Thearch.Ginscon(Thearch.Optoas(OMUL, t), int64(w), &n2) 1453 } 1454 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1455 } 1456 1457 indexdone: 1458 *a = n3 1459 Regfree(&n2) 1460 if freelen != 0 { 1461 Regfree(&nlen) 1462 } 1463 1464 default: 1465 Regalloc(a, Types[Tptr], res) 1466 Agen(n, a) 1467 } 1468 } 1469 1470 // log2 returns the logarithm base 2 of n. n must be a power of 2. 1471 func log2(n uint64) int { 1472 x := 0 1473 for n>>uint(x) != 1 { 1474 x++ 1475 } 1476 return x 1477 } 1478 1479 // generate: 1480 // res = &n; 1481 // The generated code checks that the result is not nil. 1482 func Agen(n *Node, res *Node) { 1483 if Debug['g'] != 0 { 1484 Dump("\nagen-res", res) 1485 Dump("agen-r", n) 1486 } 1487 1488 if n == nil || n.Type == nil { 1489 return 1490 } 1491 1492 for n.Op == OCONVNOP { 1493 n = n.Left 1494 } 1495 1496 if Isconst(n, CTNIL) && n.Type.Width > int64(Widthptr) { 1497 // Use of a nil interface or nil slice. 1498 // Create a temporary we can take the address of and read. 1499 // The generated code is just going to panic, so it need not 1500 // be terribly efficient. See issue 3670. 1501 var n1 Node 1502 Tempname(&n1, n.Type) 1503 1504 Gvardef(&n1) 1505 Thearch.Clearfat(&n1) 1506 var n2 Node 1507 Regalloc(&n2, Types[Tptr], res) 1508 var n3 Node 1509 n3.Op = OADDR 1510 n3.Left = &n1 1511 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &n3, &n2) 1512 Thearch.Gmove(&n2, res) 1513 Regfree(&n2) 1514 return 1515 } 1516 1517 if n.Op == OINDREG && n.Xoffset == 0 { 1518 // Generate MOVW R0, R1 instead of MOVW $0(R0), R1. 1519 // This allows better move propagation in the back ends 1520 // (and maybe it helps the processor). 1521 n1 := *n 1522 n1.Op = OREGISTER 1523 n1.Type = res.Type 1524 Thearch.Gmove(&n1, res) 1525 return 1526 } 1527 1528 if n.Addable { 1529 if n.Op == OREGISTER { 1530 Fatalf("agen OREGISTER") 1531 } 1532 var n1 Node 1533 n1.Op = OADDR 1534 n1.Left = n 1535 var n2 Node 1536 Regalloc(&n2, Types[Tptr], res) 1537 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &n1, &n2) 1538 Thearch.Gmove(&n2, res) 1539 Regfree(&n2) 1540 return 1541 } 1542 1543 nl := n.Left 1544 1545 switch n.Op { 1546 default: 1547 Dump("bad agen", n) 1548 Fatalf("agen: unknown op %v", Nconv(n, FmtShort|FmtSign)) 1549 1550 case OCALLMETH: 1551 cgen_callmeth(n, 0) 1552 cgen_aret(n, res) 1553 1554 case OCALLINTER: 1555 cgen_callinter(n, res, 0) 1556 cgen_aret(n, res) 1557 1558 case OCALLFUNC: 1559 cgen_call(n, 0) 1560 cgen_aret(n, res) 1561 1562 case OEFACE, ODOTTYPE, OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1563 var n1 Node 1564 Tempname(&n1, n.Type) 1565 Cgen(n, &n1) 1566 Agen(&n1, res) 1567 1568 case OINDEX: 1569 var n1 Node 1570 Agenr(n, &n1, res) 1571 Thearch.Gmove(&n1, res) 1572 Regfree(&n1) 1573 1574 case OIND: 1575 Cgen(nl, res) 1576 if !nl.NonNil { 1577 Cgen_checknil(res) 1578 } else if Debug_checknil != 0 && n.Lineno > 1 { 1579 Warnl(n.Lineno, "removed nil check") 1580 } 1581 1582 case ODOT: 1583 Agen(nl, res) 1584 if n.Xoffset != 0 { 1585 addOffset(res, n.Xoffset) 1586 } 1587 1588 case ODOTPTR: 1589 Cgen(nl, res) 1590 if !nl.NonNil { 1591 Cgen_checknil(res) 1592 } else if Debug_checknil != 0 && n.Lineno > 1 { 1593 Warnl(n.Lineno, "removed nil check") 1594 } 1595 if n.Xoffset != 0 { 1596 addOffset(res, n.Xoffset) 1597 } 1598 } 1599 } 1600 1601 func addOffset(res *Node, offset int64) { 1602 if Ctxt.Arch.InFamily(sys.AMD64, sys.I386) { 1603 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), Nodintconst(offset), res) 1604 return 1605 } 1606 1607 var n1, n2 Node 1608 Regalloc(&n1, Types[Tptr], nil) 1609 Thearch.Gmove(res, &n1) 1610 Regalloc(&n2, Types[Tptr], nil) 1611 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), Nodintconst(offset), &n2) 1612 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n1) 1613 Thearch.Gmove(&n1, res) 1614 Regfree(&n1) 1615 Regfree(&n2) 1616 } 1617 1618 // Igen computes the address &n, stores it in a register r, 1619 // and rewrites a to refer to *r. The chosen r may be the 1620 // stack pointer, it may be borrowed from res, or it may 1621 // be a newly allocated register. The caller must call Regfree(a) 1622 // to free r when the address is no longer needed. 1623 // The generated code ensures that &n is not nil. 1624 func Igen(n *Node, a *Node, res *Node) { 1625 if Debug['g'] != 0 { 1626 Dump("\nigen-n", n) 1627 } 1628 1629 switch n.Op { 1630 case ONAME: 1631 if n.Class == PAUTOHEAP { 1632 Dump("igen", n) 1633 Fatalf("bad name") 1634 } 1635 *a = *n 1636 return 1637 1638 case OINDREG: 1639 // Increase the refcount of the register so that igen's caller 1640 // has to call Regfree. 1641 if n.Reg != int16(Thearch.REGSP) { 1642 reg[n.Reg-int16(Thearch.REGMIN)]++ 1643 } 1644 *a = *n 1645 return 1646 1647 case ODOT: 1648 Igen(n.Left, a, res) 1649 a.Xoffset += n.Xoffset 1650 a.Type = n.Type 1651 Fixlargeoffset(a) 1652 return 1653 1654 case ODOTPTR: 1655 Cgenr(n.Left, a, res) 1656 if !n.Left.NonNil { 1657 Cgen_checknil(a) 1658 } else if Debug_checknil != 0 && n.Lineno > 1 { 1659 Warnl(n.Lineno, "removed nil check") 1660 } 1661 a.Op = OINDREG 1662 a.Xoffset += n.Xoffset 1663 a.Type = n.Type 1664 Fixlargeoffset(a) 1665 return 1666 1667 case OCALLFUNC, OCALLMETH, OCALLINTER: 1668 switch n.Op { 1669 case OCALLFUNC: 1670 cgen_call(n, 0) 1671 1672 case OCALLMETH: 1673 cgen_callmeth(n, 0) 1674 1675 case OCALLINTER: 1676 cgen_callinter(n, nil, 0) 1677 } 1678 1679 fp := n.Left.Type.Results().Field(0) 1680 *a = Node{} 1681 a.Op = OINDREG 1682 a.Reg = int16(Thearch.REGSP) 1683 a.Addable = true 1684 a.Xoffset = fp.Offset + Ctxt.FixedFrameSize() 1685 a.Type = n.Type 1686 return 1687 1688 case OINDEX: 1689 // Index of fixed-size array by constant can 1690 // put the offset in the addressing. 1691 // Could do the same for slice except that we need 1692 // to use the real index for the bounds checking. 1693 if n.Left.Type.IsArray() || (n.Left.Type.IsPtr() && n.Left.Left.Type.IsArray()) { 1694 if Isconst(n.Right, CTINT) { 1695 // Compute &a. 1696 if !n.Left.Type.IsPtr() { 1697 Igen(n.Left, a, res) 1698 } else { 1699 var n1 Node 1700 Igen(n.Left, &n1, res) 1701 Cgen_checknil(&n1) 1702 Regalloc(a, Types[Tptr], res) 1703 Thearch.Gmove(&n1, a) 1704 Regfree(&n1) 1705 a.Op = OINDREG 1706 } 1707 1708 // Compute &a[i] as &a + i*width. 1709 a.Type = n.Type 1710 1711 a.Xoffset += n.Right.Int64() * n.Type.Width 1712 Fixlargeoffset(a) 1713 return 1714 } 1715 } 1716 } 1717 1718 Agenr(n, a, res) 1719 a.Op = OINDREG 1720 a.Type = n.Type 1721 } 1722 1723 // Bgen generates code for branches: 1724 // 1725 // if n == wantTrue { 1726 // goto to 1727 // } 1728 func Bgen(n *Node, wantTrue bool, likely int, to *obj.Prog) { 1729 bgenx(n, nil, wantTrue, likely, to) 1730 } 1731 1732 // Bvgen generates code for calculating boolean values: 1733 // res = n == wantTrue 1734 func Bvgen(n, res *Node, wantTrue bool) { 1735 if Thearch.Ginsboolval == nil { 1736 // Direct value generation not implemented for this architecture. 1737 // Implement using jumps. 1738 bvgenjump(n, res, wantTrue, true) 1739 return 1740 } 1741 bgenx(n, res, wantTrue, 0, nil) 1742 } 1743 1744 // bvgenjump implements boolean value generation using jumps: 1745 // if n == wantTrue { 1746 // res = 1 1747 // } else { 1748 // res = 0 1749 // } 1750 // geninit controls whether n's Ninit is generated. 1751 func bvgenjump(n, res *Node, wantTrue, geninit bool) { 1752 init := n.Ninit 1753 if !geninit { 1754 n.Ninit.Set(nil) 1755 } 1756 p1 := Gbranch(obj.AJMP, nil, 0) 1757 p2 := Pc 1758 Thearch.Gmove(Nodbool(true), res) 1759 p3 := Gbranch(obj.AJMP, nil, 0) 1760 Patch(p1, Pc) 1761 Bgen(n, wantTrue, 0, p2) 1762 Thearch.Gmove(Nodbool(false), res) 1763 Patch(p3, Pc) 1764 n.Ninit.MoveNodes(&init) 1765 } 1766 1767 // bgenx is the backend for Bgen and Bvgen. 1768 // If res is nil, it generates a branch. 1769 // Otherwise, it generates a boolean value. 1770 func bgenx(n, res *Node, wantTrue bool, likely int, to *obj.Prog) { 1771 if Debug['g'] != 0 { 1772 fmt.Printf("\nbgenx wantTrue=%t likely=%d to=%v\n", wantTrue, likely, to) 1773 Dump("n", n) 1774 Dump("res", res) 1775 } 1776 1777 genval := res != nil 1778 1779 if n == nil { 1780 n = Nodbool(true) 1781 } 1782 1783 Genlist(n.Ninit) 1784 1785 if n.Type == nil { 1786 n = convlit(n, Types[TBOOL]) 1787 if n.Type == nil { 1788 return 1789 } 1790 } 1791 1792 if !n.Type.IsBoolean() { 1793 Fatalf("bgen: bad type %v for %v", n.Type, n.Op) 1794 } 1795 1796 for n.Op == OCONVNOP { 1797 n = n.Left 1798 Genlist(n.Ninit) 1799 } 1800 1801 if Thearch.Bgen_float != nil && n.Left != nil && n.Left.Type.IsFloat() { 1802 if genval { 1803 bvgenjump(n, res, wantTrue, false) 1804 return 1805 } 1806 Thearch.Bgen_float(n, wantTrue, likely, to) 1807 return 1808 } 1809 1810 switch n.Op { 1811 default: 1812 if genval { 1813 Cgen(n, res) 1814 if !wantTrue { 1815 Thearch.Gins(Thearch.Optoas(OXOR, Types[TUINT8]), Nodintconst(1), res) 1816 } 1817 return 1818 } 1819 1820 var tmp Node 1821 Regalloc(&tmp, n.Type, nil) 1822 Cgen(n, &tmp) 1823 bgenNonZero(&tmp, nil, wantTrue, likely, to) 1824 Regfree(&tmp) 1825 return 1826 1827 case ONAME: 1828 // Some architectures might need a temporary or other help here, 1829 // but they don't support direct generation of a bool value yet. 1830 // We can fix that as we go. 1831 mayNeedTemp := Ctxt.Arch.InFamily(sys.ARM, sys.ARM64, sys.MIPS64, sys.PPC64, sys.S390X) 1832 1833 if genval { 1834 if mayNeedTemp { 1835 Fatalf("genval ONAMES not fully implemented") 1836 } 1837 Cgen(n, res) 1838 if !wantTrue { 1839 Thearch.Gins(Thearch.Optoas(OXOR, Types[TUINT8]), Nodintconst(1), res) 1840 } 1841 return 1842 } 1843 1844 if n.Addable && !mayNeedTemp { 1845 // no need for a temporary 1846 bgenNonZero(n, nil, wantTrue, likely, to) 1847 return 1848 } 1849 var tmp Node 1850 Regalloc(&tmp, n.Type, nil) 1851 Cgen(n, &tmp) 1852 bgenNonZero(&tmp, nil, wantTrue, likely, to) 1853 Regfree(&tmp) 1854 return 1855 1856 case OLITERAL: 1857 // n is a constant. 1858 if !Isconst(n, CTBOOL) { 1859 Fatalf("bgen: non-bool const %v\n", Nconv(n, FmtLong)) 1860 } 1861 if genval { 1862 Cgen(Nodbool(wantTrue == n.Val().U.(bool)), res) 1863 return 1864 } 1865 // If n == wantTrue, jump; otherwise do nothing. 1866 if wantTrue == n.Val().U.(bool) { 1867 Patch(Gbranch(obj.AJMP, nil, likely), to) 1868 } 1869 return 1870 1871 case OANDAND, OOROR: 1872 and := (n.Op == OANDAND) == wantTrue 1873 if genval { 1874 p1 := Gbranch(obj.AJMP, nil, 0) 1875 p2 := Gbranch(obj.AJMP, nil, 0) 1876 Patch(p2, Pc) 1877 Cgen(Nodbool(!and), res) 1878 p3 := Gbranch(obj.AJMP, nil, 0) 1879 Patch(p1, Pc) 1880 Bgen(n.Left, wantTrue != and, 0, p2) 1881 Bvgen(n.Right, res, wantTrue) 1882 Patch(p3, Pc) 1883 return 1884 } 1885 1886 if and { 1887 p1 := Gbranch(obj.AJMP, nil, 0) 1888 p2 := Gbranch(obj.AJMP, nil, 0) 1889 Patch(p1, Pc) 1890 Bgen(n.Left, !wantTrue, -likely, p2) 1891 Bgen(n.Right, !wantTrue, -likely, p2) 1892 p1 = Gbranch(obj.AJMP, nil, 0) 1893 Patch(p1, to) 1894 Patch(p2, Pc) 1895 } else { 1896 Bgen(n.Left, wantTrue, likely, to) 1897 Bgen(n.Right, wantTrue, likely, to) 1898 } 1899 return 1900 1901 case ONOT: // unary 1902 if n.Left == nil || n.Left.Type == nil { 1903 return 1904 } 1905 bgenx(n.Left, res, !wantTrue, likely, to) 1906 return 1907 1908 case OEQ, ONE, OLT, OGT, OLE, OGE: 1909 if n.Left == nil || n.Left.Type == nil || n.Right == nil || n.Right.Type == nil { 1910 return 1911 } 1912 } 1913 1914 // n.Op is one of OEQ, ONE, OLT, OGT, OLE, OGE 1915 nl := n.Left 1916 nr := n.Right 1917 op := n.Op 1918 1919 if !wantTrue { 1920 if nr.Type.IsFloat() { 1921 // Brcom is not valid on floats when NaN is involved. 1922 ll := n.Ninit // avoid re-genning Ninit 1923 n.Ninit.Set(nil) 1924 if genval { 1925 bgenx(n, res, true, likely, to) 1926 Thearch.Gins(Thearch.Optoas(OXOR, Types[TUINT8]), Nodintconst(1), res) // res = !res 1927 n.Ninit.Set(ll.Slice()) 1928 return 1929 } 1930 p1 := Gbranch(obj.AJMP, nil, 0) 1931 p2 := Gbranch(obj.AJMP, nil, 0) 1932 Patch(p1, Pc) 1933 bgenx(n, res, true, -likely, p2) 1934 Patch(Gbranch(obj.AJMP, nil, 0), to) 1935 Patch(p2, Pc) 1936 n.Ninit.Set(ll.Slice()) 1937 return 1938 } 1939 1940 op = Brcom(op) 1941 } 1942 wantTrue = true 1943 1944 // make simplest on right 1945 if nl.Op == OLITERAL || (nl.Ullman < nr.Ullman && nl.Ullman < UINF) { 1946 op = Brrev(op) 1947 nl, nr = nr, nl 1948 } 1949 1950 if nl.Type.IsSlice() || nl.Type.IsInterface() { 1951 // front end should only leave cmp to literal nil 1952 if (op != OEQ && op != ONE) || nr.Op != OLITERAL { 1953 if nl.Type.IsSlice() { 1954 Yyerror("illegal slice comparison") 1955 } else { 1956 Yyerror("illegal interface comparison") 1957 } 1958 return 1959 } 1960 1961 var ptr Node 1962 Igen(nl, &ptr, nil) 1963 if nl.Type.IsSlice() { 1964 ptr.Xoffset += int64(Array_array) 1965 } 1966 ptr.Type = Types[Tptr] 1967 var tmp Node 1968 Regalloc(&tmp, ptr.Type, &ptr) 1969 Cgen(&ptr, &tmp) 1970 Regfree(&ptr) 1971 bgenNonZero(&tmp, res, op == OEQ != wantTrue, likely, to) 1972 Regfree(&tmp) 1973 return 1974 } 1975 1976 if nl.Type.IsComplex() { 1977 complexbool(op, nl, nr, res, wantTrue, likely, to) 1978 return 1979 } 1980 1981 if Ctxt.Arch.RegSize == 4 && Is64(nr.Type) { 1982 if genval { 1983 // TODO: Teach Cmp64 to generate boolean values and remove this. 1984 bvgenjump(n, res, wantTrue, false) 1985 return 1986 } 1987 if !nl.Addable || Isconst(nl, CTINT) { 1988 nl = CgenTemp(nl) 1989 } 1990 if !nr.Addable { 1991 nr = CgenTemp(nr) 1992 } 1993 Thearch.Cmp64(nl, nr, op, likely, to) 1994 return 1995 } 1996 1997 if nr.Ullman >= UINF { 1998 var n1 Node 1999 Regalloc(&n1, nl.Type, nil) 2000 Cgen(nl, &n1) 2001 nl = &n1 2002 2003 var tmp Node 2004 Tempname(&tmp, nl.Type) 2005 Thearch.Gmove(&n1, &tmp) 2006 Regfree(&n1) 2007 2008 var n2 Node 2009 Regalloc(&n2, nr.Type, nil) 2010 Cgen(nr, &n2) 2011 nr = &n2 2012 2013 Regalloc(&n1, nl.Type, nil) 2014 Cgen(&tmp, &n1) 2015 Regfree(&n1) 2016 Regfree(&n2) 2017 } else { 2018 var n1 Node 2019 if !nl.Addable && Ctxt.Arch.Family == sys.I386 { 2020 Tempname(&n1, nl.Type) 2021 } else { 2022 Regalloc(&n1, nl.Type, nil) 2023 defer Regfree(&n1) 2024 } 2025 Cgen(nl, &n1) 2026 nl = &n1 2027 2028 if Smallintconst(nr) && Ctxt.Arch.Family != sys.MIPS64 && Ctxt.Arch.Family != sys.PPC64 { 2029 Thearch.Gins(Thearch.Optoas(OCMP, nr.Type), nl, nr) 2030 bins(nr.Type, res, op, likely, to) 2031 return 2032 } 2033 2034 if !nr.Addable && Ctxt.Arch.Family == sys.I386 { 2035 nr = CgenTemp(nr) 2036 } 2037 2038 var n2 Node 2039 Regalloc(&n2, nr.Type, nil) 2040 Cgen(nr, &n2) 2041 nr = &n2 2042 Regfree(&n2) 2043 } 2044 2045 l, r := nl, nr 2046 2047 // On x86, only < and <= work right with NaN; reverse if needed 2048 if Ctxt.Arch.Family == sys.AMD64 && nl.Type.IsFloat() && (op == OGT || op == OGE) { 2049 l, r = r, l 2050 op = Brrev(op) 2051 } 2052 2053 // MIPS does not have CMP instruction 2054 if Ctxt.Arch.Family == sys.MIPS64 { 2055 p := Thearch.Ginscmp(op, nr.Type, l, r, likely) 2056 Patch(p, to) 2057 return 2058 } 2059 2060 // Do the comparison. 2061 Thearch.Gins(Thearch.Optoas(OCMP, nr.Type), l, r) 2062 2063 // Handle floating point special cases. 2064 // Note that 8g has Bgen_float and is handled above. 2065 if nl.Type.IsFloat() { 2066 switch Ctxt.Arch.Family { 2067 case sys.ARM: 2068 if genval { 2069 Fatalf("genval 5g Isfloat special cases not implemented") 2070 } 2071 switch n.Op { 2072 case ONE: 2073 Patch(Gbranch(Thearch.Optoas(OPS, nr.Type), nr.Type, likely), to) 2074 Patch(Gbranch(Thearch.Optoas(op, nr.Type), nr.Type, likely), to) 2075 default: 2076 p := Gbranch(Thearch.Optoas(OPS, nr.Type), nr.Type, -likely) 2077 Patch(Gbranch(Thearch.Optoas(op, nr.Type), nr.Type, likely), to) 2078 Patch(p, Pc) 2079 } 2080 return 2081 case sys.AMD64: 2082 switch n.Op { 2083 case OEQ: 2084 // neither NE nor P 2085 if genval { 2086 var reg Node 2087 Regalloc(®, Types[TBOOL], nil) 2088 Thearch.Ginsboolval(Thearch.Optoas(OEQ, nr.Type), ®) 2089 Thearch.Ginsboolval(Thearch.Optoas(OPC, nr.Type), res) 2090 Thearch.Gins(Thearch.Optoas(OAND, Types[TBOOL]), ®, res) 2091 Regfree(®) 2092 } else { 2093 p1 := Gbranch(Thearch.Optoas(ONE, nr.Type), nil, -likely) 2094 p2 := Gbranch(Thearch.Optoas(OPS, nr.Type), nil, -likely) 2095 Patch(Gbranch(obj.AJMP, nil, 0), to) 2096 Patch(p1, Pc) 2097 Patch(p2, Pc) 2098 } 2099 return 2100 case ONE: 2101 // either NE or P 2102 if genval { 2103 var reg Node 2104 Regalloc(®, Types[TBOOL], nil) 2105 Thearch.Ginsboolval(Thearch.Optoas(ONE, nr.Type), ®) 2106 Thearch.Ginsboolval(Thearch.Optoas(OPS, nr.Type), res) 2107 Thearch.Gins(Thearch.Optoas(OOR, Types[TBOOL]), ®, res) 2108 Regfree(®) 2109 } else { 2110 Patch(Gbranch(Thearch.Optoas(ONE, nr.Type), nil, likely), to) 2111 Patch(Gbranch(Thearch.Optoas(OPS, nr.Type), nil, likely), to) 2112 } 2113 return 2114 } 2115 case sys.ARM64, sys.PPC64: 2116 if genval { 2117 Fatalf("genval 7g, 9g Isfloat special cases not implemented") 2118 } 2119 switch n.Op { 2120 // On arm64 and ppc64, <= and >= mishandle NaN. Must decompose into < or > and =. 2121 // TODO(josh): Convert a <= b to b > a instead? 2122 case OLE, OGE: 2123 if op == OLE { 2124 op = OLT 2125 } else { 2126 op = OGT 2127 } 2128 Patch(Gbranch(Thearch.Optoas(op, nr.Type), nr.Type, likely), to) 2129 Patch(Gbranch(Thearch.Optoas(OEQ, nr.Type), nr.Type, likely), to) 2130 return 2131 } 2132 } 2133 } 2134 2135 // Not a special case. Insert the conditional jump or value gen. 2136 bins(nr.Type, res, op, likely, to) 2137 } 2138 2139 func bgenNonZero(n, res *Node, wantTrue bool, likely int, to *obj.Prog) { 2140 // TODO: Optimize on systems that can compare to zero easily. 2141 var op Op = ONE 2142 if !wantTrue { 2143 op = OEQ 2144 } 2145 2146 // MIPS does not have CMP instruction 2147 if Thearch.LinkArch.Family == sys.MIPS64 { 2148 p := Gbranch(Thearch.Optoas(op, n.Type), n.Type, likely) 2149 Naddr(&p.From, n) 2150 Patch(p, to) 2151 return 2152 } 2153 2154 var zero Node 2155 Nodconst(&zero, n.Type, 0) 2156 Thearch.Gins(Thearch.Optoas(OCMP, n.Type), n, &zero) 2157 bins(n.Type, res, op, likely, to) 2158 } 2159 2160 // bins inserts an instruction to handle the result of a compare. 2161 // If res is non-nil, it inserts appropriate value generation instructions. 2162 // If res is nil, it inserts a branch to to. 2163 func bins(typ *Type, res *Node, op Op, likely int, to *obj.Prog) { 2164 a := Thearch.Optoas(op, typ) 2165 if res != nil { 2166 // value gen 2167 Thearch.Ginsboolval(a, res) 2168 } else { 2169 // jump 2170 Patch(Gbranch(a, typ, likely), to) 2171 } 2172 } 2173 2174 // stkof returns n's offset from SP if n is on the stack 2175 // (either a local variable or the return value from a function call 2176 // or the arguments to a function call). 2177 // If n is not on the stack, stkof returns -1000. 2178 // If n is on the stack but in an unknown location 2179 // (due to array index arithmetic), stkof returns +1000. 2180 // 2181 // NOTE(rsc): It is possible that the ODOT and OINDEX cases 2182 // are not relevant here, since it shouldn't be possible for them 2183 // to be involved in an overlapping copy. Only function results 2184 // from one call and the arguments to the next can overlap in 2185 // any non-trivial way. If they can be dropped, then this function 2186 // becomes much simpler and also more trustworthy. 2187 // The fact that it works at all today is probably due to the fact 2188 // that ODOT and OINDEX are irrelevant. 2189 func stkof(n *Node) int64 { 2190 switch n.Op { 2191 case OINDREG: 2192 if n.Reg != int16(Thearch.REGSP) { 2193 return -1000 // not on stack 2194 } 2195 return n.Xoffset 2196 2197 case ODOT: 2198 t := n.Left.Type 2199 if t.IsPtr() { 2200 break 2201 } 2202 off := stkof(n.Left) 2203 if off == -1000 || off == +1000 { 2204 return off 2205 } 2206 return off + n.Xoffset 2207 2208 case OINDEX: 2209 t := n.Left.Type 2210 if !t.IsArray() { 2211 break 2212 } 2213 off := stkof(n.Left) 2214 if off == -1000 || off == +1000 { 2215 return off 2216 } 2217 if Isconst(n.Right, CTINT) { 2218 return off + t.Elem().Width*n.Right.Int64() 2219 } 2220 return +1000 // on stack but not sure exactly where 2221 2222 case OCALLMETH, OCALLINTER, OCALLFUNC: 2223 t := n.Left.Type 2224 if t.IsPtr() { 2225 t = t.Elem() 2226 } 2227 2228 f := t.Results().Field(0) 2229 if f != nil { 2230 return f.Offset + Ctxt.FixedFrameSize() 2231 } 2232 } 2233 2234 // botch - probably failing to recognize address 2235 // arithmetic on the above. eg INDEX and DOT 2236 return -1000 // not on stack 2237 } 2238 2239 // block copy: 2240 // memmove(&ns, &n, w); 2241 // if wb is true, needs write barrier. 2242 func sgen_wb(n *Node, ns *Node, w int64, wb bool) { 2243 if Debug['g'] != 0 { 2244 op := "sgen" 2245 if wb { 2246 op = "sgen-wb" 2247 } 2248 fmt.Printf("\n%s w=%d\n", op, w) 2249 Dump("r", n) 2250 Dump("res", ns) 2251 } 2252 2253 if n.Ullman >= UINF && ns.Ullman >= UINF { 2254 Fatalf("sgen UINF") 2255 } 2256 2257 if w < 0 { 2258 Fatalf("sgen copy %d", w) 2259 } 2260 2261 // If copying .args, that's all the results, so record definition sites 2262 // for them for the liveness analysis. 2263 if ns.Op == ONAME && ns.Sym.Name == ".args" { 2264 for _, ln := range Curfn.Func.Dcl { 2265 if ln.Class == PPARAMOUT { 2266 Gvardef(ln) 2267 } 2268 } 2269 } 2270 2271 // Avoid taking the address for simple enough types. 2272 if componentgen_wb(n, ns, wb) { 2273 return 2274 } 2275 2276 if w == 0 { 2277 // evaluate side effects only 2278 var nodr Node 2279 Regalloc(&nodr, Types[Tptr], nil) 2280 Agen(ns, &nodr) 2281 Agen(n, &nodr) 2282 Regfree(&nodr) 2283 return 2284 } 2285 2286 // offset on the stack 2287 osrc := stkof(n) 2288 odst := stkof(ns) 2289 2290 if odst != -1000 { 2291 // on stack, write barrier not needed after all 2292 wb = false 2293 } 2294 2295 if osrc != -1000 && odst != -1000 && (osrc == 1000 || odst == 1000) || wb && osrc != -1000 { 2296 // osrc and odst both on stack, and at least one is in 2297 // an unknown position. Could generate code to test 2298 // for forward/backward copy, but instead just copy 2299 // to a temporary location first. 2300 // 2301 // OR: write barrier needed and source is on stack. 2302 // Invoking the write barrier will use the stack to prepare its call. 2303 // Copy to temporary. 2304 var tmp Node 2305 Tempname(&tmp, n.Type) 2306 sgen_wb(n, &tmp, w, false) 2307 sgen_wb(&tmp, ns, w, wb) 2308 return 2309 } 2310 2311 if wb { 2312 cgen_wbfat(n, ns) 2313 return 2314 } 2315 2316 Thearch.Blockcopy(n, ns, osrc, odst, w) 2317 } 2318 2319 // generate: 2320 // call f 2321 // proc=-1 normal call but no return 2322 // proc=0 normal call 2323 // proc=1 goroutine run in new proc 2324 // proc=2 defer call save away stack 2325 // proc=3 normal call to C pointer (not Go func value) 2326 func Ginscall(f *Node, proc int) { 2327 if f.Type != nil { 2328 extra := int32(0) 2329 if proc == 1 || proc == 2 { 2330 extra = 2 * int32(Widthptr) 2331 } 2332 Setmaxarg(f.Type, extra) 2333 } 2334 2335 switch proc { 2336 default: 2337 Fatalf("Ginscall: bad proc %d", proc) 2338 2339 case 0, // normal call 2340 -1: // normal call but no return 2341 if f.Op == ONAME && f.Class == PFUNC { 2342 if f == Deferreturn { 2343 // Deferred calls will appear to be returning to the CALL 2344 // deferreturn(SB) that we are about to emit. However, the 2345 // stack scanning code will think that the instruction 2346 // before the CALL is executing. To avoid the scanning 2347 // code making bad assumptions (both cosmetic such as 2348 // showing the wrong line number and fatal, such as being 2349 // confused over whether a stack slot contains a pointer 2350 // or a scalar) insert an actual hardware NOP that will 2351 // have the right line number. This is different from 2352 // obj.ANOP, which is a virtual no-op that doesn't make it 2353 // into the instruction stream. 2354 Thearch.Ginsnop() 2355 2356 if Thearch.LinkArch.Family == sys.PPC64 { 2357 // On ppc64, when compiling Go into position 2358 // independent code on ppc64le we insert an 2359 // instruction to reload the TOC pointer from the 2360 // stack as well. See the long comment near 2361 // jmpdefer in runtime/asm_ppc64.s for why. 2362 // If the MOVD is not needed, insert a hardware NOP 2363 // so that the same number of instructions are used 2364 // on ppc64 in both shared and non-shared modes. 2365 if Ctxt.Flag_shared { 2366 p := Thearch.Gins(ppc64.AMOVD, nil, nil) 2367 p.From.Type = obj.TYPE_MEM 2368 p.From.Offset = 24 2369 p.From.Reg = ppc64.REGSP 2370 p.To.Type = obj.TYPE_REG 2371 p.To.Reg = ppc64.REG_R2 2372 } else { 2373 Thearch.Ginsnop() 2374 } 2375 } 2376 } 2377 2378 p := Thearch.Gins(obj.ACALL, nil, f) 2379 Afunclit(&p.To, f) 2380 if proc == -1 || Noreturn(p) { 2381 Thearch.Gins(obj.AUNDEF, nil, nil) 2382 } 2383 break 2384 } 2385 2386 var reg Node 2387 Nodreg(®, Types[Tptr], Thearch.REGCTXT) 2388 var r1 Node 2389 Nodreg(&r1, Types[Tptr], Thearch.REGCALLX) 2390 Thearch.Gmove(f, ®) 2391 reg.Op = OINDREG 2392 Thearch.Gmove(®, &r1) 2393 reg.Op = OREGISTER 2394 Thearch.Gins(obj.ACALL, ®, &r1) 2395 2396 case 3: // normal call of c function pointer 2397 Thearch.Gins(obj.ACALL, nil, f) 2398 2399 case 1, // call in new proc (go) 2400 2: // deferred call (defer) 2401 var stk Node 2402 2403 // size of arguments at 0(SP) 2404 stk.Op = OINDREG 2405 stk.Reg = int16(Thearch.REGSP) 2406 stk.Xoffset = Ctxt.FixedFrameSize() 2407 Thearch.Ginscon(Thearch.Optoas(OAS, Types[TINT32]), int64(Argsize(f.Type)), &stk) 2408 2409 // FuncVal* at 8(SP) 2410 stk.Xoffset = int64(Widthptr) + Ctxt.FixedFrameSize() 2411 2412 var reg Node 2413 Nodreg(®, Types[Tptr], Thearch.REGCALLX2) 2414 Thearch.Gmove(f, ®) 2415 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), ®, &stk) 2416 2417 if proc == 1 { 2418 Ginscall(Newproc, 0) 2419 } else { 2420 if !hasdefer { 2421 Fatalf("hasdefer=0 but has defer") 2422 } 2423 Ginscall(Deferproc, 0) 2424 } 2425 2426 if proc == 2 { 2427 Nodreg(®, Types[TINT32], Thearch.REGRETURN) 2428 p := Thearch.Ginscmp(OEQ, Types[TINT32], ®, Nodintconst(0), +1) 2429 cgen_ret(nil) 2430 Patch(p, Pc) 2431 } 2432 } 2433 } 2434 2435 // n is call to interface method. 2436 // generate res = n. 2437 func cgen_callinter(n *Node, res *Node, proc int) { 2438 i := n.Left 2439 if i.Op != ODOTINTER { 2440 Fatalf("cgen_callinter: not ODOTINTER %v", i.Op) 2441 } 2442 2443 i = i.Left // interface 2444 2445 if !i.Addable { 2446 var tmpi Node 2447 Tempname(&tmpi, i.Type) 2448 Cgen(i, &tmpi) 2449 i = &tmpi 2450 } 2451 2452 Genlist(n.List) // assign the args 2453 2454 // i is now addable, prepare an indirected 2455 // register to hold its address. 2456 var nodi Node 2457 Igen(i, &nodi, res) // REG = &inter 2458 2459 var nodsp Node 2460 Nodindreg(&nodsp, Types[Tptr], Thearch.REGSP) 2461 nodsp.Xoffset = Ctxt.FixedFrameSize() 2462 if proc != 0 { 2463 nodsp.Xoffset += 2 * int64(Widthptr) // leave room for size & fn 2464 } 2465 nodi.Type = Types[Tptr] 2466 nodi.Xoffset += int64(Widthptr) 2467 Cgen(&nodi, &nodsp) // {0, 8(nacl), or 16}(SP) = 8(REG) -- i.data 2468 2469 var nodo Node 2470 Regalloc(&nodo, Types[Tptr], res) 2471 2472 nodi.Type = Types[Tptr] 2473 nodi.Xoffset -= int64(Widthptr) 2474 Cgen(&nodi, &nodo) // REG = 0(REG) -- i.tab 2475 Regfree(&nodi) 2476 2477 var nodr Node 2478 Regalloc(&nodr, Types[Tptr], &nodo) 2479 if n.Left.Xoffset == BADWIDTH { 2480 Fatalf("cgen_callinter: badwidth") 2481 } 2482 Cgen_checknil(&nodo) // in case offset is huge 2483 nodo.Op = OINDREG 2484 nodo.Xoffset = n.Left.Xoffset + 3*int64(Widthptr) + 8 2485 if proc == 0 { 2486 // plain call: use direct c function pointer - more efficient 2487 Cgen(&nodo, &nodr) // REG = 32+offset(REG) -- i.tab->fun[f] 2488 proc = 3 2489 } else { 2490 // go/defer. generate go func value. 2491 Agen(&nodo, &nodr) // REG = &(32+offset(REG)) -- i.tab->fun[f] 2492 } 2493 2494 nodr.Type = n.Left.Type 2495 Ginscall(&nodr, proc) 2496 2497 Regfree(&nodr) 2498 Regfree(&nodo) 2499 } 2500 2501 // generate function call; 2502 // proc=0 normal call 2503 // proc=1 goroutine run in new proc 2504 // proc=2 defer call save away stack 2505 func cgen_call(n *Node, proc int) { 2506 if n == nil { 2507 return 2508 } 2509 2510 var afun Node 2511 if n.Left.Ullman >= UINF { 2512 // if name involves a fn call 2513 // precompute the address of the fn 2514 Tempname(&afun, Types[Tptr]) 2515 2516 Cgen(n.Left, &afun) 2517 } 2518 2519 Genlist(n.List) // assign the args 2520 t := n.Left.Type 2521 2522 // call tempname pointer 2523 if n.Left.Ullman >= UINF { 2524 var nod Node 2525 Regalloc(&nod, Types[Tptr], nil) 2526 Cgen_as(&nod, &afun) 2527 nod.Type = t 2528 Ginscall(&nod, proc) 2529 Regfree(&nod) 2530 return 2531 } 2532 2533 // call pointer 2534 if n.Left.Op != ONAME || n.Left.Class != PFUNC { 2535 var nod Node 2536 Regalloc(&nod, Types[Tptr], nil) 2537 Cgen_as(&nod, n.Left) 2538 nod.Type = t 2539 Ginscall(&nod, proc) 2540 Regfree(&nod) 2541 return 2542 } 2543 2544 // call direct 2545 n.Left.Name.Method = true 2546 2547 Ginscall(n.Left, proc) 2548 } 2549 2550 // call to n has already been generated. 2551 // generate: 2552 // res = return value from call. 2553 func cgen_callret(n *Node, res *Node) { 2554 t := n.Left.Type 2555 if t.Etype == TPTR32 || t.Etype == TPTR64 { 2556 t = t.Elem() 2557 } 2558 2559 fp := t.Results().Field(0) 2560 if fp == nil { 2561 Fatalf("cgen_callret: nil") 2562 } 2563 2564 var nod Node 2565 nod.Op = OINDREG 2566 nod.Reg = int16(Thearch.REGSP) 2567 nod.Addable = true 2568 2569 nod.Xoffset = fp.Offset + Ctxt.FixedFrameSize() 2570 nod.Type = fp.Type 2571 Cgen_as(res, &nod) 2572 } 2573 2574 // call to n has already been generated. 2575 // generate: 2576 // res = &return value from call. 2577 func cgen_aret(n *Node, res *Node) { 2578 t := n.Left.Type 2579 if t.IsPtr() { 2580 t = t.Elem() 2581 } 2582 2583 fp := t.Results().Field(0) 2584 if fp == nil { 2585 Fatalf("cgen_aret: nil") 2586 } 2587 2588 var nod1 Node 2589 nod1.Op = OINDREG 2590 nod1.Reg = int16(Thearch.REGSP) 2591 nod1.Addable = true 2592 nod1.Xoffset = fp.Offset + Ctxt.FixedFrameSize() 2593 nod1.Type = fp.Type 2594 2595 if res.Op != OREGISTER { 2596 var nod2 Node 2597 Regalloc(&nod2, Types[Tptr], res) 2598 Agen(&nod1, &nod2) 2599 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &nod2, res) 2600 Regfree(&nod2) 2601 } else { 2602 Agen(&nod1, res) 2603 } 2604 } 2605 2606 // generate return. 2607 // n->left is assignments to return values. 2608 func cgen_ret(n *Node) { 2609 if n != nil { 2610 Genlist(n.List) // copy out args 2611 } 2612 if hasdefer { 2613 Ginscall(Deferreturn, 0) 2614 } 2615 Genlist(Curfn.Func.Exit) 2616 p := Thearch.Gins(obj.ARET, nil, nil) 2617 if n != nil && n.Op == ORETJMP { 2618 p.To.Type = obj.TYPE_MEM 2619 p.To.Name = obj.NAME_EXTERN 2620 p.To.Sym = Linksym(n.Left.Sym) 2621 } 2622 } 2623 2624 // hasHMUL64 reports whether the architecture supports 64-bit 2625 // signed and unsigned high multiplication (OHMUL). 2626 func hasHMUL64() bool { 2627 switch Ctxt.Arch.Family { 2628 case sys.AMD64, sys.S390X, sys.ARM64: 2629 return true 2630 case sys.ARM, sys.I386, sys.MIPS64, sys.PPC64: 2631 return false 2632 } 2633 Fatalf("unknown architecture") 2634 return false 2635 } 2636 2637 // hasRROTC64 reports whether the architecture supports 64-bit 2638 // rotate through carry instructions (ORROTC). 2639 func hasRROTC64() bool { 2640 switch Ctxt.Arch.Family { 2641 case sys.AMD64: 2642 return true 2643 case sys.ARM, sys.ARM64, sys.I386, sys.MIPS64, sys.PPC64, sys.S390X: 2644 return false 2645 } 2646 Fatalf("unknown architecture") 2647 return false 2648 } 2649 2650 func hasRightShiftWithCarry() bool { 2651 switch Ctxt.Arch.Family { 2652 case sys.ARM64: 2653 return true 2654 case sys.AMD64, sys.ARM, sys.I386, sys.MIPS64, sys.PPC64, sys.S390X: 2655 return false 2656 } 2657 Fatalf("unknown architecture") 2658 return false 2659 } 2660 2661 func hasAddSetCarry() bool { 2662 switch Ctxt.Arch.Family { 2663 case sys.ARM64: 2664 return true 2665 case sys.AMD64, sys.ARM, sys.I386, sys.MIPS64, sys.PPC64, sys.S390X: 2666 return false 2667 } 2668 Fatalf("unknown architecture") 2669 return false 2670 } 2671 2672 // generate division according to op, one of: 2673 // res = nl / nr 2674 // res = nl % nr 2675 func cgen_div(op Op, nl *Node, nr *Node, res *Node) { 2676 var w int 2677 2678 // Architectures need to support 64-bit high multiplications 2679 // (OHMUL) in order to perform divide by constant optimizations. 2680 if nr.Op != OLITERAL || !hasHMUL64() { 2681 goto longdiv 2682 } 2683 w = int(nl.Type.Width * 8) 2684 2685 // Front end handled 32-bit division. We only need to handle 64-bit. 2686 // Try to do division using multiplication: (2^w)/d. 2687 // See Hacker's Delight, chapter 10. 2688 switch Simtype[nl.Type.Etype] { 2689 default: 2690 goto longdiv 2691 2692 case TUINT64: 2693 var m Magic 2694 m.W = w 2695 m.Ud = uint64(nr.Int64()) 2696 Umagic(&m) 2697 if m.Bad != 0 { 2698 break 2699 } 2700 2701 // In order to add the numerator we need to be able to 2702 // avoid overflow. This is done by shifting the result of the 2703 // addition right by 1 and inserting the carry bit into 2704 // the MSB. For now this needs the RROTC instruction. 2705 // TODO(mundaym): Hacker's Delight 2nd ed. chapter 10 proposes 2706 // an alternative sequence of instructions for architectures 2707 // (TODO: MIPS64, PPC64, S390X) that do not have a shift 2708 // right with carry instruction. 2709 if m.Ua != 0 && !hasRROTC64() && !hasRightShiftWithCarry() { 2710 goto longdiv 2711 } 2712 if op == OMOD { 2713 goto longmod 2714 } 2715 2716 var n1 Node 2717 Cgenr(nl, &n1, nil) 2718 var n2 Node 2719 Nodconst(&n2, nl.Type, int64(m.Um)) 2720 var n3 Node 2721 Regalloc(&n3, nl.Type, res) 2722 Thearch.Cgen_hmul(&n1, &n2, &n3) 2723 2724 if m.Ua != 0 { 2725 // Need to add numerator accounting for overflow. 2726 if hasAddSetCarry() { 2727 Thearch.AddSetCarry(&n1, &n3, &n3) 2728 } else { 2729 Thearch.Gins(Thearch.Optoas(OADD, nl.Type), &n1, &n3) 2730 } 2731 2732 if !hasRROTC64() { 2733 Thearch.RightShiftWithCarry(&n3, uint(m.S), &n3) 2734 } else { 2735 Nodconst(&n2, nl.Type, 1) 2736 Thearch.Gins(Thearch.Optoas(ORROTC, nl.Type), &n2, &n3) 2737 Nodconst(&n2, nl.Type, int64(m.S)-1) 2738 Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3) 2739 } 2740 } else { 2741 Nodconst(&n2, nl.Type, int64(m.S)) 2742 Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3) // shift dx 2743 } 2744 2745 Thearch.Gmove(&n3, res) 2746 Regfree(&n1) 2747 Regfree(&n3) 2748 return 2749 2750 case TINT64: 2751 var m Magic 2752 m.W = w 2753 m.Sd = nr.Int64() 2754 Smagic(&m) 2755 if m.Bad != 0 { 2756 break 2757 } 2758 if op == OMOD { 2759 goto longmod 2760 } 2761 2762 var n1 Node 2763 Cgenr(nl, &n1, res) 2764 var n2 Node 2765 Nodconst(&n2, nl.Type, m.Sm) 2766 var n3 Node 2767 Regalloc(&n3, nl.Type, nil) 2768 Thearch.Cgen_hmul(&n1, &n2, &n3) 2769 2770 if m.Sm < 0 { 2771 // Need to add numerator (cannot overflow). 2772 Thearch.Gins(Thearch.Optoas(OADD, nl.Type), &n1, &n3) 2773 } 2774 2775 Nodconst(&n2, nl.Type, int64(m.S)) 2776 Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3) // shift n3 2777 2778 Nodconst(&n2, nl.Type, int64(w)-1) 2779 2780 Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n1) // -1 iff num is neg 2781 Thearch.Gins(Thearch.Optoas(OSUB, nl.Type), &n1, &n3) // added 2782 2783 if m.Sd < 0 { 2784 // This could probably be removed by factoring it into 2785 // the multiplier. 2786 Thearch.Gins(Thearch.Optoas(OMINUS, nl.Type), nil, &n3) 2787 } 2788 2789 Thearch.Gmove(&n3, res) 2790 Regfree(&n1) 2791 Regfree(&n3) 2792 return 2793 } 2794 2795 goto longdiv 2796 2797 // Division and mod using (slow) hardware instruction. 2798 longdiv: 2799 Thearch.Dodiv(op, nl, nr, res) 2800 2801 return 2802 2803 // Mod using formula A%B = A-(A/B*B) but 2804 // we know that there is a fast algorithm for A/B. 2805 longmod: 2806 var n1 Node 2807 Regalloc(&n1, nl.Type, res) 2808 2809 Cgen(nl, &n1) 2810 var n2 Node 2811 Regalloc(&n2, nl.Type, nil) 2812 cgen_div(ODIV, &n1, nr, &n2) 2813 a := Thearch.Optoas(OMUL, nl.Type) 2814 2815 if !Smallintconst(nr) { 2816 var n3 Node 2817 Regalloc(&n3, nl.Type, nil) 2818 Cgen(nr, &n3) 2819 Thearch.Gins(a, &n3, &n2) 2820 Regfree(&n3) 2821 } else { 2822 Thearch.Gins(a, nr, &n2) 2823 } 2824 Thearch.Gins(Thearch.Optoas(OSUB, nl.Type), &n2, &n1) 2825 Thearch.Gmove(&n1, res) 2826 Regfree(&n1) 2827 Regfree(&n2) 2828 } 2829 2830 func Fixlargeoffset(n *Node) { 2831 if n == nil { 2832 return 2833 } 2834 if n.Op != OINDREG { 2835 return 2836 } 2837 if n.Reg == int16(Thearch.REGSP) { // stack offset cannot be large 2838 return 2839 } 2840 if n.Xoffset != int64(int32(n.Xoffset)) { 2841 // offset too large, add to register instead. 2842 a := *n 2843 2844 a.Op = OREGISTER 2845 a.Type = Types[Tptr] 2846 a.Xoffset = 0 2847 Cgen_checknil(&a) 2848 Thearch.Ginscon(Thearch.Optoas(OADD, Types[Tptr]), n.Xoffset, &a) 2849 n.Xoffset = 0 2850 } 2851 } 2852 2853 func cgen_append(n, res *Node) { 2854 if Debug['g'] != 0 { 2855 Dump("cgen_append-n", n) 2856 Dump("cgen_append-res", res) 2857 } 2858 for _, n1 := range n.List.Slice() { 2859 if n1.Ullman >= UINF { 2860 Fatalf("append with function call arguments") 2861 } 2862 } 2863 2864 // res = append(src, x, y, z) 2865 // 2866 // If res and src are the same, we can avoid writing to base and cap 2867 // unless we grow the underlying array. 2868 needFullUpdate := !samesafeexpr(res, n.List.First()) 2869 2870 // Copy src triple into base, len, cap. 2871 base := temp(Types[Tptr]) 2872 len := temp(Types[TUINT]) 2873 cap := temp(Types[TUINT]) 2874 2875 var src Node 2876 Igen(n.List.First(), &src, nil) 2877 src.Type = Types[Tptr] 2878 Thearch.Gmove(&src, base) 2879 src.Type = Types[TUINT] 2880 src.Xoffset += int64(Widthptr) 2881 Thearch.Gmove(&src, len) 2882 src.Xoffset += int64(Widthptr) 2883 Thearch.Gmove(&src, cap) 2884 2885 // if len+argc <= cap goto L1 2886 var rlen Node 2887 Regalloc(&rlen, Types[TUINT], nil) 2888 Thearch.Gmove(len, &rlen) 2889 Thearch.Ginscon(Thearch.Optoas(OADD, Types[TUINT]), int64(n.List.Len()-1), &rlen) 2890 p := Thearch.Ginscmp(OLE, Types[TUINT], &rlen, cap, +1) 2891 // Note: rlen and src are Regrealloc'ed below at the target of the 2892 // branch we just emitted; do not reuse these Go variables for 2893 // other purposes. They need to still describe the same things 2894 // below that they describe right here. 2895 Regfree(&src) 2896 2897 // base, len, cap = growslice(type, base, len, cap, newlen) 2898 var arg Node 2899 arg.Op = OINDREG 2900 arg.Reg = int16(Thearch.REGSP) 2901 arg.Addable = true 2902 arg.Xoffset = Ctxt.FixedFrameSize() 2903 arg.Type = Ptrto(Types[TUINT8]) 2904 Cgen(typename(res.Type.Elem()), &arg) 2905 arg.Xoffset += int64(Widthptr) 2906 2907 arg.Type = Types[Tptr] 2908 Cgen(base, &arg) 2909 arg.Xoffset += int64(Widthptr) 2910 2911 arg.Type = Types[TUINT] 2912 Cgen(len, &arg) 2913 arg.Xoffset += int64(Widthptr) 2914 2915 arg.Type = Types[TUINT] 2916 Cgen(cap, &arg) 2917 arg.Xoffset += int64(Widthptr) 2918 2919 arg.Type = Types[TUINT] 2920 Cgen(&rlen, &arg) 2921 arg.Xoffset += int64(Widthptr) 2922 Regfree(&rlen) 2923 2924 fn := syslook("growslice") 2925 fn = substArgTypes(fn, res.Type.Elem(), res.Type.Elem()) 2926 Ginscall(fn, 0) 2927 2928 if Widthptr == 4 && Widthreg == 8 { 2929 arg.Xoffset += 4 2930 } 2931 2932 arg.Type = Types[Tptr] 2933 Cgen(&arg, base) 2934 arg.Xoffset += int64(Widthptr) 2935 2936 arg.Type = Types[TUINT] 2937 Cgen(&arg, len) 2938 arg.Xoffset += int64(Widthptr) 2939 2940 arg.Type = Types[TUINT] 2941 Cgen(&arg, cap) 2942 2943 // Update res with base, len+argc, cap. 2944 if needFullUpdate { 2945 if Debug_append > 0 { 2946 Warn("append: full update") 2947 } 2948 Patch(p, Pc) 2949 } 2950 if res.Op == ONAME { 2951 Gvardef(res) 2952 } 2953 var dst, r1 Node 2954 Igen(res, &dst, nil) 2955 dst.Type = Types[TUINT] 2956 dst.Xoffset += int64(Widthptr) 2957 Regalloc(&r1, Types[TUINT], nil) 2958 Thearch.Gmove(len, &r1) 2959 Thearch.Ginscon(Thearch.Optoas(OADD, Types[TUINT]), int64(n.List.Len()-1), &r1) 2960 Thearch.Gmove(&r1, &dst) 2961 Regfree(&r1) 2962 dst.Xoffset += int64(Widthptr) 2963 Thearch.Gmove(cap, &dst) 2964 dst.Type = Types[Tptr] 2965 dst.Xoffset -= 2 * int64(Widthptr) 2966 cgen_wb(base, &dst, needwritebarrier(&dst, base)) 2967 Regfree(&dst) 2968 2969 if !needFullUpdate { 2970 if Debug_append > 0 { 2971 Warn("append: len-only update") 2972 } 2973 // goto L2; 2974 // L1: 2975 // update len only 2976 // L2: 2977 q := Gbranch(obj.AJMP, nil, 0) 2978 Patch(p, Pc) 2979 // At the goto above, src refers to cap and rlen holds the new len 2980 if src.Op == OREGISTER || src.Op == OINDREG { 2981 Regrealloc(&src) 2982 } 2983 Regrealloc(&rlen) 2984 src.Xoffset -= int64(Widthptr) 2985 Thearch.Gmove(&rlen, &src) 2986 Regfree(&src) 2987 Regfree(&rlen) 2988 Patch(q, Pc) 2989 } 2990 2991 // Copy data into place. 2992 // Could do write barrier check around entire copy instead of each element. 2993 // Could avoid reloading registers on each iteration if we know the cgen_wb 2994 // is not going to use a write barrier. 2995 i := 0 2996 var r2 Node 2997 for _, n2 := range n.List.Slice()[1:] { 2998 Regalloc(&r1, Types[Tptr], nil) 2999 Thearch.Gmove(base, &r1) 3000 Regalloc(&r2, Types[TUINT], nil) 3001 Thearch.Gmove(len, &r2) 3002 if i > 0 { 3003 Thearch.Gins(Thearch.Optoas(OADD, Types[TUINT]), Nodintconst(int64(i)), &r2) 3004 } 3005 w := res.Type.Elem().Width 3006 if Thearch.AddIndex != nil && Thearch.AddIndex(&r2, w, &r1) { 3007 // r1 updated by back end 3008 } else if w == 1 { 3009 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &r2, &r1) 3010 } else { 3011 Thearch.Ginscon(Thearch.Optoas(OMUL, Types[TUINT]), w, &r2) 3012 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &r2, &r1) 3013 } 3014 Regfree(&r2) 3015 3016 r1.Op = OINDREG 3017 r1.Type = res.Type.Elem() 3018 cgen_wb(n2, &r1, needwritebarrier(&r1, n2)) 3019 Regfree(&r1) 3020 i++ 3021 } 3022 } 3023 3024 // Generate res = n, where n is x[i:j] or x[i:j:k]. 3025 // If wb is true, need write barrier updating res's base pointer. 3026 // On systems with 32-bit ints, i, j, k are guaranteed to be 32-bit values. 3027 func cgen_slice(n, res *Node, wb bool) { 3028 if Debug['g'] != 0 { 3029 Dump("cgen_slice-n", n) 3030 Dump("cgen_slice-res", res) 3031 } 3032 3033 needFullUpdate := !samesafeexpr(n.Left, res) 3034 3035 // orderexpr has made sure that x is safe (but possibly expensive) 3036 // and i, j, k are cheap. On a system with registers (anything but 386) 3037 // we can evaluate x first and then know we have enough registers 3038 // for i, j, k as well. 3039 var x, xbase, xlen, xcap, i, j, k Node 3040 if n.Op != OSLICEARR && n.Op != OSLICE3ARR { 3041 Igen(n.Left, &x, nil) 3042 } 3043 3044 indexRegType := Types[TUINT] 3045 if Widthreg > Widthptr { // amd64p32 3046 indexRegType = Types[TUINT64] 3047 } 3048 3049 // On most systems, we use registers. 3050 // The 386 has basically no registers, so substitute functions 3051 // that can work with temporaries instead. 3052 regalloc := Regalloc 3053 ginscon := Thearch.Ginscon 3054 gins := Thearch.Gins 3055 if Thearch.LinkArch.Family == sys.I386 { 3056 regalloc = func(n *Node, t *Type, reuse *Node) { 3057 Tempname(n, t) 3058 } 3059 ginscon = func(as obj.As, c int64, n *Node) { 3060 var n1 Node 3061 Regalloc(&n1, n.Type, n) 3062 Thearch.Gmove(n, &n1) 3063 Thearch.Ginscon(as, c, &n1) 3064 Thearch.Gmove(&n1, n) 3065 Regfree(&n1) 3066 } 3067 gins = func(as obj.As, f, t *Node) *obj.Prog { 3068 var n1 Node 3069 Regalloc(&n1, t.Type, t) 3070 Thearch.Gmove(t, &n1) 3071 Thearch.Gins(as, f, &n1) 3072 Thearch.Gmove(&n1, t) 3073 Regfree(&n1) 3074 return nil 3075 } 3076 } 3077 3078 panics := make([]*obj.Prog, 0, 6) // 3 loads + 3 checks 3079 3080 loadlen := func() { 3081 if xlen.Op != 0 { 3082 return 3083 } 3084 if n.Op == OSLICEARR || n.Op == OSLICE3ARR { 3085 Nodconst(&xlen, indexRegType, n.Left.Type.Elem().NumElem()) 3086 return 3087 } 3088 if n.Op == OSLICESTR && Isconst(n.Left, CTSTR) { 3089 Nodconst(&xlen, indexRegType, int64(len(n.Left.Val().U.(string)))) 3090 return 3091 } 3092 regalloc(&xlen, indexRegType, nil) 3093 x.Xoffset += int64(Widthptr) 3094 x.Type = Types[TUINT] 3095 Thearch.Gmove(&x, &xlen) 3096 x.Xoffset -= int64(Widthptr) 3097 } 3098 3099 loadcap := func() { 3100 if xcap.Op != 0 { 3101 return 3102 } 3103 if n.Op == OSLICEARR || n.Op == OSLICE3ARR || n.Op == OSLICESTR { 3104 loadlen() 3105 xcap = xlen 3106 if xcap.Op == OREGISTER { 3107 Regrealloc(&xcap) 3108 } 3109 return 3110 } 3111 regalloc(&xcap, indexRegType, nil) 3112 x.Xoffset += 2 * int64(Widthptr) 3113 x.Type = Types[TUINT] 3114 Thearch.Gmove(&x, &xcap) 3115 x.Xoffset -= 2 * int64(Widthptr) 3116 } 3117 3118 x1, x2, x3 := n.SliceBounds() // unevaluated index arguments 3119 3120 // load computes src into targ, but if src refers to the len or cap of n.Left, 3121 // load copies those from xlen, xcap, loading xlen if needed. 3122 // If targ.Op == OREGISTER on return, it must be Regfreed, 3123 // but it should not be modified without first checking whether it is 3124 // xlen or xcap's register. 3125 load := func(src, targ *Node) { 3126 if src == nil { 3127 return 3128 } 3129 switch src.Op { 3130 case OLITERAL: 3131 *targ = *src 3132 return 3133 case OLEN: 3134 // NOTE(rsc): This doesn't actually trigger, because order.go 3135 // has pulled all the len and cap calls into separate assignments 3136 // to temporaries. There are tests in test/sliceopt.go that could 3137 // be enabled if this is fixed. 3138 if samesafeexpr(n.Left, src.Left) { 3139 if Debug_slice > 0 { 3140 Warn("slice: reuse len") 3141 } 3142 loadlen() 3143 *targ = xlen 3144 if targ.Op == OREGISTER { 3145 Regrealloc(targ) 3146 } 3147 return 3148 } 3149 case OCAP: 3150 // NOTE(rsc): This doesn't actually trigger; see note in case OLEN above. 3151 if samesafeexpr(n.Left, src.Left) { 3152 if Debug_slice > 0 { 3153 Warn("slice: reuse cap") 3154 } 3155 loadcap() 3156 *targ = xcap 3157 if targ.Op == OREGISTER { 3158 Regrealloc(targ) 3159 } 3160 return 3161 } 3162 } 3163 if i.Op != 0 && samesafeexpr(x1, src) { 3164 if Debug_slice > 0 { 3165 Warn("slice: reuse 1st index") 3166 } 3167 *targ = i 3168 if targ.Op == OREGISTER { 3169 Regrealloc(targ) 3170 } 3171 return 3172 } 3173 if j.Op != 0 && samesafeexpr(x2, src) { 3174 if Debug_slice > 0 { 3175 Warn("slice: reuse 2nd index") 3176 } 3177 *targ = j 3178 if targ.Op == OREGISTER { 3179 Regrealloc(targ) 3180 } 3181 return 3182 } 3183 if Thearch.Cgenindex != nil { 3184 regalloc(targ, indexRegType, nil) 3185 p := Thearch.Cgenindex(src, targ, false) 3186 if p != nil { 3187 panics = append(panics, p) 3188 } 3189 } else if Thearch.Igenindex != nil { 3190 p := Thearch.Igenindex(src, targ, false) 3191 if p != nil { 3192 panics = append(panics, p) 3193 } 3194 } else { 3195 regalloc(targ, indexRegType, nil) 3196 var tmp Node 3197 Cgenr(src, &tmp, targ) 3198 Thearch.Gmove(&tmp, targ) 3199 Regfree(&tmp) 3200 } 3201 } 3202 3203 load(x1, &i) 3204 load(x2, &j) 3205 load(x3, &k) 3206 3207 // i defaults to 0. 3208 if i.Op == 0 { 3209 Nodconst(&i, indexRegType, 0) 3210 } 3211 3212 // j defaults to len(x) 3213 if j.Op == 0 { 3214 loadlen() 3215 j = xlen 3216 if j.Op == OREGISTER { 3217 Regrealloc(&j) 3218 } 3219 } 3220 3221 // k defaults to cap(x) 3222 // Only need to load it if we're recalculating cap or doing a full update. 3223 if k.Op == 0 && n.Op != OSLICESTR && (!iszero(&i) || needFullUpdate) { 3224 loadcap() 3225 k = xcap 3226 if k.Op == OREGISTER { 3227 Regrealloc(&k) 3228 } 3229 } 3230 3231 // Check constant indexes for negative values, and against constant length if known. 3232 // The func obvious below checks for out-of-order constant indexes. 3233 var bound int64 = -1 3234 if n.Op == OSLICEARR || n.Op == OSLICE3ARR { 3235 bound = n.Left.Type.Elem().NumElem() 3236 } else if n.Op == OSLICESTR && Isconst(n.Left, CTSTR) { 3237 bound = int64(len(n.Left.Val().U.(string))) 3238 } 3239 if Isconst(&i, CTINT) { 3240 if i.Val().U.(*Mpint).CmpInt64(0) < 0 || bound >= 0 && i.Val().U.(*Mpint).CmpInt64(bound) > 0 { 3241 Yyerror("slice index out of bounds") 3242 } 3243 } 3244 if Isconst(&j, CTINT) { 3245 if j.Val().U.(*Mpint).CmpInt64(0) < 0 || bound >= 0 && j.Val().U.(*Mpint).CmpInt64(bound) > 0 { 3246 Yyerror("slice index out of bounds") 3247 } 3248 } 3249 if Isconst(&k, CTINT) { 3250 if k.Val().U.(*Mpint).CmpInt64(0) < 0 || bound >= 0 && k.Val().U.(*Mpint).CmpInt64(bound) > 0 { 3251 Yyerror("slice index out of bounds") 3252 } 3253 } 3254 3255 // same reports whether n1 and n2 are the same register or constant. 3256 same := func(n1, n2 *Node) bool { 3257 return n1.Op == OREGISTER && n2.Op == OREGISTER && n1.Reg == n2.Reg || 3258 n1.Op == ONAME && n2.Op == ONAME && n1.Orig == n2.Orig && n1.Type == n2.Type && n1.Xoffset == n2.Xoffset || 3259 n1.Op == OLITERAL && n2.Op == OLITERAL && n1.Val().U.(*Mpint).Cmp(n2.Val().U.(*Mpint)) == 0 3260 } 3261 3262 // obvious reports whether n1 <= n2 is obviously true, 3263 // and it calls Yyerror if n1 <= n2 is obviously false. 3264 obvious := func(n1, n2 *Node) bool { 3265 if Debug['B'] != 0 { // -B disables bounds checks 3266 return true 3267 } 3268 if same(n1, n2) { 3269 return true // n1 == n2 3270 } 3271 if iszero(n1) { 3272 return true // using unsigned compare, so 0 <= n2 always true 3273 } 3274 if xlen.Op != 0 && same(n1, &xlen) && xcap.Op != 0 && same(n2, &xcap) { 3275 return true // len(x) <= cap(x) always true 3276 } 3277 if Isconst(n1, CTINT) && Isconst(n2, CTINT) { 3278 if n1.Val().U.(*Mpint).Cmp(n2.Val().U.(*Mpint)) <= 0 { 3279 return true // n1, n2 constants such that n1 <= n2 3280 } 3281 Yyerror("slice index out of bounds") 3282 return true 3283 } 3284 return false 3285 } 3286 3287 compare := func(n1, n2 *Node) { 3288 // n1 might be a 64-bit constant, even on 32-bit architectures, 3289 // but it will be represented in 32 bits. 3290 if Ctxt.Arch.RegSize == 4 && Is64(n1.Type) { 3291 if n1.Val().U.(*Mpint).CmpInt64(1<<31) >= 0 { 3292 Fatalf("missed slice out of bounds check") 3293 } 3294 var tmp Node 3295 Nodconst(&tmp, indexRegType, n1.Int64()) 3296 n1 = &tmp 3297 } 3298 p := Thearch.Ginscmp(OGT, indexRegType, n1, n2, -1) 3299 panics = append(panics, p) 3300 } 3301 3302 loadcap() 3303 max := &xcap 3304 if k.Op != 0 && (n.Op == OSLICE3 || n.Op == OSLICE3ARR) { 3305 if obvious(&k, max) { 3306 if Debug_slice > 0 { 3307 Warn("slice: omit check for 3rd index") 3308 } 3309 } else { 3310 compare(&k, max) 3311 } 3312 max = &k 3313 } 3314 if j.Op != 0 { 3315 if obvious(&j, max) { 3316 if Debug_slice > 0 { 3317 Warn("slice: omit check for 2nd index") 3318 } 3319 } else { 3320 compare(&j, max) 3321 } 3322 max = &j 3323 } 3324 if i.Op != 0 { 3325 if obvious(&i, max) { 3326 if Debug_slice > 0 { 3327 Warn("slice: omit check for 1st index") 3328 } 3329 } else { 3330 compare(&i, max) 3331 } 3332 max = &i 3333 } 3334 if k.Op != 0 && i.Op != 0 { 3335 obvious(&i, &k) // emit compile-time error for x[3:n:2] 3336 } 3337 3338 if len(panics) > 0 { 3339 p := Gbranch(obj.AJMP, nil, 0) 3340 for _, q := range panics { 3341 Patch(q, Pc) 3342 } 3343 Ginscall(panicslice, -1) 3344 Patch(p, Pc) 3345 } 3346 3347 // Checks are done. 3348 // Compute new len as j-i, cap as k-i. 3349 // If i and j are same register, len is constant 0. 3350 // If i and k are same register, cap is constant 0. 3351 // If j and k are same register, len and cap are same. 3352 3353 // Done with xlen and xcap. 3354 // Now safe to modify j and k even if they alias xlen, xcap. 3355 if xlen.Op == OREGISTER { 3356 Regfree(&xlen) 3357 } 3358 if xcap.Op == OREGISTER { 3359 Regfree(&xcap) 3360 } 3361 3362 // are j and k the same value? 3363 sameJK := same(&j, &k) 3364 3365 if i.Op != 0 { 3366 // j -= i 3367 if same(&i, &j) { 3368 if Debug_slice > 0 { 3369 Warn("slice: result len == 0") 3370 } 3371 if j.Op == OREGISTER { 3372 Regfree(&j) 3373 } 3374 Nodconst(&j, indexRegType, 0) 3375 } else { 3376 switch j.Op { 3377 case OLITERAL: 3378 if Isconst(&i, CTINT) { 3379 Nodconst(&j, indexRegType, j.Int64()-i.Int64()) 3380 if Debug_slice > 0 { 3381 Warn("slice: result len == %d", j.Int64()) 3382 } 3383 break 3384 } 3385 fallthrough 3386 case ONAME: 3387 if !istemp(&j) { 3388 var r Node 3389 regalloc(&r, indexRegType, nil) 3390 Thearch.Gmove(&j, &r) 3391 j = r 3392 } 3393 fallthrough 3394 case OREGISTER: 3395 if i.Op == OLITERAL { 3396 v := i.Int64() 3397 if v != 0 { 3398 ginscon(Thearch.Optoas(OSUB, indexRegType), v, &j) 3399 } 3400 } else { 3401 gins(Thearch.Optoas(OSUB, indexRegType), &i, &j) 3402 } 3403 } 3404 } 3405 3406 // k -= i if k different from j and cap is needed.j 3407 // (The modifications to j above cannot affect i: if j and i were aliased, 3408 // we replace j with a constant 0 instead of doing a subtraction, 3409 // leaving i unmodified.) 3410 if k.Op == 0 { 3411 if Debug_slice > 0 && n.Op != OSLICESTR { 3412 Warn("slice: result cap not computed") 3413 } 3414 // no need 3415 } else if same(&i, &k) { 3416 if k.Op == OREGISTER { 3417 Regfree(&k) 3418 } 3419 Nodconst(&k, indexRegType, 0) 3420 if Debug_slice > 0 { 3421 Warn("slice: result cap == 0") 3422 } 3423 } else if sameJK { 3424 if Debug_slice > 0 { 3425 Warn("slice: result cap == result len") 3426 } 3427 // k and j were the same value; make k-i the same as j-i. 3428 if k.Op == OREGISTER { 3429 Regfree(&k) 3430 } 3431 k = j 3432 if k.Op == OREGISTER { 3433 Regrealloc(&k) 3434 } 3435 } else { 3436 switch k.Op { 3437 case OLITERAL: 3438 if Isconst(&i, CTINT) { 3439 Nodconst(&k, indexRegType, k.Int64()-i.Int64()) 3440 if Debug_slice > 0 { 3441 Warn("slice: result cap == %d", k.Int64()) 3442 } 3443 break 3444 } 3445 fallthrough 3446 case ONAME: 3447 if !istemp(&k) { 3448 var r Node 3449 regalloc(&r, indexRegType, nil) 3450 Thearch.Gmove(&k, &r) 3451 k = r 3452 } 3453 fallthrough 3454 case OREGISTER: 3455 if same(&i, &k) { 3456 Regfree(&k) 3457 Nodconst(&k, indexRegType, 0) 3458 if Debug_slice > 0 { 3459 Warn("slice: result cap == 0") 3460 } 3461 } else if i.Op == OLITERAL { 3462 v := i.Int64() 3463 if v != 0 { 3464 ginscon(Thearch.Optoas(OSUB, indexRegType), v, &k) 3465 } 3466 } else { 3467 gins(Thearch.Optoas(OSUB, indexRegType), &i, &k) 3468 } 3469 } 3470 } 3471 } 3472 3473 adjustBase := true 3474 if i.Op == 0 || iszero(&i) { 3475 if Debug_slice > 0 { 3476 Warn("slice: skip base adjustment for 1st index 0") 3477 } 3478 adjustBase = false 3479 } else if k.Op != 0 && iszero(&k) || k.Op == 0 && iszero(&j) { 3480 if Debug_slice > 0 { 3481 if n.Op == OSLICESTR { 3482 Warn("slice: skip base adjustment for string len == 0") 3483 } else { 3484 Warn("slice: skip base adjustment for cap == 0") 3485 } 3486 } 3487 adjustBase = false 3488 } 3489 3490 if !adjustBase && !needFullUpdate { 3491 if Debug_slice > 0 { 3492 if k.Op != 0 { 3493 Warn("slice: len/cap-only update") 3494 } else { 3495 Warn("slice: len-only update") 3496 } 3497 } 3498 if i.Op == OREGISTER { 3499 Regfree(&i) 3500 } 3501 // Write len (and cap if needed) back to x. 3502 x.Xoffset += int64(Widthptr) 3503 x.Type = Types[TUINT] 3504 Thearch.Gmove(&j, &x) 3505 x.Xoffset -= int64(Widthptr) 3506 if k.Op != 0 { 3507 x.Xoffset += 2 * int64(Widthptr) 3508 x.Type = Types[TUINT] 3509 Thearch.Gmove(&k, &x) 3510 x.Xoffset -= 2 * int64(Widthptr) 3511 } 3512 Regfree(&x) 3513 } else { 3514 // Compute new base. May smash i. 3515 if n.Op == OSLICEARR || n.Op == OSLICE3ARR { 3516 Cgenr(n.Left, &xbase, nil) 3517 Cgen_checknil(&xbase) 3518 } else { 3519 var ptr *Type 3520 if n.Op == OSLICESTR { 3521 ptr = ptrToUint8 3522 } else { 3523 ptr = Ptrto(n.Type.Elem()) 3524 } 3525 regalloc(&xbase, ptr, nil) 3526 x.Type = xbase.Type 3527 Thearch.Gmove(&x, &xbase) 3528 Regfree(&x) 3529 } 3530 if i.Op != 0 && adjustBase { 3531 // Branch around the base adjustment if the resulting cap will be 0. 3532 var p *obj.Prog 3533 size := &k 3534 if k.Op == 0 { 3535 size = &j 3536 } 3537 if Isconst(size, CTINT) { 3538 // zero was checked above, must be non-zero. 3539 } else { 3540 var tmp Node 3541 Nodconst(&tmp, indexRegType, 0) 3542 p = Thearch.Ginscmp(OEQ, indexRegType, size, &tmp, -1) 3543 } 3544 var w int64 3545 if n.Op == OSLICESTR { 3546 w = 1 // res is string, elem size is 1 (byte) 3547 } else { 3548 w = res.Type.Elem().Width // res is []T, elem size is T.width 3549 } 3550 if Isconst(&i, CTINT) { 3551 ginscon(Thearch.Optoas(OADD, xbase.Type), i.Int64()*w, &xbase) 3552 } else if Thearch.AddIndex != nil && Thearch.AddIndex(&i, w, &xbase) { 3553 // done by back end 3554 } else if w == 1 { 3555 gins(Thearch.Optoas(OADD, xbase.Type), &i, &xbase) 3556 } else { 3557 if i.Op == ONAME && !istemp(&i) { 3558 var tmp Node 3559 Tempname(&tmp, i.Type) 3560 Thearch.Gmove(&i, &tmp) 3561 i = tmp 3562 } 3563 ginscon(Thearch.Optoas(OMUL, i.Type), w, &i) 3564 gins(Thearch.Optoas(OADD, xbase.Type), &i, &xbase) 3565 } 3566 if p != nil { 3567 Patch(p, Pc) 3568 } 3569 } 3570 if i.Op == OREGISTER { 3571 Regfree(&i) 3572 } 3573 3574 // Write len, cap, base to result. 3575 if res.Op == ONAME { 3576 Gvardef(res) 3577 } 3578 Igen(res, &x, nil) 3579 x.Xoffset += int64(Widthptr) 3580 x.Type = Types[TUINT] 3581 Thearch.Gmove(&j, &x) 3582 x.Xoffset -= int64(Widthptr) 3583 if k.Op != 0 { 3584 x.Xoffset += 2 * int64(Widthptr) 3585 Thearch.Gmove(&k, &x) 3586 x.Xoffset -= 2 * int64(Widthptr) 3587 } 3588 x.Type = xbase.Type 3589 cgen_wb(&xbase, &x, wb) 3590 Regfree(&xbase) 3591 Regfree(&x) 3592 } 3593 3594 if j.Op == OREGISTER { 3595 Regfree(&j) 3596 } 3597 if k.Op == OREGISTER { 3598 Regfree(&k) 3599 } 3600 }