github.com/sbinet/go@v0.0.0-20160827155028-54d7de7dd62b/src/cmd/compile/internal/gc/cgen.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/internal/obj" 9 "cmd/internal/obj/ppc64" 10 "cmd/internal/sys" 11 "fmt" 12 ) 13 14 // generate: 15 // res = n; 16 // simplifies and calls Thearch.Gmove. 17 // if wb is true, need to emit write barriers. 18 func Cgen(n, res *Node) { 19 cgen_wb(n, res, false) 20 } 21 22 func cgen_wb(n, res *Node, wb bool) { 23 if Debug['g'] != 0 { 24 op := "cgen" 25 if wb { 26 op = "cgen_wb" 27 } 28 Dump("\n"+op+"-n", n) 29 Dump(op+"-res", res) 30 } 31 32 if n == nil || n.Type == nil { 33 return 34 } 35 36 if res == nil || res.Type == nil { 37 Fatalf("cgen: res nil") 38 } 39 40 for n.Op == OCONVNOP { 41 n = n.Left 42 } 43 44 switch n.Op { 45 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 46 cgen_slice(n, res, wb) 47 return 48 49 case OEFACE: 50 if res.Op != ONAME || !res.Addable || wb { 51 var n1 Node 52 Tempname(&n1, n.Type) 53 Cgen_eface(n, &n1) 54 cgen_wb(&n1, res, wb) 55 } else { 56 Cgen_eface(n, res) 57 } 58 return 59 60 case ODOTTYPE: 61 cgen_dottype(n, res, nil, wb) 62 return 63 64 case OAPPEND: 65 cgen_append(n, res) 66 return 67 } 68 69 if n.Ullman >= UINF { 70 if n.Op == OINDREG { 71 Fatalf("cgen: this is going to miscompile") 72 } 73 if res.Ullman >= UINF { 74 var n1 Node 75 Tempname(&n1, n.Type) 76 Cgen(n, &n1) 77 cgen_wb(&n1, res, wb) 78 return 79 } 80 } 81 82 if Isfat(n.Type) { 83 if n.Type.Width < 0 { 84 Fatalf("forgot to compute width for %v", n.Type) 85 } 86 sgen_wb(n, res, n.Type.Width, wb) 87 return 88 } 89 90 if !res.Addable { 91 if n.Ullman > res.Ullman { 92 if Ctxt.Arch.RegSize == 4 && Is64(n.Type) { 93 var n1 Node 94 Tempname(&n1, n.Type) 95 Cgen(n, &n1) 96 cgen_wb(&n1, res, wb) 97 return 98 } 99 100 var n1 Node 101 Regalloc(&n1, n.Type, res) 102 Cgen(n, &n1) 103 if n1.Ullman > res.Ullman { 104 Dump("n1", &n1) 105 Dump("res", res) 106 Fatalf("loop in cgen") 107 } 108 109 cgen_wb(&n1, res, wb) 110 Regfree(&n1) 111 return 112 } 113 114 if res.Ullman < UINF { 115 if Complexop(n, res) { 116 Complexgen(n, res) 117 return 118 } 119 120 f := true // gen through register 121 switch n.Op { 122 case OLITERAL: 123 if Smallintconst(n) { 124 f = false 125 } 126 127 case OREGISTER: 128 f = false 129 } 130 131 if !n.Type.IsComplex() && Ctxt.Arch.RegSize == 8 && !wb { 132 a := Thearch.Optoas(OAS, res.Type) 133 var addr obj.Addr 134 if Thearch.Sudoaddable(a, res, &addr) { 135 var p1 *obj.Prog 136 if f { 137 var n2 Node 138 Regalloc(&n2, res.Type, nil) 139 Cgen(n, &n2) 140 p1 = Thearch.Gins(a, &n2, nil) 141 Regfree(&n2) 142 } else { 143 p1 = Thearch.Gins(a, n, nil) 144 } 145 p1.To = addr 146 if Debug['g'] != 0 { 147 fmt.Printf("%v [ignore previous line]\n", p1) 148 } 149 Thearch.Sudoclean() 150 return 151 } 152 } 153 } 154 155 if Ctxt.Arch.Family == sys.I386 { 156 // no registers to speak of 157 var n1, n2 Node 158 Tempname(&n1, n.Type) 159 Cgen(n, &n1) 160 Igen(res, &n2, nil) 161 cgen_wb(&n1, &n2, wb) 162 Regfree(&n2) 163 return 164 } 165 166 var n1 Node 167 Igen(res, &n1, nil) 168 cgen_wb(n, &n1, wb) 169 Regfree(&n1) 170 return 171 } 172 173 // update addressability for string, slice 174 // can't do in walk because n->left->addable 175 // changes if n->left is an escaping local variable. 176 switch n.Op { 177 case OSPTR, OLEN: 178 if n.Left.Type.IsSlice() || n.Left.Type.IsString() { 179 n.Addable = n.Left.Addable 180 } 181 182 case OCAP: 183 if n.Left.Type.IsSlice() { 184 n.Addable = n.Left.Addable 185 } 186 187 case OITAB, OIDATA: 188 n.Addable = n.Left.Addable 189 } 190 191 if wb { 192 if Simtype[res.Type.Etype] != Tptr { 193 Fatalf("cgen_wb of type %v", res.Type) 194 } 195 if n.Ullman >= UINF { 196 var n1 Node 197 Tempname(&n1, n.Type) 198 Cgen(n, &n1) 199 n = &n1 200 } 201 cgen_wbptr(n, res) 202 return 203 } 204 205 // Write barrier now handled. Code below this line can ignore wb. 206 207 if Ctxt.Arch.Family == sys.ARM { // TODO(rsc): Maybe more often? 208 // if both are addressable, move 209 if n.Addable && res.Addable { 210 if Is64(n.Type) || Is64(res.Type) || n.Op == OREGISTER || res.Op == OREGISTER || n.Type.IsComplex() || res.Type.IsComplex() { 211 Thearch.Gmove(n, res) 212 } else { 213 var n1 Node 214 Regalloc(&n1, n.Type, nil) 215 Thearch.Gmove(n, &n1) 216 Cgen(&n1, res) 217 Regfree(&n1) 218 } 219 220 return 221 } 222 223 // if both are not addressable, use a temporary. 224 if !n.Addable && !res.Addable { 225 // could use regalloc here sometimes, 226 // but have to check for ullman >= UINF. 227 var n1 Node 228 Tempname(&n1, n.Type) 229 Cgen(n, &n1) 230 Cgen(&n1, res) 231 return 232 } 233 234 // if result is not addressable directly but n is, 235 // compute its address and then store via the address. 236 if !res.Addable { 237 var n1 Node 238 Igen(res, &n1, nil) 239 Cgen(n, &n1) 240 Regfree(&n1) 241 return 242 } 243 } 244 245 if Complexop(n, res) { 246 Complexgen(n, res) 247 return 248 } 249 250 if Ctxt.Arch.InFamily(sys.AMD64, sys.I386, sys.S390X) && n.Addable { 251 Thearch.Gmove(n, res) 252 return 253 } 254 255 if Ctxt.Arch.InFamily(sys.ARM64, sys.MIPS64, sys.PPC64) { 256 // if both are addressable, move 257 if n.Addable { 258 if n.Op == OREGISTER || res.Op == OREGISTER { 259 Thearch.Gmove(n, res) 260 } else { 261 var n1 Node 262 Regalloc(&n1, n.Type, nil) 263 Thearch.Gmove(n, &n1) 264 Cgen(&n1, res) 265 Regfree(&n1) 266 } 267 return 268 } 269 } 270 271 // if n is sudoaddable generate addr and move 272 if Ctxt.Arch.Family == sys.ARM && !Is64(n.Type) && !Is64(res.Type) && !n.Type.IsComplex() && !res.Type.IsComplex() { 273 a := Thearch.Optoas(OAS, n.Type) 274 var addr obj.Addr 275 if Thearch.Sudoaddable(a, n, &addr) { 276 if res.Op != OREGISTER { 277 var n2 Node 278 Regalloc(&n2, res.Type, nil) 279 p1 := Thearch.Gins(a, nil, &n2) 280 p1.From = addr 281 if Debug['g'] != 0 { 282 fmt.Printf("%v [ignore previous line]\n", p1) 283 } 284 Thearch.Gmove(&n2, res) 285 Regfree(&n2) 286 } else { 287 p1 := Thearch.Gins(a, nil, res) 288 p1.From = addr 289 if Debug['g'] != 0 { 290 fmt.Printf("%v [ignore previous line]\n", p1) 291 } 292 } 293 Thearch.Sudoclean() 294 return 295 } 296 } 297 298 nl := n.Left 299 nr := n.Right 300 301 if nl != nil && nl.Ullman >= UINF { 302 if nr != nil && nr.Ullman >= UINF { 303 var n1 Node 304 Tempname(&n1, nl.Type) 305 Cgen(nl, &n1) 306 n2 := *n 307 n2.Left = &n1 308 Cgen(&n2, res) 309 return 310 } 311 } 312 313 // 64-bit ops are hard on 32-bit machine. 314 if Ctxt.Arch.RegSize == 4 && (Is64(n.Type) || Is64(res.Type) || n.Left != nil && Is64(n.Left.Type)) { 315 switch n.Op { 316 // math goes to cgen64. 317 case OMINUS, 318 OCOM, 319 OADD, 320 OSUB, 321 OMUL, 322 OLROT, 323 OLSH, 324 ORSH, 325 OAND, 326 OOR, 327 OXOR: 328 Thearch.Cgen64(n, res) 329 return 330 } 331 } 332 333 if Thearch.Cgen_float != nil && nl != nil && n.Type.IsFloat() && nl.Type.IsFloat() { 334 Thearch.Cgen_float(n, res) 335 return 336 } 337 338 if !n.Type.IsComplex() && Ctxt.Arch.RegSize == 8 { 339 a := Thearch.Optoas(OAS, n.Type) 340 var addr obj.Addr 341 if Thearch.Sudoaddable(a, n, &addr) { 342 if res.Op == OREGISTER { 343 p1 := Thearch.Gins(a, nil, res) 344 p1.From = addr 345 } else { 346 var n2 Node 347 Regalloc(&n2, n.Type, nil) 348 p1 := Thearch.Gins(a, nil, &n2) 349 p1.From = addr 350 Thearch.Gins(a, &n2, res) 351 Regfree(&n2) 352 } 353 354 Thearch.Sudoclean() 355 return 356 } 357 } 358 359 var a obj.As 360 switch n.Op { 361 default: 362 Dump("cgen", n) 363 Dump("cgen-res", res) 364 Fatalf("cgen: unknown op %v", Nconv(n, FmtShort|FmtSign)) 365 366 case OOROR, OANDAND, 367 OEQ, ONE, 368 OLT, OLE, 369 OGE, OGT, 370 ONOT: 371 Bvgen(n, res, true) 372 return 373 374 case OPLUS: 375 Cgen(nl, res) 376 return 377 378 // unary 379 case OCOM: 380 a := Thearch.Optoas(OXOR, nl.Type) 381 382 var n1 Node 383 Regalloc(&n1, nl.Type, nil) 384 Cgen(nl, &n1) 385 var n2 Node 386 Nodconst(&n2, nl.Type, -1) 387 Thearch.Gins(a, &n2, &n1) 388 cgen_norm(n, &n1, res) 389 return 390 391 case OMINUS: 392 if nl.Type.IsFloat() { 393 nr = Nodintconst(-1) 394 nr = convlit(nr, n.Type) 395 a = Thearch.Optoas(OMUL, nl.Type) 396 goto sbop 397 } 398 399 a := Thearch.Optoas(n.Op, nl.Type) 400 // unary 401 var n1 Node 402 Regalloc(&n1, nl.Type, res) 403 404 Cgen(nl, &n1) 405 if Ctxt.Arch.Family == sys.ARM { 406 var n2 Node 407 Nodconst(&n2, nl.Type, 0) 408 Thearch.Gins(a, &n2, &n1) 409 } else if Ctxt.Arch.Family == sys.ARM64 { 410 Thearch.Gins(a, &n1, &n1) 411 } else { 412 Thearch.Gins(a, nil, &n1) 413 } 414 cgen_norm(n, &n1, res) 415 return 416 417 case OSQRT: 418 var n1 Node 419 Regalloc(&n1, nl.Type, res) 420 Cgen(n.Left, &n1) 421 Thearch.Gins(Thearch.Optoas(OSQRT, nl.Type), &n1, &n1) 422 Thearch.Gmove(&n1, res) 423 Regfree(&n1) 424 return 425 426 case OGETG: 427 Thearch.Getg(res) 428 return 429 430 // symmetric binary 431 case OAND, 432 OOR, 433 OXOR, 434 OADD, 435 OMUL: 436 if n.Op == OMUL && Thearch.Cgen_bmul != nil && Thearch.Cgen_bmul(n.Op, nl, nr, res) { 437 break 438 } 439 a = Thearch.Optoas(n.Op, nl.Type) 440 goto sbop 441 442 // asymmetric binary 443 case OSUB: 444 a = Thearch.Optoas(n.Op, nl.Type) 445 goto abop 446 447 case OHMUL: 448 Thearch.Cgen_hmul(nl, nr, res) 449 450 case OCONV: 451 if Eqtype(n.Type, nl.Type) || Noconv(n.Type, nl.Type) { 452 Cgen(nl, res) 453 return 454 } 455 456 if Ctxt.Arch.Family == sys.I386 { 457 var n1 Node 458 var n2 Node 459 Tempname(&n2, n.Type) 460 Mgen(nl, &n1, res) 461 Thearch.Gmove(&n1, &n2) 462 Thearch.Gmove(&n2, res) 463 Mfree(&n1) 464 break 465 } 466 467 var n1 Node 468 var n2 Node 469 if Ctxt.Arch.Family == sys.ARM { 470 if nl.Addable && !Is64(nl.Type) { 471 Regalloc(&n1, nl.Type, res) 472 Thearch.Gmove(nl, &n1) 473 } else { 474 if n.Type.Width > int64(Widthptr) || Is64(nl.Type) || nl.Type.IsFloat() { 475 Tempname(&n1, nl.Type) 476 } else { 477 Regalloc(&n1, nl.Type, res) 478 } 479 Cgen(nl, &n1) 480 } 481 if n.Type.Width > int64(Widthptr) || Is64(n.Type) || n.Type.IsFloat() { 482 Tempname(&n2, n.Type) 483 } else { 484 Regalloc(&n2, n.Type, nil) 485 } 486 } else { 487 if n.Type.Width > nl.Type.Width { 488 // If loading from memory, do conversion during load, 489 // so as to avoid use of 8-bit register in, say, int(*byteptr). 490 switch nl.Op { 491 case ODOT, ODOTPTR, OINDEX, OIND, ONAME: 492 Igen(nl, &n1, res) 493 Regalloc(&n2, n.Type, res) 494 Thearch.Gmove(&n1, &n2) 495 Thearch.Gmove(&n2, res) 496 Regfree(&n2) 497 Regfree(&n1) 498 return 499 } 500 } 501 Regalloc(&n1, nl.Type, res) 502 Regalloc(&n2, n.Type, &n1) 503 Cgen(nl, &n1) 504 } 505 506 // if we do the conversion n1 -> n2 here 507 // reusing the register, then gmove won't 508 // have to allocate its own register. 509 Thearch.Gmove(&n1, &n2) 510 Thearch.Gmove(&n2, res) 511 if n2.Op == OREGISTER { 512 Regfree(&n2) 513 } 514 if n1.Op == OREGISTER { 515 Regfree(&n1) 516 } 517 518 case ODOT, 519 ODOTPTR, 520 OINDEX, 521 OIND: 522 var n1 Node 523 Igen(n, &n1, res) 524 525 Thearch.Gmove(&n1, res) 526 Regfree(&n1) 527 528 case OITAB: 529 // interface table is first word of interface value 530 var n1 Node 531 Igen(nl, &n1, res) 532 n1.Type = n.Type 533 Thearch.Gmove(&n1, res) 534 Regfree(&n1) 535 536 case OIDATA: 537 // interface data is second word of interface value 538 var n1 Node 539 Igen(nl, &n1, res) 540 n1.Type = n.Type 541 n1.Xoffset += int64(Widthptr) 542 Thearch.Gmove(&n1, res) 543 Regfree(&n1) 544 545 case OSPTR: 546 // pointer is the first word of string or slice. 547 if Isconst(nl, CTSTR) { 548 var n1 Node 549 Regalloc(&n1, Types[Tptr], res) 550 p1 := Thearch.Gins(Thearch.Optoas(OAS, n1.Type), nil, &n1) 551 Datastring(nl.Val().U.(string), &p1.From) 552 p1.From.Type = obj.TYPE_ADDR 553 Thearch.Gmove(&n1, res) 554 Regfree(&n1) 555 break 556 } 557 558 var n1 Node 559 Igen(nl, &n1, res) 560 n1.Type = n.Type 561 Thearch.Gmove(&n1, res) 562 Regfree(&n1) 563 564 case OLEN: 565 if nl.Type.IsMap() || nl.Type.IsChan() { 566 // map and chan have len in the first int-sized word. 567 // a zero pointer means zero length 568 var n1 Node 569 Regalloc(&n1, Types[Tptr], res) 570 571 Cgen(nl, &n1) 572 573 var n2 Node 574 Nodconst(&n2, Types[Tptr], 0) 575 p1 := Thearch.Ginscmp(OEQ, Types[Tptr], &n1, &n2, 0) 576 577 n2 = n1 578 n2.Op = OINDREG 579 n2.Type = Types[Simtype[TINT]] 580 Thearch.Gmove(&n2, &n1) 581 582 Patch(p1, Pc) 583 584 Thearch.Gmove(&n1, res) 585 Regfree(&n1) 586 break 587 } 588 589 if nl.Type.IsString() || nl.Type.IsSlice() { 590 // both slice and string have len one pointer into the struct. 591 // a zero pointer means zero length 592 var n1 Node 593 Igen(nl, &n1, res) 594 595 n1.Type = Types[Simtype[TUINT]] 596 n1.Xoffset += int64(Array_nel) 597 Thearch.Gmove(&n1, res) 598 Regfree(&n1) 599 break 600 } 601 602 Fatalf("cgen: OLEN: unknown type %v", Tconv(nl.Type, FmtLong)) 603 604 case OCAP: 605 if nl.Type.IsChan() { 606 // chan has cap in the second int-sized word. 607 // a zero pointer means zero length 608 var n1 Node 609 Regalloc(&n1, Types[Tptr], res) 610 611 Cgen(nl, &n1) 612 613 var n2 Node 614 Nodconst(&n2, Types[Tptr], 0) 615 p1 := Thearch.Ginscmp(OEQ, Types[Tptr], &n1, &n2, 0) 616 617 n2 = n1 618 n2.Op = OINDREG 619 n2.Xoffset = int64(Widthint) 620 n2.Type = Types[Simtype[TINT]] 621 Thearch.Gmove(&n2, &n1) 622 623 Patch(p1, Pc) 624 625 Thearch.Gmove(&n1, res) 626 Regfree(&n1) 627 break 628 } 629 630 if nl.Type.IsSlice() { 631 var n1 Node 632 Igen(nl, &n1, res) 633 n1.Type = Types[Simtype[TUINT]] 634 n1.Xoffset += int64(Array_cap) 635 Thearch.Gmove(&n1, res) 636 Regfree(&n1) 637 break 638 } 639 640 Fatalf("cgen: OCAP: unknown type %v", Tconv(nl.Type, FmtLong)) 641 642 case OADDR: 643 if n.Bounded { // let race detector avoid nil checks 644 Disable_checknil++ 645 } 646 Agen(nl, res) 647 if n.Bounded { 648 Disable_checknil-- 649 } 650 651 case OCALLMETH: 652 cgen_callmeth(n, 0) 653 cgen_callret(n, res) 654 655 case OCALLINTER: 656 cgen_callinter(n, res, 0) 657 cgen_callret(n, res) 658 659 case OCALLFUNC: 660 cgen_call(n, 0) 661 cgen_callret(n, res) 662 663 case OMOD, ODIV: 664 if n.Type.IsFloat() || Thearch.Dodiv == nil { 665 a = Thearch.Optoas(n.Op, nl.Type) 666 goto abop 667 } 668 669 if nl.Ullman >= nr.Ullman { 670 var n1 Node 671 Regalloc(&n1, nl.Type, res) 672 Cgen(nl, &n1) 673 cgen_div(n.Op, &n1, nr, res) 674 Regfree(&n1) 675 } else { 676 var n2 Node 677 if !Smallintconst(nr) { 678 Regalloc(&n2, nr.Type, res) 679 Cgen(nr, &n2) 680 } else { 681 n2 = *nr 682 } 683 684 cgen_div(n.Op, nl, &n2, res) 685 if n2.Op != OLITERAL { 686 Regfree(&n2) 687 } 688 } 689 690 case OLSH, ORSH, OLROT: 691 Thearch.Cgen_shift(n.Op, n.Bounded, nl, nr, res) 692 } 693 694 return 695 696 // put simplest on right - we'll generate into left 697 // and then adjust it using the computation of right. 698 // constants and variables have the same ullman 699 // count, so look for constants specially. 700 // 701 // an integer constant we can use as an immediate 702 // is simpler than a variable - we can use the immediate 703 // in the adjustment instruction directly - so it goes 704 // on the right. 705 // 706 // other constants, like big integers or floating point 707 // constants, require a mov into a register, so those 708 // might as well go on the left, so we can reuse that 709 // register for the computation. 710 sbop: // symmetric binary 711 if nl.Ullman < nr.Ullman || (nl.Ullman == nr.Ullman && (Smallintconst(nl) || (nr.Op == OLITERAL && !Smallintconst(nr)))) { 712 nl, nr = nr, nl 713 } 714 715 abop: // asymmetric binary 716 var n1 Node 717 var n2 Node 718 if Ctxt.Arch.Family == sys.I386 { 719 // no registers, sigh 720 if Smallintconst(nr) { 721 var n1 Node 722 Mgen(nl, &n1, res) 723 var n2 Node 724 Regalloc(&n2, nl.Type, &n1) 725 Thearch.Gmove(&n1, &n2) 726 Thearch.Gins(a, nr, &n2) 727 Thearch.Gmove(&n2, res) 728 Regfree(&n2) 729 Mfree(&n1) 730 } else if nl.Ullman >= nr.Ullman { 731 var nt Node 732 Tempname(&nt, nl.Type) 733 Cgen(nl, &nt) 734 var n2 Node 735 Mgen(nr, &n2, nil) 736 var n1 Node 737 Regalloc(&n1, nl.Type, res) 738 Thearch.Gmove(&nt, &n1) 739 Thearch.Gins(a, &n2, &n1) 740 Thearch.Gmove(&n1, res) 741 Regfree(&n1) 742 Mfree(&n2) 743 } else { 744 var n2 Node 745 Regalloc(&n2, nr.Type, res) 746 Cgen(nr, &n2) 747 var n1 Node 748 Regalloc(&n1, nl.Type, nil) 749 Cgen(nl, &n1) 750 Thearch.Gins(a, &n2, &n1) 751 Regfree(&n2) 752 Thearch.Gmove(&n1, res) 753 Regfree(&n1) 754 } 755 return 756 } 757 758 if nl.Ullman >= nr.Ullman { 759 Regalloc(&n1, nl.Type, res) 760 Cgen(nl, &n1) 761 762 if Smallintconst(nr) && Ctxt.Arch.Family != sys.MIPS64 && Ctxt.Arch.Family != sys.ARM && Ctxt.Arch.Family != sys.ARM64 && Ctxt.Arch.Family != sys.PPC64 { // TODO(rsc): Check opcode for arm 763 n2 = *nr 764 } else { 765 Regalloc(&n2, nr.Type, nil) 766 Cgen(nr, &n2) 767 } 768 } else { 769 if Smallintconst(nr) && Ctxt.Arch.Family != sys.MIPS64 && Ctxt.Arch.Family != sys.ARM && Ctxt.Arch.Family != sys.ARM64 && Ctxt.Arch.Family != sys.PPC64 { // TODO(rsc): Check opcode for arm 770 n2 = *nr 771 } else { 772 Regalloc(&n2, nr.Type, res) 773 Cgen(nr, &n2) 774 } 775 776 Regalloc(&n1, nl.Type, nil) 777 Cgen(nl, &n1) 778 } 779 780 Thearch.Gins(a, &n2, &n1) 781 if n2.Op != OLITERAL { 782 Regfree(&n2) 783 } 784 cgen_norm(n, &n1, res) 785 } 786 787 var sys_wbptr *Node 788 789 func cgen_wbptr(n, res *Node) { 790 if Curfn != nil { 791 if Curfn.Func.Pragma&Nowritebarrier != 0 { 792 Yyerror("write barrier prohibited") 793 } 794 if Curfn.Func.WBLineno == 0 { 795 Curfn.Func.WBLineno = lineno 796 } 797 } 798 if Debug_wb > 0 { 799 Warn("write barrier") 800 } 801 802 var dst, src Node 803 Igen(res, &dst, nil) 804 if n.Op == OREGISTER { 805 src = *n 806 Regrealloc(&src) 807 } else { 808 Cgenr(n, &src, nil) 809 } 810 811 wbVar := syslook("writeBarrier") 812 wbEnabled := NodSym(ODOT, wbVar, wbVar.Type.Field(0).Sym) 813 wbEnabled = typecheck(wbEnabled, Erv) 814 pbr := Thearch.Ginscmp(ONE, Types[TUINT8], wbEnabled, Nodintconst(0), -1) 815 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &src, &dst) 816 pjmp := Gbranch(obj.AJMP, nil, 0) 817 Patch(pbr, Pc) 818 var adst Node 819 Agenr(&dst, &adst, &dst) 820 p := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &adst, nil) 821 a := &p.To 822 a.Type = obj.TYPE_MEM 823 a.Reg = int16(Thearch.REGSP) 824 a.Offset = Ctxt.FixedFrameSize() 825 p2 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &src, nil) 826 p2.To = p.To 827 p2.To.Offset += int64(Widthptr) 828 Regfree(&adst) 829 if sys_wbptr == nil { 830 sys_wbptr = writebarrierfn("writebarrierptr", Types[Tptr], Types[Tptr]) 831 } 832 Ginscall(sys_wbptr, 0) 833 Patch(pjmp, Pc) 834 835 Regfree(&dst) 836 Regfree(&src) 837 } 838 839 func cgen_wbfat(n, res *Node) { 840 if Curfn != nil { 841 if Curfn.Func.Pragma&Nowritebarrier != 0 { 842 Yyerror("write barrier prohibited") 843 } 844 if Curfn.Func.WBLineno == 0 { 845 Curfn.Func.WBLineno = lineno 846 } 847 } 848 if Debug_wb > 0 { 849 Warn("write barrier") 850 } 851 needType := true 852 funcName := "typedmemmove" 853 var dst, src Node 854 if n.Ullman >= res.Ullman { 855 Agenr(n, &src, nil) 856 Agenr(res, &dst, nil) 857 } else { 858 Agenr(res, &dst, nil) 859 Agenr(n, &src, nil) 860 } 861 p := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &dst, nil) 862 a := &p.To 863 a.Type = obj.TYPE_MEM 864 a.Reg = int16(Thearch.REGSP) 865 a.Offset = Ctxt.FixedFrameSize() 866 if needType { 867 a.Offset += int64(Widthptr) 868 } 869 p2 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &src, nil) 870 p2.To = p.To 871 p2.To.Offset += int64(Widthptr) 872 Regfree(&dst) 873 if needType { 874 src.Type = Types[Tptr] 875 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), typename(n.Type), &src) 876 p3 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &src, nil) 877 p3.To = p2.To 878 p3.To.Offset -= 2 * int64(Widthptr) 879 } 880 Regfree(&src) 881 Ginscall(writebarrierfn(funcName, Types[Tptr], Types[Tptr]), 0) 882 } 883 884 // cgen_norm moves n1 to res, truncating to expected type if necessary. 885 // n1 is a register, and cgen_norm frees it. 886 func cgen_norm(n, n1, res *Node) { 887 switch Ctxt.Arch.Family { 888 case sys.AMD64, sys.I386: 889 // We use sized math, so the result is already truncated. 890 default: 891 switch n.Op { 892 case OADD, OSUB, OMUL, ODIV, OCOM, OMINUS: 893 // TODO(rsc): What about left shift? 894 Thearch.Gins(Thearch.Optoas(OAS, n.Type), n1, n1) 895 } 896 } 897 898 Thearch.Gmove(n1, res) 899 Regfree(n1) 900 } 901 902 func Mgen(n *Node, n1 *Node, rg *Node) { 903 n1.Op = OEMPTY 904 905 if n.Addable { 906 *n1 = *n 907 if n1.Op == OREGISTER || n1.Op == OINDREG { 908 reg[n.Reg-int16(Thearch.REGMIN)]++ 909 } 910 return 911 } 912 913 Tempname(n1, n.Type) 914 Cgen(n, n1) 915 if n.Type.Width <= int64(Widthptr) || n.Type.IsFloat() { 916 n2 := *n1 917 Regalloc(n1, n.Type, rg) 918 Thearch.Gmove(&n2, n1) 919 } 920 } 921 922 func Mfree(n *Node) { 923 if n.Op == OREGISTER { 924 Regfree(n) 925 } 926 } 927 928 // allocate a register (reusing res if possible) and generate 929 // a = n 930 // The caller must call Regfree(a). 931 func Cgenr(n *Node, a *Node, res *Node) { 932 if Debug['g'] != 0 { 933 Dump("cgenr-n", n) 934 } 935 936 if Isfat(n.Type) { 937 Fatalf("cgenr on fat node") 938 } 939 940 if n.Addable { 941 Regalloc(a, n.Type, res) 942 Thearch.Gmove(n, a) 943 return 944 } 945 946 switch n.Op { 947 case ONAME, 948 ODOT, 949 ODOTPTR, 950 OINDEX, 951 OCALLFUNC, 952 OCALLMETH, 953 OCALLINTER: 954 var n1 Node 955 Igen(n, &n1, res) 956 Regalloc(a, n.Type, &n1) 957 Thearch.Gmove(&n1, a) 958 Regfree(&n1) 959 960 default: 961 Regalloc(a, n.Type, res) 962 Cgen(n, a) 963 } 964 } 965 966 // allocate a register (reusing res if possible) and generate 967 // a = &n 968 // The caller must call Regfree(a). 969 // The generated code checks that the result is not nil. 970 func Agenr(n *Node, a *Node, res *Node) { 971 if Debug['g'] != 0 { 972 Dump("\nagenr-n", n) 973 } 974 975 nl := n.Left 976 nr := n.Right 977 978 switch n.Op { 979 case ODOT, ODOTPTR, OCALLFUNC, OCALLMETH, OCALLINTER: 980 var n1 Node 981 Igen(n, &n1, res) 982 Regalloc(a, Types[Tptr], &n1) 983 Agen(&n1, a) 984 Regfree(&n1) 985 986 case OIND: 987 Cgenr(n.Left, a, res) 988 if !n.Left.NonNil { 989 Cgen_checknil(a) 990 } else if Debug_checknil != 0 && n.Lineno > 1 { 991 Warnl(n.Lineno, "removed nil check") 992 } 993 994 case OINDEX: 995 if Ctxt.Arch.Family == sys.ARM { 996 var p2 *obj.Prog // to be patched to panicindex. 997 w := uint32(n.Type.Width) 998 bounded := Debug['B'] != 0 || n.Bounded 999 var n1 Node 1000 var n3 Node 1001 if nr.Addable { 1002 var tmp Node 1003 if !Isconst(nr, CTINT) { 1004 Tempname(&tmp, Types[TINT32]) 1005 } 1006 if !Isconst(nl, CTSTR) { 1007 Agenr(nl, &n3, res) 1008 } 1009 if !Isconst(nr, CTINT) { 1010 p2 = Thearch.Cgenindex(nr, &tmp, bounded) 1011 Regalloc(&n1, tmp.Type, nil) 1012 Thearch.Gmove(&tmp, &n1) 1013 } 1014 } else if nl.Addable { 1015 if !Isconst(nr, CTINT) { 1016 var tmp Node 1017 Tempname(&tmp, Types[TINT32]) 1018 p2 = Thearch.Cgenindex(nr, &tmp, bounded) 1019 Regalloc(&n1, tmp.Type, nil) 1020 Thearch.Gmove(&tmp, &n1) 1021 } 1022 1023 if !Isconst(nl, CTSTR) { 1024 Agenr(nl, &n3, res) 1025 } 1026 } else { 1027 var tmp Node 1028 Tempname(&tmp, Types[TINT32]) 1029 p2 = Thearch.Cgenindex(nr, &tmp, bounded) 1030 nr = &tmp 1031 if !Isconst(nl, CTSTR) { 1032 Agenr(nl, &n3, res) 1033 } 1034 Regalloc(&n1, tmp.Type, nil) 1035 Thearch.Gins(Thearch.Optoas(OAS, tmp.Type), &tmp, &n1) 1036 } 1037 1038 // &a is in &n3 (allocated in res) 1039 // i is in &n1 (if not constant) 1040 // w is width 1041 1042 // constant index 1043 if Isconst(nr, CTINT) { 1044 if Isconst(nl, CTSTR) { 1045 Fatalf("constant string constant index") 1046 } 1047 v := uint64(nr.Int64()) 1048 var n2 Node 1049 if nl.Type.IsSlice() || nl.Type.IsString() { 1050 if Debug['B'] == 0 && !n.Bounded { 1051 n1 = n3 1052 n1.Op = OINDREG 1053 n1.Type = Types[Tptr] 1054 n1.Xoffset = int64(Array_nel) 1055 Nodconst(&n2, Types[TUINT32], int64(v)) 1056 p1 := Thearch.Ginscmp(OGT, Types[TUINT32], &n1, &n2, +1) 1057 Ginscall(Panicindex, -1) 1058 Patch(p1, Pc) 1059 } 1060 1061 n1 = n3 1062 n1.Op = OINDREG 1063 n1.Type = Types[Tptr] 1064 n1.Xoffset = int64(Array_array) 1065 Thearch.Gmove(&n1, &n3) 1066 } 1067 1068 Nodconst(&n2, Types[Tptr], int64(v*uint64(w))) 1069 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1070 *a = n3 1071 break 1072 } 1073 1074 var n2 Node 1075 Regalloc(&n2, Types[TINT32], &n1) // i 1076 Thearch.Gmove(&n1, &n2) 1077 Regfree(&n1) 1078 1079 var n4 Node 1080 if Debug['B'] == 0 && !n.Bounded { 1081 // check bounds 1082 if Isconst(nl, CTSTR) { 1083 Nodconst(&n4, Types[TUINT32], int64(len(nl.Val().U.(string)))) 1084 } else if nl.Type.IsSlice() || nl.Type.IsString() { 1085 n1 = n3 1086 n1.Op = OINDREG 1087 n1.Type = Types[Tptr] 1088 n1.Xoffset = int64(Array_nel) 1089 Regalloc(&n4, Types[TUINT32], nil) 1090 Thearch.Gmove(&n1, &n4) 1091 } else { 1092 Nodconst(&n4, Types[TUINT32], nl.Type.NumElem()) 1093 } 1094 p1 := Thearch.Ginscmp(OLT, Types[TUINT32], &n2, &n4, +1) 1095 if n4.Op == OREGISTER { 1096 Regfree(&n4) 1097 } 1098 if p2 != nil { 1099 Patch(p2, Pc) 1100 } 1101 Ginscall(Panicindex, -1) 1102 Patch(p1, Pc) 1103 } 1104 1105 if Isconst(nl, CTSTR) { 1106 Regalloc(&n3, Types[Tptr], res) 1107 p1 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &n3) 1108 Datastring(nl.Val().U.(string), &p1.From) 1109 p1.From.Type = obj.TYPE_ADDR 1110 } else if nl.Type.IsSlice() || nl.Type.IsString() { 1111 n1 = n3 1112 n1.Op = OINDREG 1113 n1.Type = Types[Tptr] 1114 n1.Xoffset = int64(Array_array) 1115 Thearch.Gmove(&n1, &n3) 1116 } 1117 1118 if w == 0 { 1119 // nothing to do 1120 } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) { 1121 // done by back end 1122 } else if w == 1 { 1123 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1124 } else { 1125 if w&(w-1) == 0 { 1126 // Power of 2. Use shift. 1127 Thearch.Ginscon(Thearch.Optoas(OLSH, Types[TUINT32]), int64(log2(uint64(w))), &n2) 1128 } else { 1129 // Not a power of 2. Use multiply. 1130 Regalloc(&n4, Types[TUINT32], nil) 1131 Nodconst(&n1, Types[TUINT32], int64(w)) 1132 Thearch.Gmove(&n1, &n4) 1133 Thearch.Gins(Thearch.Optoas(OMUL, Types[TUINT32]), &n4, &n2) 1134 Regfree(&n4) 1135 } 1136 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1137 } 1138 *a = n3 1139 Regfree(&n2) 1140 break 1141 } 1142 if Ctxt.Arch.Family == sys.I386 { 1143 var p2 *obj.Prog // to be patched to panicindex. 1144 w := uint32(n.Type.Width) 1145 bounded := Debug['B'] != 0 || n.Bounded 1146 var n3 Node 1147 var tmp Node 1148 var n1 Node 1149 if nr.Addable { 1150 // Generate &nl first, and move nr into register. 1151 if !Isconst(nl, CTSTR) { 1152 Igen(nl, &n3, res) 1153 } 1154 if !Isconst(nr, CTINT) { 1155 p2 = Thearch.Igenindex(nr, &tmp, bounded) 1156 Regalloc(&n1, tmp.Type, nil) 1157 Thearch.Gmove(&tmp, &n1) 1158 } 1159 } else if nl.Addable { 1160 // Generate nr first, and move &nl into register. 1161 if !Isconst(nr, CTINT) { 1162 p2 = Thearch.Igenindex(nr, &tmp, bounded) 1163 Regalloc(&n1, tmp.Type, nil) 1164 Thearch.Gmove(&tmp, &n1) 1165 } 1166 1167 if !Isconst(nl, CTSTR) { 1168 Igen(nl, &n3, res) 1169 } 1170 } else { 1171 p2 = Thearch.Igenindex(nr, &tmp, bounded) 1172 nr = &tmp 1173 if !Isconst(nl, CTSTR) { 1174 Igen(nl, &n3, res) 1175 } 1176 Regalloc(&n1, tmp.Type, nil) 1177 Thearch.Gins(Thearch.Optoas(OAS, tmp.Type), &tmp, &n1) 1178 } 1179 1180 // For fixed array we really want the pointer in n3. 1181 var n2 Node 1182 if nl.Type.IsArray() { 1183 Regalloc(&n2, Types[Tptr], &n3) 1184 Agen(&n3, &n2) 1185 Regfree(&n3) 1186 n3 = n2 1187 } 1188 1189 // &a[0] is in n3 (allocated in res) 1190 // i is in n1 (if not constant) 1191 // len(a) is in nlen (if needed) 1192 // w is width 1193 1194 // constant index 1195 if Isconst(nr, CTINT) { 1196 if Isconst(nl, CTSTR) { 1197 Fatalf("constant string constant index") // front end should handle 1198 } 1199 v := uint64(nr.Int64()) 1200 if nl.Type.IsSlice() || nl.Type.IsString() { 1201 if Debug['B'] == 0 && !n.Bounded { 1202 nlen := n3 1203 nlen.Type = Types[TUINT32] 1204 nlen.Xoffset += int64(Array_nel) 1205 Nodconst(&n2, Types[TUINT32], int64(v)) 1206 p1 := Thearch.Ginscmp(OGT, Types[TUINT32], &nlen, &n2, +1) 1207 Ginscall(Panicindex, -1) 1208 Patch(p1, Pc) 1209 } 1210 } 1211 1212 // Load base pointer in n2 = n3. 1213 Regalloc(&n2, Types[Tptr], &n3) 1214 1215 n3.Type = Types[Tptr] 1216 n3.Xoffset += int64(Array_array) 1217 Thearch.Gmove(&n3, &n2) 1218 Regfree(&n3) 1219 if v*uint64(w) != 0 { 1220 Nodconst(&n1, Types[Tptr], int64(v*uint64(w))) 1221 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n1, &n2) 1222 } 1223 *a = n2 1224 break 1225 } 1226 1227 // i is in register n1, extend to 32 bits. 1228 t := Types[TUINT32] 1229 1230 if n1.Type.IsSigned() { 1231 t = Types[TINT32] 1232 } 1233 1234 Regalloc(&n2, t, &n1) // i 1235 Thearch.Gmove(&n1, &n2) 1236 Regfree(&n1) 1237 1238 if Debug['B'] == 0 && !n.Bounded { 1239 // check bounds 1240 t := Types[TUINT32] 1241 1242 var nlen Node 1243 if Isconst(nl, CTSTR) { 1244 Nodconst(&nlen, t, int64(len(nl.Val().U.(string)))) 1245 } else if nl.Type.IsSlice() || nl.Type.IsString() { 1246 nlen = n3 1247 nlen.Type = t 1248 nlen.Xoffset += int64(Array_nel) 1249 } else { 1250 Nodconst(&nlen, t, nl.Type.NumElem()) 1251 } 1252 1253 p1 := Thearch.Ginscmp(OLT, t, &n2, &nlen, +1) 1254 if p2 != nil { 1255 Patch(p2, Pc) 1256 } 1257 Ginscall(Panicindex, -1) 1258 Patch(p1, Pc) 1259 } 1260 1261 if Isconst(nl, CTSTR) { 1262 Regalloc(&n3, Types[Tptr], res) 1263 p1 := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &n3) 1264 Datastring(nl.Val().U.(string), &p1.From) 1265 p1.From.Type = obj.TYPE_ADDR 1266 Thearch.Gins(Thearch.Optoas(OADD, n3.Type), &n2, &n3) 1267 goto indexdone1 1268 } 1269 1270 // Load base pointer in n3. 1271 Regalloc(&tmp, Types[Tptr], &n3) 1272 1273 if nl.Type.IsSlice() || nl.Type.IsString() { 1274 n3.Type = Types[Tptr] 1275 n3.Xoffset += int64(Array_array) 1276 Thearch.Gmove(&n3, &tmp) 1277 } 1278 1279 Regfree(&n3) 1280 n3 = tmp 1281 1282 if w == 0 { 1283 // nothing to do 1284 } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) { 1285 // done by back end 1286 } else if w == 1 { 1287 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1288 } else { 1289 if w&(w-1) == 0 { 1290 // Power of 2. Use shift. 1291 Thearch.Ginscon(Thearch.Optoas(OLSH, Types[TUINT32]), int64(log2(uint64(w))), &n2) 1292 } else { 1293 // Not a power of 2. Use multiply. 1294 Thearch.Ginscon(Thearch.Optoas(OMUL, Types[TUINT32]), int64(w), &n2) 1295 } 1296 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1297 } 1298 1299 indexdone1: 1300 *a = n3 1301 Regfree(&n2) 1302 break 1303 } 1304 1305 freelen := 0 1306 w := uint64(n.Type.Width) 1307 1308 // Generate the non-addressable child first. 1309 var n3 Node 1310 var nlen Node 1311 var tmp Node 1312 var n1 Node 1313 if nr.Addable { 1314 goto irad 1315 } 1316 if nl.Addable { 1317 Cgenr(nr, &n1, nil) 1318 if !Isconst(nl, CTSTR) { 1319 if nl.Type.IsArray() { 1320 Agenr(nl, &n3, res) 1321 } else { 1322 Igen(nl, &nlen, res) 1323 freelen = 1 1324 nlen.Type = Types[Tptr] 1325 nlen.Xoffset += int64(Array_array) 1326 Regalloc(&n3, Types[Tptr], res) 1327 Thearch.Gmove(&nlen, &n3) 1328 nlen.Type = Types[Simtype[TUINT]] 1329 nlen.Xoffset += int64(Array_nel) - int64(Array_array) 1330 } 1331 } 1332 1333 goto index 1334 } 1335 1336 Tempname(&tmp, nr.Type) 1337 Cgen(nr, &tmp) 1338 nr = &tmp 1339 1340 irad: 1341 if !Isconst(nl, CTSTR) { 1342 if nl.Type.IsArray() { 1343 Agenr(nl, &n3, res) 1344 } else { 1345 if !nl.Addable { 1346 if res != nil && res.Op == OREGISTER { // give up res, which we don't need yet. 1347 Regfree(res) 1348 } 1349 1350 // igen will need an addressable node. 1351 var tmp2 Node 1352 Tempname(&tmp2, nl.Type) 1353 Cgen(nl, &tmp2) 1354 nl = &tmp2 1355 1356 if res != nil && res.Op == OREGISTER { // reacquire res 1357 Regrealloc(res) 1358 } 1359 } 1360 1361 Igen(nl, &nlen, res) 1362 freelen = 1 1363 nlen.Type = Types[Tptr] 1364 nlen.Xoffset += int64(Array_array) 1365 Regalloc(&n3, Types[Tptr], res) 1366 Thearch.Gmove(&nlen, &n3) 1367 nlen.Type = Types[Simtype[TUINT]] 1368 nlen.Xoffset += int64(Array_nel) - int64(Array_array) 1369 } 1370 } 1371 1372 if !Isconst(nr, CTINT) { 1373 Cgenr(nr, &n1, nil) 1374 } 1375 1376 goto index 1377 1378 // &a is in &n3 (allocated in res) 1379 // i is in &n1 (if not constant) 1380 // len(a) is in nlen (if needed) 1381 // w is width 1382 1383 // constant index 1384 index: 1385 if Isconst(nr, CTINT) { 1386 if Isconst(nl, CTSTR) { 1387 Fatalf("constant string constant index") // front end should handle 1388 } 1389 v := uint64(nr.Int64()) 1390 if nl.Type.IsSlice() || nl.Type.IsString() { 1391 if Debug['B'] == 0 && !n.Bounded { 1392 p1 := Thearch.Ginscmp(OGT, Types[Simtype[TUINT]], &nlen, Nodintconst(int64(v)), +1) 1393 Ginscall(Panicindex, -1) 1394 Patch(p1, Pc) 1395 } 1396 1397 Regfree(&nlen) 1398 } 1399 1400 if v*w != 0 { 1401 Thearch.Ginscon(Thearch.Optoas(OADD, Types[Tptr]), int64(v*w), &n3) 1402 } 1403 *a = n3 1404 break 1405 } 1406 1407 // type of the index 1408 t := Types[TUINT64] 1409 1410 if n1.Type.IsSigned() { 1411 t = Types[TINT64] 1412 } 1413 1414 var n2 Node 1415 Regalloc(&n2, t, &n1) // i 1416 Thearch.Gmove(&n1, &n2) 1417 Regfree(&n1) 1418 1419 if Debug['B'] == 0 && !n.Bounded { 1420 // check bounds 1421 t = Types[Simtype[TUINT]] 1422 1423 if Is64(nr.Type) { 1424 t = Types[TUINT64] 1425 } 1426 if Isconst(nl, CTSTR) { 1427 Nodconst(&nlen, t, int64(len(nl.Val().U.(string)))) 1428 } else if nl.Type.IsSlice() || nl.Type.IsString() { 1429 // nlen already initialized 1430 } else { 1431 Nodconst(&nlen, t, nl.Type.NumElem()) 1432 } 1433 1434 p1 := Thearch.Ginscmp(OLT, t, &n2, &nlen, +1) 1435 Ginscall(Panicindex, -1) 1436 Patch(p1, Pc) 1437 } 1438 1439 if Isconst(nl, CTSTR) { 1440 Regalloc(&n3, Types[Tptr], res) 1441 p1 := Thearch.Gins(Thearch.Optoas(OAS, n3.Type), nil, &n3) // XXX was LEAQ! 1442 Datastring(nl.Val().U.(string), &p1.From) 1443 p1.From.Type = obj.TYPE_ADDR 1444 Thearch.Gins(Thearch.Optoas(OADD, n3.Type), &n2, &n3) 1445 goto indexdone 1446 } 1447 1448 if w == 0 { 1449 // nothing to do 1450 } else if Thearch.AddIndex != nil && Thearch.AddIndex(&n2, int64(w), &n3) { 1451 // done by back end 1452 } else if w == 1 { 1453 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1454 } else { 1455 if w&(w-1) == 0 { 1456 // Power of 2. Use shift. 1457 Thearch.Ginscon(Thearch.Optoas(OLSH, t), int64(log2(w)), &n2) 1458 } else { 1459 // Not a power of 2. Use multiply. 1460 Thearch.Ginscon(Thearch.Optoas(OMUL, t), int64(w), &n2) 1461 } 1462 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n3) 1463 } 1464 1465 indexdone: 1466 *a = n3 1467 Regfree(&n2) 1468 if freelen != 0 { 1469 Regfree(&nlen) 1470 } 1471 1472 default: 1473 Regalloc(a, Types[Tptr], res) 1474 Agen(n, a) 1475 } 1476 } 1477 1478 // log2 returns the logarithm base 2 of n. n must be a power of 2. 1479 func log2(n uint64) int { 1480 x := 0 1481 for n>>uint(x) != 1 { 1482 x++ 1483 } 1484 return x 1485 } 1486 1487 // generate: 1488 // res = &n; 1489 // The generated code checks that the result is not nil. 1490 func Agen(n *Node, res *Node) { 1491 if Debug['g'] != 0 { 1492 Dump("\nagen-res", res) 1493 Dump("agen-r", n) 1494 } 1495 1496 if n == nil || n.Type == nil { 1497 return 1498 } 1499 1500 for n.Op == OCONVNOP { 1501 n = n.Left 1502 } 1503 1504 if Isconst(n, CTNIL) && n.Type.Width > int64(Widthptr) { 1505 // Use of a nil interface or nil slice. 1506 // Create a temporary we can take the address of and read. 1507 // The generated code is just going to panic, so it need not 1508 // be terribly efficient. See issue 3670. 1509 var n1 Node 1510 Tempname(&n1, n.Type) 1511 1512 Gvardef(&n1) 1513 Thearch.Clearfat(&n1) 1514 var n2 Node 1515 Regalloc(&n2, Types[Tptr], res) 1516 var n3 Node 1517 n3.Op = OADDR 1518 n3.Left = &n1 1519 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &n3, &n2) 1520 Thearch.Gmove(&n2, res) 1521 Regfree(&n2) 1522 return 1523 } 1524 1525 if n.Op == OINDREG && n.Xoffset == 0 { 1526 // Generate MOVW R0, R1 instead of MOVW $0(R0), R1. 1527 // This allows better move propagation in the back ends 1528 // (and maybe it helps the processor). 1529 n1 := *n 1530 n1.Op = OREGISTER 1531 n1.Type = res.Type 1532 Thearch.Gmove(&n1, res) 1533 return 1534 } 1535 1536 if n.Addable { 1537 if n.Op == OREGISTER { 1538 Fatalf("agen OREGISTER") 1539 } 1540 var n1 Node 1541 n1.Op = OADDR 1542 n1.Left = n 1543 var n2 Node 1544 Regalloc(&n2, Types[Tptr], res) 1545 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &n1, &n2) 1546 Thearch.Gmove(&n2, res) 1547 Regfree(&n2) 1548 return 1549 } 1550 1551 nl := n.Left 1552 1553 switch n.Op { 1554 default: 1555 Dump("bad agen", n) 1556 Fatalf("agen: unknown op %v", Nconv(n, FmtShort|FmtSign)) 1557 1558 case OCALLMETH: 1559 cgen_callmeth(n, 0) 1560 cgen_aret(n, res) 1561 1562 case OCALLINTER: 1563 cgen_callinter(n, res, 0) 1564 cgen_aret(n, res) 1565 1566 case OCALLFUNC: 1567 cgen_call(n, 0) 1568 cgen_aret(n, res) 1569 1570 case OEFACE, ODOTTYPE, OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1571 var n1 Node 1572 Tempname(&n1, n.Type) 1573 Cgen(n, &n1) 1574 Agen(&n1, res) 1575 1576 case OINDEX: 1577 var n1 Node 1578 Agenr(n, &n1, res) 1579 Thearch.Gmove(&n1, res) 1580 Regfree(&n1) 1581 1582 case OIND: 1583 Cgen(nl, res) 1584 if !nl.NonNil { 1585 Cgen_checknil(res) 1586 } else if Debug_checknil != 0 && n.Lineno > 1 { 1587 Warnl(n.Lineno, "removed nil check") 1588 } 1589 1590 case ODOT: 1591 Agen(nl, res) 1592 if n.Xoffset != 0 { 1593 addOffset(res, n.Xoffset) 1594 } 1595 1596 case ODOTPTR: 1597 Cgen(nl, res) 1598 if !nl.NonNil { 1599 Cgen_checknil(res) 1600 } else if Debug_checknil != 0 && n.Lineno > 1 { 1601 Warnl(n.Lineno, "removed nil check") 1602 } 1603 if n.Xoffset != 0 { 1604 addOffset(res, n.Xoffset) 1605 } 1606 } 1607 } 1608 1609 func addOffset(res *Node, offset int64) { 1610 if Ctxt.Arch.InFamily(sys.AMD64, sys.I386) { 1611 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), Nodintconst(offset), res) 1612 return 1613 } 1614 1615 var n1, n2 Node 1616 Regalloc(&n1, Types[Tptr], nil) 1617 Thearch.Gmove(res, &n1) 1618 Regalloc(&n2, Types[Tptr], nil) 1619 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), Nodintconst(offset), &n2) 1620 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &n2, &n1) 1621 Thearch.Gmove(&n1, res) 1622 Regfree(&n1) 1623 Regfree(&n2) 1624 } 1625 1626 // Igen computes the address &n, stores it in a register r, 1627 // and rewrites a to refer to *r. The chosen r may be the 1628 // stack pointer, it may be borrowed from res, or it may 1629 // be a newly allocated register. The caller must call Regfree(a) 1630 // to free r when the address is no longer needed. 1631 // The generated code ensures that &n is not nil. 1632 func Igen(n *Node, a *Node, res *Node) { 1633 if Debug['g'] != 0 { 1634 Dump("\nigen-n", n) 1635 } 1636 1637 switch n.Op { 1638 case ONAME: 1639 if n.Class == PAUTOHEAP { 1640 Dump("igen", n) 1641 Fatalf("bad name") 1642 } 1643 *a = *n 1644 return 1645 1646 case OINDREG: 1647 // Increase the refcount of the register so that igen's caller 1648 // has to call Regfree. 1649 if n.Reg != int16(Thearch.REGSP) { 1650 reg[n.Reg-int16(Thearch.REGMIN)]++ 1651 } 1652 *a = *n 1653 return 1654 1655 case ODOT: 1656 Igen(n.Left, a, res) 1657 a.Xoffset += n.Xoffset 1658 a.Type = n.Type 1659 Fixlargeoffset(a) 1660 return 1661 1662 case ODOTPTR: 1663 Cgenr(n.Left, a, res) 1664 if !n.Left.NonNil { 1665 Cgen_checknil(a) 1666 } else if Debug_checknil != 0 && n.Lineno > 1 { 1667 Warnl(n.Lineno, "removed nil check") 1668 } 1669 a.Op = OINDREG 1670 a.Xoffset += n.Xoffset 1671 a.Type = n.Type 1672 Fixlargeoffset(a) 1673 return 1674 1675 case OCALLFUNC, OCALLMETH, OCALLINTER: 1676 switch n.Op { 1677 case OCALLFUNC: 1678 cgen_call(n, 0) 1679 1680 case OCALLMETH: 1681 cgen_callmeth(n, 0) 1682 1683 case OCALLINTER: 1684 cgen_callinter(n, nil, 0) 1685 } 1686 1687 fp := n.Left.Type.Results().Field(0) 1688 *a = Node{} 1689 a.Op = OINDREG 1690 a.Reg = int16(Thearch.REGSP) 1691 a.Addable = true 1692 a.Xoffset = fp.Offset + Ctxt.FixedFrameSize() 1693 a.Type = n.Type 1694 return 1695 1696 case OINDEX: 1697 // Index of fixed-size array by constant can 1698 // put the offset in the addressing. 1699 // Could do the same for slice except that we need 1700 // to use the real index for the bounds checking. 1701 if n.Left.Type.IsArray() || (n.Left.Type.IsPtr() && n.Left.Left.Type.IsArray()) { 1702 if Isconst(n.Right, CTINT) { 1703 // Compute &a. 1704 if !n.Left.Type.IsPtr() { 1705 Igen(n.Left, a, res) 1706 } else { 1707 var n1 Node 1708 Igen(n.Left, &n1, res) 1709 Cgen_checknil(&n1) 1710 Regalloc(a, Types[Tptr], res) 1711 Thearch.Gmove(&n1, a) 1712 Regfree(&n1) 1713 a.Op = OINDREG 1714 } 1715 1716 // Compute &a[i] as &a + i*width. 1717 a.Type = n.Type 1718 1719 a.Xoffset += n.Right.Int64() * n.Type.Width 1720 Fixlargeoffset(a) 1721 return 1722 } 1723 } 1724 } 1725 1726 Agenr(n, a, res) 1727 a.Op = OINDREG 1728 a.Type = n.Type 1729 } 1730 1731 // Bgen generates code for branches: 1732 // 1733 // if n == wantTrue { 1734 // goto to 1735 // } 1736 func Bgen(n *Node, wantTrue bool, likely int, to *obj.Prog) { 1737 bgenx(n, nil, wantTrue, likely, to) 1738 } 1739 1740 // Bvgen generates code for calculating boolean values: 1741 // res = n == wantTrue 1742 func Bvgen(n, res *Node, wantTrue bool) { 1743 if Thearch.Ginsboolval == nil { 1744 // Direct value generation not implemented for this architecture. 1745 // Implement using jumps. 1746 bvgenjump(n, res, wantTrue, true) 1747 return 1748 } 1749 bgenx(n, res, wantTrue, 0, nil) 1750 } 1751 1752 // bvgenjump implements boolean value generation using jumps: 1753 // if n == wantTrue { 1754 // res = 1 1755 // } else { 1756 // res = 0 1757 // } 1758 // geninit controls whether n's Ninit is generated. 1759 func bvgenjump(n, res *Node, wantTrue, geninit bool) { 1760 init := n.Ninit 1761 if !geninit { 1762 n.Ninit.Set(nil) 1763 } 1764 p1 := Gbranch(obj.AJMP, nil, 0) 1765 p2 := Pc 1766 Thearch.Gmove(Nodbool(true), res) 1767 p3 := Gbranch(obj.AJMP, nil, 0) 1768 Patch(p1, Pc) 1769 Bgen(n, wantTrue, 0, p2) 1770 Thearch.Gmove(Nodbool(false), res) 1771 Patch(p3, Pc) 1772 n.Ninit.MoveNodes(&init) 1773 } 1774 1775 // bgenx is the backend for Bgen and Bvgen. 1776 // If res is nil, it generates a branch. 1777 // Otherwise, it generates a boolean value. 1778 func bgenx(n, res *Node, wantTrue bool, likely int, to *obj.Prog) { 1779 if Debug['g'] != 0 { 1780 fmt.Printf("\nbgenx wantTrue=%t likely=%d to=%v\n", wantTrue, likely, to) 1781 Dump("n", n) 1782 Dump("res", res) 1783 } 1784 1785 genval := res != nil 1786 1787 if n == nil { 1788 n = Nodbool(true) 1789 } 1790 1791 Genlist(n.Ninit) 1792 1793 if n.Type == nil { 1794 n = convlit(n, Types[TBOOL]) 1795 if n.Type == nil { 1796 return 1797 } 1798 } 1799 1800 if !n.Type.IsBoolean() { 1801 Fatalf("bgen: bad type %v for %v", n.Type, n.Op) 1802 } 1803 1804 for n.Op == OCONVNOP { 1805 n = n.Left 1806 Genlist(n.Ninit) 1807 } 1808 1809 if Thearch.Bgen_float != nil && n.Left != nil && n.Left.Type.IsFloat() { 1810 if genval { 1811 bvgenjump(n, res, wantTrue, false) 1812 return 1813 } 1814 Thearch.Bgen_float(n, wantTrue, likely, to) 1815 return 1816 } 1817 1818 switch n.Op { 1819 default: 1820 if genval { 1821 Cgen(n, res) 1822 if !wantTrue { 1823 Thearch.Gins(Thearch.Optoas(OXOR, Types[TUINT8]), Nodintconst(1), res) 1824 } 1825 return 1826 } 1827 1828 var tmp Node 1829 Regalloc(&tmp, n.Type, nil) 1830 Cgen(n, &tmp) 1831 bgenNonZero(&tmp, nil, wantTrue, likely, to) 1832 Regfree(&tmp) 1833 return 1834 1835 case ONAME: 1836 // Some architectures might need a temporary or other help here, 1837 // but they don't support direct generation of a bool value yet. 1838 // We can fix that as we go. 1839 mayNeedTemp := Ctxt.Arch.InFamily(sys.ARM, sys.ARM64, sys.MIPS64, sys.PPC64, sys.S390X) 1840 1841 if genval { 1842 if mayNeedTemp { 1843 Fatalf("genval ONAMES not fully implemented") 1844 } 1845 Cgen(n, res) 1846 if !wantTrue { 1847 Thearch.Gins(Thearch.Optoas(OXOR, Types[TUINT8]), Nodintconst(1), res) 1848 } 1849 return 1850 } 1851 1852 if n.Addable && !mayNeedTemp { 1853 // no need for a temporary 1854 bgenNonZero(n, nil, wantTrue, likely, to) 1855 return 1856 } 1857 var tmp Node 1858 Regalloc(&tmp, n.Type, nil) 1859 Cgen(n, &tmp) 1860 bgenNonZero(&tmp, nil, wantTrue, likely, to) 1861 Regfree(&tmp) 1862 return 1863 1864 case OLITERAL: 1865 // n is a constant. 1866 if !Isconst(n, CTBOOL) { 1867 Fatalf("bgen: non-bool const %v\n", Nconv(n, FmtLong)) 1868 } 1869 if genval { 1870 Cgen(Nodbool(wantTrue == n.Val().U.(bool)), res) 1871 return 1872 } 1873 // If n == wantTrue, jump; otherwise do nothing. 1874 if wantTrue == n.Val().U.(bool) { 1875 Patch(Gbranch(obj.AJMP, nil, likely), to) 1876 } 1877 return 1878 1879 case OANDAND, OOROR: 1880 and := (n.Op == OANDAND) == wantTrue 1881 if genval { 1882 p1 := Gbranch(obj.AJMP, nil, 0) 1883 p2 := Gbranch(obj.AJMP, nil, 0) 1884 Patch(p2, Pc) 1885 Cgen(Nodbool(!and), res) 1886 p3 := Gbranch(obj.AJMP, nil, 0) 1887 Patch(p1, Pc) 1888 Bgen(n.Left, wantTrue != and, 0, p2) 1889 Bvgen(n.Right, res, wantTrue) 1890 Patch(p3, Pc) 1891 return 1892 } 1893 1894 if and { 1895 p1 := Gbranch(obj.AJMP, nil, 0) 1896 p2 := Gbranch(obj.AJMP, nil, 0) 1897 Patch(p1, Pc) 1898 Bgen(n.Left, !wantTrue, -likely, p2) 1899 Bgen(n.Right, !wantTrue, -likely, p2) 1900 p1 = Gbranch(obj.AJMP, nil, 0) 1901 Patch(p1, to) 1902 Patch(p2, Pc) 1903 } else { 1904 Bgen(n.Left, wantTrue, likely, to) 1905 Bgen(n.Right, wantTrue, likely, to) 1906 } 1907 return 1908 1909 case ONOT: // unary 1910 if n.Left == nil || n.Left.Type == nil { 1911 return 1912 } 1913 bgenx(n.Left, res, !wantTrue, likely, to) 1914 return 1915 1916 case OEQ, ONE, OLT, OGT, OLE, OGE: 1917 if n.Left == nil || n.Left.Type == nil || n.Right == nil || n.Right.Type == nil { 1918 return 1919 } 1920 } 1921 1922 // n.Op is one of OEQ, ONE, OLT, OGT, OLE, OGE 1923 nl := n.Left 1924 nr := n.Right 1925 op := n.Op 1926 1927 if !wantTrue { 1928 if nr.Type.IsFloat() { 1929 // Brcom is not valid on floats when NaN is involved. 1930 ll := n.Ninit // avoid re-genning Ninit 1931 n.Ninit.Set(nil) 1932 if genval { 1933 bgenx(n, res, true, likely, to) 1934 Thearch.Gins(Thearch.Optoas(OXOR, Types[TUINT8]), Nodintconst(1), res) // res = !res 1935 n.Ninit.Set(ll.Slice()) 1936 return 1937 } 1938 p1 := Gbranch(obj.AJMP, nil, 0) 1939 p2 := Gbranch(obj.AJMP, nil, 0) 1940 Patch(p1, Pc) 1941 bgenx(n, res, true, -likely, p2) 1942 Patch(Gbranch(obj.AJMP, nil, 0), to) 1943 Patch(p2, Pc) 1944 n.Ninit.Set(ll.Slice()) 1945 return 1946 } 1947 1948 op = Brcom(op) 1949 } 1950 wantTrue = true 1951 1952 // make simplest on right 1953 if nl.Op == OLITERAL || (nl.Ullman < nr.Ullman && nl.Ullman < UINF) { 1954 op = Brrev(op) 1955 nl, nr = nr, nl 1956 } 1957 1958 if nl.Type.IsSlice() || nl.Type.IsInterface() { 1959 // front end should only leave cmp to literal nil 1960 if (op != OEQ && op != ONE) || nr.Op != OLITERAL { 1961 if nl.Type.IsSlice() { 1962 Yyerror("illegal slice comparison") 1963 } else { 1964 Yyerror("illegal interface comparison") 1965 } 1966 return 1967 } 1968 1969 var ptr Node 1970 Igen(nl, &ptr, nil) 1971 if nl.Type.IsSlice() { 1972 ptr.Xoffset += int64(Array_array) 1973 } 1974 ptr.Type = Types[Tptr] 1975 var tmp Node 1976 Regalloc(&tmp, ptr.Type, &ptr) 1977 Cgen(&ptr, &tmp) 1978 Regfree(&ptr) 1979 bgenNonZero(&tmp, res, op == OEQ != wantTrue, likely, to) 1980 Regfree(&tmp) 1981 return 1982 } 1983 1984 if nl.Type.IsComplex() { 1985 complexbool(op, nl, nr, res, wantTrue, likely, to) 1986 return 1987 } 1988 1989 if Ctxt.Arch.RegSize == 4 && Is64(nr.Type) { 1990 if genval { 1991 // TODO: Teach Cmp64 to generate boolean values and remove this. 1992 bvgenjump(n, res, wantTrue, false) 1993 return 1994 } 1995 if !nl.Addable || Isconst(nl, CTINT) { 1996 nl = CgenTemp(nl) 1997 } 1998 if !nr.Addable { 1999 nr = CgenTemp(nr) 2000 } 2001 Thearch.Cmp64(nl, nr, op, likely, to) 2002 return 2003 } 2004 2005 if nr.Ullman >= UINF { 2006 var n1 Node 2007 Regalloc(&n1, nl.Type, nil) 2008 Cgen(nl, &n1) 2009 nl = &n1 2010 2011 var tmp Node 2012 Tempname(&tmp, nl.Type) 2013 Thearch.Gmove(&n1, &tmp) 2014 Regfree(&n1) 2015 2016 var n2 Node 2017 Regalloc(&n2, nr.Type, nil) 2018 Cgen(nr, &n2) 2019 nr = &n2 2020 2021 Regalloc(&n1, nl.Type, nil) 2022 Cgen(&tmp, &n1) 2023 Regfree(&n1) 2024 Regfree(&n2) 2025 } else { 2026 var n1 Node 2027 if !nl.Addable && Ctxt.Arch.Family == sys.I386 { 2028 Tempname(&n1, nl.Type) 2029 } else { 2030 Regalloc(&n1, nl.Type, nil) 2031 defer Regfree(&n1) 2032 } 2033 Cgen(nl, &n1) 2034 nl = &n1 2035 2036 if Smallintconst(nr) && Ctxt.Arch.Family != sys.MIPS64 && Ctxt.Arch.Family != sys.PPC64 { 2037 Thearch.Gins(Thearch.Optoas(OCMP, nr.Type), nl, nr) 2038 bins(nr.Type, res, op, likely, to) 2039 return 2040 } 2041 2042 if !nr.Addable && Ctxt.Arch.Family == sys.I386 { 2043 nr = CgenTemp(nr) 2044 } 2045 2046 var n2 Node 2047 Regalloc(&n2, nr.Type, nil) 2048 Cgen(nr, &n2) 2049 nr = &n2 2050 Regfree(&n2) 2051 } 2052 2053 l, r := nl, nr 2054 2055 // On x86, only < and <= work right with NaN; reverse if needed 2056 if Ctxt.Arch.Family == sys.AMD64 && nl.Type.IsFloat() && (op == OGT || op == OGE) { 2057 l, r = r, l 2058 op = Brrev(op) 2059 } 2060 2061 // MIPS does not have CMP instruction 2062 if Ctxt.Arch.Family == sys.MIPS64 { 2063 p := Thearch.Ginscmp(op, nr.Type, l, r, likely) 2064 Patch(p, to) 2065 return 2066 } 2067 2068 // Do the comparison. 2069 Thearch.Gins(Thearch.Optoas(OCMP, nr.Type), l, r) 2070 2071 // Handle floating point special cases. 2072 // Note that 8g has Bgen_float and is handled above. 2073 if nl.Type.IsFloat() { 2074 switch Ctxt.Arch.Family { 2075 case sys.ARM: 2076 if genval { 2077 Fatalf("genval 5g Isfloat special cases not implemented") 2078 } 2079 switch n.Op { 2080 case ONE: 2081 Patch(Gbranch(Thearch.Optoas(OPS, nr.Type), nr.Type, likely), to) 2082 Patch(Gbranch(Thearch.Optoas(op, nr.Type), nr.Type, likely), to) 2083 default: 2084 p := Gbranch(Thearch.Optoas(OPS, nr.Type), nr.Type, -likely) 2085 Patch(Gbranch(Thearch.Optoas(op, nr.Type), nr.Type, likely), to) 2086 Patch(p, Pc) 2087 } 2088 return 2089 case sys.AMD64: 2090 switch n.Op { 2091 case OEQ: 2092 // neither NE nor P 2093 if genval { 2094 var reg Node 2095 Regalloc(®, Types[TBOOL], nil) 2096 Thearch.Ginsboolval(Thearch.Optoas(OEQ, nr.Type), ®) 2097 Thearch.Ginsboolval(Thearch.Optoas(OPC, nr.Type), res) 2098 Thearch.Gins(Thearch.Optoas(OAND, Types[TBOOL]), ®, res) 2099 Regfree(®) 2100 } else { 2101 p1 := Gbranch(Thearch.Optoas(ONE, nr.Type), nil, -likely) 2102 p2 := Gbranch(Thearch.Optoas(OPS, nr.Type), nil, -likely) 2103 Patch(Gbranch(obj.AJMP, nil, 0), to) 2104 Patch(p1, Pc) 2105 Patch(p2, Pc) 2106 } 2107 return 2108 case ONE: 2109 // either NE or P 2110 if genval { 2111 var reg Node 2112 Regalloc(®, Types[TBOOL], nil) 2113 Thearch.Ginsboolval(Thearch.Optoas(ONE, nr.Type), ®) 2114 Thearch.Ginsboolval(Thearch.Optoas(OPS, nr.Type), res) 2115 Thearch.Gins(Thearch.Optoas(OOR, Types[TBOOL]), ®, res) 2116 Regfree(®) 2117 } else { 2118 Patch(Gbranch(Thearch.Optoas(ONE, nr.Type), nil, likely), to) 2119 Patch(Gbranch(Thearch.Optoas(OPS, nr.Type), nil, likely), to) 2120 } 2121 return 2122 } 2123 case sys.ARM64, sys.PPC64: 2124 if genval { 2125 Fatalf("genval 7g, 9g Isfloat special cases not implemented") 2126 } 2127 switch n.Op { 2128 // On arm64 and ppc64, <= and >= mishandle NaN. Must decompose into < or > and =. 2129 // TODO(josh): Convert a <= b to b > a instead? 2130 case OLE, OGE: 2131 if op == OLE { 2132 op = OLT 2133 } else { 2134 op = OGT 2135 } 2136 Patch(Gbranch(Thearch.Optoas(op, nr.Type), nr.Type, likely), to) 2137 Patch(Gbranch(Thearch.Optoas(OEQ, nr.Type), nr.Type, likely), to) 2138 return 2139 } 2140 } 2141 } 2142 2143 // Not a special case. Insert the conditional jump or value gen. 2144 bins(nr.Type, res, op, likely, to) 2145 } 2146 2147 func bgenNonZero(n, res *Node, wantTrue bool, likely int, to *obj.Prog) { 2148 // TODO: Optimize on systems that can compare to zero easily. 2149 var op Op = ONE 2150 if !wantTrue { 2151 op = OEQ 2152 } 2153 2154 // MIPS does not have CMP instruction 2155 if Thearch.LinkArch.Family == sys.MIPS64 { 2156 p := Gbranch(Thearch.Optoas(op, n.Type), n.Type, likely) 2157 Naddr(&p.From, n) 2158 Patch(p, to) 2159 return 2160 } 2161 2162 var zero Node 2163 Nodconst(&zero, n.Type, 0) 2164 Thearch.Gins(Thearch.Optoas(OCMP, n.Type), n, &zero) 2165 bins(n.Type, res, op, likely, to) 2166 } 2167 2168 // bins inserts an instruction to handle the result of a compare. 2169 // If res is non-nil, it inserts appropriate value generation instructions. 2170 // If res is nil, it inserts a branch to to. 2171 func bins(typ *Type, res *Node, op Op, likely int, to *obj.Prog) { 2172 a := Thearch.Optoas(op, typ) 2173 if res != nil { 2174 // value gen 2175 Thearch.Ginsboolval(a, res) 2176 } else { 2177 // jump 2178 Patch(Gbranch(a, typ, likely), to) 2179 } 2180 } 2181 2182 // stkof returns n's offset from SP if n is on the stack 2183 // (either a local variable or the return value from a function call 2184 // or the arguments to a function call). 2185 // If n is not on the stack, stkof returns -1000. 2186 // If n is on the stack but in an unknown location 2187 // (due to array index arithmetic), stkof returns +1000. 2188 // 2189 // NOTE(rsc): It is possible that the ODOT and OINDEX cases 2190 // are not relevant here, since it shouldn't be possible for them 2191 // to be involved in an overlapping copy. Only function results 2192 // from one call and the arguments to the next can overlap in 2193 // any non-trivial way. If they can be dropped, then this function 2194 // becomes much simpler and also more trustworthy. 2195 // The fact that it works at all today is probably due to the fact 2196 // that ODOT and OINDEX are irrelevant. 2197 func stkof(n *Node) int64 { 2198 switch n.Op { 2199 case OINDREG: 2200 if n.Reg != int16(Thearch.REGSP) { 2201 return -1000 // not on stack 2202 } 2203 return n.Xoffset 2204 2205 case ODOT: 2206 t := n.Left.Type 2207 if t.IsPtr() { 2208 break 2209 } 2210 off := stkof(n.Left) 2211 if off == -1000 || off == +1000 { 2212 return off 2213 } 2214 return off + n.Xoffset 2215 2216 case OINDEX: 2217 t := n.Left.Type 2218 if !t.IsArray() { 2219 break 2220 } 2221 off := stkof(n.Left) 2222 if off == -1000 || off == +1000 { 2223 return off 2224 } 2225 if Isconst(n.Right, CTINT) { 2226 return off + t.Elem().Width*n.Right.Int64() 2227 } 2228 return +1000 // on stack but not sure exactly where 2229 2230 case OCALLMETH, OCALLINTER, OCALLFUNC: 2231 t := n.Left.Type 2232 if t.IsPtr() { 2233 t = t.Elem() 2234 } 2235 2236 f := t.Results().Field(0) 2237 if f != nil { 2238 return f.Offset + Ctxt.FixedFrameSize() 2239 } 2240 } 2241 2242 // botch - probably failing to recognize address 2243 // arithmetic on the above. eg INDEX and DOT 2244 return -1000 // not on stack 2245 } 2246 2247 // block copy: 2248 // memmove(&ns, &n, w); 2249 // if wb is true, needs write barrier. 2250 func sgen_wb(n *Node, ns *Node, w int64, wb bool) { 2251 if Debug['g'] != 0 { 2252 op := "sgen" 2253 if wb { 2254 op = "sgen-wb" 2255 } 2256 fmt.Printf("\n%s w=%d\n", op, w) 2257 Dump("r", n) 2258 Dump("res", ns) 2259 } 2260 2261 if n.Ullman >= UINF && ns.Ullman >= UINF { 2262 Fatalf("sgen UINF") 2263 } 2264 2265 if w < 0 { 2266 Fatalf("sgen copy %d", w) 2267 } 2268 2269 // If copying .args, that's all the results, so record definition sites 2270 // for them for the liveness analysis. 2271 if ns.Op == ONAME && ns.Sym.Name == ".args" { 2272 for _, ln := range Curfn.Func.Dcl { 2273 if ln.Class == PPARAMOUT { 2274 Gvardef(ln) 2275 } 2276 } 2277 } 2278 2279 // Avoid taking the address for simple enough types. 2280 if componentgen_wb(n, ns, wb) { 2281 return 2282 } 2283 2284 if w == 0 { 2285 // evaluate side effects only 2286 var nodr Node 2287 Regalloc(&nodr, Types[Tptr], nil) 2288 Agen(ns, &nodr) 2289 Agen(n, &nodr) 2290 Regfree(&nodr) 2291 return 2292 } 2293 2294 // offset on the stack 2295 osrc := stkof(n) 2296 odst := stkof(ns) 2297 2298 if odst != -1000 { 2299 // on stack, write barrier not needed after all 2300 wb = false 2301 } 2302 2303 if osrc != -1000 && odst != -1000 && (osrc == 1000 || odst == 1000) || wb && osrc != -1000 { 2304 // osrc and odst both on stack, and at least one is in 2305 // an unknown position. Could generate code to test 2306 // for forward/backward copy, but instead just copy 2307 // to a temporary location first. 2308 // 2309 // OR: write barrier needed and source is on stack. 2310 // Invoking the write barrier will use the stack to prepare its call. 2311 // Copy to temporary. 2312 var tmp Node 2313 Tempname(&tmp, n.Type) 2314 sgen_wb(n, &tmp, w, false) 2315 sgen_wb(&tmp, ns, w, wb) 2316 return 2317 } 2318 2319 if wb { 2320 cgen_wbfat(n, ns) 2321 return 2322 } 2323 2324 Thearch.Blockcopy(n, ns, osrc, odst, w) 2325 } 2326 2327 // generate: 2328 // call f 2329 // proc=-1 normal call but no return 2330 // proc=0 normal call 2331 // proc=1 goroutine run in new proc 2332 // proc=2 defer call save away stack 2333 // proc=3 normal call to C pointer (not Go func value) 2334 func Ginscall(f *Node, proc int) { 2335 if f.Type != nil { 2336 extra := int32(0) 2337 if proc == 1 || proc == 2 { 2338 extra = 2 * int32(Widthptr) 2339 } 2340 Setmaxarg(f.Type, extra) 2341 } 2342 2343 switch proc { 2344 default: 2345 Fatalf("Ginscall: bad proc %d", proc) 2346 2347 case 0, // normal call 2348 -1: // normal call but no return 2349 if f.Op == ONAME && f.Class == PFUNC { 2350 if f == Deferreturn { 2351 // Deferred calls will appear to be returning to the CALL 2352 // deferreturn(SB) that we are about to emit. However, the 2353 // stack scanning code will think that the instruction 2354 // before the CALL is executing. To avoid the scanning 2355 // code making bad assumptions (both cosmetic such as 2356 // showing the wrong line number and fatal, such as being 2357 // confused over whether a stack slot contains a pointer 2358 // or a scalar) insert an actual hardware NOP that will 2359 // have the right line number. This is different from 2360 // obj.ANOP, which is a virtual no-op that doesn't make it 2361 // into the instruction stream. 2362 Thearch.Ginsnop() 2363 2364 if Thearch.LinkArch.Family == sys.PPC64 { 2365 // On ppc64, when compiling Go into position 2366 // independent code on ppc64le we insert an 2367 // instruction to reload the TOC pointer from the 2368 // stack as well. See the long comment near 2369 // jmpdefer in runtime/asm_ppc64.s for why. 2370 // If the MOVD is not needed, insert a hardware NOP 2371 // so that the same number of instructions are used 2372 // on ppc64 in both shared and non-shared modes. 2373 if Ctxt.Flag_shared { 2374 p := Thearch.Gins(ppc64.AMOVD, nil, nil) 2375 p.From.Type = obj.TYPE_MEM 2376 p.From.Offset = 24 2377 p.From.Reg = ppc64.REGSP 2378 p.To.Type = obj.TYPE_REG 2379 p.To.Reg = ppc64.REG_R2 2380 } else { 2381 Thearch.Ginsnop() 2382 } 2383 } 2384 } 2385 2386 p := Thearch.Gins(obj.ACALL, nil, f) 2387 Afunclit(&p.To, f) 2388 if proc == -1 || Noreturn(p) { 2389 Thearch.Gins(obj.AUNDEF, nil, nil) 2390 } 2391 break 2392 } 2393 2394 var reg Node 2395 Nodreg(®, Types[Tptr], Thearch.REGCTXT) 2396 var r1 Node 2397 Nodreg(&r1, Types[Tptr], Thearch.REGCALLX) 2398 Thearch.Gmove(f, ®) 2399 reg.Op = OINDREG 2400 Thearch.Gmove(®, &r1) 2401 reg.Op = OREGISTER 2402 Thearch.Gins(obj.ACALL, ®, &r1) 2403 2404 case 3: // normal call of c function pointer 2405 Thearch.Gins(obj.ACALL, nil, f) 2406 2407 case 1, // call in new proc (go) 2408 2: // deferred call (defer) 2409 var stk Node 2410 2411 // size of arguments at 0(SP) 2412 stk.Op = OINDREG 2413 stk.Reg = int16(Thearch.REGSP) 2414 stk.Xoffset = Ctxt.FixedFrameSize() 2415 Thearch.Ginscon(Thearch.Optoas(OAS, Types[TINT32]), int64(Argsize(f.Type)), &stk) 2416 2417 // FuncVal* at 8(SP) 2418 stk.Xoffset = int64(Widthptr) + Ctxt.FixedFrameSize() 2419 2420 var reg Node 2421 Nodreg(®, Types[Tptr], Thearch.REGCALLX2) 2422 Thearch.Gmove(f, ®) 2423 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), ®, &stk) 2424 2425 if proc == 1 { 2426 Ginscall(Newproc, 0) 2427 } else { 2428 if !hasdefer { 2429 Fatalf("hasdefer=0 but has defer") 2430 } 2431 Ginscall(Deferproc, 0) 2432 } 2433 2434 if proc == 2 { 2435 Nodreg(®, Types[TINT32], Thearch.REGRETURN) 2436 p := Thearch.Ginscmp(OEQ, Types[TINT32], ®, Nodintconst(0), +1) 2437 cgen_ret(nil) 2438 Patch(p, Pc) 2439 } 2440 } 2441 } 2442 2443 // n is call to interface method. 2444 // generate res = n. 2445 func cgen_callinter(n *Node, res *Node, proc int) { 2446 i := n.Left 2447 if i.Op != ODOTINTER { 2448 Fatalf("cgen_callinter: not ODOTINTER %v", i.Op) 2449 } 2450 2451 i = i.Left // interface 2452 2453 if !i.Addable { 2454 var tmpi Node 2455 Tempname(&tmpi, i.Type) 2456 Cgen(i, &tmpi) 2457 i = &tmpi 2458 } 2459 2460 Genlist(n.List) // assign the args 2461 2462 // i is now addable, prepare an indirected 2463 // register to hold its address. 2464 var nodi Node 2465 Igen(i, &nodi, res) // REG = &inter 2466 2467 var nodsp Node 2468 Nodindreg(&nodsp, Types[Tptr], Thearch.REGSP) 2469 nodsp.Xoffset = Ctxt.FixedFrameSize() 2470 if proc != 0 { 2471 nodsp.Xoffset += 2 * int64(Widthptr) // leave room for size & fn 2472 } 2473 nodi.Type = Types[Tptr] 2474 nodi.Xoffset += int64(Widthptr) 2475 Cgen(&nodi, &nodsp) // {0, 8(nacl), or 16}(SP) = 8(REG) -- i.data 2476 2477 var nodo Node 2478 Regalloc(&nodo, Types[Tptr], res) 2479 2480 nodi.Type = Types[Tptr] 2481 nodi.Xoffset -= int64(Widthptr) 2482 Cgen(&nodi, &nodo) // REG = 0(REG) -- i.tab 2483 Regfree(&nodi) 2484 2485 var nodr Node 2486 Regalloc(&nodr, Types[Tptr], &nodo) 2487 if n.Left.Xoffset == BADWIDTH { 2488 Fatalf("cgen_callinter: badwidth") 2489 } 2490 Cgen_checknil(&nodo) // in case offset is huge 2491 nodo.Op = OINDREG 2492 nodo.Xoffset = n.Left.Xoffset + 3*int64(Widthptr) + 8 2493 if proc == 0 { 2494 // plain call: use direct c function pointer - more efficient 2495 Cgen(&nodo, &nodr) // REG = 32+offset(REG) -- i.tab->fun[f] 2496 proc = 3 2497 } else { 2498 // go/defer. generate go func value. 2499 Agen(&nodo, &nodr) // REG = &(32+offset(REG)) -- i.tab->fun[f] 2500 } 2501 2502 nodr.Type = n.Left.Type 2503 Ginscall(&nodr, proc) 2504 2505 Regfree(&nodr) 2506 Regfree(&nodo) 2507 } 2508 2509 // generate function call; 2510 // proc=0 normal call 2511 // proc=1 goroutine run in new proc 2512 // proc=2 defer call save away stack 2513 func cgen_call(n *Node, proc int) { 2514 if n == nil { 2515 return 2516 } 2517 2518 var afun Node 2519 if n.Left.Ullman >= UINF { 2520 // if name involves a fn call 2521 // precompute the address of the fn 2522 Tempname(&afun, Types[Tptr]) 2523 2524 Cgen(n.Left, &afun) 2525 } 2526 2527 Genlist(n.List) // assign the args 2528 t := n.Left.Type 2529 2530 // call tempname pointer 2531 if n.Left.Ullman >= UINF { 2532 var nod Node 2533 Regalloc(&nod, Types[Tptr], nil) 2534 Cgen_as(&nod, &afun) 2535 nod.Type = t 2536 Ginscall(&nod, proc) 2537 Regfree(&nod) 2538 return 2539 } 2540 2541 // call pointer 2542 if n.Left.Op != ONAME || n.Left.Class != PFUNC { 2543 var nod Node 2544 Regalloc(&nod, Types[Tptr], nil) 2545 Cgen_as(&nod, n.Left) 2546 nod.Type = t 2547 Ginscall(&nod, proc) 2548 Regfree(&nod) 2549 return 2550 } 2551 2552 // call direct 2553 n.Left.Name.Method = true 2554 2555 Ginscall(n.Left, proc) 2556 } 2557 2558 // call to n has already been generated. 2559 // generate: 2560 // res = return value from call. 2561 func cgen_callret(n *Node, res *Node) { 2562 t := n.Left.Type 2563 if t.Etype == TPTR32 || t.Etype == TPTR64 { 2564 t = t.Elem() 2565 } 2566 2567 fp := t.Results().Field(0) 2568 if fp == nil { 2569 Fatalf("cgen_callret: nil") 2570 } 2571 2572 var nod Node 2573 nod.Op = OINDREG 2574 nod.Reg = int16(Thearch.REGSP) 2575 nod.Addable = true 2576 2577 nod.Xoffset = fp.Offset + Ctxt.FixedFrameSize() 2578 nod.Type = fp.Type 2579 Cgen_as(res, &nod) 2580 } 2581 2582 // call to n has already been generated. 2583 // generate: 2584 // res = &return value from call. 2585 func cgen_aret(n *Node, res *Node) { 2586 t := n.Left.Type 2587 if t.IsPtr() { 2588 t = t.Elem() 2589 } 2590 2591 fp := t.Results().Field(0) 2592 if fp == nil { 2593 Fatalf("cgen_aret: nil") 2594 } 2595 2596 var nod1 Node 2597 nod1.Op = OINDREG 2598 nod1.Reg = int16(Thearch.REGSP) 2599 nod1.Addable = true 2600 nod1.Xoffset = fp.Offset + Ctxt.FixedFrameSize() 2601 nod1.Type = fp.Type 2602 2603 if res.Op != OREGISTER { 2604 var nod2 Node 2605 Regalloc(&nod2, Types[Tptr], res) 2606 Agen(&nod1, &nod2) 2607 Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), &nod2, res) 2608 Regfree(&nod2) 2609 } else { 2610 Agen(&nod1, res) 2611 } 2612 } 2613 2614 // generate return. 2615 // n->left is assignments to return values. 2616 func cgen_ret(n *Node) { 2617 if n != nil { 2618 Genlist(n.List) // copy out args 2619 } 2620 if hasdefer { 2621 Ginscall(Deferreturn, 0) 2622 } 2623 Genlist(Curfn.Func.Exit) 2624 p := Thearch.Gins(obj.ARET, nil, nil) 2625 if n != nil && n.Op == ORETJMP { 2626 p.To.Type = obj.TYPE_MEM 2627 p.To.Name = obj.NAME_EXTERN 2628 p.To.Sym = Linksym(n.Left.Sym) 2629 } 2630 } 2631 2632 // hasHMUL64 reports whether the architecture supports 64-bit 2633 // signed and unsigned high multiplication (OHMUL). 2634 func hasHMUL64() bool { 2635 switch Ctxt.Arch.Family { 2636 case sys.AMD64, sys.S390X, sys.ARM64: 2637 return true 2638 case sys.ARM, sys.I386, sys.MIPS64, sys.PPC64: 2639 return false 2640 } 2641 Fatalf("unknown architecture") 2642 return false 2643 } 2644 2645 // hasRROTC64 reports whether the architecture supports 64-bit 2646 // rotate through carry instructions (ORROTC). 2647 func hasRROTC64() bool { 2648 switch Ctxt.Arch.Family { 2649 case sys.AMD64: 2650 return true 2651 case sys.ARM, sys.ARM64, sys.I386, sys.MIPS64, sys.PPC64, sys.S390X: 2652 return false 2653 } 2654 Fatalf("unknown architecture") 2655 return false 2656 } 2657 2658 func hasRightShiftWithCarry() bool { 2659 switch Ctxt.Arch.Family { 2660 case sys.ARM64: 2661 return true 2662 case sys.AMD64, sys.ARM, sys.I386, sys.MIPS64, sys.PPC64, sys.S390X: 2663 return false 2664 } 2665 Fatalf("unknown architecture") 2666 return false 2667 } 2668 2669 func hasAddSetCarry() bool { 2670 switch Ctxt.Arch.Family { 2671 case sys.ARM64: 2672 return true 2673 case sys.AMD64, sys.ARM, sys.I386, sys.MIPS64, sys.PPC64, sys.S390X: 2674 return false 2675 } 2676 Fatalf("unknown architecture") 2677 return false 2678 } 2679 2680 // generate division according to op, one of: 2681 // res = nl / nr 2682 // res = nl % nr 2683 func cgen_div(op Op, nl *Node, nr *Node, res *Node) { 2684 var w int 2685 2686 // Architectures need to support 64-bit high multiplications 2687 // (OHMUL) in order to perform divide by constant optimizations. 2688 if nr.Op != OLITERAL || !hasHMUL64() { 2689 goto longdiv 2690 } 2691 w = int(nl.Type.Width * 8) 2692 2693 // Front end handled 32-bit division. We only need to handle 64-bit. 2694 // Try to do division using multiplication: (2^w)/d. 2695 // See Hacker's Delight, chapter 10. 2696 switch Simtype[nl.Type.Etype] { 2697 default: 2698 goto longdiv 2699 2700 case TUINT64: 2701 var m Magic 2702 m.W = w 2703 m.Ud = uint64(nr.Int64()) 2704 Umagic(&m) 2705 if m.Bad != 0 { 2706 break 2707 } 2708 2709 // In order to add the numerator we need to be able to 2710 // avoid overflow. This is done by shifting the result of the 2711 // addition right by 1 and inserting the carry bit into 2712 // the MSB. For now this needs the RROTC instruction. 2713 // TODO(mundaym): Hacker's Delight 2nd ed. chapter 10 proposes 2714 // an alternative sequence of instructions for architectures 2715 // (TODO: MIPS64, PPC64, S390X) that do not have a shift 2716 // right with carry instruction. 2717 if m.Ua != 0 && !hasRROTC64() && !hasRightShiftWithCarry() { 2718 goto longdiv 2719 } 2720 if op == OMOD { 2721 goto longmod 2722 } 2723 2724 var n1 Node 2725 Cgenr(nl, &n1, nil) 2726 var n2 Node 2727 Nodconst(&n2, nl.Type, int64(m.Um)) 2728 var n3 Node 2729 Regalloc(&n3, nl.Type, res) 2730 Thearch.Cgen_hmul(&n1, &n2, &n3) 2731 2732 if m.Ua != 0 { 2733 // Need to add numerator accounting for overflow. 2734 if hasAddSetCarry() { 2735 Thearch.AddSetCarry(&n1, &n3, &n3) 2736 } else { 2737 Thearch.Gins(Thearch.Optoas(OADD, nl.Type), &n1, &n3) 2738 } 2739 2740 if !hasRROTC64() { 2741 Thearch.RightShiftWithCarry(&n3, uint(m.S), &n3) 2742 } else { 2743 Nodconst(&n2, nl.Type, 1) 2744 Thearch.Gins(Thearch.Optoas(ORROTC, nl.Type), &n2, &n3) 2745 Nodconst(&n2, nl.Type, int64(m.S)-1) 2746 Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3) 2747 } 2748 } else { 2749 Nodconst(&n2, nl.Type, int64(m.S)) 2750 Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3) // shift dx 2751 } 2752 2753 Thearch.Gmove(&n3, res) 2754 Regfree(&n1) 2755 Regfree(&n3) 2756 return 2757 2758 case TINT64: 2759 var m Magic 2760 m.W = w 2761 m.Sd = nr.Int64() 2762 Smagic(&m) 2763 if m.Bad != 0 { 2764 break 2765 } 2766 if op == OMOD { 2767 goto longmod 2768 } 2769 2770 var n1 Node 2771 Cgenr(nl, &n1, res) 2772 var n2 Node 2773 Nodconst(&n2, nl.Type, m.Sm) 2774 var n3 Node 2775 Regalloc(&n3, nl.Type, nil) 2776 Thearch.Cgen_hmul(&n1, &n2, &n3) 2777 2778 if m.Sm < 0 { 2779 // Need to add numerator (cannot overflow). 2780 Thearch.Gins(Thearch.Optoas(OADD, nl.Type), &n1, &n3) 2781 } 2782 2783 Nodconst(&n2, nl.Type, int64(m.S)) 2784 Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n3) // shift n3 2785 2786 Nodconst(&n2, nl.Type, int64(w)-1) 2787 2788 Thearch.Gins(Thearch.Optoas(ORSH, nl.Type), &n2, &n1) // -1 iff num is neg 2789 Thearch.Gins(Thearch.Optoas(OSUB, nl.Type), &n1, &n3) // added 2790 2791 if m.Sd < 0 { 2792 // This could probably be removed by factoring it into 2793 // the multiplier. 2794 Thearch.Gins(Thearch.Optoas(OMINUS, nl.Type), nil, &n3) 2795 } 2796 2797 Thearch.Gmove(&n3, res) 2798 Regfree(&n1) 2799 Regfree(&n3) 2800 return 2801 } 2802 2803 goto longdiv 2804 2805 // Division and mod using (slow) hardware instruction. 2806 longdiv: 2807 Thearch.Dodiv(op, nl, nr, res) 2808 2809 return 2810 2811 // Mod using formula A%B = A-(A/B*B) but 2812 // we know that there is a fast algorithm for A/B. 2813 longmod: 2814 var n1 Node 2815 Regalloc(&n1, nl.Type, res) 2816 2817 Cgen(nl, &n1) 2818 var n2 Node 2819 Regalloc(&n2, nl.Type, nil) 2820 cgen_div(ODIV, &n1, nr, &n2) 2821 a := Thearch.Optoas(OMUL, nl.Type) 2822 2823 if !Smallintconst(nr) { 2824 var n3 Node 2825 Regalloc(&n3, nl.Type, nil) 2826 Cgen(nr, &n3) 2827 Thearch.Gins(a, &n3, &n2) 2828 Regfree(&n3) 2829 } else { 2830 Thearch.Gins(a, nr, &n2) 2831 } 2832 Thearch.Gins(Thearch.Optoas(OSUB, nl.Type), &n2, &n1) 2833 Thearch.Gmove(&n1, res) 2834 Regfree(&n1) 2835 Regfree(&n2) 2836 } 2837 2838 func Fixlargeoffset(n *Node) { 2839 if n == nil { 2840 return 2841 } 2842 if n.Op != OINDREG { 2843 return 2844 } 2845 if n.Reg == int16(Thearch.REGSP) { // stack offset cannot be large 2846 return 2847 } 2848 if n.Xoffset != int64(int32(n.Xoffset)) { 2849 // offset too large, add to register instead. 2850 a := *n 2851 2852 a.Op = OREGISTER 2853 a.Type = Types[Tptr] 2854 a.Xoffset = 0 2855 Cgen_checknil(&a) 2856 Thearch.Ginscon(Thearch.Optoas(OADD, Types[Tptr]), n.Xoffset, &a) 2857 n.Xoffset = 0 2858 } 2859 } 2860 2861 func cgen_append(n, res *Node) { 2862 if Debug['g'] != 0 { 2863 Dump("cgen_append-n", n) 2864 Dump("cgen_append-res", res) 2865 } 2866 for _, n1 := range n.List.Slice() { 2867 if n1.Ullman >= UINF { 2868 Fatalf("append with function call arguments") 2869 } 2870 } 2871 2872 // res = append(src, x, y, z) 2873 // 2874 // If res and src are the same, we can avoid writing to base and cap 2875 // unless we grow the underlying array. 2876 needFullUpdate := !samesafeexpr(res, n.List.First()) 2877 2878 // Copy src triple into base, len, cap. 2879 base := temp(Types[Tptr]) 2880 len := temp(Types[TUINT]) 2881 cap := temp(Types[TUINT]) 2882 2883 var src Node 2884 Igen(n.List.First(), &src, nil) 2885 src.Type = Types[Tptr] 2886 Thearch.Gmove(&src, base) 2887 src.Type = Types[TUINT] 2888 src.Xoffset += int64(Widthptr) 2889 Thearch.Gmove(&src, len) 2890 src.Xoffset += int64(Widthptr) 2891 Thearch.Gmove(&src, cap) 2892 2893 // if len+argc <= cap goto L1 2894 var rlen Node 2895 Regalloc(&rlen, Types[TUINT], nil) 2896 Thearch.Gmove(len, &rlen) 2897 Thearch.Ginscon(Thearch.Optoas(OADD, Types[TUINT]), int64(n.List.Len()-1), &rlen) 2898 p := Thearch.Ginscmp(OLE, Types[TUINT], &rlen, cap, +1) 2899 // Note: rlen and src are Regrealloc'ed below at the target of the 2900 // branch we just emitted; do not reuse these Go variables for 2901 // other purposes. They need to still describe the same things 2902 // below that they describe right here. 2903 Regfree(&src) 2904 2905 // base, len, cap = growslice(type, base, len, cap, newlen) 2906 var arg Node 2907 arg.Op = OINDREG 2908 arg.Reg = int16(Thearch.REGSP) 2909 arg.Addable = true 2910 arg.Xoffset = Ctxt.FixedFrameSize() 2911 arg.Type = Ptrto(Types[TUINT8]) 2912 Cgen(typename(res.Type.Elem()), &arg) 2913 arg.Xoffset += int64(Widthptr) 2914 2915 arg.Type = Types[Tptr] 2916 Cgen(base, &arg) 2917 arg.Xoffset += int64(Widthptr) 2918 2919 arg.Type = Types[TUINT] 2920 Cgen(len, &arg) 2921 arg.Xoffset += int64(Widthptr) 2922 2923 arg.Type = Types[TUINT] 2924 Cgen(cap, &arg) 2925 arg.Xoffset += int64(Widthptr) 2926 2927 arg.Type = Types[TUINT] 2928 Cgen(&rlen, &arg) 2929 arg.Xoffset += int64(Widthptr) 2930 Regfree(&rlen) 2931 2932 fn := syslook("growslice") 2933 fn = substArgTypes(fn, res.Type.Elem(), res.Type.Elem()) 2934 Ginscall(fn, 0) 2935 2936 if Widthptr == 4 && Widthreg == 8 { 2937 arg.Xoffset += 4 2938 } 2939 2940 arg.Type = Types[Tptr] 2941 Cgen(&arg, base) 2942 arg.Xoffset += int64(Widthptr) 2943 2944 arg.Type = Types[TUINT] 2945 Cgen(&arg, len) 2946 arg.Xoffset += int64(Widthptr) 2947 2948 arg.Type = Types[TUINT] 2949 Cgen(&arg, cap) 2950 2951 // Update res with base, len+argc, cap. 2952 if needFullUpdate { 2953 if Debug_append > 0 { 2954 Warn("append: full update") 2955 } 2956 Patch(p, Pc) 2957 } 2958 if res.Op == ONAME { 2959 Gvardef(res) 2960 } 2961 var dst, r1 Node 2962 Igen(res, &dst, nil) 2963 dst.Type = Types[TUINT] 2964 dst.Xoffset += int64(Widthptr) 2965 Regalloc(&r1, Types[TUINT], nil) 2966 Thearch.Gmove(len, &r1) 2967 Thearch.Ginscon(Thearch.Optoas(OADD, Types[TUINT]), int64(n.List.Len()-1), &r1) 2968 Thearch.Gmove(&r1, &dst) 2969 Regfree(&r1) 2970 dst.Xoffset += int64(Widthptr) 2971 Thearch.Gmove(cap, &dst) 2972 dst.Type = Types[Tptr] 2973 dst.Xoffset -= 2 * int64(Widthptr) 2974 cgen_wb(base, &dst, needwritebarrier(&dst, base)) 2975 Regfree(&dst) 2976 2977 if !needFullUpdate { 2978 if Debug_append > 0 { 2979 Warn("append: len-only update") 2980 } 2981 // goto L2; 2982 // L1: 2983 // update len only 2984 // L2: 2985 q := Gbranch(obj.AJMP, nil, 0) 2986 Patch(p, Pc) 2987 // At the goto above, src refers to cap and rlen holds the new len 2988 if src.Op == OREGISTER || src.Op == OINDREG { 2989 Regrealloc(&src) 2990 } 2991 Regrealloc(&rlen) 2992 src.Xoffset -= int64(Widthptr) 2993 Thearch.Gmove(&rlen, &src) 2994 Regfree(&src) 2995 Regfree(&rlen) 2996 Patch(q, Pc) 2997 } 2998 2999 // Copy data into place. 3000 // Could do write barrier check around entire copy instead of each element. 3001 // Could avoid reloading registers on each iteration if we know the cgen_wb 3002 // is not going to use a write barrier. 3003 i := 0 3004 var r2 Node 3005 for _, n2 := range n.List.Slice()[1:] { 3006 Regalloc(&r1, Types[Tptr], nil) 3007 Thearch.Gmove(base, &r1) 3008 Regalloc(&r2, Types[TUINT], nil) 3009 Thearch.Gmove(len, &r2) 3010 if i > 0 { 3011 Thearch.Gins(Thearch.Optoas(OADD, Types[TUINT]), Nodintconst(int64(i)), &r2) 3012 } 3013 w := res.Type.Elem().Width 3014 if Thearch.AddIndex != nil && Thearch.AddIndex(&r2, w, &r1) { 3015 // r1 updated by back end 3016 } else if w == 1 { 3017 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &r2, &r1) 3018 } else { 3019 Thearch.Ginscon(Thearch.Optoas(OMUL, Types[TUINT]), w, &r2) 3020 Thearch.Gins(Thearch.Optoas(OADD, Types[Tptr]), &r2, &r1) 3021 } 3022 Regfree(&r2) 3023 3024 r1.Op = OINDREG 3025 r1.Type = res.Type.Elem() 3026 cgen_wb(n2, &r1, needwritebarrier(&r1, n2)) 3027 Regfree(&r1) 3028 i++ 3029 } 3030 } 3031 3032 // Generate res = n, where n is x[i:j] or x[i:j:k]. 3033 // If wb is true, need write barrier updating res's base pointer. 3034 // On systems with 32-bit ints, i, j, k are guaranteed to be 32-bit values. 3035 func cgen_slice(n, res *Node, wb bool) { 3036 if Debug['g'] != 0 { 3037 Dump("cgen_slice-n", n) 3038 Dump("cgen_slice-res", res) 3039 } 3040 3041 needFullUpdate := !samesafeexpr(n.Left, res) 3042 3043 // orderexpr has made sure that x is safe (but possibly expensive) 3044 // and i, j, k are cheap. On a system with registers (anything but 386) 3045 // we can evaluate x first and then know we have enough registers 3046 // for i, j, k as well. 3047 var x, xbase, xlen, xcap, i, j, k Node 3048 if n.Op != OSLICEARR && n.Op != OSLICE3ARR { 3049 Igen(n.Left, &x, nil) 3050 } 3051 3052 indexRegType := Types[TUINT] 3053 if Widthreg > Widthptr { // amd64p32 3054 indexRegType = Types[TUINT64] 3055 } 3056 3057 // On most systems, we use registers. 3058 // The 386 has basically no registers, so substitute functions 3059 // that can work with temporaries instead. 3060 regalloc := Regalloc 3061 ginscon := Thearch.Ginscon 3062 gins := Thearch.Gins 3063 if Thearch.LinkArch.Family == sys.I386 { 3064 regalloc = func(n *Node, t *Type, reuse *Node) { 3065 Tempname(n, t) 3066 } 3067 ginscon = func(as obj.As, c int64, n *Node) { 3068 var n1 Node 3069 Regalloc(&n1, n.Type, n) 3070 Thearch.Gmove(n, &n1) 3071 Thearch.Ginscon(as, c, &n1) 3072 Thearch.Gmove(&n1, n) 3073 Regfree(&n1) 3074 } 3075 gins = func(as obj.As, f, t *Node) *obj.Prog { 3076 var n1 Node 3077 Regalloc(&n1, t.Type, t) 3078 Thearch.Gmove(t, &n1) 3079 Thearch.Gins(as, f, &n1) 3080 Thearch.Gmove(&n1, t) 3081 Regfree(&n1) 3082 return nil 3083 } 3084 } 3085 3086 panics := make([]*obj.Prog, 0, 6) // 3 loads + 3 checks 3087 3088 loadlen := func() { 3089 if xlen.Op != 0 { 3090 return 3091 } 3092 if n.Op == OSLICEARR || n.Op == OSLICE3ARR { 3093 Nodconst(&xlen, indexRegType, n.Left.Type.Elem().NumElem()) 3094 return 3095 } 3096 if n.Op == OSLICESTR && Isconst(n.Left, CTSTR) { 3097 Nodconst(&xlen, indexRegType, int64(len(n.Left.Val().U.(string)))) 3098 return 3099 } 3100 regalloc(&xlen, indexRegType, nil) 3101 x.Xoffset += int64(Widthptr) 3102 x.Type = Types[TUINT] 3103 Thearch.Gmove(&x, &xlen) 3104 x.Xoffset -= int64(Widthptr) 3105 } 3106 3107 loadcap := func() { 3108 if xcap.Op != 0 { 3109 return 3110 } 3111 if n.Op == OSLICEARR || n.Op == OSLICE3ARR || n.Op == OSLICESTR { 3112 loadlen() 3113 xcap = xlen 3114 if xcap.Op == OREGISTER { 3115 Regrealloc(&xcap) 3116 } 3117 return 3118 } 3119 regalloc(&xcap, indexRegType, nil) 3120 x.Xoffset += 2 * int64(Widthptr) 3121 x.Type = Types[TUINT] 3122 Thearch.Gmove(&x, &xcap) 3123 x.Xoffset -= 2 * int64(Widthptr) 3124 } 3125 3126 x1, x2, x3 := n.SliceBounds() // unevaluated index arguments 3127 3128 // load computes src into targ, but if src refers to the len or cap of n.Left, 3129 // load copies those from xlen, xcap, loading xlen if needed. 3130 // If targ.Op == OREGISTER on return, it must be Regfreed, 3131 // but it should not be modified without first checking whether it is 3132 // xlen or xcap's register. 3133 load := func(src, targ *Node) { 3134 if src == nil { 3135 return 3136 } 3137 switch src.Op { 3138 case OLITERAL: 3139 *targ = *src 3140 return 3141 case OLEN: 3142 // NOTE(rsc): This doesn't actually trigger, because order.go 3143 // has pulled all the len and cap calls into separate assignments 3144 // to temporaries. There are tests in test/sliceopt.go that could 3145 // be enabled if this is fixed. 3146 if samesafeexpr(n.Left, src.Left) { 3147 if Debug_slice > 0 { 3148 Warn("slice: reuse len") 3149 } 3150 loadlen() 3151 *targ = xlen 3152 if targ.Op == OREGISTER { 3153 Regrealloc(targ) 3154 } 3155 return 3156 } 3157 case OCAP: 3158 // NOTE(rsc): This doesn't actually trigger; see note in case OLEN above. 3159 if samesafeexpr(n.Left, src.Left) { 3160 if Debug_slice > 0 { 3161 Warn("slice: reuse cap") 3162 } 3163 loadcap() 3164 *targ = xcap 3165 if targ.Op == OREGISTER { 3166 Regrealloc(targ) 3167 } 3168 return 3169 } 3170 } 3171 if i.Op != 0 && samesafeexpr(x1, src) { 3172 if Debug_slice > 0 { 3173 Warn("slice: reuse 1st index") 3174 } 3175 *targ = i 3176 if targ.Op == OREGISTER { 3177 Regrealloc(targ) 3178 } 3179 return 3180 } 3181 if j.Op != 0 && samesafeexpr(x2, src) { 3182 if Debug_slice > 0 { 3183 Warn("slice: reuse 2nd index") 3184 } 3185 *targ = j 3186 if targ.Op == OREGISTER { 3187 Regrealloc(targ) 3188 } 3189 return 3190 } 3191 if Thearch.Cgenindex != nil { 3192 regalloc(targ, indexRegType, nil) 3193 p := Thearch.Cgenindex(src, targ, false) 3194 if p != nil { 3195 panics = append(panics, p) 3196 } 3197 } else if Thearch.Igenindex != nil { 3198 p := Thearch.Igenindex(src, targ, false) 3199 if p != nil { 3200 panics = append(panics, p) 3201 } 3202 } else { 3203 regalloc(targ, indexRegType, nil) 3204 var tmp Node 3205 Cgenr(src, &tmp, targ) 3206 Thearch.Gmove(&tmp, targ) 3207 Regfree(&tmp) 3208 } 3209 } 3210 3211 load(x1, &i) 3212 load(x2, &j) 3213 load(x3, &k) 3214 3215 // i defaults to 0. 3216 if i.Op == 0 { 3217 Nodconst(&i, indexRegType, 0) 3218 } 3219 3220 // j defaults to len(x) 3221 if j.Op == 0 { 3222 loadlen() 3223 j = xlen 3224 if j.Op == OREGISTER { 3225 Regrealloc(&j) 3226 } 3227 } 3228 3229 // k defaults to cap(x) 3230 // Only need to load it if we're recalculating cap or doing a full update. 3231 if k.Op == 0 && n.Op != OSLICESTR && (!iszero(&i) || needFullUpdate) { 3232 loadcap() 3233 k = xcap 3234 if k.Op == OREGISTER { 3235 Regrealloc(&k) 3236 } 3237 } 3238 3239 // Check constant indexes for negative values, and against constant length if known. 3240 // The func obvious below checks for out-of-order constant indexes. 3241 var bound int64 = -1 3242 if n.Op == OSLICEARR || n.Op == OSLICE3ARR { 3243 bound = n.Left.Type.Elem().NumElem() 3244 } else if n.Op == OSLICESTR && Isconst(n.Left, CTSTR) { 3245 bound = int64(len(n.Left.Val().U.(string))) 3246 } 3247 if Isconst(&i, CTINT) { 3248 if i.Val().U.(*Mpint).CmpInt64(0) < 0 || bound >= 0 && i.Val().U.(*Mpint).CmpInt64(bound) > 0 { 3249 Yyerror("slice index out of bounds") 3250 } 3251 } 3252 if Isconst(&j, CTINT) { 3253 if j.Val().U.(*Mpint).CmpInt64(0) < 0 || bound >= 0 && j.Val().U.(*Mpint).CmpInt64(bound) > 0 { 3254 Yyerror("slice index out of bounds") 3255 } 3256 } 3257 if Isconst(&k, CTINT) { 3258 if k.Val().U.(*Mpint).CmpInt64(0) < 0 || bound >= 0 && k.Val().U.(*Mpint).CmpInt64(bound) > 0 { 3259 Yyerror("slice index out of bounds") 3260 } 3261 } 3262 3263 // same reports whether n1 and n2 are the same register or constant. 3264 same := func(n1, n2 *Node) bool { 3265 return n1.Op == OREGISTER && n2.Op == OREGISTER && n1.Reg == n2.Reg || 3266 n1.Op == ONAME && n2.Op == ONAME && n1.Orig == n2.Orig && n1.Type == n2.Type && n1.Xoffset == n2.Xoffset || 3267 n1.Op == OLITERAL && n2.Op == OLITERAL && n1.Val().U.(*Mpint).Cmp(n2.Val().U.(*Mpint)) == 0 3268 } 3269 3270 // obvious reports whether n1 <= n2 is obviously true, 3271 // and it calls Yyerror if n1 <= n2 is obviously false. 3272 obvious := func(n1, n2 *Node) bool { 3273 if Debug['B'] != 0 { // -B disables bounds checks 3274 return true 3275 } 3276 if same(n1, n2) { 3277 return true // n1 == n2 3278 } 3279 if iszero(n1) { 3280 return true // using unsigned compare, so 0 <= n2 always true 3281 } 3282 if xlen.Op != 0 && same(n1, &xlen) && xcap.Op != 0 && same(n2, &xcap) { 3283 return true // len(x) <= cap(x) always true 3284 } 3285 if Isconst(n1, CTINT) && Isconst(n2, CTINT) { 3286 if n1.Val().U.(*Mpint).Cmp(n2.Val().U.(*Mpint)) <= 0 { 3287 return true // n1, n2 constants such that n1 <= n2 3288 } 3289 Yyerror("slice index out of bounds") 3290 return true 3291 } 3292 return false 3293 } 3294 3295 compare := func(n1, n2 *Node) { 3296 // n1 might be a 64-bit constant, even on 32-bit architectures, 3297 // but it will be represented in 32 bits. 3298 if Ctxt.Arch.RegSize == 4 && Is64(n1.Type) { 3299 if n1.Val().U.(*Mpint).CmpInt64(1<<31) >= 0 { 3300 Fatalf("missed slice out of bounds check") 3301 } 3302 var tmp Node 3303 Nodconst(&tmp, indexRegType, n1.Int64()) 3304 n1 = &tmp 3305 } 3306 p := Thearch.Ginscmp(OGT, indexRegType, n1, n2, -1) 3307 panics = append(panics, p) 3308 } 3309 3310 loadcap() 3311 max := &xcap 3312 if k.Op != 0 && (n.Op == OSLICE3 || n.Op == OSLICE3ARR) { 3313 if obvious(&k, max) { 3314 if Debug_slice > 0 { 3315 Warn("slice: omit check for 3rd index") 3316 } 3317 } else { 3318 compare(&k, max) 3319 } 3320 max = &k 3321 } 3322 if j.Op != 0 { 3323 if obvious(&j, max) { 3324 if Debug_slice > 0 { 3325 Warn("slice: omit check for 2nd index") 3326 } 3327 } else { 3328 compare(&j, max) 3329 } 3330 max = &j 3331 } 3332 if i.Op != 0 { 3333 if obvious(&i, max) { 3334 if Debug_slice > 0 { 3335 Warn("slice: omit check for 1st index") 3336 } 3337 } else { 3338 compare(&i, max) 3339 } 3340 max = &i 3341 } 3342 if k.Op != 0 && i.Op != 0 { 3343 obvious(&i, &k) // emit compile-time error for x[3:n:2] 3344 } 3345 3346 if len(panics) > 0 { 3347 p := Gbranch(obj.AJMP, nil, 0) 3348 for _, q := range panics { 3349 Patch(q, Pc) 3350 } 3351 Ginscall(panicslice, -1) 3352 Patch(p, Pc) 3353 } 3354 3355 // Checks are done. 3356 // Compute new len as j-i, cap as k-i. 3357 // If i and j are same register, len is constant 0. 3358 // If i and k are same register, cap is constant 0. 3359 // If j and k are same register, len and cap are same. 3360 3361 // Done with xlen and xcap. 3362 // Now safe to modify j and k even if they alias xlen, xcap. 3363 if xlen.Op == OREGISTER { 3364 Regfree(&xlen) 3365 } 3366 if xcap.Op == OREGISTER { 3367 Regfree(&xcap) 3368 } 3369 3370 // are j and k the same value? 3371 sameJK := same(&j, &k) 3372 3373 if i.Op != 0 { 3374 // j -= i 3375 if same(&i, &j) { 3376 if Debug_slice > 0 { 3377 Warn("slice: result len == 0") 3378 } 3379 if j.Op == OREGISTER { 3380 Regfree(&j) 3381 } 3382 Nodconst(&j, indexRegType, 0) 3383 } else { 3384 switch j.Op { 3385 case OLITERAL: 3386 if Isconst(&i, CTINT) { 3387 Nodconst(&j, indexRegType, j.Int64()-i.Int64()) 3388 if Debug_slice > 0 { 3389 Warn("slice: result len == %d", j.Int64()) 3390 } 3391 break 3392 } 3393 fallthrough 3394 case ONAME: 3395 if !istemp(&j) { 3396 var r Node 3397 regalloc(&r, indexRegType, nil) 3398 Thearch.Gmove(&j, &r) 3399 j = r 3400 } 3401 fallthrough 3402 case OREGISTER: 3403 if i.Op == OLITERAL { 3404 v := i.Int64() 3405 if v != 0 { 3406 ginscon(Thearch.Optoas(OSUB, indexRegType), v, &j) 3407 } 3408 } else { 3409 gins(Thearch.Optoas(OSUB, indexRegType), &i, &j) 3410 } 3411 } 3412 } 3413 3414 // k -= i if k different from j and cap is needed.j 3415 // (The modifications to j above cannot affect i: if j and i were aliased, 3416 // we replace j with a constant 0 instead of doing a subtraction, 3417 // leaving i unmodified.) 3418 if k.Op == 0 { 3419 if Debug_slice > 0 && n.Op != OSLICESTR { 3420 Warn("slice: result cap not computed") 3421 } 3422 // no need 3423 } else if same(&i, &k) { 3424 if k.Op == OREGISTER { 3425 Regfree(&k) 3426 } 3427 Nodconst(&k, indexRegType, 0) 3428 if Debug_slice > 0 { 3429 Warn("slice: result cap == 0") 3430 } 3431 } else if sameJK { 3432 if Debug_slice > 0 { 3433 Warn("slice: result cap == result len") 3434 } 3435 // k and j were the same value; make k-i the same as j-i. 3436 if k.Op == OREGISTER { 3437 Regfree(&k) 3438 } 3439 k = j 3440 if k.Op == OREGISTER { 3441 Regrealloc(&k) 3442 } 3443 } else { 3444 switch k.Op { 3445 case OLITERAL: 3446 if Isconst(&i, CTINT) { 3447 Nodconst(&k, indexRegType, k.Int64()-i.Int64()) 3448 if Debug_slice > 0 { 3449 Warn("slice: result cap == %d", k.Int64()) 3450 } 3451 break 3452 } 3453 fallthrough 3454 case ONAME: 3455 if !istemp(&k) { 3456 var r Node 3457 regalloc(&r, indexRegType, nil) 3458 Thearch.Gmove(&k, &r) 3459 k = r 3460 } 3461 fallthrough 3462 case OREGISTER: 3463 if same(&i, &k) { 3464 Regfree(&k) 3465 Nodconst(&k, indexRegType, 0) 3466 if Debug_slice > 0 { 3467 Warn("slice: result cap == 0") 3468 } 3469 } else if i.Op == OLITERAL { 3470 v := i.Int64() 3471 if v != 0 { 3472 ginscon(Thearch.Optoas(OSUB, indexRegType), v, &k) 3473 } 3474 } else { 3475 gins(Thearch.Optoas(OSUB, indexRegType), &i, &k) 3476 } 3477 } 3478 } 3479 } 3480 3481 adjustBase := true 3482 if i.Op == 0 || iszero(&i) { 3483 if Debug_slice > 0 { 3484 Warn("slice: skip base adjustment for 1st index 0") 3485 } 3486 adjustBase = false 3487 } else if k.Op != 0 && iszero(&k) || k.Op == 0 && iszero(&j) { 3488 if Debug_slice > 0 { 3489 if n.Op == OSLICESTR { 3490 Warn("slice: skip base adjustment for string len == 0") 3491 } else { 3492 Warn("slice: skip base adjustment for cap == 0") 3493 } 3494 } 3495 adjustBase = false 3496 } 3497 3498 if !adjustBase && !needFullUpdate { 3499 if Debug_slice > 0 { 3500 if k.Op != 0 { 3501 Warn("slice: len/cap-only update") 3502 } else { 3503 Warn("slice: len-only update") 3504 } 3505 } 3506 if i.Op == OREGISTER { 3507 Regfree(&i) 3508 } 3509 // Write len (and cap if needed) back to x. 3510 x.Xoffset += int64(Widthptr) 3511 x.Type = Types[TUINT] 3512 Thearch.Gmove(&j, &x) 3513 x.Xoffset -= int64(Widthptr) 3514 if k.Op != 0 { 3515 x.Xoffset += 2 * int64(Widthptr) 3516 x.Type = Types[TUINT] 3517 Thearch.Gmove(&k, &x) 3518 x.Xoffset -= 2 * int64(Widthptr) 3519 } 3520 Regfree(&x) 3521 } else { 3522 // Compute new base. May smash i. 3523 if n.Op == OSLICEARR || n.Op == OSLICE3ARR { 3524 Cgenr(n.Left, &xbase, nil) 3525 Cgen_checknil(&xbase) 3526 } else { 3527 var ptr *Type 3528 if n.Op == OSLICESTR { 3529 ptr = ptrToUint8 3530 } else { 3531 ptr = Ptrto(n.Type.Elem()) 3532 } 3533 regalloc(&xbase, ptr, nil) 3534 x.Type = xbase.Type 3535 Thearch.Gmove(&x, &xbase) 3536 Regfree(&x) 3537 } 3538 if i.Op != 0 && adjustBase { 3539 // Branch around the base adjustment if the resulting cap will be 0. 3540 var p *obj.Prog 3541 size := &k 3542 if k.Op == 0 { 3543 size = &j 3544 } 3545 if Isconst(size, CTINT) { 3546 // zero was checked above, must be non-zero. 3547 } else { 3548 var tmp Node 3549 Nodconst(&tmp, indexRegType, 0) 3550 p = Thearch.Ginscmp(OEQ, indexRegType, size, &tmp, -1) 3551 } 3552 var w int64 3553 if n.Op == OSLICESTR { 3554 w = 1 // res is string, elem size is 1 (byte) 3555 } else { 3556 w = res.Type.Elem().Width // res is []T, elem size is T.width 3557 } 3558 if Isconst(&i, CTINT) { 3559 ginscon(Thearch.Optoas(OADD, xbase.Type), i.Int64()*w, &xbase) 3560 } else if Thearch.AddIndex != nil && Thearch.AddIndex(&i, w, &xbase) { 3561 // done by back end 3562 } else if w == 1 { 3563 gins(Thearch.Optoas(OADD, xbase.Type), &i, &xbase) 3564 } else { 3565 if i.Op == ONAME && !istemp(&i) { 3566 var tmp Node 3567 Tempname(&tmp, i.Type) 3568 Thearch.Gmove(&i, &tmp) 3569 i = tmp 3570 } 3571 ginscon(Thearch.Optoas(OMUL, i.Type), w, &i) 3572 gins(Thearch.Optoas(OADD, xbase.Type), &i, &xbase) 3573 } 3574 if p != nil { 3575 Patch(p, Pc) 3576 } 3577 } 3578 if i.Op == OREGISTER { 3579 Regfree(&i) 3580 } 3581 3582 // Write len, cap, base to result. 3583 if res.Op == ONAME { 3584 Gvardef(res) 3585 } 3586 Igen(res, &x, nil) 3587 x.Xoffset += int64(Widthptr) 3588 x.Type = Types[TUINT] 3589 Thearch.Gmove(&j, &x) 3590 x.Xoffset -= int64(Widthptr) 3591 if k.Op != 0 { 3592 x.Xoffset += 2 * int64(Widthptr) 3593 Thearch.Gmove(&k, &x) 3594 x.Xoffset -= 2 * int64(Widthptr) 3595 } 3596 x.Type = xbase.Type 3597 cgen_wb(&xbase, &x, wb) 3598 Regfree(&xbase) 3599 Regfree(&x) 3600 } 3601 3602 if j.Op == OREGISTER { 3603 Regfree(&j) 3604 } 3605 if k.Op == OREGISTER { 3606 Regfree(&k) 3607 } 3608 }