github.com/riscv/riscv-go@v0.0.0-20200123204226-124ebd6fcc8e/src/cmd/compile/internal/gc/walk.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/internal/obj" 9 "cmd/internal/sys" 10 "fmt" 11 "strings" 12 ) 13 14 // The constant is known to runtime. 15 const ( 16 tmpstringbufsize = 32 17 ) 18 19 func walk(fn *Node) { 20 Curfn = fn 21 22 if Debug['W'] != 0 { 23 s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) 24 dumplist(s, Curfn.Nbody) 25 } 26 27 lno := lineno 28 29 // Final typecheck for any unused variables. 30 for i, ln := range fn.Func.Dcl { 31 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) { 32 ln = typecheck(ln, Erv|Easgn) 33 fn.Func.Dcl[i] = ln 34 } 35 } 36 37 // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. 38 for _, ln := range fn.Func.Dcl { 39 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Used { 40 ln.Name.Defn.Left.Used = true 41 } 42 } 43 44 for _, ln := range fn.Func.Dcl { 45 if ln.Op != ONAME || (ln.Class != PAUTO && ln.Class != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Used { 46 continue 47 } 48 if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { 49 if defn.Left.Used { 50 continue 51 } 52 lineno = defn.Left.Pos 53 yyerror("%v declared and not used", ln.Sym) 54 defn.Left.Used = true // suppress repeats 55 } else { 56 lineno = ln.Pos 57 yyerror("%v declared and not used", ln.Sym) 58 } 59 } 60 61 lineno = lno 62 if nerrors != 0 { 63 return 64 } 65 walkstmtlist(Curfn.Nbody.Slice()) 66 if Debug['W'] != 0 { 67 s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) 68 dumplist(s, Curfn.Nbody) 69 } 70 71 heapmoves() 72 if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 { 73 s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) 74 dumplist(s, Curfn.Func.Enter) 75 } 76 } 77 78 func walkstmtlist(s []*Node) { 79 for i := range s { 80 s[i] = walkstmt(s[i]) 81 } 82 } 83 84 func samelist(a, b []*Node) bool { 85 if len(a) != len(b) { 86 return false 87 } 88 for i, n := range a { 89 if n != b[i] { 90 return false 91 } 92 } 93 return true 94 } 95 96 func paramoutheap(fn *Node) bool { 97 for _, ln := range fn.Func.Dcl { 98 switch ln.Class { 99 case PPARAMOUT: 100 if ln.isParamStackCopy() || ln.Addrtaken { 101 return true 102 } 103 104 case PAUTO: 105 // stop early - parameters are over 106 return false 107 } 108 } 109 110 return false 111 } 112 113 // adds "adjust" to all the argument locations for the call n. 114 // n must be a defer or go node that has already been walked. 115 func adjustargs(n *Node, adjust int) { 116 var arg *Node 117 var lhs *Node 118 119 callfunc := n.Left 120 for _, arg = range callfunc.List.Slice() { 121 if arg.Op != OAS { 122 yyerror("call arg not assignment") 123 } 124 lhs = arg.Left 125 if lhs.Op == ONAME { 126 // This is a temporary introduced by reorder1. 127 // The real store to the stack appears later in the arg list. 128 continue 129 } 130 131 if lhs.Op != OINDREGSP { 132 yyerror("call argument store does not use OINDREGSP") 133 } 134 135 // can't really check this in machine-indep code. 136 //if(lhs->val.u.reg != D_SP) 137 // yyerror("call arg assign not indreg(SP)"); 138 lhs.Xoffset += int64(adjust) 139 } 140 } 141 142 // The result of walkstmt MUST be assigned back to n, e.g. 143 // n.Left = walkstmt(n.Left) 144 func walkstmt(n *Node) *Node { 145 if n == nil { 146 return n 147 } 148 if n.IsStatic { // don't walk, generated by anylit. 149 return n 150 } 151 152 setlineno(n) 153 154 walkstmtlist(n.Ninit.Slice()) 155 156 switch n.Op { 157 default: 158 if n.Op == ONAME { 159 yyerror("%v is not a top level statement", n.Sym) 160 } else { 161 yyerror("%v is not a top level statement", n.Op) 162 } 163 Dump("nottop", n) 164 165 case OAS, 166 OASOP, 167 OAS2, 168 OAS2DOTTYPE, 169 OAS2RECV, 170 OAS2FUNC, 171 OAS2MAPR, 172 OCLOSE, 173 OCOPY, 174 OCALLMETH, 175 OCALLINTER, 176 OCALL, 177 OCALLFUNC, 178 ODELETE, 179 OSEND, 180 OPRINT, 181 OPRINTN, 182 OPANIC, 183 OEMPTY, 184 ORECOVER, 185 OGETG: 186 if n.Typecheck == 0 { 187 Fatalf("missing typecheck: %+v", n) 188 } 189 wascopy := n.Op == OCOPY 190 init := n.Ninit 191 n.Ninit.Set(nil) 192 n = walkexpr(n, &init) 193 n = addinit(n, init.Slice()) 194 if wascopy && n.Op == OCONVNOP { 195 n.Op = OEMPTY // don't leave plain values as statements. 196 } 197 198 // special case for a receive where we throw away 199 // the value received. 200 case ORECV: 201 if n.Typecheck == 0 { 202 Fatalf("missing typecheck: %+v", n) 203 } 204 init := n.Ninit 205 n.Ninit.Set(nil) 206 207 n.Left = walkexpr(n.Left, &init) 208 n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, typename(n.Left.Type), n.Left, nodnil()) 209 n = walkexpr(n, &init) 210 211 n = addinit(n, init.Slice()) 212 213 case OBREAK, 214 OCONTINUE, 215 OFALL, 216 OGOTO, 217 OLABEL, 218 ODCLCONST, 219 ODCLTYPE, 220 OCHECKNIL, 221 OVARKILL, 222 OVARLIVE: 223 break 224 225 case ODCL: 226 v := n.Left 227 if v.Class == PAUTOHEAP { 228 if compiling_runtime { 229 yyerror("%v escapes to heap, not allowed in runtime.", v) 230 } 231 if prealloc[v] == nil { 232 prealloc[v] = callnew(v.Type) 233 } 234 nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v]) 235 nn.Colas = true 236 nn = typecheck(nn, Etop) 237 return walkstmt(nn) 238 } 239 240 case OBLOCK: 241 walkstmtlist(n.List.Slice()) 242 243 case OXCASE: 244 yyerror("case statement out of place") 245 n.Op = OCASE 246 fallthrough 247 248 case OCASE: 249 n.Right = walkstmt(n.Right) 250 251 case ODEFER: 252 hasdefer = true 253 switch n.Left.Op { 254 case OPRINT, OPRINTN: 255 n.Left = walkprintfunc(n.Left, &n.Ninit) 256 257 case OCOPY: 258 n.Left = copyany(n.Left, &n.Ninit, true) 259 260 default: 261 n.Left = walkexpr(n.Left, &n.Ninit) 262 } 263 264 // make room for size & fn arguments. 265 adjustargs(n, 2*Widthptr) 266 267 case OFOR: 268 if n.Left != nil { 269 walkstmtlist(n.Left.Ninit.Slice()) 270 init := n.Left.Ninit 271 n.Left.Ninit.Set(nil) 272 n.Left = walkexpr(n.Left, &init) 273 n.Left = addinit(n.Left, init.Slice()) 274 } 275 276 n.Right = walkstmt(n.Right) 277 walkstmtlist(n.Nbody.Slice()) 278 279 case OIF: 280 n.Left = walkexpr(n.Left, &n.Ninit) 281 walkstmtlist(n.Nbody.Slice()) 282 walkstmtlist(n.Rlist.Slice()) 283 284 case OPROC: 285 switch n.Left.Op { 286 case OPRINT, OPRINTN: 287 n.Left = walkprintfunc(n.Left, &n.Ninit) 288 289 case OCOPY: 290 n.Left = copyany(n.Left, &n.Ninit, true) 291 292 default: 293 n.Left = walkexpr(n.Left, &n.Ninit) 294 } 295 296 // make room for size & fn arguments. 297 adjustargs(n, 2*Widthptr) 298 299 case ORETURN: 300 walkexprlist(n.List.Slice(), &n.Ninit) 301 if n.List.Len() == 0 { 302 break 303 } 304 if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) { 305 // assign to the function out parameters, 306 // so that reorder3 can fix up conflicts 307 var rl []*Node 308 309 var cl Class 310 for _, ln := range Curfn.Func.Dcl { 311 cl = ln.Class 312 if cl == PAUTO || cl == PAUTOHEAP { 313 break 314 } 315 if cl == PPARAMOUT { 316 if ln.isParamStackCopy() { 317 ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil) 318 } 319 rl = append(rl, ln) 320 } 321 } 322 323 if got, want := n.List.Len(), len(rl); got != want { 324 // order should have rewritten multi-value function calls 325 // with explicit OAS2FUNC nodes. 326 Fatalf("expected %v return arguments, have %v", want, got) 327 } 328 329 if samelist(rl, n.List.Slice()) { 330 // special return in disguise 331 n.List.Set(nil) 332 333 break 334 } 335 336 // move function calls out, to make reorder3's job easier. 337 walkexprlistsafe(n.List.Slice(), &n.Ninit) 338 339 ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit) 340 n.List.Set(reorder3(ll)) 341 ls := n.List.Slice() 342 for i, n := range ls { 343 ls[i] = applywritebarrier(n) 344 } 345 break 346 } 347 348 ll := ascompatte(n.Op, nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit) 349 n.List.Set(ll) 350 351 case ORETJMP: 352 break 353 354 case OSELECT: 355 walkselect(n) 356 357 case OSWITCH: 358 walkswitch(n) 359 360 case ORANGE: 361 walkrange(n) 362 363 case OXFALL: 364 yyerror("fallthrough statement out of place") 365 n.Op = OFALL 366 } 367 368 if n.Op == ONAME { 369 Fatalf("walkstmt ended up with name: %+v", n) 370 } 371 return n 372 } 373 374 func isSmallMakeSlice(n *Node) bool { 375 if n.Op != OMAKESLICE { 376 return false 377 } 378 l := n.Left 379 r := n.Right 380 if r == nil { 381 r = l 382 } 383 t := n.Type 384 385 return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) 386 } 387 388 // walk the whole tree of the body of an 389 // expression or simple statement. 390 // the types expressions are calculated. 391 // compile-time constants are evaluated. 392 // complex side effects like statements are appended to init 393 func walkexprlist(s []*Node, init *Nodes) { 394 for i := range s { 395 s[i] = walkexpr(s[i], init) 396 } 397 } 398 399 func walkexprlistsafe(s []*Node, init *Nodes) { 400 for i, n := range s { 401 s[i] = safeexpr(n, init) 402 s[i] = walkexpr(s[i], init) 403 } 404 } 405 406 func walkexprlistcheap(s []*Node, init *Nodes) { 407 for i, n := range s { 408 s[i] = cheapexpr(n, init) 409 s[i] = walkexpr(s[i], init) 410 } 411 } 412 413 // Build name of function for interface conversion. 414 // Not all names are possible 415 // (e.g., we'll never generate convE2E or convE2I or convI2E). 416 func convFuncName(from, to *Type) string { 417 tkind := to.iet() 418 switch from.iet() { 419 case 'I': 420 switch tkind { 421 case 'I': 422 return "convI2I" 423 } 424 case 'T': 425 switch tkind { 426 case 'E': 427 return "convT2E" 428 case 'I': 429 return "convT2I" 430 } 431 } 432 Fatalf("unknown conv func %c2%c", from.iet(), to.iet()) 433 panic("unreachable") 434 } 435 436 // The result of walkexpr MUST be assigned back to n, e.g. 437 // n.Left = walkexpr(n.Left, init) 438 func walkexpr(n *Node, init *Nodes) *Node { 439 if n == nil { 440 return n 441 } 442 443 if init == &n.Ninit { 444 // not okay to use n->ninit when walking n, 445 // because we might replace n with some other node 446 // and would lose the init list. 447 Fatalf("walkexpr init == &n->ninit") 448 } 449 450 if n.Ninit.Len() != 0 { 451 walkstmtlist(n.Ninit.Slice()) 452 init.AppendNodes(&n.Ninit) 453 } 454 455 lno := setlineno(n) 456 457 if Debug['w'] > 1 { 458 Dump("walk-before", n) 459 } 460 461 if n.Typecheck != 1 { 462 Fatalf("missed typecheck: %+v", n) 463 } 464 465 if n.Op == ONAME && n.Class == PAUTOHEAP { 466 nn := nod(OIND, n.Name.Param.Heapaddr, nil) 467 nn = typecheck(nn, Erv) 468 nn = walkexpr(nn, init) 469 nn.Left.NonNil = true 470 return nn 471 } 472 473 opswitch: 474 switch n.Op { 475 default: 476 Dump("walk", n) 477 Fatalf("walkexpr: switch 1 unknown op %+S", n) 478 479 case OTYPE, 480 ONONAME, 481 OINDREGSP, 482 OEMPTY, 483 OGETG: 484 485 case ONOT, 486 OMINUS, 487 OPLUS, 488 OCOM, 489 OREAL, 490 OIMAG, 491 ODOTMETH, 492 ODOTINTER: 493 n.Left = walkexpr(n.Left, init) 494 495 case OIND: 496 n.Left = walkexpr(n.Left, init) 497 498 case ODOT: 499 usefield(n) 500 n.Left = walkexpr(n.Left, init) 501 502 case ODOTPTR: 503 usefield(n) 504 if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 { 505 // No actual copy will be generated, so emit an explicit nil check. 506 n.Left = cheapexpr(n.Left, init) 507 508 checknil(n.Left, init) 509 } 510 511 n.Left = walkexpr(n.Left, init) 512 513 case OEFACE: 514 n.Left = walkexpr(n.Left, init) 515 n.Right = walkexpr(n.Right, init) 516 517 case OSPTR, OITAB, OIDATA: 518 n.Left = walkexpr(n.Left, init) 519 520 case OLEN, OCAP: 521 n.Left = walkexpr(n.Left, init) 522 523 // replace len(*[10]int) with 10. 524 // delayed until now to preserve side effects. 525 t := n.Left.Type 526 527 if t.IsPtr() { 528 t = t.Elem() 529 } 530 if t.IsArray() { 531 safeexpr(n.Left, init) 532 Nodconst(n, n.Type, t.NumElem()) 533 n.Typecheck = 1 534 } 535 536 case OLSH, ORSH: 537 n.Left = walkexpr(n.Left, init) 538 n.Right = walkexpr(n.Right, init) 539 t := n.Left.Type 540 n.Bounded = bounded(n.Right, 8*t.Width) 541 if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { 542 Warn("shift bounds check elided") 543 } 544 545 // Use results from call expression as arguments for complex. 546 case OAND, 547 OSUB, 548 OHMUL, 549 OLT, 550 OLE, 551 OGE, 552 OGT, 553 OADD, 554 OOR, 555 OXOR, 556 OCOMPLEX: 557 if n.Op == OCOMPLEX && n.Left == nil && n.Right == nil { 558 n.Left = n.List.First() 559 n.Right = n.List.Second() 560 } 561 562 n.Left = walkexpr(n.Left, init) 563 n.Right = walkexpr(n.Right, init) 564 565 case OEQ, ONE: 566 n.Left = walkexpr(n.Left, init) 567 n.Right = walkexpr(n.Right, init) 568 569 // Disable safemode while compiling this code: the code we 570 // generate internally can refer to unsafe.Pointer. 571 // In this case it can happen if we need to generate an == 572 // for a struct containing a reflect.Value, which itself has 573 // an unexported field of type unsafe.Pointer. 574 old_safemode := safemode 575 safemode = false 576 n = walkcompare(n, init) 577 safemode = old_safemode 578 579 case OANDAND, OOROR: 580 n.Left = walkexpr(n.Left, init) 581 582 // cannot put side effects from n.Right on init, 583 // because they cannot run before n.Left is checked. 584 // save elsewhere and store on the eventual n.Right. 585 var ll Nodes 586 587 n.Right = walkexpr(n.Right, &ll) 588 n.Right = addinit(n.Right, ll.Slice()) 589 n = walkinrange(n, init) 590 591 case OPRINT, OPRINTN: 592 walkexprlist(n.List.Slice(), init) 593 n = walkprint(n, init) 594 595 case OPANIC: 596 n = mkcall("gopanic", nil, init, n.Left) 597 598 case ORECOVER: 599 n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil)) 600 601 case OLITERAL: 602 n.Addable = true 603 604 case OCLOSUREVAR, OCFUNC: 605 n.Addable = true 606 607 case ONAME: 608 n.Addable = true 609 610 case OCALLINTER: 611 usemethod(n) 612 t := n.Left.Type 613 if n.List.Len() != 0 && n.List.First().Op == OAS { 614 break 615 } 616 n.Left = walkexpr(n.Left, init) 617 walkexprlist(n.List.Slice(), init) 618 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 619 n.List.Set(reorder1(ll)) 620 621 case OCALLFUNC: 622 if n.Left.Op == OCLOSURE { 623 // Transform direct call of a closure to call of a normal function. 624 // transformclosure already did all preparation work. 625 626 // Prepend captured variables to argument list. 627 n.List.Prepend(n.Left.Func.Enter.Slice()...) 628 629 n.Left.Func.Enter.Set(nil) 630 631 // Replace OCLOSURE with ONAME/PFUNC. 632 n.Left = n.Left.Func.Closure.Func.Nname 633 634 // Update type of OCALLFUNC node. 635 // Output arguments had not changed, but their offsets could. 636 if n.Left.Type.Results().NumFields() == 1 { 637 n.Type = n.Left.Type.Results().Field(0).Type 638 } else { 639 n.Type = n.Left.Type.Results() 640 } 641 } 642 643 t := n.Left.Type 644 if n.List.Len() != 0 && n.List.First().Op == OAS { 645 break 646 } 647 648 n.Left = walkexpr(n.Left, init) 649 walkexprlist(n.List.Slice(), init) 650 651 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 652 n.List.Set(reorder1(ll)) 653 654 case OCALLMETH: 655 t := n.Left.Type 656 if n.List.Len() != 0 && n.List.First().Op == OAS { 657 break 658 } 659 n.Left = walkexpr(n.Left, init) 660 walkexprlist(n.List.Slice(), init) 661 ll := ascompatte(n.Op, n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init) 662 lr := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 663 ll = append(ll, lr...) 664 n.Left.Left = nil 665 ullmancalc(n.Left) 666 n.List.Set(reorder1(ll)) 667 668 case OAS: 669 init.AppendNodes(&n.Ninit) 670 671 n.Left = walkexpr(n.Left, init) 672 n.Left = safeexpr(n.Left, init) 673 674 if oaslit(n, init) { 675 break 676 } 677 678 if n.Right == nil { 679 // TODO(austin): Check all "implicit zeroing" 680 break 681 } 682 683 if !instrumenting && iszero(n.Right) && !needwritebarrier(n.Left, n.Right) { 684 break 685 } 686 687 switch n.Right.Op { 688 default: 689 n.Right = walkexpr(n.Right, init) 690 691 case ORECV: 692 // x = <-c; n.Left is x, n.Right.Left is c. 693 // orderstmt made sure x is addressable. 694 n.Right.Left = walkexpr(n.Right.Left, init) 695 696 n1 := nod(OADDR, n.Left, nil) 697 r := n.Right.Left // the channel 698 n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, typename(r.Type), r, n1) 699 n = walkexpr(n, init) 700 break opswitch 701 702 case OAPPEND: 703 // x = append(...) 704 r := n.Right 705 if r.Type.Elem().NotInHeap { 706 yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem()) 707 } 708 if r.Isddd { 709 r = appendslice(r, init) // also works for append(slice, string). 710 } else { 711 r = walkappend(r, init, n) 712 } 713 n.Right = r 714 if r.Op == OAPPEND { 715 // Left in place for back end. 716 // Do not add a new write barrier. 717 break opswitch 718 } 719 // Otherwise, lowered for race detector. 720 // Treat as ordinary assignment. 721 } 722 723 if n.Left != nil && n.Right != nil { 724 static := n.IsStatic 725 n = convas(n, init) 726 n.IsStatic = static 727 n = applywritebarrier(n) 728 } 729 730 case OAS2: 731 init.AppendNodes(&n.Ninit) 732 walkexprlistsafe(n.List.Slice(), init) 733 walkexprlistsafe(n.Rlist.Slice(), init) 734 ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init) 735 ll = reorder3(ll) 736 for i, n := range ll { 737 ll[i] = applywritebarrier(n) 738 } 739 n = liststmt(ll) 740 741 // a,b,... = fn() 742 case OAS2FUNC: 743 init.AppendNodes(&n.Ninit) 744 745 r := n.Rlist.First() 746 walkexprlistsafe(n.List.Slice(), init) 747 r = walkexpr(r, init) 748 749 if isIntrinsicCall(r) { 750 n.Rlist.Set1(r) 751 break 752 } 753 init.Append(r) 754 755 ll := ascompatet(n.Op, n.List, r.Type) 756 for i, n := range ll { 757 ll[i] = applywritebarrier(n) 758 } 759 n = liststmt(ll) 760 761 // x, y = <-c 762 // orderstmt made sure x is addressable. 763 case OAS2RECV: 764 init.AppendNodes(&n.Ninit) 765 766 r := n.Rlist.First() 767 walkexprlistsafe(n.List.Slice(), init) 768 r.Left = walkexpr(r.Left, init) 769 var n1 *Node 770 if isblank(n.List.First()) { 771 n1 = nodnil() 772 } else { 773 n1 = nod(OADDR, n.List.First(), nil) 774 } 775 n1.Etype = 1 // addr does not escape 776 fn := chanfn("chanrecv2", 2, r.Left.Type) 777 ok := n.List.Second() 778 call := mkcall1(fn, ok.Type, init, typename(r.Left.Type), r.Left, n1) 779 n = nod(OAS, ok, call) 780 n = typecheck(n, Etop) 781 782 // a,b = m[i]; 783 case OAS2MAPR: 784 init.AppendNodes(&n.Ninit) 785 786 r := n.Rlist.First() 787 walkexprlistsafe(n.List.Slice(), init) 788 r.Left = walkexpr(r.Left, init) 789 r.Right = walkexpr(r.Right, init) 790 t := r.Left.Type 791 p := "" 792 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 793 switch algtype(t.Key()) { 794 case AMEM32: 795 p = "mapaccess2_fast32" 796 case AMEM64: 797 p = "mapaccess2_fast64" 798 case ASTRING: 799 p = "mapaccess2_faststr" 800 } 801 } 802 803 var key *Node 804 if p != "" { 805 // fast versions take key by value 806 key = r.Right 807 } else { 808 // standard version takes key by reference 809 // orderexpr made sure key is addressable. 810 key = nod(OADDR, r.Right, nil) 811 812 p = "mapaccess2" 813 } 814 815 // from: 816 // a,b = m[i] 817 // to: 818 // var,b = mapaccess2*(t, m, i) 819 // a = *var 820 a := n.List.First() 821 822 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 823 fn := mapfn(p, t) 824 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key) 825 } else { 826 fn := mapfn("mapaccess2_fat", t) 827 z := zeroaddr(w) 828 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z) 829 } 830 831 // mapaccess2* returns a typed bool, but due to spec changes, 832 // the boolean result of i.(T) is now untyped so we make it the 833 // same type as the variable on the lhs. 834 if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() { 835 r.Type.Field(1).Type = ok.Type 836 } 837 n.Rlist.Set1(r) 838 n.Op = OAS2FUNC 839 840 // don't generate a = *var if a is _ 841 if !isblank(a) { 842 var_ := temp(ptrto(t.Val())) 843 var_.Typecheck = 1 844 var_.NonNil = true // mapaccess always returns a non-nil pointer 845 n.List.SetIndex(0, var_) 846 n = walkexpr(n, init) 847 init.Append(n) 848 n = nod(OAS, a, nod(OIND, var_, nil)) 849 } 850 851 n = typecheck(n, Etop) 852 n = walkexpr(n, init) 853 854 case ODELETE: 855 init.AppendNodes(&n.Ninit) 856 map_ := n.List.First() 857 key := n.List.Second() 858 map_ = walkexpr(map_, init) 859 key = walkexpr(key, init) 860 861 // orderstmt made sure key is addressable. 862 key = nod(OADDR, key, nil) 863 864 t := map_.Type 865 n = mkcall1(mapfndel("mapdelete", t), nil, init, typename(t), map_, key) 866 867 case OAS2DOTTYPE: 868 walkexprlistsafe(n.List.Slice(), init) 869 e := n.Rlist.First() // i.(T) 870 e.Left = walkexpr(e.Left, init) 871 872 case ODOTTYPE, ODOTTYPE2: 873 n.Left = walkexpr(n.Left, init) 874 875 case OCONVIFACE: 876 n.Left = walkexpr(n.Left, init) 877 878 // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped. 879 if isdirectiface(n.Left.Type) { 880 var t *Node 881 if n.Type.IsEmptyInterface() { 882 t = typename(n.Left.Type) 883 } else { 884 t = itabname(n.Left.Type, n.Type) 885 } 886 l := nod(OEFACE, t, n.Left) 887 l.Type = n.Type 888 l.Typecheck = n.Typecheck 889 n = l 890 break 891 } 892 893 if staticbytes == nil { 894 staticbytes = newname(Pkglookup("staticbytes", Runtimepkg)) 895 staticbytes.Class = PEXTERN 896 staticbytes.Type = typArray(Types[TUINT8], 256) 897 zerobase = newname(Pkglookup("zerobase", Runtimepkg)) 898 zerobase.Class = PEXTERN 899 zerobase.Type = Types[TUINTPTR] 900 } 901 902 // Optimize convT2{E,I} for many cases in which T is not pointer-shaped, 903 // by using an existing addressable value identical to n.Left 904 // or creating one on the stack. 905 var value *Node 906 switch { 907 case n.Left.Type.Size() == 0: 908 // n.Left is zero-sized. Use zerobase. 909 value = zerobase 910 case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()): 911 // n.Left is a bool/byte. Use staticbytes[n.Left]. 912 value = nod(OINDEX, staticbytes, byteindex(n.Left)) 913 value.Bounded = true 914 case n.Left.Class == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly: 915 // n.Left is a readonly global; use it directly. 916 value = n.Left 917 case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024: 918 // n.Left does not escape. Use a stack temporary initialized to n.Left. 919 value = temp(n.Left.Type) 920 init.Append(typecheck(nod(OAS, value, n.Left), Etop)) 921 } 922 923 if value != nil { 924 // Value is identical to n.Left. 925 // Construct the interface directly: {type/itab, &value}. 926 var t *Node 927 if n.Type.IsEmptyInterface() { 928 t = typename(n.Left.Type) 929 } else { 930 t = itabname(n.Left.Type, n.Type) 931 } 932 l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv)) 933 l.Type = n.Type 934 l.Typecheck = n.Typecheck 935 n = l 936 break 937 } 938 939 // Implement interface to empty interface conversion. 940 // tmp = i.itab 941 // if tmp != nil { 942 // tmp = tmp.type 943 // } 944 // e = iface{tmp, i.data} 945 if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 946 // Evaluate the input interface. 947 c := temp(n.Left.Type) 948 init.Append(nod(OAS, c, n.Left)) 949 950 // Get the itab out of the interface. 951 tmp := temp(ptrto(Types[TUINT8])) 952 init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv))) 953 954 // Get the type out of the itab. 955 nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil) 956 nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp))) 957 init.Append(nif) 958 959 // Build the result. 960 e := nod(OEFACE, tmp, ifaceData(c, ptrto(Types[TUINT8]))) 961 e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE. 962 e.Typecheck = 1 963 n = e 964 break 965 } 966 967 var ll []*Node 968 if n.Type.IsEmptyInterface() { 969 if !n.Left.Type.IsInterface() { 970 ll = append(ll, typename(n.Left.Type)) 971 } 972 } else { 973 if n.Left.Type.IsInterface() { 974 ll = append(ll, typename(n.Type)) 975 } else { 976 ll = append(ll, itabname(n.Left.Type, n.Type)) 977 } 978 } 979 980 if n.Left.Type.IsInterface() { 981 ll = append(ll, n.Left) 982 } else { 983 // regular types are passed by reference to avoid C vararg calls 984 // orderexpr arranged for n.Left to be a temporary for all 985 // the conversions it could see. comparison of an interface 986 // with a non-interface, especially in a switch on interface value 987 // with non-interface cases, is not visible to orderstmt, so we 988 // have to fall back on allocating a temp here. 989 if islvalue(n.Left) { 990 ll = append(ll, nod(OADDR, n.Left, nil)) 991 } else { 992 ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) 993 } 994 dowidth(n.Left.Type) 995 } 996 997 fn := syslook(convFuncName(n.Left.Type, n.Type)) 998 fn = substArgTypes(fn, n.Left.Type, n.Type) 999 dowidth(fn.Type) 1000 n = nod(OCALL, fn, nil) 1001 n.List.Set(ll) 1002 n = typecheck(n, Erv) 1003 n = walkexpr(n, init) 1004 1005 case OCONV, OCONVNOP: 1006 if Thearch.LinkArch.Family == sys.ARM || Thearch.LinkArch.Family == sys.MIPS { 1007 if n.Left.Type.IsFloat() { 1008 if n.Type.Etype == TINT64 { 1009 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1010 break 1011 } 1012 1013 if n.Type.Etype == TUINT64 { 1014 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1015 break 1016 } 1017 } 1018 1019 if n.Type.IsFloat() { 1020 if n.Left.Type.Etype == TINT64 { 1021 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1022 break 1023 } 1024 1025 if n.Left.Type.Etype == TUINT64 { 1026 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1027 break 1028 } 1029 } 1030 } 1031 1032 if Thearch.LinkArch.Family == sys.I386 { 1033 if n.Left.Type.IsFloat() { 1034 if n.Type.Etype == TINT64 { 1035 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1036 break 1037 } 1038 1039 if n.Type.Etype == TUINT64 { 1040 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1041 break 1042 } 1043 if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR { 1044 n = mkcall("float64touint32", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1045 break 1046 } 1047 } 1048 if n.Type.IsFloat() { 1049 if n.Left.Type.Etype == TINT64 { 1050 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1051 break 1052 } 1053 1054 if n.Left.Type.Etype == TUINT64 { 1055 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1056 break 1057 } 1058 if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR { 1059 n = conv(mkcall("uint32tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT32])), n.Type) 1060 break 1061 } 1062 } 1063 } 1064 1065 n.Left = walkexpr(n.Left, init) 1066 1067 case OANDNOT: 1068 n.Left = walkexpr(n.Left, init) 1069 n.Op = OAND 1070 n.Right = nod(OCOM, n.Right, nil) 1071 n.Right = typecheck(n.Right, Erv) 1072 n.Right = walkexpr(n.Right, init) 1073 1074 case OMUL: 1075 n.Left = walkexpr(n.Left, init) 1076 n.Right = walkexpr(n.Right, init) 1077 n = walkmul(n, init) 1078 1079 case ODIV, OMOD: 1080 n.Left = walkexpr(n.Left, init) 1081 n.Right = walkexpr(n.Right, init) 1082 1083 // rewrite complex div into function call. 1084 et := n.Left.Type.Etype 1085 1086 if isComplex[et] && n.Op == ODIV { 1087 t := n.Type 1088 n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128])) 1089 n = conv(n, t) 1090 break 1091 } 1092 1093 // Nothing to do for float divisions. 1094 if isFloat[et] { 1095 break 1096 } 1097 1098 // Try rewriting as shifts or magic multiplies. 1099 n = walkdiv(n, init) 1100 1101 // rewrite 64-bit div and mod into function calls 1102 // on 32-bit architectures. 1103 switch n.Op { 1104 case OMOD, ODIV: 1105 if Widthreg >= 8 || (et != TUINT64 && et != TINT64) { 1106 break opswitch 1107 } 1108 var fn string 1109 if et == TINT64 { 1110 fn = "int64" 1111 } else { 1112 fn = "uint64" 1113 } 1114 if n.Op == ODIV { 1115 fn += "div" 1116 } else { 1117 fn += "mod" 1118 } 1119 n = mkcall(fn, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et])) 1120 } 1121 1122 case OINDEX: 1123 n.Left = walkexpr(n.Left, init) 1124 1125 // save the original node for bounds checking elision. 1126 // If it was a ODIV/OMOD walk might rewrite it. 1127 r := n.Right 1128 1129 n.Right = walkexpr(n.Right, init) 1130 1131 // if range of type cannot exceed static array bound, 1132 // disable bounds check. 1133 if n.Bounded { 1134 break 1135 } 1136 t := n.Left.Type 1137 if t != nil && t.IsPtr() { 1138 t = t.Elem() 1139 } 1140 if t.IsArray() { 1141 n.Bounded = bounded(r, t.NumElem()) 1142 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1143 Warn("index bounds check elided") 1144 } 1145 if smallintconst(n.Right) && !n.Bounded { 1146 yyerror("index out of bounds") 1147 } 1148 } else if Isconst(n.Left, CTSTR) { 1149 n.Bounded = bounded(r, int64(len(n.Left.Val().U.(string)))) 1150 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1151 Warn("index bounds check elided") 1152 } 1153 if smallintconst(n.Right) && !n.Bounded { 1154 yyerror("index out of bounds") 1155 } 1156 } 1157 1158 if Isconst(n.Right, CTINT) { 1159 if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 { 1160 yyerror("index out of bounds") 1161 } 1162 } 1163 1164 case OINDEXMAP: 1165 // Replace m[k] with *map{access1,assign}(maptype, m, &k) 1166 n.Left = walkexpr(n.Left, init) 1167 n.Right = walkexpr(n.Right, init) 1168 map_ := n.Left 1169 key := n.Right 1170 t := map_.Type 1171 if n.Etype == 1 { 1172 // This m[k] expression is on the left-hand side of an assignment. 1173 // orderexpr made sure key is addressable. 1174 key = nod(OADDR, key, nil) 1175 n = mkcall1(mapfn("mapassign", t), nil, init, typename(t), map_, key) 1176 } else { 1177 // m[k] is not the target of an assignment. 1178 p := "" 1179 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 1180 switch algtype(t.Key()) { 1181 case AMEM32: 1182 p = "mapaccess1_fast32" 1183 case AMEM64: 1184 p = "mapaccess1_fast64" 1185 case ASTRING: 1186 p = "mapaccess1_faststr" 1187 } 1188 } 1189 1190 if p == "" { 1191 // standard version takes key by reference. 1192 // orderexpr made sure key is addressable. 1193 key = nod(OADDR, key, nil) 1194 p = "mapaccess1" 1195 } 1196 1197 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 1198 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key) 1199 } else { 1200 p = "mapaccess1_fat" 1201 z := zeroaddr(w) 1202 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key, z) 1203 } 1204 } 1205 n.Type = ptrto(t.Val()) 1206 n.NonNil = true // mapaccess1* and mapassign always return non-nil pointers. 1207 n = nod(OIND, n, nil) 1208 n.Type = t.Val() 1209 n.Typecheck = 1 1210 1211 case ORECV: 1212 Fatalf("walkexpr ORECV") // should see inside OAS only 1213 1214 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1215 n.Left = walkexpr(n.Left, init) 1216 low, high, max := n.SliceBounds() 1217 low = walkexpr(low, init) 1218 if low != nil && iszero(low) { 1219 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k]. 1220 low = nil 1221 } 1222 high = walkexpr(high, init) 1223 max = walkexpr(max, init) 1224 n.SetSliceBounds(low, high, max) 1225 if n.Op.IsSlice3() { 1226 if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) { 1227 // Reduce x[i:j:cap(x)] to x[i:j]. 1228 if n.Op == OSLICE3 { 1229 n.Op = OSLICE 1230 } else { 1231 n.Op = OSLICEARR 1232 } 1233 n = reduceSlice(n) 1234 } 1235 } else { 1236 n = reduceSlice(n) 1237 } 1238 1239 case OADDR: 1240 n.Left = walkexpr(n.Left, init) 1241 1242 case ONEW: 1243 if n.Esc == EscNone { 1244 if n.Type.Elem().Width >= 1<<16 { 1245 Fatalf("large ONEW with EscNone: %v", n) 1246 } 1247 r := temp(n.Type.Elem()) 1248 r = nod(OAS, r, nil) // zero temp 1249 r = typecheck(r, Etop) 1250 init.Append(r) 1251 r = nod(OADDR, r.Left, nil) 1252 r = typecheck(r, Erv) 1253 n = r 1254 } else { 1255 n = callnew(n.Type.Elem()) 1256 } 1257 1258 case OCMPSTR: 1259 // s + "badgerbadgerbadger" == "badgerbadgerbadger" 1260 if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) { 1261 // TODO(marvin): Fix Node.EType type union. 1262 r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0)) 1263 r = typecheck(r, Erv) 1264 r = walkexpr(r, init) 1265 r.Type = n.Type 1266 n = r 1267 break 1268 } 1269 1270 // Rewrite comparisons to short constant strings as length+byte-wise comparisons. 1271 var cs, ncs *Node // const string, non-const string 1272 switch { 1273 case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR): 1274 // ignore; will be constant evaluated 1275 case Isconst(n.Left, CTSTR): 1276 cs = n.Left 1277 ncs = n.Right 1278 case Isconst(n.Right, CTSTR): 1279 cs = n.Right 1280 ncs = n.Left 1281 } 1282 if cs != nil { 1283 cmp := Op(n.Etype) 1284 // maxRewriteLen was chosen empirically. 1285 // It is the value that minimizes cmd/go file size 1286 // across most architectures. 1287 // See the commit description for CL 26758 for details. 1288 maxRewriteLen := 6 1289 var and Op 1290 switch cmp { 1291 case OEQ: 1292 and = OANDAND 1293 case ONE: 1294 and = OOROR 1295 default: 1296 // Don't do byte-wise comparisons for <, <=, etc. 1297 // They're fairly complicated. 1298 // Length-only checks are ok, though. 1299 maxRewriteLen = 0 1300 } 1301 if s := cs.Val().U.(string); len(s) <= maxRewriteLen { 1302 if len(s) > 0 { 1303 ncs = safeexpr(ncs, init) 1304 } 1305 // TODO(marvin): Fix Node.EType type union. 1306 r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s)))) 1307 for i := 0; i < len(s); i++ { 1308 cb := nodintconst(int64(s[i])) 1309 ncb := nod(OINDEX, ncs, nodintconst(int64(i))) 1310 r = nod(and, r, nod(cmp, ncb, cb)) 1311 } 1312 r = typecheck(r, Erv) 1313 r = walkexpr(r, init) 1314 r.Type = n.Type 1315 n = r 1316 break 1317 } 1318 } 1319 1320 var r *Node 1321 // TODO(marvin): Fix Node.EType type union. 1322 if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { 1323 // prepare for rewrite below 1324 n.Left = cheapexpr(n.Left, init) 1325 n.Right = cheapexpr(n.Right, init) 1326 1327 r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1328 1329 // quick check of len before full compare for == or != 1330 // eqstring assumes that the lengths are equal 1331 // TODO(marvin): Fix Node.EType type union. 1332 if Op(n.Etype) == OEQ { 1333 // len(left) == len(right) && eqstring(left, right) 1334 r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1335 } else { 1336 // len(left) != len(right) || !eqstring(left, right) 1337 r = nod(ONOT, r, nil) 1338 r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1339 } 1340 1341 r = typecheck(r, Erv) 1342 r = walkexpr(r, nil) 1343 } else { 1344 // sys_cmpstring(s1, s2) :: 0 1345 r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1346 // TODO(marvin): Fix Node.EType type union. 1347 r = nod(Op(n.Etype), r, nodintconst(0)) 1348 } 1349 1350 r = typecheck(r, Erv) 1351 if !n.Type.IsBoolean() { 1352 Fatalf("cmp %v", n.Type) 1353 } 1354 r.Type = n.Type 1355 n = r 1356 1357 case OADDSTR: 1358 n = addstr(n, init) 1359 1360 case OAPPEND: 1361 // order should make sure we only see OAS(node, OAPPEND), which we handle above. 1362 Fatalf("append outside assignment") 1363 1364 case OCOPY: 1365 n = copyany(n, init, instrumenting && !compiling_runtime) 1366 1367 // cannot use chanfn - closechan takes any, not chan any 1368 case OCLOSE: 1369 fn := syslook("closechan") 1370 1371 fn = substArgTypes(fn, n.Left.Type) 1372 n = mkcall1(fn, nil, init, n.Left) 1373 1374 case OMAKECHAN: 1375 n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64])) 1376 1377 case OMAKEMAP: 1378 t := n.Type 1379 1380 a := nodnil() // hmap buffer 1381 r := nodnil() // bucket buffer 1382 if n.Esc == EscNone { 1383 // Allocate hmap buffer on stack. 1384 var_ := temp(hmap(t)) 1385 1386 a = nod(OAS, var_, nil) // zero temp 1387 a = typecheck(a, Etop) 1388 init.Append(a) 1389 a = nod(OADDR, var_, nil) 1390 1391 // Allocate one bucket on stack. 1392 // Maximum key/value size is 128 bytes, larger objects 1393 // are stored with an indirection. So max bucket size is 2048+eps. 1394 var_ = temp(mapbucket(t)) 1395 1396 r = nod(OAS, var_, nil) // zero temp 1397 r = typecheck(r, Etop) 1398 init.Append(r) 1399 r = nod(OADDR, var_, nil) 1400 } 1401 1402 fn := syslook("makemap") 1403 fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val()) 1404 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r) 1405 1406 case OMAKESLICE: 1407 l := n.Left 1408 r := n.Right 1409 if r == nil { 1410 r = safeexpr(l, init) 1411 l = r 1412 } 1413 t := n.Type 1414 if n.Esc == EscNone { 1415 if !isSmallMakeSlice(n) { 1416 Fatalf("non-small OMAKESLICE with EscNone: %v", n) 1417 } 1418 // var arr [r]T 1419 // n = arr[:l] 1420 t = typArray(t.Elem(), nonnegintconst(r)) // [r]T 1421 var_ := temp(t) 1422 a := nod(OAS, var_, nil) // zero temp 1423 a = typecheck(a, Etop) 1424 init.Append(a) 1425 r := nod(OSLICE, var_, nil) // arr[:l] 1426 r.SetSliceBounds(nil, l, nil) 1427 r = conv(r, n.Type) // in case n.Type is named. 1428 r = typecheck(r, Erv) 1429 r = walkexpr(r, init) 1430 n = r 1431 } else { 1432 // n escapes; set up a call to makeslice. 1433 // When len and cap can fit into int, use makeslice instead of 1434 // makeslice64, which is faster and shorter on 32 bit platforms. 1435 1436 if t.Elem().NotInHeap { 1437 yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem()) 1438 } 1439 1440 len, cap := l, r 1441 1442 fnname := "makeslice64" 1443 argtype := Types[TINT64] 1444 1445 // typechecking guarantees that TIDEAL len/cap are positive and fit in an int. 1446 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 1447 // will be handled by the negative range checks in makeslice during runtime. 1448 if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) && 1449 (cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) { 1450 fnname = "makeslice" 1451 argtype = Types[TINT] 1452 } 1453 1454 fn := syslook(fnname) 1455 fn = substArgTypes(fn, t.Elem()) // any-1 1456 n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)) 1457 } 1458 1459 case ORUNESTR: 1460 a := nodnil() 1461 if n.Esc == EscNone { 1462 t := typArray(Types[TUINT8], 4) 1463 var_ := temp(t) 1464 a = nod(OADDR, var_, nil) 1465 } 1466 1467 // intstring(*[4]byte, rune) 1468 n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64])) 1469 1470 case OARRAYBYTESTR: 1471 a := nodnil() 1472 if n.Esc == EscNone { 1473 // Create temporary buffer for string on stack. 1474 t := typArray(Types[TUINT8], tmpstringbufsize) 1475 1476 a = nod(OADDR, temp(t), nil) 1477 } 1478 1479 // slicebytetostring(*[32]byte, []byte) string; 1480 n = mkcall("slicebytetostring", n.Type, init, a, n.Left) 1481 1482 // slicebytetostringtmp([]byte) string; 1483 case OARRAYBYTESTRTMP: 1484 n.Left = walkexpr(n.Left, init) 1485 1486 if !instrumenting { 1487 // Let the backend handle OARRAYBYTESTRTMP directly 1488 // to avoid a function call to slicebytetostringtmp. 1489 break 1490 } 1491 1492 n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) 1493 1494 // slicerunetostring(*[32]byte, []rune) string; 1495 case OARRAYRUNESTR: 1496 a := nodnil() 1497 1498 if n.Esc == EscNone { 1499 // Create temporary buffer for string on stack. 1500 t := typArray(Types[TUINT8], tmpstringbufsize) 1501 1502 a = nod(OADDR, temp(t), nil) 1503 } 1504 1505 n = mkcall("slicerunetostring", n.Type, init, a, n.Left) 1506 1507 // stringtoslicebyte(*32[byte], string) []byte; 1508 case OSTRARRAYBYTE: 1509 a := nodnil() 1510 1511 if n.Esc == EscNone { 1512 // Create temporary buffer for slice on stack. 1513 t := typArray(Types[TUINT8], tmpstringbufsize) 1514 1515 a = nod(OADDR, temp(t), nil) 1516 } 1517 1518 n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING])) 1519 1520 case OSTRARRAYBYTETMP: 1521 // []byte(string) conversion that creates a slice 1522 // referring to the actual string bytes. 1523 // This conversion is handled later by the backend and 1524 // is only for use by internal compiler optimizations 1525 // that know that the slice won't be mutated. 1526 // The only such case today is: 1527 // for i, c := range []byte(string) 1528 n.Left = walkexpr(n.Left, init) 1529 1530 // stringtoslicerune(*[32]rune, string) []rune 1531 case OSTRARRAYRUNE: 1532 a := nodnil() 1533 1534 if n.Esc == EscNone { 1535 // Create temporary buffer for slice on stack. 1536 t := typArray(Types[TINT32], tmpstringbufsize) 1537 1538 a = nod(OADDR, temp(t), nil) 1539 } 1540 1541 n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) 1542 1543 // ifaceeq(i1 any-1, i2 any-2) (ret bool); 1544 case OCMPIFACE: 1545 if !eqtype(n.Left.Type, n.Right.Type) { 1546 Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type) 1547 } 1548 var fn *Node 1549 if n.Left.Type.IsEmptyInterface() { 1550 fn = syslook("efaceeq") 1551 } else { 1552 fn = syslook("ifaceeq") 1553 } 1554 1555 n.Right = cheapexpr(n.Right, init) 1556 n.Left = cheapexpr(n.Left, init) 1557 fn = substArgTypes(fn, n.Right.Type, n.Left.Type) 1558 r := mkcall1(fn, n.Type, init, n.Left, n.Right) 1559 // TODO(marvin): Fix Node.EType type union. 1560 if Op(n.Etype) == ONE { 1561 r = nod(ONOT, r, nil) 1562 } 1563 1564 // check itable/type before full compare. 1565 // TODO(marvin): Fix Node.EType type union. 1566 if Op(n.Etype) == OEQ { 1567 r = nod(OANDAND, nod(OEQ, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1568 } else { 1569 r = nod(OOROR, nod(ONE, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1570 } 1571 r = typecheck(r, Erv) 1572 r = walkexpr(r, init) 1573 r.Type = n.Type 1574 n = r 1575 1576 case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: 1577 if isStaticCompositeLiteral(n) { 1578 // n can be directly represented in the read-only data section. 1579 // Make direct reference to the static data. See issue 12841. 1580 vstat := staticname(n.Type) 1581 vstat.Name.Readonly = true 1582 fixedlit(inInitFunction, initKindStatic, n, vstat, init) 1583 n = vstat 1584 n = typecheck(n, Erv) 1585 break 1586 } 1587 var_ := temp(n.Type) 1588 anylit(n, var_, init) 1589 n = var_ 1590 1591 case OSEND: 1592 n1 := n.Right 1593 n1 = assignconv(n1, n.Left.Type.Elem(), "chan send") 1594 n1 = walkexpr(n1, init) 1595 n1 = nod(OADDR, n1, nil) 1596 n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, typename(n.Left.Type), n.Left, n1) 1597 1598 case OCLOSURE: 1599 n = walkclosure(n, init) 1600 1601 case OCALLPART: 1602 n = walkpartialcall(n, init) 1603 } 1604 1605 // Expressions that are constant at run time but not 1606 // considered const by the language spec are not turned into 1607 // constants until walk. For example, if n is y%1 == 0, the 1608 // walk of y%1 may have replaced it by 0. 1609 // Check whether n with its updated args is itself now a constant. 1610 t := n.Type 1611 1612 evconst(n) 1613 n.Type = t 1614 if n.Op == OLITERAL { 1615 n = typecheck(n, Erv) 1616 } 1617 1618 ullmancalc(n) 1619 1620 if Debug['w'] != 0 && n != nil { 1621 Dump("walk", n) 1622 } 1623 1624 lineno = lno 1625 return n 1626 } 1627 1628 // TODO(josharian): combine this with its caller and simplify 1629 func reduceSlice(n *Node) *Node { 1630 low, high, max := n.SliceBounds() 1631 if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) { 1632 // Reduce x[i:len(x)] to x[i:]. 1633 high = nil 1634 } 1635 n.SetSliceBounds(low, high, max) 1636 if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { 1637 // Reduce x[:] to x. 1638 if Debug_slice > 0 { 1639 Warn("slice: omit slice operation") 1640 } 1641 return n.Left 1642 } 1643 return n 1644 } 1645 1646 func ascompatee1(op Op, l *Node, r *Node, init *Nodes) *Node { 1647 // convas will turn map assigns into function calls, 1648 // making it impossible for reorder3 to work. 1649 n := nod(OAS, l, r) 1650 1651 if l.Op == OINDEXMAP { 1652 return n 1653 } 1654 1655 return convas(n, init) 1656 } 1657 1658 func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { 1659 // check assign expression list to 1660 // a expression list. called in 1661 // expr-list = expr-list 1662 1663 // ensure order of evaluation for function calls 1664 for i := range nl { 1665 nl[i] = safeexpr(nl[i], init) 1666 } 1667 for i1 := range nr { 1668 nr[i1] = safeexpr(nr[i1], init) 1669 } 1670 1671 var nn []*Node 1672 i := 0 1673 for ; i < len(nl); i++ { 1674 if i >= len(nr) { 1675 break 1676 } 1677 // Do not generate 'x = x' during return. See issue 4014. 1678 if op == ORETURN && samesafeexpr(nl[i], nr[i]) { 1679 continue 1680 } 1681 nn = append(nn, ascompatee1(op, nl[i], nr[i], init)) 1682 } 1683 1684 // cannot happen: caller checked that lists had same length 1685 if i < len(nl) || i < len(nr) { 1686 var nln, nrn Nodes 1687 nln.Set(nl) 1688 nrn.Set(nr) 1689 yyerror("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.Func.Nname.Sym.Name) 1690 } 1691 return nn 1692 } 1693 1694 // l is an lv and rt is the type of an rv 1695 // return 1 if this implies a function call 1696 // evaluating the lv or a function call 1697 // in the conversion of the types 1698 func fncall(l *Node, rt *Type) bool { 1699 if l.Ullman >= UINF || l.Op == OINDEXMAP { 1700 return true 1701 } 1702 var r Node 1703 if needwritebarrier(l, &r) { 1704 return true 1705 } 1706 if eqtype(l.Type, rt) { 1707 return false 1708 } 1709 return true 1710 } 1711 1712 // check assign type list to 1713 // a expression list. called in 1714 // expr-list = func() 1715 func ascompatet(op Op, nl Nodes, nr *Type) []*Node { 1716 r, saver := iterFields(nr) 1717 1718 var nn, mm Nodes 1719 var ullmanOverflow bool 1720 var i int 1721 for i = 0; i < nl.Len(); i++ { 1722 if r == nil { 1723 break 1724 } 1725 l := nl.Index(i) 1726 if isblank(l) { 1727 r = saver.Next() 1728 continue 1729 } 1730 1731 // any lv that causes a fn call must be 1732 // deferred until all the return arguments 1733 // have been pulled from the output arguments 1734 if fncall(l, r.Type) { 1735 tmp := temp(r.Type) 1736 tmp = typecheck(tmp, Erv) 1737 a := nod(OAS, l, tmp) 1738 a = convas(a, &mm) 1739 mm.Append(a) 1740 l = tmp 1741 } 1742 1743 a := nod(OAS, l, nodarg(r, 0)) 1744 a = convas(a, &nn) 1745 ullmancalc(a) 1746 if a.Ullman >= UINF { 1747 Dump("ascompatet ucount", a) 1748 ullmanOverflow = true 1749 } 1750 1751 nn.Append(a) 1752 r = saver.Next() 1753 } 1754 1755 if i < nl.Len() || r != nil { 1756 yyerror("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) 1757 } 1758 1759 if ullmanOverflow { 1760 Fatalf("ascompatet: too many function calls evaluating parameters") 1761 } 1762 return append(nn.Slice(), mm.Slice()...) 1763 } 1764 1765 // package all the arguments that match a ... T parameter into a []T. 1766 func mkdotargslice(lr0, nn []*Node, l *Field, fp int, init *Nodes, ddd *Node) []*Node { 1767 esc := uint16(EscUnknown) 1768 if ddd != nil { 1769 esc = ddd.Esc 1770 } 1771 1772 tslice := typSlice(l.Type.Elem()) 1773 1774 var n *Node 1775 if len(lr0) == 0 { 1776 n = nodnil() 1777 n.Type = tslice 1778 } else { 1779 n = nod(OCOMPLIT, nil, typenod(tslice)) 1780 if ddd != nil && prealloc[ddd] != nil { 1781 prealloc[n] = prealloc[ddd] // temporary to use 1782 } 1783 n.List.Set(lr0) 1784 n.Esc = esc 1785 n = typecheck(n, Erv) 1786 if n.Type == nil { 1787 Fatalf("mkdotargslice: typecheck failed") 1788 } 1789 n = walkexpr(n, init) 1790 } 1791 1792 a := nod(OAS, nodarg(l, fp), n) 1793 nn = append(nn, convas(a, init)) 1794 return nn 1795 } 1796 1797 // helpers for shape errors 1798 func dumptypes(nl *Type, what string) string { 1799 s := "" 1800 for _, l := range nl.Fields().Slice() { 1801 if s != "" { 1802 s += ", " 1803 } 1804 s += fldconv(l, 0) 1805 } 1806 if s == "" { 1807 s = fmt.Sprintf("[no arguments %s]", what) 1808 } 1809 return s 1810 } 1811 1812 func dumpnodetypes(l []*Node, what string) string { 1813 s := "" 1814 for _, r := range l { 1815 if s != "" { 1816 s += ", " 1817 } 1818 s += r.Type.String() 1819 } 1820 if s == "" { 1821 s = fmt.Sprintf("[no arguments %s]", what) 1822 } 1823 return s 1824 } 1825 1826 // check assign expression list to 1827 // a type list. called in 1828 // return expr-list 1829 // func(expr-list) 1830 func ascompatte(op Op, call *Node, isddd bool, nl *Type, lr []*Node, fp int, init *Nodes) []*Node { 1831 lr0 := lr 1832 l, savel := iterFields(nl) 1833 var r *Node 1834 if len(lr) > 0 { 1835 r = lr[0] 1836 } 1837 var nn []*Node 1838 1839 // f(g()) where g has multiple return values 1840 if r != nil && len(lr) <= 1 && r.Type.IsFuncArgStruct() { 1841 // optimization - can do block copy 1842 if eqtypenoname(r.Type, nl) { 1843 arg := nodarg(nl, fp) 1844 r = nod(OCONVNOP, r, nil) 1845 r.Type = arg.Type 1846 nn = []*Node{convas(nod(OAS, arg, r), init)} 1847 goto ret 1848 } 1849 1850 // conversions involved. 1851 // copy into temporaries. 1852 var alist []*Node 1853 1854 for _, l := range r.Type.Fields().Slice() { 1855 tmp := temp(l.Type) 1856 alist = append(alist, tmp) 1857 } 1858 1859 a := nod(OAS2, nil, nil) 1860 a.List.Set(alist) 1861 a.Rlist.Set(lr) 1862 a = typecheck(a, Etop) 1863 a = walkstmt(a) 1864 init.Append(a) 1865 lr = alist 1866 r = lr[0] 1867 l, savel = iterFields(nl) 1868 } 1869 1870 for { 1871 if l != nil && l.Isddd { 1872 // the ddd parameter must be last 1873 ll := savel.Next() 1874 1875 if ll != nil { 1876 yyerror("... must be last argument") 1877 } 1878 1879 // special case -- 1880 // only if we are assigning a single ddd 1881 // argument to a ddd parameter then it is 1882 // passed through unencapsulated 1883 if r != nil && len(lr) <= 1 && isddd && eqtype(l.Type, r.Type) { 1884 a := nod(OAS, nodarg(l, fp), r) 1885 a = convas(a, init) 1886 nn = append(nn, a) 1887 break 1888 } 1889 1890 // normal case -- make a slice of all 1891 // remaining arguments and pass it to 1892 // the ddd parameter. 1893 nn = mkdotargslice(lr, nn, l, fp, init, call.Right) 1894 1895 break 1896 } 1897 1898 if l == nil || r == nil { 1899 if l != nil || r != nil { 1900 l1 := dumptypes(nl, "expected") 1901 l2 := dumpnodetypes(lr0, "given") 1902 if l != nil { 1903 yyerror("not enough arguments to %v\n\t%s\n\t%s", op, l1, l2) 1904 } else { 1905 yyerror("too many arguments to %v\n\t%s\n\t%s", op, l1, l2) 1906 } 1907 } 1908 1909 break 1910 } 1911 1912 a := nod(OAS, nodarg(l, fp), r) 1913 a = convas(a, init) 1914 nn = append(nn, a) 1915 1916 l = savel.Next() 1917 r = nil 1918 lr = lr[1:] 1919 if len(lr) > 0 { 1920 r = lr[0] 1921 } 1922 } 1923 1924 ret: 1925 for _, n := range nn { 1926 n.Typecheck = 1 1927 } 1928 return nn 1929 } 1930 1931 // generate code for print 1932 func walkprint(nn *Node, init *Nodes) *Node { 1933 var r *Node 1934 var n *Node 1935 var on *Node 1936 var t *Type 1937 var et EType 1938 1939 op := nn.Op 1940 all := nn.List 1941 var calls []*Node 1942 notfirst := false 1943 1944 // Hoist all the argument evaluation up before the lock. 1945 walkexprlistcheap(all.Slice(), init) 1946 1947 calls = append(calls, mkcall("printlock", nil, init)) 1948 for i1, n1 := range all.Slice() { 1949 if notfirst { 1950 calls = append(calls, mkcall("printsp", nil, init)) 1951 } 1952 1953 notfirst = op == OPRINTN 1954 1955 n = n1 1956 if n.Op == OLITERAL { 1957 switch n.Val().Ctype() { 1958 case CTRUNE: 1959 n = defaultlit(n, runetype) 1960 1961 case CTINT: 1962 n = defaultlit(n, Types[TINT64]) 1963 1964 case CTFLT: 1965 n = defaultlit(n, Types[TFLOAT64]) 1966 } 1967 } 1968 1969 if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { 1970 n = defaultlit(n, Types[TINT64]) 1971 } 1972 n = defaultlit(n, nil) 1973 all.SetIndex(i1, n) 1974 if n.Type == nil || n.Type.Etype == TFORW { 1975 continue 1976 } 1977 1978 t = n.Type 1979 et = n.Type.Etype 1980 if n.Type.IsInterface() { 1981 if n.Type.IsEmptyInterface() { 1982 on = syslook("printeface") 1983 } else { 1984 on = syslook("printiface") 1985 } 1986 on = substArgTypes(on, n.Type) // any-1 1987 } else if n.Type.IsPtr() || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR { 1988 on = syslook("printpointer") 1989 on = substArgTypes(on, n.Type) // any-1 1990 } else if n.Type.IsSlice() { 1991 on = syslook("printslice") 1992 on = substArgTypes(on, n.Type) // any-1 1993 } else if isInt[et] { 1994 if et == TUINT64 { 1995 if (t.Sym.Pkg == Runtimepkg || compiling_runtime) && t.Sym.Name == "hex" { 1996 on = syslook("printhex") 1997 } else { 1998 on = syslook("printuint") 1999 } 2000 } else { 2001 on = syslook("printint") 2002 } 2003 } else if isFloat[et] { 2004 on = syslook("printfloat") 2005 } else if isComplex[et] { 2006 on = syslook("printcomplex") 2007 } else if et == TBOOL { 2008 on = syslook("printbool") 2009 } else if et == TSTRING { 2010 on = syslook("printstring") 2011 } else { 2012 badtype(OPRINT, n.Type, nil) 2013 continue 2014 } 2015 2016 t = on.Type.Params().Field(0).Type 2017 2018 if !eqtype(t, n.Type) { 2019 n = nod(OCONV, n, nil) 2020 n.Type = t 2021 } 2022 2023 r = nod(OCALL, on, nil) 2024 r.List.Append(n) 2025 calls = append(calls, r) 2026 } 2027 2028 if op == OPRINTN { 2029 calls = append(calls, mkcall("printnl", nil, nil)) 2030 } 2031 2032 calls = append(calls, mkcall("printunlock", nil, init)) 2033 2034 typecheckslice(calls, Etop) 2035 walkexprlist(calls, init) 2036 2037 r = nod(OEMPTY, nil, nil) 2038 r = typecheck(r, Etop) 2039 r = walkexpr(r, init) 2040 r.Ninit.Set(calls) 2041 return r 2042 } 2043 2044 func callnew(t *Type) *Node { 2045 if t.NotInHeap { 2046 yyerror("%v is go:notinheap; heap allocation disallowed", t) 2047 } 2048 dowidth(t) 2049 fn := syslook("newobject") 2050 fn = substArgTypes(fn, t) 2051 v := mkcall1(fn, ptrto(t), nil, typename(t)) 2052 v.NonNil = true 2053 return v 2054 } 2055 2056 func iscallret(n *Node) bool { 2057 n = outervalue(n) 2058 return n.Op == OINDREGSP 2059 } 2060 2061 func isstack(n *Node) bool { 2062 n = outervalue(n) 2063 2064 // If n is *autotmp and autotmp = &foo, replace n with foo. 2065 // We introduce such temps when initializing struct literals. 2066 if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() { 2067 defn := n.Left.Name.Defn 2068 if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR { 2069 n = defn.Right.Left 2070 } 2071 } 2072 2073 switch n.Op { 2074 case OINDREGSP: 2075 return true 2076 2077 case ONAME: 2078 switch n.Class { 2079 case PAUTO, PPARAM, PPARAMOUT: 2080 return true 2081 } 2082 } 2083 2084 return false 2085 } 2086 2087 // Do we need a write barrier for the assignment l = r? 2088 func needwritebarrier(l *Node, r *Node) bool { 2089 if !use_writebarrier { 2090 return false 2091 } 2092 2093 if l == nil || isblank(l) { 2094 return false 2095 } 2096 2097 // No write barrier for write of non-pointers. 2098 dowidth(l.Type) 2099 2100 if !haspointers(l.Type) { 2101 return false 2102 } 2103 2104 // No write barrier for write to stack. 2105 if isstack(l) { 2106 return false 2107 } 2108 2109 // No write barrier if this is a pointer to a go:notinheap 2110 // type, since the write barrier's inheap(ptr) check will fail. 2111 if l.Type.IsPtr() && l.Type.Elem().NotInHeap { 2112 return false 2113 } 2114 2115 // Implicit zeroing is still zeroing, so it needs write 2116 // barriers. In practice, these are all to stack variables 2117 // (even if isstack isn't smart enough to figure that out), so 2118 // they'll be eliminated by the backend. 2119 if r == nil { 2120 return true 2121 } 2122 2123 // Ignore no-op conversions when making decision. 2124 // Ensures that xp = unsafe.Pointer(&x) is treated 2125 // the same as xp = &x. 2126 for r.Op == OCONVNOP { 2127 r = r.Left 2128 } 2129 2130 // TODO: We can eliminate write barriers if we know *both* the 2131 // current and new content of the slot must already be shaded. 2132 // We know a pointer is shaded if it's nil, or points to 2133 // static data, a global (variable or function), or the stack. 2134 // The nil optimization could be particularly useful for 2135 // writes to just-allocated objects. Unfortunately, knowing 2136 // the "current" value of the slot requires flow analysis. 2137 2138 // No write barrier for storing address of stack values, 2139 // which are guaranteed only to be written to the stack. 2140 if r.Op == OADDR && isstack(r.Left) { 2141 return false 2142 } 2143 2144 // Otherwise, be conservative and use write barrier. 2145 return true 2146 } 2147 2148 // TODO(rsc): Perhaps componentgen should run before this. 2149 2150 func applywritebarrier(n *Node) *Node { 2151 if n.Left != nil && n.Right != nil && needwritebarrier(n.Left, n.Right) { 2152 if Debug_wb > 1 { 2153 Warnl(n.Pos, "marking %v for barrier", n.Left) 2154 } 2155 n.Op = OASWB 2156 return n 2157 } 2158 return n 2159 } 2160 2161 func convas(n *Node, init *Nodes) *Node { 2162 if n.Op != OAS { 2163 Fatalf("convas: not OAS %v", n.Op) 2164 } 2165 2166 n.Typecheck = 1 2167 2168 var lt *Type 2169 var rt *Type 2170 if n.Left == nil || n.Right == nil { 2171 goto out 2172 } 2173 2174 lt = n.Left.Type 2175 rt = n.Right.Type 2176 if lt == nil || rt == nil { 2177 goto out 2178 } 2179 2180 if isblank(n.Left) { 2181 n.Right = defaultlit(n.Right, nil) 2182 goto out 2183 } 2184 2185 if !eqtype(lt, rt) { 2186 n.Right = assignconv(n.Right, lt, "assignment") 2187 n.Right = walkexpr(n.Right, init) 2188 } 2189 2190 out: 2191 ullmancalc(n) 2192 return n 2193 } 2194 2195 // from ascompat[te] 2196 // evaluating actual function arguments. 2197 // f(a,b) 2198 // if there is exactly one function expr, 2199 // then it is done first. otherwise must 2200 // make temp variables 2201 func reorder1(all []*Node) []*Node { 2202 c := 0 // function calls 2203 t := 0 // total parameters 2204 2205 for _, n := range all { 2206 t++ 2207 ullmancalc(n) 2208 if n.Ullman >= UINF { 2209 c++ 2210 } 2211 } 2212 2213 if c == 0 || t == 1 { 2214 return all 2215 } 2216 2217 var g []*Node // fncalls assigned to tempnames 2218 var f *Node // last fncall assigned to stack 2219 var r []*Node // non fncalls and tempnames assigned to stack 2220 d := 0 2221 var a *Node 2222 for _, n := range all { 2223 if n.Ullman < UINF { 2224 r = append(r, n) 2225 continue 2226 } 2227 2228 d++ 2229 if d == c { 2230 f = n 2231 continue 2232 } 2233 2234 // make assignment of fncall to tempname 2235 a = temp(n.Right.Type) 2236 2237 a = nod(OAS, a, n.Right) 2238 g = append(g, a) 2239 2240 // put normal arg assignment on list 2241 // with fncall replaced by tempname 2242 n.Right = a.Left 2243 2244 r = append(r, n) 2245 } 2246 2247 if f != nil { 2248 g = append(g, f) 2249 } 2250 return append(g, r...) 2251 } 2252 2253 // from ascompat[ee] 2254 // a,b = c,d 2255 // simultaneous assignment. there cannot 2256 // be later use of an earlier lvalue. 2257 // 2258 // function calls have been removed. 2259 func reorder3(all []*Node) []*Node { 2260 var l *Node 2261 2262 // If a needed expression may be affected by an 2263 // earlier assignment, make an early copy of that 2264 // expression and use the copy instead. 2265 var early []*Node 2266 2267 var mapinit Nodes 2268 for i, n := range all { 2269 l = n.Left 2270 2271 // Save subexpressions needed on left side. 2272 // Drill through non-dereferences. 2273 for { 2274 if l.Op == ODOT || l.Op == OPAREN { 2275 l = l.Left 2276 continue 2277 } 2278 2279 if l.Op == OINDEX && l.Left.Type.IsArray() { 2280 l.Right = reorder3save(l.Right, all, i, &early) 2281 l = l.Left 2282 continue 2283 } 2284 2285 break 2286 } 2287 2288 switch l.Op { 2289 default: 2290 Fatalf("reorder3 unexpected lvalue %#v", l.Op) 2291 2292 case ONAME: 2293 break 2294 2295 case OINDEX, OINDEXMAP: 2296 l.Left = reorder3save(l.Left, all, i, &early) 2297 l.Right = reorder3save(l.Right, all, i, &early) 2298 if l.Op == OINDEXMAP { 2299 all[i] = convas(all[i], &mapinit) 2300 } 2301 2302 case OIND, ODOTPTR: 2303 l.Left = reorder3save(l.Left, all, i, &early) 2304 } 2305 2306 // Save expression on right side. 2307 all[i].Right = reorder3save(all[i].Right, all, i, &early) 2308 } 2309 2310 early = append(mapinit.Slice(), early...) 2311 return append(early, all...) 2312 } 2313 2314 // if the evaluation of *np would be affected by the 2315 // assignments in all up to but not including the ith assignment, 2316 // copy into a temporary during *early and 2317 // replace *np with that temp. 2318 // The result of reorder3save MUST be assigned back to n, e.g. 2319 // n.Left = reorder3save(n.Left, all, i, early) 2320 func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node { 2321 if !aliased(n, all, i) { 2322 return n 2323 } 2324 2325 q := temp(n.Type) 2326 q = nod(OAS, q, n) 2327 q = typecheck(q, Etop) 2328 *early = append(*early, q) 2329 return q.Left 2330 } 2331 2332 // what's the outer value that a write to n affects? 2333 // outer value means containing struct or array. 2334 func outervalue(n *Node) *Node { 2335 for { 2336 if n.Op == OXDOT { 2337 Fatalf("OXDOT in walk") 2338 } 2339 if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP { 2340 n = n.Left 2341 continue 2342 } 2343 2344 if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() { 2345 n = n.Left 2346 continue 2347 } 2348 2349 break 2350 } 2351 2352 return n 2353 } 2354 2355 // Is it possible that the computation of n might be 2356 // affected by writes in as up to but not including the ith element? 2357 func aliased(n *Node, all []*Node, i int) bool { 2358 if n == nil { 2359 return false 2360 } 2361 2362 // Treat all fields of a struct as referring to the whole struct. 2363 // We could do better but we would have to keep track of the fields. 2364 for n.Op == ODOT { 2365 n = n.Left 2366 } 2367 2368 // Look for obvious aliasing: a variable being assigned 2369 // during the all list and appearing in n. 2370 // Also record whether there are any writes to main memory. 2371 // Also record whether there are any writes to variables 2372 // whose addresses have been taken. 2373 memwrite := 0 2374 2375 varwrite := 0 2376 var a *Node 2377 for _, an := range all[:i] { 2378 a = outervalue(an.Left) 2379 2380 for a.Op == ODOT { 2381 a = a.Left 2382 } 2383 2384 if a.Op != ONAME { 2385 memwrite = 1 2386 continue 2387 } 2388 2389 switch n.Class { 2390 default: 2391 varwrite = 1 2392 continue 2393 2394 case PAUTO, PPARAM, PPARAMOUT: 2395 if n.Addrtaken { 2396 varwrite = 1 2397 continue 2398 } 2399 2400 if vmatch2(a, n) { 2401 // Direct hit. 2402 return true 2403 } 2404 } 2405 } 2406 2407 // The variables being written do not appear in n. 2408 // However, n might refer to computed addresses 2409 // that are being written. 2410 2411 // If no computed addresses are affected by the writes, no aliasing. 2412 if memwrite == 0 && varwrite == 0 { 2413 return false 2414 } 2415 2416 // If n does not refer to computed addresses 2417 // (that is, if n only refers to variables whose addresses 2418 // have not been taken), no aliasing. 2419 if varexpr(n) { 2420 return false 2421 } 2422 2423 // Otherwise, both the writes and n refer to computed memory addresses. 2424 // Assume that they might conflict. 2425 return true 2426 } 2427 2428 // does the evaluation of n only refer to variables 2429 // whose addresses have not been taken? 2430 // (and no other memory) 2431 func varexpr(n *Node) bool { 2432 if n == nil { 2433 return true 2434 } 2435 2436 switch n.Op { 2437 case OLITERAL: 2438 return true 2439 2440 case ONAME: 2441 switch n.Class { 2442 case PAUTO, PPARAM, PPARAMOUT: 2443 if !n.Addrtaken { 2444 return true 2445 } 2446 } 2447 2448 return false 2449 2450 case OADD, 2451 OSUB, 2452 OOR, 2453 OXOR, 2454 OMUL, 2455 ODIV, 2456 OMOD, 2457 OLSH, 2458 ORSH, 2459 OAND, 2460 OANDNOT, 2461 OPLUS, 2462 OMINUS, 2463 OCOM, 2464 OPAREN, 2465 OANDAND, 2466 OOROR, 2467 OCONV, 2468 OCONVNOP, 2469 OCONVIFACE, 2470 ODOTTYPE: 2471 return varexpr(n.Left) && varexpr(n.Right) 2472 2473 case ODOT: // but not ODOTPTR 2474 // Should have been handled in aliased. 2475 Fatalf("varexpr unexpected ODOT") 2476 } 2477 2478 // Be conservative. 2479 return false 2480 } 2481 2482 // is the name l mentioned in r? 2483 func vmatch2(l *Node, r *Node) bool { 2484 if r == nil { 2485 return false 2486 } 2487 switch r.Op { 2488 // match each right given left 2489 case ONAME: 2490 return l == r 2491 2492 case OLITERAL: 2493 return false 2494 } 2495 2496 if vmatch2(l, r.Left) { 2497 return true 2498 } 2499 if vmatch2(l, r.Right) { 2500 return true 2501 } 2502 for _, n := range r.List.Slice() { 2503 if vmatch2(l, n) { 2504 return true 2505 } 2506 } 2507 return false 2508 } 2509 2510 // is any name mentioned in l also mentioned in r? 2511 // called by sinit.go 2512 func vmatch1(l *Node, r *Node) bool { 2513 // isolate all left sides 2514 if l == nil || r == nil { 2515 return false 2516 } 2517 switch l.Op { 2518 case ONAME: 2519 switch l.Class { 2520 case PPARAM, PAUTO: 2521 break 2522 2523 // assignment to non-stack variable 2524 // must be delayed if right has function calls. 2525 default: 2526 if r.Ullman >= UINF { 2527 return true 2528 } 2529 } 2530 2531 return vmatch2(l, r) 2532 2533 case OLITERAL: 2534 return false 2535 } 2536 2537 if vmatch1(l.Left, r) { 2538 return true 2539 } 2540 if vmatch1(l.Right, r) { 2541 return true 2542 } 2543 for _, n := range l.List.Slice() { 2544 if vmatch1(n, r) { 2545 return true 2546 } 2547 } 2548 return false 2549 } 2550 2551 // paramstoheap returns code to allocate memory for heap-escaped parameters 2552 // and to copy non-result prameters' values from the stack. 2553 // If out is true, then code is also produced to zero-initialize their 2554 // stack memory addresses. 2555 func paramstoheap(params *Type) []*Node { 2556 var nn []*Node 2557 for _, t := range params.Fields().Slice() { 2558 // For precise stacks, the garbage collector assumes results 2559 // are always live, so zero them always. 2560 if params.StructType().Funarg == FunargResults { 2561 // Defer might stop a panic and show the 2562 // return values as they exist at the time of panic. 2563 // Make sure to zero them on entry to the function. 2564 nn = append(nn, nod(OAS, nodarg(t, 1), nil)) 2565 } 2566 2567 v := t.Nname 2568 if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result 2569 v = nil 2570 } 2571 if v == nil { 2572 continue 2573 } 2574 2575 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil { 2576 nn = append(nn, walkstmt(nod(ODCL, v, nil))) 2577 if stackcopy.Class == PPARAM { 2578 nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop))) 2579 } 2580 } 2581 } 2582 2583 return nn 2584 } 2585 2586 // returnsfromheap returns code to copy values for heap-escaped parameters 2587 // back to the stack. 2588 func returnsfromheap(params *Type) []*Node { 2589 var nn []*Node 2590 for _, t := range params.Fields().Slice() { 2591 v := t.Nname 2592 if v == nil { 2593 continue 2594 } 2595 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class == PPARAMOUT { 2596 nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop))) 2597 } 2598 } 2599 2600 return nn 2601 } 2602 2603 // heapmoves generates code to handle migrating heap-escaped parameters 2604 // between the stack and the heap. The generated code is added to Curfn's 2605 // Enter and Exit lists. 2606 func heapmoves() { 2607 lno := lineno 2608 lineno = Curfn.Pos 2609 nn := paramstoheap(Curfn.Type.Recvs()) 2610 nn = append(nn, paramstoheap(Curfn.Type.Params())...) 2611 nn = append(nn, paramstoheap(Curfn.Type.Results())...) 2612 Curfn.Func.Enter.Append(nn...) 2613 lineno = Curfn.Func.Endlineno 2614 Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) 2615 lineno = lno 2616 } 2617 2618 func vmkcall(fn *Node, t *Type, init *Nodes, va []*Node) *Node { 2619 if fn.Type == nil || fn.Type.Etype != TFUNC { 2620 Fatalf("mkcall %v %v", fn, fn.Type) 2621 } 2622 2623 n := fn.Type.Params().NumFields() 2624 2625 r := nod(OCALL, fn, nil) 2626 r.List.Set(va[:n]) 2627 if fn.Type.Results().NumFields() > 0 { 2628 r = typecheck(r, Erv|Efnstruct) 2629 } else { 2630 r = typecheck(r, Etop) 2631 } 2632 r = walkexpr(r, init) 2633 r.Type = t 2634 return r 2635 } 2636 2637 func mkcall(name string, t *Type, init *Nodes, args ...*Node) *Node { 2638 return vmkcall(syslook(name), t, init, args) 2639 } 2640 2641 func mkcall1(fn *Node, t *Type, init *Nodes, args ...*Node) *Node { 2642 return vmkcall(fn, t, init, args) 2643 } 2644 2645 func conv(n *Node, t *Type) *Node { 2646 if eqtype(n.Type, t) { 2647 return n 2648 } 2649 n = nod(OCONV, n, nil) 2650 n.Type = t 2651 n = typecheck(n, Erv) 2652 return n 2653 } 2654 2655 // byteindex converts n, which is byte-sized, to a uint8. 2656 // We cannot use conv, because we allow converting bool to uint8 here, 2657 // which is forbidden in user code. 2658 func byteindex(n *Node) *Node { 2659 if eqtype(n.Type, Types[TUINT8]) { 2660 return n 2661 } 2662 n = nod(OCONV, n, nil) 2663 n.Type = Types[TUINT8] 2664 n.Typecheck = 1 2665 return n 2666 } 2667 2668 func chanfn(name string, n int, t *Type) *Node { 2669 if !t.IsChan() { 2670 Fatalf("chanfn %v", t) 2671 } 2672 fn := syslook(name) 2673 switch n { 2674 default: 2675 Fatalf("chanfn %d", n) 2676 case 1: 2677 fn = substArgTypes(fn, t.Elem()) 2678 case 2: 2679 fn = substArgTypes(fn, t.Elem(), t.Elem()) 2680 } 2681 return fn 2682 } 2683 2684 func mapfn(name string, t *Type) *Node { 2685 if !t.IsMap() { 2686 Fatalf("mapfn %v", t) 2687 } 2688 fn := syslook(name) 2689 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val()) 2690 return fn 2691 } 2692 2693 func mapfndel(name string, t *Type) *Node { 2694 if !t.IsMap() { 2695 Fatalf("mapfn %v", t) 2696 } 2697 fn := syslook(name) 2698 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key()) 2699 return fn 2700 } 2701 2702 func writebarrierfn(name string, l *Type, r *Type) *Node { 2703 fn := syslook(name) 2704 fn = substArgTypes(fn, l, r) 2705 return fn 2706 } 2707 2708 func addstr(n *Node, init *Nodes) *Node { 2709 // orderexpr rewrote OADDSTR to have a list of strings. 2710 c := n.List.Len() 2711 2712 if c < 2 { 2713 yyerror("addstr count %d too small", c) 2714 } 2715 2716 buf := nodnil() 2717 if n.Esc == EscNone { 2718 sz := int64(0) 2719 for _, n1 := range n.List.Slice() { 2720 if n1.Op == OLITERAL { 2721 sz += int64(len(n1.Val().U.(string))) 2722 } 2723 } 2724 2725 // Don't allocate the buffer if the result won't fit. 2726 if sz < tmpstringbufsize { 2727 // Create temporary buffer for result string on stack. 2728 t := typArray(Types[TUINT8], tmpstringbufsize) 2729 2730 buf = nod(OADDR, temp(t), nil) 2731 } 2732 } 2733 2734 // build list of string arguments 2735 args := []*Node{buf} 2736 for _, n2 := range n.List.Slice() { 2737 args = append(args, conv(n2, Types[TSTRING])) 2738 } 2739 2740 var fn string 2741 if c <= 5 { 2742 // small numbers of strings use direct runtime helpers. 2743 // note: orderexpr knows this cutoff too. 2744 fn = fmt.Sprintf("concatstring%d", c) 2745 } else { 2746 // large numbers of strings are passed to the runtime as a slice. 2747 fn = "concatstrings" 2748 2749 t := typSlice(Types[TSTRING]) 2750 slice := nod(OCOMPLIT, nil, typenod(t)) 2751 if prealloc[n] != nil { 2752 prealloc[slice] = prealloc[n] 2753 } 2754 slice.List.Set(args[1:]) // skip buf arg 2755 args = []*Node{buf, slice} 2756 slice.Esc = EscNone 2757 } 2758 2759 cat := syslook(fn) 2760 r := nod(OCALL, cat, nil) 2761 r.List.Set(args) 2762 r = typecheck(r, Erv) 2763 r = walkexpr(r, init) 2764 r.Type = n.Type 2765 2766 return r 2767 } 2768 2769 // expand append(l1, l2...) to 2770 // init { 2771 // s := l1 2772 // n := len(s) + len(l2) 2773 // // Compare as uint so growslice can panic on overflow. 2774 // if uint(n) > uint(cap(s)) { 2775 // s = growslice(s, n) 2776 // } 2777 // s = s[:n] 2778 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2779 // } 2780 // s 2781 // 2782 // l2 is allowed to be a string. 2783 func appendslice(n *Node, init *Nodes) *Node { 2784 walkexprlistsafe(n.List.Slice(), init) 2785 2786 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2787 // and n are name or literal, but those may index the slice we're 2788 // modifying here. Fix explicitly. 2789 ls := n.List.Slice() 2790 for i1, n1 := range ls { 2791 ls[i1] = cheapexpr(n1, init) 2792 } 2793 2794 l1 := n.List.First() 2795 l2 := n.List.Second() 2796 2797 var l []*Node 2798 2799 // var s []T 2800 s := temp(l1.Type) 2801 l = append(l, nod(OAS, s, l1)) // s = l1 2802 2803 // n := len(s) + len(l2) 2804 nn := temp(Types[TINT]) 2805 l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil)))) 2806 2807 // if uint(n) > uint(cap(s)) 2808 nif := nod(OIF, nil, nil) 2809 nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil)) 2810 nif.Left.Left.Type = Types[TUINT] 2811 nif.Left.Right.Type = Types[TUINT] 2812 2813 // instantiate growslice(Type*, []any, int) []any 2814 fn := syslook("growslice") 2815 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2816 2817 // s = growslice(T, s, n) 2818 nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn))) 2819 l = append(l, nif) 2820 2821 // s = s[:n] 2822 nt := nod(OSLICE, s, nil) 2823 nt.SetSliceBounds(nil, nn, nil) 2824 nt.Etype = 1 2825 l = append(l, nod(OAS, s, nt)) 2826 2827 if haspointers(l1.Type.Elem()) { 2828 // copy(s[len(l1):], l2) 2829 nptr1 := nod(OSLICE, s, nil) 2830 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2831 nptr1.Etype = 1 2832 nptr2 := l2 2833 fn := syslook("typedslicecopy") 2834 fn = substArgTypes(fn, l1.Type, l2.Type) 2835 var ln Nodes 2836 ln.Set(l) 2837 nt := mkcall1(fn, Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2) 2838 l = append(ln.Slice(), nt) 2839 } else if instrumenting && !compiling_runtime { 2840 // rely on runtime to instrument copy. 2841 // copy(s[len(l1):], l2) 2842 nptr1 := nod(OSLICE, s, nil) 2843 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2844 nptr1.Etype = 1 2845 nptr2 := l2 2846 var fn *Node 2847 if l2.Type.IsString() { 2848 fn = syslook("slicestringcopy") 2849 } else { 2850 fn = syslook("slicecopy") 2851 } 2852 fn = substArgTypes(fn, l1.Type, l2.Type) 2853 var ln Nodes 2854 ln.Set(l) 2855 nt := mkcall1(fn, Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width)) 2856 l = append(ln.Slice(), nt) 2857 } else { 2858 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2859 nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil)) 2860 nptr1.Bounded = true 2861 2862 nptr1 = nod(OADDR, nptr1, nil) 2863 2864 nptr2 := nod(OSPTR, l2, nil) 2865 2866 fn := syslook("memmove") 2867 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2868 2869 var ln Nodes 2870 ln.Set(l) 2871 nwid := cheapexpr(conv(nod(OLEN, l2, nil), Types[TUINTPTR]), &ln) 2872 2873 nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width)) 2874 nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid) 2875 l = append(ln.Slice(), nt) 2876 } 2877 2878 typecheckslice(l, Etop) 2879 walkstmtlist(l) 2880 init.Append(l...) 2881 return s 2882 } 2883 2884 // Rewrite append(src, x, y, z) so that any side effects in 2885 // x, y, z (including runtime panics) are evaluated in 2886 // initialization statements before the append. 2887 // For normal code generation, stop there and leave the 2888 // rest to cgen_append. 2889 // 2890 // For race detector, expand append(src, a [, b]* ) to 2891 // 2892 // init { 2893 // s := src 2894 // const argc = len(args) - 1 2895 // if cap(s) - len(s) < argc { 2896 // s = growslice(s, len(s)+argc) 2897 // } 2898 // n := len(s) 2899 // s = s[:n+argc] 2900 // s[n] = a 2901 // s[n+1] = b 2902 // ... 2903 // } 2904 // s 2905 func walkappend(n *Node, init *Nodes, dst *Node) *Node { 2906 if !samesafeexpr(dst, n.List.First()) { 2907 n.List.SetIndex(0, safeexpr(n.List.Index(0), init)) 2908 n.List.SetIndex(0, walkexpr(n.List.Index(0), init)) 2909 } 2910 walkexprlistsafe(n.List.Slice()[1:], init) 2911 2912 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2913 // and n are name or literal, but those may index the slice we're 2914 // modifying here. Fix explicitly. 2915 // Using cheapexpr also makes sure that the evaluation 2916 // of all arguments (and especially any panics) happen 2917 // before we begin to modify the slice in a visible way. 2918 ls := n.List.Slice()[1:] 2919 for i, n := range ls { 2920 ls[i] = cheapexpr(n, init) 2921 } 2922 2923 nsrc := n.List.First() 2924 2925 argc := n.List.Len() - 1 2926 if argc < 1 { 2927 return nsrc 2928 } 2929 2930 // General case, with no function calls left as arguments. 2931 // Leave for gen, except that instrumentation requires old form. 2932 if !instrumenting || compiling_runtime { 2933 return n 2934 } 2935 2936 var l []*Node 2937 2938 ns := temp(nsrc.Type) 2939 l = append(l, nod(OAS, ns, nsrc)) // s = src 2940 2941 na := nodintconst(int64(argc)) // const argc 2942 nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc 2943 nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na) 2944 2945 fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T) 2946 fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem()) 2947 2948 nx.Nbody.Set1(nod(OAS, ns, 2949 mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns, 2950 nod(OADD, nod(OLEN, ns, nil), na)))) 2951 2952 l = append(l, nx) 2953 2954 nn := temp(Types[TINT]) 2955 l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s) 2956 2957 nx = nod(OSLICE, ns, nil) // ...s[:n+argc] 2958 nx.SetSliceBounds(nil, nod(OADD, nn, na), nil) 2959 nx.Etype = 1 2960 l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc] 2961 2962 ls = n.List.Slice()[1:] 2963 for i, n := range ls { 2964 nx = nod(OINDEX, ns, nn) // s[n] ... 2965 nx.Bounded = true 2966 l = append(l, nod(OAS, nx, n)) // s[n] = arg 2967 if i+1 < len(ls) { 2968 l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1 2969 } 2970 } 2971 2972 typecheckslice(l, Etop) 2973 walkstmtlist(l) 2974 init.Append(l...) 2975 return ns 2976 } 2977 2978 // Lower copy(a, b) to a memmove call or a runtime call. 2979 // 2980 // init { 2981 // n := len(a) 2982 // if n > len(b) { n = len(b) } 2983 // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) 2984 // } 2985 // n; 2986 // 2987 // Also works if b is a string. 2988 // 2989 func copyany(n *Node, init *Nodes, runtimecall bool) *Node { 2990 if haspointers(n.Left.Type.Elem()) { 2991 fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) 2992 return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right) 2993 } 2994 2995 if runtimecall { 2996 var fn *Node 2997 if n.Right.Type.IsString() { 2998 fn = syslook("slicestringcopy") 2999 } else { 3000 fn = syslook("slicecopy") 3001 } 3002 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3003 return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width)) 3004 } 3005 3006 n.Left = walkexpr(n.Left, init) 3007 n.Right = walkexpr(n.Right, init) 3008 nl := temp(n.Left.Type) 3009 nr := temp(n.Right.Type) 3010 var l []*Node 3011 l = append(l, nod(OAS, nl, n.Left)) 3012 l = append(l, nod(OAS, nr, n.Right)) 3013 3014 nfrm := nod(OSPTR, nr, nil) 3015 nto := nod(OSPTR, nl, nil) 3016 3017 nlen := temp(Types[TINT]) 3018 3019 // n = len(to) 3020 l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil))) 3021 3022 // if n > len(frm) { n = len(frm) } 3023 nif := nod(OIF, nil, nil) 3024 3025 nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil)) 3026 nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil))) 3027 l = append(l, nif) 3028 3029 // Call memmove. 3030 fn := syslook("memmove") 3031 3032 fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem()) 3033 nwid := temp(Types[TUINTPTR]) 3034 l = append(l, nod(OAS, nwid, conv(nlen, Types[TUINTPTR]))) 3035 nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width)) 3036 l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) 3037 3038 typecheckslice(l, Etop) 3039 walkstmtlist(l) 3040 init.Append(l...) 3041 return nlen 3042 } 3043 3044 func eqfor(t *Type, needsize *int) *Node { 3045 // Should only arrive here with large memory or 3046 // a struct/array containing a non-memory field/element. 3047 // Small memory is handled inline, and single non-memory 3048 // is handled during type check (OCMPSTR etc). 3049 switch a, _ := algtype1(t); a { 3050 case AMEM: 3051 n := syslook("memequal") 3052 n = substArgTypes(n, t, t) 3053 *needsize = 1 3054 return n 3055 case ASPECIAL: 3056 sym := typesymprefix(".eq", t) 3057 n := newname(sym) 3058 n.Class = PFUNC 3059 ntype := nod(OTFUNC, nil, nil) 3060 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3061 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3062 ntype.Rlist.Append(nod(ODCLFIELD, nil, typenod(Types[TBOOL]))) 3063 ntype = typecheck(ntype, Etype) 3064 n.Type = ntype.Type 3065 *needsize = 0 3066 return n 3067 } 3068 Fatalf("eqfor %v", t) 3069 return nil 3070 } 3071 3072 // The result of walkcompare MUST be assigned back to n, e.g. 3073 // n.Left = walkcompare(n.Left, init) 3074 func walkcompare(n *Node, init *Nodes) *Node { 3075 // Given interface value l and concrete value r, rewrite 3076 // l == r 3077 // into types-equal && data-equal. 3078 // This is efficient, avoids allocations, and avoids runtime calls. 3079 var l, r *Node 3080 if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() { 3081 l = n.Left 3082 r = n.Right 3083 } else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() { 3084 l = n.Right 3085 r = n.Left 3086 } 3087 3088 if l != nil { 3089 // Handle both == and !=. 3090 eq := n.Op 3091 var andor Op 3092 if eq == OEQ { 3093 andor = OANDAND 3094 } else { 3095 andor = OOROR 3096 } 3097 // Check for types equal. 3098 // For empty interface, this is: 3099 // l.tab == type(r) 3100 // For non-empty interface, this is: 3101 // l.tab != nil && l.tab._type == type(r) 3102 var eqtype *Node 3103 tab := nod(OITAB, l, nil) 3104 rtyp := typename(r.Type) 3105 if l.Type.IsEmptyInterface() { 3106 tab.Type = ptrto(Types[TUINT8]) 3107 tab.Typecheck = 1 3108 eqtype = nod(eq, tab, rtyp) 3109 } else { 3110 nonnil := nod(brcom(eq), nodnil(), tab) 3111 match := nod(eq, itabType(tab), rtyp) 3112 eqtype = nod(andor, nonnil, match) 3113 } 3114 // Check for data equal. 3115 eqdata := nod(eq, ifaceData(l, r.Type), r) 3116 // Put it all together. 3117 expr := nod(andor, eqtype, eqdata) 3118 n = finishcompare(n, expr, init) 3119 return n 3120 } 3121 3122 // Must be comparison of array or struct. 3123 // Otherwise back end handles it. 3124 // While we're here, decide whether to 3125 // inline or call an eq alg. 3126 t := n.Left.Type 3127 var inline bool 3128 switch t.Etype { 3129 default: 3130 return n 3131 case TARRAY: 3132 inline = t.NumElem() <= 1 || (t.NumElem() <= 4 && issimple[t.Elem().Etype]) 3133 case TSTRUCT: 3134 inline = t.NumFields() <= 4 3135 } 3136 3137 cmpl := n.Left 3138 for cmpl != nil && cmpl.Op == OCONVNOP { 3139 cmpl = cmpl.Left 3140 } 3141 cmpr := n.Right 3142 for cmpr != nil && cmpr.Op == OCONVNOP { 3143 cmpr = cmpr.Left 3144 } 3145 3146 // Chose not to inline. Call equality function directly. 3147 if !inline { 3148 if !islvalue(cmpl) || !islvalue(cmpr) { 3149 Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) 3150 } 3151 3152 // eq algs take pointers 3153 pl := temp(ptrto(t)) 3154 al := nod(OAS, pl, nod(OADDR, cmpl, nil)) 3155 al.Right.Etype = 1 // addr does not escape 3156 al = typecheck(al, Etop) 3157 init.Append(al) 3158 3159 pr := temp(ptrto(t)) 3160 ar := nod(OAS, pr, nod(OADDR, cmpr, nil)) 3161 ar.Right.Etype = 1 // addr does not escape 3162 ar = typecheck(ar, Etop) 3163 init.Append(ar) 3164 3165 var needsize int 3166 call := nod(OCALL, eqfor(t, &needsize), nil) 3167 call.List.Append(pl) 3168 call.List.Append(pr) 3169 if needsize != 0 { 3170 call.List.Append(nodintconst(t.Width)) 3171 } 3172 res := call 3173 if n.Op != OEQ { 3174 res = nod(ONOT, res, nil) 3175 } 3176 n = finishcompare(n, res, init) 3177 return n 3178 } 3179 3180 // inline: build boolean expression comparing element by element 3181 andor := OANDAND 3182 if n.Op == ONE { 3183 andor = OOROR 3184 } 3185 var expr *Node 3186 compare := func(el, er *Node) { 3187 a := nod(n.Op, el, er) 3188 if expr == nil { 3189 expr = a 3190 } else { 3191 expr = nod(andor, expr, a) 3192 } 3193 } 3194 cmpl = safeexpr(cmpl, init) 3195 cmpr = safeexpr(cmpr, init) 3196 if t.IsStruct() { 3197 for _, f := range t.Fields().Slice() { 3198 sym := f.Sym 3199 if isblanksym(sym) { 3200 continue 3201 } 3202 compare( 3203 nodSym(OXDOT, cmpl, sym), 3204 nodSym(OXDOT, cmpr, sym), 3205 ) 3206 } 3207 } else { 3208 for i := 0; int64(i) < t.NumElem(); i++ { 3209 compare( 3210 nod(OINDEX, cmpl, nodintconst(int64(i))), 3211 nod(OINDEX, cmpr, nodintconst(int64(i))), 3212 ) 3213 } 3214 } 3215 if expr == nil { 3216 expr = nodbool(n.Op == OEQ) 3217 } 3218 n = finishcompare(n, expr, init) 3219 return n 3220 } 3221 3222 // The result of finishcompare MUST be assigned back to n, e.g. 3223 // n.Left = finishcompare(n.Left, x, r, init) 3224 func finishcompare(n, r *Node, init *Nodes) *Node { 3225 // Use nn here to avoid passing r to typecheck. 3226 nn := r 3227 nn = typecheck(nn, Erv) 3228 nn = walkexpr(nn, init) 3229 r = nn 3230 if r.Type != n.Type { 3231 r = nod(OCONVNOP, r, nil) 3232 r.Type = n.Type 3233 r.Typecheck = 1 3234 nn = r 3235 } 3236 return nn 3237 } 3238 3239 func samecheap(a *Node, b *Node) bool { 3240 var ar *Node 3241 var br *Node 3242 for a != nil && b != nil && a.Op == b.Op { 3243 switch a.Op { 3244 default: 3245 return false 3246 3247 case ONAME: 3248 return a == b 3249 3250 case ODOT, ODOTPTR: 3251 if a.Sym != b.Sym { 3252 return false 3253 } 3254 3255 case OINDEX: 3256 ar = a.Right 3257 br = b.Right 3258 if !Isconst(ar, CTINT) || !Isconst(br, CTINT) || ar.Val().U.(*Mpint).Cmp(br.Val().U.(*Mpint)) != 0 { 3259 return false 3260 } 3261 } 3262 3263 a = a.Left 3264 b = b.Left 3265 } 3266 3267 return false 3268 } 3269 3270 // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers. 3271 func (n *Node) isIntOrdering() bool { 3272 switch n.Op { 3273 case OLE, OLT, OGE, OGT: 3274 default: 3275 return false 3276 } 3277 return n.Left.Type.IsInteger() && n.Right.Type.IsInteger() 3278 } 3279 3280 // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10. 3281 // n must be an OANDAND or OOROR node. 3282 // The result of walkinrange MUST be assigned back to n, e.g. 3283 // n.Left = walkinrange(n.Left) 3284 func walkinrange(n *Node, init *Nodes) *Node { 3285 // We are looking for something equivalent to a opl b OP b opr c, where: 3286 // * a, b, and c have integer type 3287 // * b is side-effect-free 3288 // * opl and opr are each < or ≤ 3289 // * OP is && 3290 l := n.Left 3291 r := n.Right 3292 if !l.isIntOrdering() || !r.isIntOrdering() { 3293 return n 3294 } 3295 3296 // Find b, if it exists, and rename appropriately. 3297 // Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right 3298 // Output is: a opl b(==x) ANDAND/OROR b(==x) opr c 3299 a, opl, b := l.Left, l.Op, l.Right 3300 x, opr, c := r.Left, r.Op, r.Right 3301 for i := 0; ; i++ { 3302 if samesafeexpr(b, x) { 3303 break 3304 } 3305 if i == 3 { 3306 // Tried all permutations and couldn't find an appropriate b == x. 3307 return n 3308 } 3309 if i&1 == 0 { 3310 a, opl, b = b, brrev(opl), a 3311 } else { 3312 x, opr, c = c, brrev(opr), x 3313 } 3314 } 3315 3316 // If n.Op is ||, apply de Morgan. 3317 // Negate the internal ops now; we'll negate the top level op at the end. 3318 // Henceforth assume &&. 3319 negateResult := n.Op == OOROR 3320 if negateResult { 3321 opl = brcom(opl) 3322 opr = brcom(opr) 3323 } 3324 3325 cmpdir := func(o Op) int { 3326 switch o { 3327 case OLE, OLT: 3328 return -1 3329 case OGE, OGT: 3330 return +1 3331 } 3332 Fatalf("walkinrange cmpdir %v", o) 3333 return 0 3334 } 3335 if cmpdir(opl) != cmpdir(opr) { 3336 // Not a range check; something like b < a && b < c. 3337 return n 3338 } 3339 3340 switch opl { 3341 case OGE, OGT: 3342 // We have something like a > b && b ≥ c. 3343 // Switch and reverse ops and rename constants, 3344 // to make it look like a ≤ b && b < c. 3345 a, c = c, a 3346 opl, opr = brrev(opr), brrev(opl) 3347 } 3348 3349 // We must ensure that c-a is non-negative. 3350 // For now, require a and c to be constants. 3351 // In the future, we could also support a == 0 and c == len/cap(...). 3352 // Unfortunately, by this point, most len/cap expressions have been 3353 // stored into temporary variables. 3354 if !Isconst(a, CTINT) || !Isconst(c, CTINT) { 3355 return n 3356 } 3357 3358 if opl == OLT { 3359 // We have a < b && ... 3360 // We need a ≤ b && ... to safely use unsigned comparison tricks. 3361 // If a is not the maximum constant for b's type, 3362 // we can increment a and switch to ≤. 3363 if a.Int64() >= maxintval[b.Type.Etype].Int64() { 3364 return n 3365 } 3366 a = nodintconst(a.Int64() + 1) 3367 opl = OLE 3368 } 3369 3370 bound := c.Int64() - a.Int64() 3371 if bound < 0 { 3372 // Bad news. Something like 5 <= x && x < 3. 3373 // Rare in practice, and we still need to generate side-effects, 3374 // so just leave it alone. 3375 return n 3376 } 3377 3378 // We have a ≤ b && b < c (or a ≤ b && b ≤ c). 3379 // This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a), 3380 // which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a), 3381 // which is equivalent to uint(b-a) < uint(c-a). 3382 ut := b.Type.toUnsigned() 3383 lhs := conv(nod(OSUB, b, a), ut) 3384 rhs := nodintconst(bound) 3385 if negateResult { 3386 // Negate top level. 3387 opr = brcom(opr) 3388 } 3389 cmp := nod(opr, lhs, rhs) 3390 cmp.Pos = n.Pos 3391 cmp = addinit(cmp, l.Ninit.Slice()) 3392 cmp = addinit(cmp, r.Ninit.Slice()) 3393 // Typecheck the AST rooted at cmp... 3394 cmp = typecheck(cmp, Erv) 3395 // ...but then reset cmp's type to match n's type. 3396 cmp.Type = n.Type 3397 cmp = walkexpr(cmp, init) 3398 return cmp 3399 } 3400 3401 // walkmul rewrites integer multiplication by powers of two as shifts. 3402 // The result of walkmul MUST be assigned back to n, e.g. 3403 // n.Left = walkmul(n.Left, init) 3404 func walkmul(n *Node, init *Nodes) *Node { 3405 if !n.Type.IsInteger() { 3406 return n 3407 } 3408 3409 var nr *Node 3410 var nl *Node 3411 if n.Right.Op == OLITERAL { 3412 nl = n.Left 3413 nr = n.Right 3414 } else if n.Left.Op == OLITERAL { 3415 nl = n.Right 3416 nr = n.Left 3417 } else { 3418 return n 3419 } 3420 3421 neg := 0 3422 3423 // x*0 is 0 (and side effects of x). 3424 var pow int 3425 var w int 3426 if nr.Int64() == 0 { 3427 cheapexpr(nl, init) 3428 Nodconst(n, n.Type, 0) 3429 goto ret 3430 } 3431 3432 // nr is a constant. 3433 pow = powtwo(nr) 3434 3435 if pow < 0 { 3436 return n 3437 } 3438 if pow >= 1000 { 3439 // negative power of 2, like -16 3440 neg = 1 3441 3442 pow -= 1000 3443 } 3444 3445 w = int(nl.Type.Width * 8) 3446 if pow+1 >= w { // too big, shouldn't happen 3447 return n 3448 } 3449 3450 nl = cheapexpr(nl, init) 3451 3452 if pow == 0 { 3453 // x*1 is x 3454 n = nl 3455 3456 goto ret 3457 } 3458 3459 n = nod(OLSH, nl, nodintconst(int64(pow))) 3460 3461 ret: 3462 if neg != 0 { 3463 n = nod(OMINUS, n, nil) 3464 } 3465 3466 n = typecheck(n, Erv) 3467 n = walkexpr(n, init) 3468 return n 3469 } 3470 3471 // walkdiv rewrites division by a constant as less expensive 3472 // operations. 3473 // The result of walkdiv MUST be assigned back to n, e.g. 3474 // n.Left = walkdiv(n.Left, init) 3475 func walkdiv(n *Node, init *Nodes) *Node { 3476 // if >= 0, nr is 1<<pow // 1 if nr is negative. 3477 3478 if n.Right.Op != OLITERAL { 3479 return n 3480 } 3481 3482 // nr is a constant. 3483 nl := cheapexpr(n.Left, init) 3484 3485 nr := n.Right 3486 3487 // special cases of mod/div 3488 // by a constant 3489 w := int(nl.Type.Width * 8) 3490 3491 s := 0 // 1 if nr is negative. 3492 pow := powtwo(nr) // if >= 0, nr is 1<<pow 3493 if pow >= 1000 { 3494 // negative power of 2 3495 s = 1 3496 3497 pow -= 1000 3498 } 3499 3500 if pow+1 >= w { 3501 // divisor too large. 3502 return n 3503 } 3504 3505 if pow < 0 { 3506 // try to do division by multiply by (2^w)/d 3507 // see hacker's delight chapter 10 3508 // TODO: support 64-bit magic multiply here. 3509 var m Magic 3510 m.W = w 3511 3512 if nl.Type.IsSigned() { 3513 m.Sd = nr.Int64() 3514 smagic(&m) 3515 } else { 3516 m.Ud = uint64(nr.Int64()) 3517 umagic(&m) 3518 } 3519 3520 if m.Bad != 0 { 3521 return n 3522 } 3523 3524 // We have a quick division method so use it 3525 // for modulo too. 3526 if n.Op == OMOD { 3527 // rewrite as A%B = A - (A/B*B). 3528 n1 := nod(ODIV, nl, nr) 3529 3530 n2 := nod(OMUL, n1, nr) 3531 n = nod(OSUB, nl, n2) 3532 goto ret 3533 } 3534 3535 switch simtype[nl.Type.Etype] { 3536 default: 3537 return n 3538 3539 // n1 = nl * magic >> w (HMUL) 3540 case TUINT8, TUINT16, TUINT32: 3541 var nc Node 3542 3543 Nodconst(&nc, nl.Type, int64(m.Um)) 3544 n1 := nod(OHMUL, nl, &nc) 3545 n1 = typecheck(n1, Erv) 3546 if m.Ua != 0 { 3547 // Select a Go type with (at least) twice the width. 3548 var twide *Type 3549 switch simtype[nl.Type.Etype] { 3550 default: 3551 return n 3552 3553 case TUINT8, TUINT16: 3554 twide = Types[TUINT32] 3555 3556 case TUINT32: 3557 twide = Types[TUINT64] 3558 3559 case TINT8, TINT16: 3560 twide = Types[TINT32] 3561 3562 case TINT32: 3563 twide = Types[TINT64] 3564 } 3565 3566 // add numerator (might overflow). 3567 // n2 = (n1 + nl) 3568 n2 := nod(OADD, conv(n1, twide), conv(nl, twide)) 3569 3570 // shift by m.s 3571 var nc Node 3572 3573 Nodconst(&nc, Types[TUINT], int64(m.S)) 3574 n = conv(nod(ORSH, n2, &nc), nl.Type) 3575 } else { 3576 // n = n1 >> m.s 3577 var nc Node 3578 3579 Nodconst(&nc, Types[TUINT], int64(m.S)) 3580 n = nod(ORSH, n1, &nc) 3581 } 3582 3583 // n1 = nl * magic >> w 3584 case TINT8, TINT16, TINT32: 3585 var nc Node 3586 3587 Nodconst(&nc, nl.Type, m.Sm) 3588 n1 := nod(OHMUL, nl, &nc) 3589 n1 = typecheck(n1, Erv) 3590 if m.Sm < 0 { 3591 // add the numerator. 3592 n1 = nod(OADD, n1, nl) 3593 } 3594 3595 // shift by m.s 3596 var ns Node 3597 3598 Nodconst(&ns, Types[TUINT], int64(m.S)) 3599 n2 := conv(nod(ORSH, n1, &ns), nl.Type) 3600 3601 // add 1 iff n1 is negative. 3602 var nneg Node 3603 3604 Nodconst(&nneg, Types[TUINT], int64(w)-1) 3605 n3 := nod(ORSH, nl, &nneg) // n4 = -1 iff n1 is negative. 3606 n = nod(OSUB, n2, n3) 3607 3608 // apply sign. 3609 if m.Sd < 0 { 3610 n = nod(OMINUS, n, nil) 3611 } 3612 } 3613 3614 goto ret 3615 } 3616 3617 switch pow { 3618 case 0: 3619 if n.Op == OMOD { 3620 // nl % 1 is zero. 3621 Nodconst(n, n.Type, 0) 3622 } else if s != 0 { 3623 // divide by -1 3624 n.Op = OMINUS 3625 3626 n.Right = nil 3627 } else { 3628 // divide by 1 3629 n = nl 3630 } 3631 3632 default: 3633 if n.Type.IsSigned() { 3634 if n.Op == OMOD { 3635 // signed modulo 2^pow is like ANDing 3636 // with the last pow bits, but if nl < 0, 3637 // nl & (2^pow-1) is (nl+1)%2^pow - 1. 3638 var nc Node 3639 3640 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3641 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3642 if pow == 1 { 3643 n1 = typecheck(n1, Erv) 3644 n1 = cheapexpr(n1, init) 3645 3646 // n = (nl+ε)&1 -ε where ε=1 iff nl<0. 3647 n2 := nod(OSUB, nl, n1) 3648 3649 var nc Node 3650 Nodconst(&nc, nl.Type, 1) 3651 n3 := nod(OAND, n2, &nc) 3652 n = nod(OADD, n3, n1) 3653 } else { 3654 // n = (nl+ε)&(nr-1) - ε where ε=2^pow-1 iff nl<0. 3655 var nc Node 3656 3657 Nodconst(&nc, nl.Type, (1<<uint(pow))-1) 3658 n2 := nod(OAND, n1, &nc) // n2 = 2^pow-1 iff nl<0. 3659 n2 = typecheck(n2, Erv) 3660 n2 = cheapexpr(n2, init) 3661 3662 n3 := nod(OADD, nl, n2) 3663 n4 := nod(OAND, n3, &nc) 3664 n = nod(OSUB, n4, n2) 3665 } 3666 3667 break 3668 } else { 3669 // arithmetic right shift does not give the correct rounding. 3670 // if nl >= 0, nl >> n == nl / nr 3671 // if nl < 0, we want to add 2^n-1 first. 3672 var nc Node 3673 3674 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3675 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3676 if pow == 1 { 3677 // nl+1 is nl-(-1) 3678 n.Left = nod(OSUB, nl, n1) 3679 } else { 3680 // Do a logical right right on -1 to keep pow bits. 3681 var nc Node 3682 3683 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-int64(pow)) 3684 n2 := nod(ORSH, conv(n1, nl.Type.toUnsigned()), &nc) 3685 n.Left = nod(OADD, nl, conv(n2, nl.Type)) 3686 } 3687 3688 // n = (nl + 2^pow-1) >> pow 3689 n.Op = ORSH 3690 3691 var n2 Node 3692 Nodconst(&n2, Types[simtype[TUINT]], int64(pow)) 3693 n.Right = &n2 3694 n.Typecheck = 0 3695 } 3696 3697 if s != 0 { 3698 n = nod(OMINUS, n, nil) 3699 } 3700 break 3701 } 3702 3703 var nc Node 3704 if n.Op == OMOD { 3705 // n = nl & (nr-1) 3706 n.Op = OAND 3707 3708 Nodconst(&nc, nl.Type, nr.Int64()-1) 3709 } else { 3710 // n = nl >> pow 3711 n.Op = ORSH 3712 3713 Nodconst(&nc, Types[simtype[TUINT]], int64(pow)) 3714 } 3715 3716 n.Typecheck = 0 3717 n.Right = &nc 3718 } 3719 3720 goto ret 3721 3722 ret: 3723 n = typecheck(n, Erv) 3724 n = walkexpr(n, init) 3725 return n 3726 } 3727 3728 // return 1 if integer n must be in range [0, max), 0 otherwise 3729 func bounded(n *Node, max int64) bool { 3730 if n.Type == nil || !n.Type.IsInteger() { 3731 return false 3732 } 3733 3734 sign := n.Type.IsSigned() 3735 bits := int32(8 * n.Type.Width) 3736 3737 if smallintconst(n) { 3738 v := n.Int64() 3739 return 0 <= v && v < max 3740 } 3741 3742 switch n.Op { 3743 case OAND: 3744 v := int64(-1) 3745 if smallintconst(n.Left) { 3746 v = n.Left.Int64() 3747 } else if smallintconst(n.Right) { 3748 v = n.Right.Int64() 3749 } 3750 3751 if 0 <= v && v < max { 3752 return true 3753 } 3754 3755 case OMOD: 3756 if !sign && smallintconst(n.Right) { 3757 v := n.Right.Int64() 3758 if 0 <= v && v <= max { 3759 return true 3760 } 3761 } 3762 3763 case ODIV: 3764 if !sign && smallintconst(n.Right) { 3765 v := n.Right.Int64() 3766 for bits > 0 && v >= 2 { 3767 bits-- 3768 v >>= 1 3769 } 3770 } 3771 3772 case ORSH: 3773 if !sign && smallintconst(n.Right) { 3774 v := n.Right.Int64() 3775 if v > int64(bits) { 3776 return true 3777 } 3778 bits -= int32(v) 3779 } 3780 } 3781 3782 if !sign && bits <= 62 && 1<<uint(bits) <= max { 3783 return true 3784 } 3785 3786 return false 3787 } 3788 3789 // usemethod check interface method calls for uses of reflect.Type.Method. 3790 func usemethod(n *Node) { 3791 t := n.Left.Type 3792 3793 // Looking for either of: 3794 // Method(int) reflect.Method 3795 // MethodByName(string) (reflect.Method, bool) 3796 // 3797 // TODO(crawshaw): improve precision of match by working out 3798 // how to check the method name. 3799 if n := t.Params().NumFields(); n != 1 { 3800 return 3801 } 3802 if n := t.Results().NumFields(); n != 1 && n != 2 { 3803 return 3804 } 3805 p0 := t.Params().Field(0) 3806 res0 := t.Results().Field(0) 3807 var res1 *Field 3808 if t.Results().NumFields() == 2 { 3809 res1 = t.Results().Field(1) 3810 } 3811 3812 if res1 == nil { 3813 if p0.Type.Etype != TINT { 3814 return 3815 } 3816 } else { 3817 if !p0.Type.IsString() { 3818 return 3819 } 3820 if !res1.Type.IsBoolean() { 3821 return 3822 } 3823 } 3824 if res0.Type.String() != "reflect.Method" { 3825 return 3826 } 3827 3828 Curfn.Func.ReflectMethod = true 3829 } 3830 3831 func usefield(n *Node) { 3832 if obj.Fieldtrack_enabled == 0 { 3833 return 3834 } 3835 3836 switch n.Op { 3837 default: 3838 Fatalf("usefield %v", n.Op) 3839 3840 case ODOT, ODOTPTR: 3841 break 3842 } 3843 if n.Sym == nil { 3844 // No field name. This DOTPTR was built by the compiler for access 3845 // to runtime data structures. Ignore. 3846 return 3847 } 3848 3849 t := n.Left.Type 3850 if t.IsPtr() { 3851 t = t.Elem() 3852 } 3853 field := dotField[typeSym{t.Orig, n.Sym}] 3854 if field == nil { 3855 Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) 3856 } 3857 if !strings.Contains(field.Note, "go:\"track\"") { 3858 return 3859 } 3860 3861 outer := n.Left.Type 3862 if outer.IsPtr() { 3863 outer = outer.Elem() 3864 } 3865 if outer.Sym == nil { 3866 yyerror("tracked field must be in named struct type") 3867 } 3868 if !exportname(field.Sym.Name) { 3869 yyerror("tracked field must be exported (upper case)") 3870 } 3871 3872 sym := tracksym(outer, field) 3873 if Curfn.Func.FieldTrack == nil { 3874 Curfn.Func.FieldTrack = make(map[*Sym]struct{}) 3875 } 3876 Curfn.Func.FieldTrack[sym] = struct{}{} 3877 } 3878 3879 func candiscardlist(l Nodes) bool { 3880 for _, n := range l.Slice() { 3881 if !candiscard(n) { 3882 return false 3883 } 3884 } 3885 return true 3886 } 3887 3888 func candiscard(n *Node) bool { 3889 if n == nil { 3890 return true 3891 } 3892 3893 switch n.Op { 3894 default: 3895 return false 3896 3897 // Discardable as long as the subpieces are. 3898 case ONAME, 3899 ONONAME, 3900 OTYPE, 3901 OPACK, 3902 OLITERAL, 3903 OADD, 3904 OSUB, 3905 OOR, 3906 OXOR, 3907 OADDSTR, 3908 OADDR, 3909 OANDAND, 3910 OARRAYBYTESTR, 3911 OARRAYRUNESTR, 3912 OSTRARRAYBYTE, 3913 OSTRARRAYRUNE, 3914 OCAP, 3915 OCMPIFACE, 3916 OCMPSTR, 3917 OCOMPLIT, 3918 OMAPLIT, 3919 OSTRUCTLIT, 3920 OARRAYLIT, 3921 OSLICELIT, 3922 OPTRLIT, 3923 OCONV, 3924 OCONVIFACE, 3925 OCONVNOP, 3926 ODOT, 3927 OEQ, 3928 ONE, 3929 OLT, 3930 OLE, 3931 OGT, 3932 OGE, 3933 OKEY, 3934 OSTRUCTKEY, 3935 OLEN, 3936 OMUL, 3937 OLSH, 3938 ORSH, 3939 OAND, 3940 OANDNOT, 3941 ONEW, 3942 ONOT, 3943 OCOM, 3944 OPLUS, 3945 OMINUS, 3946 OOROR, 3947 OPAREN, 3948 ORUNESTR, 3949 OREAL, 3950 OIMAG, 3951 OCOMPLEX: 3952 break 3953 3954 // Discardable as long as we know it's not division by zero. 3955 case ODIV, OMOD: 3956 if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 { 3957 break 3958 } 3959 if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 { 3960 break 3961 } 3962 return false 3963 3964 // Discardable as long as we know it won't fail because of a bad size. 3965 case OMAKECHAN, OMAKEMAP: 3966 if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 { 3967 break 3968 } 3969 return false 3970 3971 // Difficult to tell what sizes are okay. 3972 case OMAKESLICE: 3973 return false 3974 } 3975 3976 if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) { 3977 return false 3978 } 3979 3980 return true 3981 } 3982 3983 // rewrite 3984 // print(x, y, z) 3985 // into 3986 // func(a1, a2, a3) { 3987 // print(a1, a2, a3) 3988 // }(x, y, z) 3989 // and same for println. 3990 3991 var walkprintfunc_prgen int 3992 3993 // The result of walkprintfunc MUST be assigned back to n, e.g. 3994 // n.Left = walkprintfunc(n.Left, init) 3995 func walkprintfunc(n *Node, init *Nodes) *Node { 3996 if n.Ninit.Len() != 0 { 3997 walkstmtlist(n.Ninit.Slice()) 3998 init.AppendNodes(&n.Ninit) 3999 } 4000 4001 t := nod(OTFUNC, nil, nil) 4002 num := 0 4003 var printargs []*Node 4004 var a *Node 4005 var buf string 4006 for _, n1 := range n.List.Slice() { 4007 buf = fmt.Sprintf("a%d", num) 4008 num++ 4009 a = nod(ODCLFIELD, newname(lookup(buf)), typenod(n1.Type)) 4010 t.List.Append(a) 4011 printargs = append(printargs, a.Left) 4012 } 4013 4014 fn := nod(ODCLFUNC, nil, nil) 4015 walkprintfunc_prgen++ 4016 buf = fmt.Sprintf("print·%d", walkprintfunc_prgen) 4017 fn.Func.Nname = newname(lookup(buf)) 4018 fn.Func.Nname.Name.Defn = fn 4019 fn.Func.Nname.Name.Param.Ntype = t 4020 declare(fn.Func.Nname, PFUNC) 4021 4022 oldfn := Curfn 4023 Curfn = nil 4024 funchdr(fn) 4025 4026 a = nod(n.Op, nil, nil) 4027 a.List.Set(printargs) 4028 a = typecheck(a, Etop) 4029 a = walkstmt(a) 4030 4031 fn.Nbody.Set1(a) 4032 4033 funcbody(fn) 4034 4035 fn = typecheck(fn, Etop) 4036 typecheckslice(fn.Nbody.Slice(), Etop) 4037 xtop = append(xtop, fn) 4038 Curfn = oldfn 4039 4040 a = nod(OCALL, nil, nil) 4041 a.Left = fn.Func.Nname 4042 a.List.Set(n.List.Slice()) 4043 a = typecheck(a, Etop) 4044 a = walkexpr(a, init) 4045 return a 4046 }