github.com/muesli/go@v0.0.0-20170208044820-e410d2a81ef2/src/cmd/compile/internal/gc/walk.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/internal/obj" 9 "cmd/internal/sys" 10 "fmt" 11 "strings" 12 ) 13 14 // The constant is known to runtime. 15 const ( 16 tmpstringbufsize = 32 17 ) 18 19 func walk(fn *Node) { 20 Curfn = fn 21 22 if Debug['W'] != 0 { 23 s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) 24 dumplist(s, Curfn.Nbody) 25 } 26 27 lno := lineno 28 29 // Final typecheck for any unused variables. 30 for i, ln := range fn.Func.Dcl { 31 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) { 32 ln = typecheck(ln, Erv|Easgn) 33 fn.Func.Dcl[i] = ln 34 } 35 } 36 37 // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. 38 for _, ln := range fn.Func.Dcl { 39 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Used { 40 ln.Name.Defn.Left.Used = true 41 } 42 } 43 44 for _, ln := range fn.Func.Dcl { 45 if ln.Op != ONAME || (ln.Class != PAUTO && ln.Class != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Used { 46 continue 47 } 48 if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { 49 if defn.Left.Used { 50 continue 51 } 52 lineno = defn.Left.Pos 53 yyerror("%v declared and not used", ln.Sym) 54 defn.Left.Used = true // suppress repeats 55 } else { 56 lineno = ln.Pos 57 yyerror("%v declared and not used", ln.Sym) 58 } 59 } 60 61 lineno = lno 62 if nerrors != 0 { 63 return 64 } 65 walkstmtlist(Curfn.Nbody.Slice()) 66 if Debug['W'] != 0 { 67 s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) 68 dumplist(s, Curfn.Nbody) 69 } 70 71 heapmoves() 72 if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 { 73 s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) 74 dumplist(s, Curfn.Func.Enter) 75 } 76 } 77 78 func walkstmtlist(s []*Node) { 79 for i := range s { 80 s[i] = walkstmt(s[i]) 81 } 82 } 83 84 func samelist(a, b []*Node) bool { 85 if len(a) != len(b) { 86 return false 87 } 88 for i, n := range a { 89 if n != b[i] { 90 return false 91 } 92 } 93 return true 94 } 95 96 func paramoutheap(fn *Node) bool { 97 for _, ln := range fn.Func.Dcl { 98 switch ln.Class { 99 case PPARAMOUT: 100 if ln.isParamStackCopy() || ln.Addrtaken { 101 return true 102 } 103 104 case PAUTO: 105 // stop early - parameters are over 106 return false 107 } 108 } 109 110 return false 111 } 112 113 // adds "adjust" to all the argument locations for the call n. 114 // n must be a defer or go node that has already been walked. 115 func adjustargs(n *Node, adjust int) { 116 var arg *Node 117 var lhs *Node 118 119 callfunc := n.Left 120 for _, arg = range callfunc.List.Slice() { 121 if arg.Op != OAS { 122 yyerror("call arg not assignment") 123 } 124 lhs = arg.Left 125 if lhs.Op == ONAME { 126 // This is a temporary introduced by reorder1. 127 // The real store to the stack appears later in the arg list. 128 continue 129 } 130 131 if lhs.Op != OINDREGSP { 132 yyerror("call argument store does not use OINDREGSP") 133 } 134 135 // can't really check this in machine-indep code. 136 //if(lhs->val.u.reg != D_SP) 137 // yyerror("call arg assign not indreg(SP)"); 138 lhs.Xoffset += int64(adjust) 139 } 140 } 141 142 // The result of walkstmt MUST be assigned back to n, e.g. 143 // n.Left = walkstmt(n.Left) 144 func walkstmt(n *Node) *Node { 145 if n == nil { 146 return n 147 } 148 if n.IsStatic { // don't walk, generated by anylit. 149 return n 150 } 151 152 setlineno(n) 153 154 walkstmtlist(n.Ninit.Slice()) 155 156 switch n.Op { 157 default: 158 if n.Op == ONAME { 159 yyerror("%v is not a top level statement", n.Sym) 160 } else { 161 yyerror("%v is not a top level statement", n.Op) 162 } 163 Dump("nottop", n) 164 165 case OAS, 166 OASOP, 167 OAS2, 168 OAS2DOTTYPE, 169 OAS2RECV, 170 OAS2FUNC, 171 OAS2MAPR, 172 OCLOSE, 173 OCOPY, 174 OCALLMETH, 175 OCALLINTER, 176 OCALL, 177 OCALLFUNC, 178 ODELETE, 179 OSEND, 180 OPRINT, 181 OPRINTN, 182 OPANIC, 183 OEMPTY, 184 ORECOVER, 185 OGETG: 186 if n.Typecheck == 0 { 187 Fatalf("missing typecheck: %+v", n) 188 } 189 wascopy := n.Op == OCOPY 190 init := n.Ninit 191 n.Ninit.Set(nil) 192 n = walkexpr(n, &init) 193 n = addinit(n, init.Slice()) 194 if wascopy && n.Op == OCONVNOP { 195 n.Op = OEMPTY // don't leave plain values as statements. 196 } 197 198 // special case for a receive where we throw away 199 // the value received. 200 case ORECV: 201 if n.Typecheck == 0 { 202 Fatalf("missing typecheck: %+v", n) 203 } 204 init := n.Ninit 205 n.Ninit.Set(nil) 206 207 n.Left = walkexpr(n.Left, &init) 208 n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, typename(n.Left.Type), n.Left, nodnil()) 209 n = walkexpr(n, &init) 210 211 n = addinit(n, init.Slice()) 212 213 case OBREAK, 214 OCONTINUE, 215 OFALL, 216 OGOTO, 217 OLABEL, 218 ODCLCONST, 219 ODCLTYPE, 220 OCHECKNIL, 221 OVARKILL, 222 OVARLIVE: 223 break 224 225 case ODCL: 226 v := n.Left 227 if v.Class == PAUTOHEAP { 228 if compiling_runtime { 229 yyerror("%v escapes to heap, not allowed in runtime.", v) 230 } 231 if prealloc[v] == nil { 232 prealloc[v] = callnew(v.Type) 233 } 234 nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v]) 235 nn.Colas = true 236 nn = typecheck(nn, Etop) 237 return walkstmt(nn) 238 } 239 240 case OBLOCK: 241 walkstmtlist(n.List.Slice()) 242 243 case OXCASE: 244 yyerror("case statement out of place") 245 n.Op = OCASE 246 fallthrough 247 248 case OCASE: 249 n.Right = walkstmt(n.Right) 250 251 case ODEFER: 252 hasdefer = true 253 switch n.Left.Op { 254 case OPRINT, OPRINTN: 255 n.Left = walkprintfunc(n.Left, &n.Ninit) 256 257 case OCOPY: 258 n.Left = copyany(n.Left, &n.Ninit, true) 259 260 default: 261 n.Left = walkexpr(n.Left, &n.Ninit) 262 } 263 264 // make room for size & fn arguments. 265 adjustargs(n, 2*Widthptr) 266 267 case OFOR: 268 if n.Left != nil { 269 walkstmtlist(n.Left.Ninit.Slice()) 270 init := n.Left.Ninit 271 n.Left.Ninit.Set(nil) 272 n.Left = walkexpr(n.Left, &init) 273 n.Left = addinit(n.Left, init.Slice()) 274 } 275 276 n.Right = walkstmt(n.Right) 277 walkstmtlist(n.Nbody.Slice()) 278 279 case OIF: 280 n.Left = walkexpr(n.Left, &n.Ninit) 281 walkstmtlist(n.Nbody.Slice()) 282 walkstmtlist(n.Rlist.Slice()) 283 284 case OPROC: 285 switch n.Left.Op { 286 case OPRINT, OPRINTN: 287 n.Left = walkprintfunc(n.Left, &n.Ninit) 288 289 case OCOPY: 290 n.Left = copyany(n.Left, &n.Ninit, true) 291 292 default: 293 n.Left = walkexpr(n.Left, &n.Ninit) 294 } 295 296 // make room for size & fn arguments. 297 adjustargs(n, 2*Widthptr) 298 299 case ORETURN: 300 walkexprlist(n.List.Slice(), &n.Ninit) 301 if n.List.Len() == 0 { 302 break 303 } 304 if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) { 305 // assign to the function out parameters, 306 // so that reorder3 can fix up conflicts 307 var rl []*Node 308 309 var cl Class 310 for _, ln := range Curfn.Func.Dcl { 311 cl = ln.Class 312 if cl == PAUTO || cl == PAUTOHEAP { 313 break 314 } 315 if cl == PPARAMOUT { 316 if ln.isParamStackCopy() { 317 ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil) 318 } 319 rl = append(rl, ln) 320 } 321 } 322 323 if got, want := n.List.Len(), len(rl); got != want { 324 // order should have rewritten multi-value function calls 325 // with explicit OAS2FUNC nodes. 326 Fatalf("expected %v return arguments, have %v", want, got) 327 } 328 329 if samelist(rl, n.List.Slice()) { 330 // special return in disguise 331 n.List.Set(nil) 332 333 break 334 } 335 336 // move function calls out, to make reorder3's job easier. 337 walkexprlistsafe(n.List.Slice(), &n.Ninit) 338 339 ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit) 340 n.List.Set(reorder3(ll)) 341 ls := n.List.Slice() 342 for i, n := range ls { 343 ls[i] = applywritebarrier(n) 344 } 345 break 346 } 347 348 ll := ascompatte(n.Op, nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit) 349 n.List.Set(ll) 350 351 case ORETJMP: 352 break 353 354 case OSELECT: 355 walkselect(n) 356 357 case OSWITCH: 358 walkswitch(n) 359 360 case ORANGE: 361 walkrange(n) 362 363 case OXFALL: 364 yyerror("fallthrough statement out of place") 365 n.Op = OFALL 366 } 367 368 if n.Op == ONAME { 369 Fatalf("walkstmt ended up with name: %+v", n) 370 } 371 return n 372 } 373 374 func isSmallMakeSlice(n *Node) bool { 375 if n.Op != OMAKESLICE { 376 return false 377 } 378 l := n.Left 379 r := n.Right 380 if r == nil { 381 r = l 382 } 383 t := n.Type 384 385 return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) 386 } 387 388 // walk the whole tree of the body of an 389 // expression or simple statement. 390 // the types expressions are calculated. 391 // compile-time constants are evaluated. 392 // complex side effects like statements are appended to init 393 func walkexprlist(s []*Node, init *Nodes) { 394 for i := range s { 395 s[i] = walkexpr(s[i], init) 396 } 397 } 398 399 func walkexprlistsafe(s []*Node, init *Nodes) { 400 for i, n := range s { 401 s[i] = safeexpr(n, init) 402 s[i] = walkexpr(s[i], init) 403 } 404 } 405 406 func walkexprlistcheap(s []*Node, init *Nodes) { 407 for i, n := range s { 408 s[i] = cheapexpr(n, init) 409 s[i] = walkexpr(s[i], init) 410 } 411 } 412 413 // Build name of function for interface conversion. 414 // Not all names are possible 415 // (e.g., we'll never generate convE2E or convE2I or convI2E). 416 func convFuncName(from, to *Type) string { 417 tkind := to.iet() 418 switch from.iet() { 419 case 'I': 420 switch tkind { 421 case 'I': 422 return "convI2I" 423 } 424 case 'T': 425 switch tkind { 426 case 'E': 427 return "convT2E" 428 case 'I': 429 return "convT2I" 430 } 431 } 432 Fatalf("unknown conv func %c2%c", from.iet(), to.iet()) 433 panic("unreachable") 434 } 435 436 // The result of walkexpr MUST be assigned back to n, e.g. 437 // n.Left = walkexpr(n.Left, init) 438 func walkexpr(n *Node, init *Nodes) *Node { 439 if n == nil { 440 return n 441 } 442 443 if init == &n.Ninit { 444 // not okay to use n->ninit when walking n, 445 // because we might replace n with some other node 446 // and would lose the init list. 447 Fatalf("walkexpr init == &n->ninit") 448 } 449 450 if n.Ninit.Len() != 0 { 451 walkstmtlist(n.Ninit.Slice()) 452 init.AppendNodes(&n.Ninit) 453 } 454 455 lno := setlineno(n) 456 457 if Debug['w'] > 1 { 458 Dump("walk-before", n) 459 } 460 461 if n.Typecheck != 1 { 462 Fatalf("missed typecheck: %+v", n) 463 } 464 465 if n.Op == ONAME && n.Class == PAUTOHEAP { 466 nn := nod(OIND, n.Name.Param.Heapaddr, nil) 467 nn = typecheck(nn, Erv) 468 nn = walkexpr(nn, init) 469 nn.Left.NonNil = true 470 return nn 471 } 472 473 opswitch: 474 switch n.Op { 475 default: 476 Dump("walk", n) 477 Fatalf("walkexpr: switch 1 unknown op %+S", n) 478 479 case OTYPE, 480 ONONAME, 481 OINDREGSP, 482 OEMPTY, 483 OGETG: 484 485 case ONOT, 486 OMINUS, 487 OPLUS, 488 OCOM, 489 OREAL, 490 OIMAG, 491 ODOTMETH, 492 ODOTINTER: 493 n.Left = walkexpr(n.Left, init) 494 495 case OIND: 496 n.Left = walkexpr(n.Left, init) 497 498 case ODOT: 499 usefield(n) 500 n.Left = walkexpr(n.Left, init) 501 502 case ODOTPTR: 503 usefield(n) 504 if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 { 505 // No actual copy will be generated, so emit an explicit nil check. 506 n.Left = cheapexpr(n.Left, init) 507 508 checknil(n.Left, init) 509 } 510 511 n.Left = walkexpr(n.Left, init) 512 513 case OEFACE: 514 n.Left = walkexpr(n.Left, init) 515 n.Right = walkexpr(n.Right, init) 516 517 case OSPTR, OITAB, OIDATA: 518 n.Left = walkexpr(n.Left, init) 519 520 case OLEN, OCAP: 521 n.Left = walkexpr(n.Left, init) 522 523 // replace len(*[10]int) with 10. 524 // delayed until now to preserve side effects. 525 t := n.Left.Type 526 527 if t.IsPtr() { 528 t = t.Elem() 529 } 530 if t.IsArray() { 531 safeexpr(n.Left, init) 532 Nodconst(n, n.Type, t.NumElem()) 533 n.Typecheck = 1 534 } 535 536 case OLSH, ORSH: 537 n.Left = walkexpr(n.Left, init) 538 n.Right = walkexpr(n.Right, init) 539 t := n.Left.Type 540 n.Bounded = bounded(n.Right, 8*t.Width) 541 if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { 542 Warn("shift bounds check elided") 543 } 544 545 case OAND, 546 OSUB, 547 OHMUL, 548 OMUL, 549 OLT, 550 OLE, 551 OGE, 552 OGT, 553 OADD, 554 OOR, 555 OXOR: 556 n.Left = walkexpr(n.Left, init) 557 n.Right = walkexpr(n.Right, init) 558 559 case OCOMPLEX: 560 // Use results from call expression as arguments for complex. 561 if n.Left == nil && n.Right == nil { 562 n.Left = n.List.First() 563 n.Right = n.List.Second() 564 } 565 n.Left = walkexpr(n.Left, init) 566 n.Right = walkexpr(n.Right, init) 567 568 case OEQ, ONE: 569 n.Left = walkexpr(n.Left, init) 570 n.Right = walkexpr(n.Right, init) 571 572 // Disable safemode while compiling this code: the code we 573 // generate internally can refer to unsafe.Pointer. 574 // In this case it can happen if we need to generate an == 575 // for a struct containing a reflect.Value, which itself has 576 // an unexported field of type unsafe.Pointer. 577 old_safemode := safemode 578 safemode = false 579 n = walkcompare(n, init) 580 safemode = old_safemode 581 582 case OANDAND, OOROR: 583 n.Left = walkexpr(n.Left, init) 584 585 // cannot put side effects from n.Right on init, 586 // because they cannot run before n.Left is checked. 587 // save elsewhere and store on the eventual n.Right. 588 var ll Nodes 589 590 n.Right = walkexpr(n.Right, &ll) 591 n.Right = addinit(n.Right, ll.Slice()) 592 n = walkinrange(n, init) 593 594 case OPRINT, OPRINTN: 595 walkexprlist(n.List.Slice(), init) 596 n = walkprint(n, init) 597 598 case OPANIC: 599 n = mkcall("gopanic", nil, init, n.Left) 600 601 case ORECOVER: 602 n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil)) 603 604 case OLITERAL: 605 n.Addable = true 606 607 case OCLOSUREVAR, OCFUNC: 608 n.Addable = true 609 610 case ONAME: 611 n.Addable = true 612 613 case OCALLINTER: 614 usemethod(n) 615 t := n.Left.Type 616 if n.List.Len() != 0 && n.List.First().Op == OAS { 617 break 618 } 619 n.Left = walkexpr(n.Left, init) 620 walkexprlist(n.List.Slice(), init) 621 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 622 n.List.Set(reorder1(ll)) 623 624 case OCALLFUNC: 625 if n.Left.Op == OCLOSURE { 626 // Transform direct call of a closure to call of a normal function. 627 // transformclosure already did all preparation work. 628 629 // Prepend captured variables to argument list. 630 n.List.Prepend(n.Left.Func.Enter.Slice()...) 631 632 n.Left.Func.Enter.Set(nil) 633 634 // Replace OCLOSURE with ONAME/PFUNC. 635 n.Left = n.Left.Func.Closure.Func.Nname 636 637 // Update type of OCALLFUNC node. 638 // Output arguments had not changed, but their offsets could. 639 if n.Left.Type.Results().NumFields() == 1 { 640 n.Type = n.Left.Type.Results().Field(0).Type 641 } else { 642 n.Type = n.Left.Type.Results() 643 } 644 } 645 646 t := n.Left.Type 647 if n.List.Len() != 0 && n.List.First().Op == OAS { 648 break 649 } 650 651 n.Left = walkexpr(n.Left, init) 652 walkexprlist(n.List.Slice(), init) 653 654 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 655 n.List.Set(reorder1(ll)) 656 657 case OCALLMETH: 658 t := n.Left.Type 659 if n.List.Len() != 0 && n.List.First().Op == OAS { 660 break 661 } 662 n.Left = walkexpr(n.Left, init) 663 walkexprlist(n.List.Slice(), init) 664 ll := ascompatte(n.Op, n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init) 665 lr := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 666 ll = append(ll, lr...) 667 n.Left.Left = nil 668 ullmancalc(n.Left) 669 n.List.Set(reorder1(ll)) 670 671 case OAS: 672 init.AppendNodes(&n.Ninit) 673 674 n.Left = walkexpr(n.Left, init) 675 n.Left = safeexpr(n.Left, init) 676 677 if oaslit(n, init) { 678 break 679 } 680 681 if n.Right == nil { 682 // TODO(austin): Check all "implicit zeroing" 683 break 684 } 685 686 if !instrumenting && iszero(n.Right) && !needwritebarrier(n.Left, n.Right) { 687 break 688 } 689 690 switch n.Right.Op { 691 default: 692 n.Right = walkexpr(n.Right, init) 693 694 case ORECV: 695 // x = <-c; n.Left is x, n.Right.Left is c. 696 // orderstmt made sure x is addressable. 697 n.Right.Left = walkexpr(n.Right.Left, init) 698 699 n1 := nod(OADDR, n.Left, nil) 700 r := n.Right.Left // the channel 701 n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, typename(r.Type), r, n1) 702 n = walkexpr(n, init) 703 break opswitch 704 705 case OAPPEND: 706 // x = append(...) 707 r := n.Right 708 if r.Type.Elem().NotInHeap { 709 yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem()) 710 } 711 if r.Isddd { 712 r = appendslice(r, init) // also works for append(slice, string). 713 } else { 714 r = walkappend(r, init, n) 715 } 716 n.Right = r 717 if r.Op == OAPPEND { 718 // Left in place for back end. 719 // Do not add a new write barrier. 720 break opswitch 721 } 722 // Otherwise, lowered for race detector. 723 // Treat as ordinary assignment. 724 } 725 726 if n.Left != nil && n.Right != nil { 727 static := n.IsStatic 728 n = convas(n, init) 729 n.IsStatic = static 730 n = applywritebarrier(n) 731 } 732 733 case OAS2: 734 init.AppendNodes(&n.Ninit) 735 walkexprlistsafe(n.List.Slice(), init) 736 walkexprlistsafe(n.Rlist.Slice(), init) 737 ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init) 738 ll = reorder3(ll) 739 for i, n := range ll { 740 ll[i] = applywritebarrier(n) 741 } 742 n = liststmt(ll) 743 744 // a,b,... = fn() 745 case OAS2FUNC: 746 init.AppendNodes(&n.Ninit) 747 748 r := n.Rlist.First() 749 walkexprlistsafe(n.List.Slice(), init) 750 r = walkexpr(r, init) 751 752 if isIntrinsicCall(r) { 753 n.Rlist.Set1(r) 754 break 755 } 756 init.Append(r) 757 758 ll := ascompatet(n.Op, n.List, r.Type) 759 for i, n := range ll { 760 ll[i] = applywritebarrier(n) 761 } 762 n = liststmt(ll) 763 764 // x, y = <-c 765 // orderstmt made sure x is addressable. 766 case OAS2RECV: 767 init.AppendNodes(&n.Ninit) 768 769 r := n.Rlist.First() 770 walkexprlistsafe(n.List.Slice(), init) 771 r.Left = walkexpr(r.Left, init) 772 var n1 *Node 773 if isblank(n.List.First()) { 774 n1 = nodnil() 775 } else { 776 n1 = nod(OADDR, n.List.First(), nil) 777 } 778 n1.Etype = 1 // addr does not escape 779 fn := chanfn("chanrecv2", 2, r.Left.Type) 780 ok := n.List.Second() 781 call := mkcall1(fn, ok.Type, init, typename(r.Left.Type), r.Left, n1) 782 n = nod(OAS, ok, call) 783 n = typecheck(n, Etop) 784 785 // a,b = m[i]; 786 case OAS2MAPR: 787 init.AppendNodes(&n.Ninit) 788 789 r := n.Rlist.First() 790 walkexprlistsafe(n.List.Slice(), init) 791 r.Left = walkexpr(r.Left, init) 792 r.Right = walkexpr(r.Right, init) 793 t := r.Left.Type 794 p := "" 795 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 796 switch algtype(t.Key()) { 797 case AMEM32: 798 p = "mapaccess2_fast32" 799 case AMEM64: 800 p = "mapaccess2_fast64" 801 case ASTRING: 802 p = "mapaccess2_faststr" 803 } 804 } 805 806 var key *Node 807 if p != "" { 808 // fast versions take key by value 809 key = r.Right 810 } else { 811 // standard version takes key by reference 812 // orderexpr made sure key is addressable. 813 key = nod(OADDR, r.Right, nil) 814 815 p = "mapaccess2" 816 } 817 818 // from: 819 // a,b = m[i] 820 // to: 821 // var,b = mapaccess2*(t, m, i) 822 // a = *var 823 a := n.List.First() 824 825 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 826 fn := mapfn(p, t) 827 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key) 828 } else { 829 fn := mapfn("mapaccess2_fat", t) 830 z := zeroaddr(w) 831 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z) 832 } 833 834 // mapaccess2* returns a typed bool, but due to spec changes, 835 // the boolean result of i.(T) is now untyped so we make it the 836 // same type as the variable on the lhs. 837 if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() { 838 r.Type.Field(1).Type = ok.Type 839 } 840 n.Rlist.Set1(r) 841 n.Op = OAS2FUNC 842 843 // don't generate a = *var if a is _ 844 if !isblank(a) { 845 var_ := temp(ptrto(t.Val())) 846 var_.Typecheck = 1 847 var_.NonNil = true // mapaccess always returns a non-nil pointer 848 n.List.SetIndex(0, var_) 849 n = walkexpr(n, init) 850 init.Append(n) 851 n = nod(OAS, a, nod(OIND, var_, nil)) 852 } 853 854 n = typecheck(n, Etop) 855 n = walkexpr(n, init) 856 857 case ODELETE: 858 init.AppendNodes(&n.Ninit) 859 map_ := n.List.First() 860 key := n.List.Second() 861 map_ = walkexpr(map_, init) 862 key = walkexpr(key, init) 863 864 // orderstmt made sure key is addressable. 865 key = nod(OADDR, key, nil) 866 867 t := map_.Type 868 n = mkcall1(mapfndel("mapdelete", t), nil, init, typename(t), map_, key) 869 870 case OAS2DOTTYPE: 871 walkexprlistsafe(n.List.Slice(), init) 872 e := n.Rlist.First() // i.(T) 873 e.Left = walkexpr(e.Left, init) 874 875 case ODOTTYPE, ODOTTYPE2: 876 n.Left = walkexpr(n.Left, init) 877 878 case OCONVIFACE: 879 n.Left = walkexpr(n.Left, init) 880 881 // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped. 882 if isdirectiface(n.Left.Type) { 883 var t *Node 884 if n.Type.IsEmptyInterface() { 885 t = typename(n.Left.Type) 886 } else { 887 t = itabname(n.Left.Type, n.Type) 888 } 889 l := nod(OEFACE, t, n.Left) 890 l.Type = n.Type 891 l.Typecheck = n.Typecheck 892 n = l 893 break 894 } 895 896 if staticbytes == nil { 897 staticbytes = newname(Pkglookup("staticbytes", Runtimepkg)) 898 staticbytes.Class = PEXTERN 899 staticbytes.Type = typArray(Types[TUINT8], 256) 900 zerobase = newname(Pkglookup("zerobase", Runtimepkg)) 901 zerobase.Class = PEXTERN 902 zerobase.Type = Types[TUINTPTR] 903 } 904 905 // Optimize convT2{E,I} for many cases in which T is not pointer-shaped, 906 // by using an existing addressable value identical to n.Left 907 // or creating one on the stack. 908 var value *Node 909 switch { 910 case n.Left.Type.Size() == 0: 911 // n.Left is zero-sized. Use zerobase. 912 value = zerobase 913 case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()): 914 // n.Left is a bool/byte. Use staticbytes[n.Left]. 915 value = nod(OINDEX, staticbytes, byteindex(n.Left)) 916 value.Bounded = true 917 case n.Left.Class == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly: 918 // n.Left is a readonly global; use it directly. 919 value = n.Left 920 case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024: 921 // n.Left does not escape. Use a stack temporary initialized to n.Left. 922 value = temp(n.Left.Type) 923 init.Append(typecheck(nod(OAS, value, n.Left), Etop)) 924 } 925 926 if value != nil { 927 // Value is identical to n.Left. 928 // Construct the interface directly: {type/itab, &value}. 929 var t *Node 930 if n.Type.IsEmptyInterface() { 931 t = typename(n.Left.Type) 932 } else { 933 t = itabname(n.Left.Type, n.Type) 934 } 935 l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv)) 936 l.Type = n.Type 937 l.Typecheck = n.Typecheck 938 n = l 939 break 940 } 941 942 // Implement interface to empty interface conversion. 943 // tmp = i.itab 944 // if tmp != nil { 945 // tmp = tmp.type 946 // } 947 // e = iface{tmp, i.data} 948 if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 949 // Evaluate the input interface. 950 c := temp(n.Left.Type) 951 init.Append(nod(OAS, c, n.Left)) 952 953 // Get the itab out of the interface. 954 tmp := temp(ptrto(Types[TUINT8])) 955 init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv))) 956 957 // Get the type out of the itab. 958 nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil) 959 nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp))) 960 init.Append(nif) 961 962 // Build the result. 963 e := nod(OEFACE, tmp, ifaceData(c, ptrto(Types[TUINT8]))) 964 e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE. 965 e.Typecheck = 1 966 n = e 967 break 968 } 969 970 var ll []*Node 971 if n.Type.IsEmptyInterface() { 972 if !n.Left.Type.IsInterface() { 973 ll = append(ll, typename(n.Left.Type)) 974 } 975 } else { 976 if n.Left.Type.IsInterface() { 977 ll = append(ll, typename(n.Type)) 978 } else { 979 ll = append(ll, itabname(n.Left.Type, n.Type)) 980 } 981 } 982 983 if n.Left.Type.IsInterface() { 984 ll = append(ll, n.Left) 985 } else { 986 // regular types are passed by reference to avoid C vararg calls 987 // orderexpr arranged for n.Left to be a temporary for all 988 // the conversions it could see. comparison of an interface 989 // with a non-interface, especially in a switch on interface value 990 // with non-interface cases, is not visible to orderstmt, so we 991 // have to fall back on allocating a temp here. 992 if islvalue(n.Left) { 993 ll = append(ll, nod(OADDR, n.Left, nil)) 994 } else { 995 ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) 996 } 997 dowidth(n.Left.Type) 998 } 999 1000 fn := syslook(convFuncName(n.Left.Type, n.Type)) 1001 fn = substArgTypes(fn, n.Left.Type, n.Type) 1002 dowidth(fn.Type) 1003 n = nod(OCALL, fn, nil) 1004 n.List.Set(ll) 1005 n = typecheck(n, Erv) 1006 n = walkexpr(n, init) 1007 1008 case OCONV, OCONVNOP: 1009 if Thearch.LinkArch.Family == sys.ARM || Thearch.LinkArch.Family == sys.MIPS { 1010 if n.Left.Type.IsFloat() { 1011 if n.Type.Etype == TINT64 { 1012 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1013 break 1014 } 1015 1016 if n.Type.Etype == TUINT64 { 1017 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1018 break 1019 } 1020 } 1021 1022 if n.Type.IsFloat() { 1023 if n.Left.Type.Etype == TINT64 { 1024 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1025 break 1026 } 1027 1028 if n.Left.Type.Etype == TUINT64 { 1029 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1030 break 1031 } 1032 } 1033 } 1034 1035 if Thearch.LinkArch.Family == sys.I386 { 1036 if n.Left.Type.IsFloat() { 1037 if n.Type.Etype == TINT64 { 1038 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1039 break 1040 } 1041 1042 if n.Type.Etype == TUINT64 { 1043 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1044 break 1045 } 1046 if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR { 1047 n = mkcall("float64touint32", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1048 break 1049 } 1050 } 1051 if n.Type.IsFloat() { 1052 if n.Left.Type.Etype == TINT64 { 1053 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1054 break 1055 } 1056 1057 if n.Left.Type.Etype == TUINT64 { 1058 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1059 break 1060 } 1061 if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR { 1062 n = conv(mkcall("uint32tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT32])), n.Type) 1063 break 1064 } 1065 } 1066 } 1067 1068 n.Left = walkexpr(n.Left, init) 1069 1070 case OANDNOT: 1071 n.Left = walkexpr(n.Left, init) 1072 n.Op = OAND 1073 n.Right = nod(OCOM, n.Right, nil) 1074 n.Right = typecheck(n.Right, Erv) 1075 n.Right = walkexpr(n.Right, init) 1076 1077 case ODIV, OMOD: 1078 n.Left = walkexpr(n.Left, init) 1079 n.Right = walkexpr(n.Right, init) 1080 1081 // rewrite complex div into function call. 1082 et := n.Left.Type.Etype 1083 1084 if isComplex[et] && n.Op == ODIV { 1085 t := n.Type 1086 n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128])) 1087 n = conv(n, t) 1088 break 1089 } 1090 1091 // Nothing to do for float divisions. 1092 if isFloat[et] { 1093 break 1094 } 1095 1096 // Try rewriting as shifts or magic multiplies. 1097 n = walkdiv(n, init) 1098 1099 // rewrite 64-bit div and mod into function calls 1100 // on 32-bit architectures. 1101 switch n.Op { 1102 case OMOD, ODIV: 1103 if Widthreg >= 8 || (et != TUINT64 && et != TINT64) { 1104 break opswitch 1105 } 1106 var fn string 1107 if et == TINT64 { 1108 fn = "int64" 1109 } else { 1110 fn = "uint64" 1111 } 1112 if n.Op == ODIV { 1113 fn += "div" 1114 } else { 1115 fn += "mod" 1116 } 1117 n = mkcall(fn, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et])) 1118 } 1119 1120 case OINDEX: 1121 n.Left = walkexpr(n.Left, init) 1122 1123 // save the original node for bounds checking elision. 1124 // If it was a ODIV/OMOD walk might rewrite it. 1125 r := n.Right 1126 1127 n.Right = walkexpr(n.Right, init) 1128 1129 // if range of type cannot exceed static array bound, 1130 // disable bounds check. 1131 if n.Bounded { 1132 break 1133 } 1134 t := n.Left.Type 1135 if t != nil && t.IsPtr() { 1136 t = t.Elem() 1137 } 1138 if t.IsArray() { 1139 n.Bounded = bounded(r, t.NumElem()) 1140 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1141 Warn("index bounds check elided") 1142 } 1143 if smallintconst(n.Right) && !n.Bounded { 1144 yyerror("index out of bounds") 1145 } 1146 } else if Isconst(n.Left, CTSTR) { 1147 n.Bounded = bounded(r, int64(len(n.Left.Val().U.(string)))) 1148 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1149 Warn("index bounds check elided") 1150 } 1151 if smallintconst(n.Right) && !n.Bounded { 1152 yyerror("index out of bounds") 1153 } 1154 } 1155 1156 if Isconst(n.Right, CTINT) { 1157 if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 { 1158 yyerror("index out of bounds") 1159 } 1160 } 1161 1162 case OINDEXMAP: 1163 // Replace m[k] with *map{access1,assign}(maptype, m, &k) 1164 n.Left = walkexpr(n.Left, init) 1165 n.Right = walkexpr(n.Right, init) 1166 map_ := n.Left 1167 key := n.Right 1168 t := map_.Type 1169 if n.Etype == 1 { 1170 // This m[k] expression is on the left-hand side of an assignment. 1171 // orderexpr made sure key is addressable. 1172 key = nod(OADDR, key, nil) 1173 n = mkcall1(mapfn("mapassign", t), nil, init, typename(t), map_, key) 1174 } else { 1175 // m[k] is not the target of an assignment. 1176 p := "" 1177 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 1178 switch algtype(t.Key()) { 1179 case AMEM32: 1180 p = "mapaccess1_fast32" 1181 case AMEM64: 1182 p = "mapaccess1_fast64" 1183 case ASTRING: 1184 p = "mapaccess1_faststr" 1185 } 1186 } 1187 1188 if p == "" { 1189 // standard version takes key by reference. 1190 // orderexpr made sure key is addressable. 1191 key = nod(OADDR, key, nil) 1192 p = "mapaccess1" 1193 } 1194 1195 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 1196 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key) 1197 } else { 1198 p = "mapaccess1_fat" 1199 z := zeroaddr(w) 1200 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key, z) 1201 } 1202 } 1203 n.Type = ptrto(t.Val()) 1204 n.NonNil = true // mapaccess1* and mapassign always return non-nil pointers. 1205 n = nod(OIND, n, nil) 1206 n.Type = t.Val() 1207 n.Typecheck = 1 1208 1209 case ORECV: 1210 Fatalf("walkexpr ORECV") // should see inside OAS only 1211 1212 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1213 n.Left = walkexpr(n.Left, init) 1214 low, high, max := n.SliceBounds() 1215 low = walkexpr(low, init) 1216 if low != nil && iszero(low) { 1217 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k]. 1218 low = nil 1219 } 1220 high = walkexpr(high, init) 1221 max = walkexpr(max, init) 1222 n.SetSliceBounds(low, high, max) 1223 if n.Op.IsSlice3() { 1224 if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) { 1225 // Reduce x[i:j:cap(x)] to x[i:j]. 1226 if n.Op == OSLICE3 { 1227 n.Op = OSLICE 1228 } else { 1229 n.Op = OSLICEARR 1230 } 1231 n = reduceSlice(n) 1232 } 1233 } else { 1234 n = reduceSlice(n) 1235 } 1236 1237 case OADDR: 1238 n.Left = walkexpr(n.Left, init) 1239 1240 case ONEW: 1241 if n.Esc == EscNone { 1242 if n.Type.Elem().Width >= 1<<16 { 1243 Fatalf("large ONEW with EscNone: %v", n) 1244 } 1245 r := temp(n.Type.Elem()) 1246 r = nod(OAS, r, nil) // zero temp 1247 r = typecheck(r, Etop) 1248 init.Append(r) 1249 r = nod(OADDR, r.Left, nil) 1250 r = typecheck(r, Erv) 1251 n = r 1252 } else { 1253 n = callnew(n.Type.Elem()) 1254 } 1255 1256 case OCMPSTR: 1257 // s + "badgerbadgerbadger" == "badgerbadgerbadger" 1258 if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) { 1259 // TODO(marvin): Fix Node.EType type union. 1260 r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0)) 1261 r = typecheck(r, Erv) 1262 r = walkexpr(r, init) 1263 r.Type = n.Type 1264 n = r 1265 break 1266 } 1267 1268 // Rewrite comparisons to short constant strings as length+byte-wise comparisons. 1269 var cs, ncs *Node // const string, non-const string 1270 switch { 1271 case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR): 1272 // ignore; will be constant evaluated 1273 case Isconst(n.Left, CTSTR): 1274 cs = n.Left 1275 ncs = n.Right 1276 case Isconst(n.Right, CTSTR): 1277 cs = n.Right 1278 ncs = n.Left 1279 } 1280 if cs != nil { 1281 cmp := Op(n.Etype) 1282 // maxRewriteLen was chosen empirically. 1283 // It is the value that minimizes cmd/go file size 1284 // across most architectures. 1285 // See the commit description for CL 26758 for details. 1286 maxRewriteLen := 6 1287 var and Op 1288 switch cmp { 1289 case OEQ: 1290 and = OANDAND 1291 case ONE: 1292 and = OOROR 1293 default: 1294 // Don't do byte-wise comparisons for <, <=, etc. 1295 // They're fairly complicated. 1296 // Length-only checks are ok, though. 1297 maxRewriteLen = 0 1298 } 1299 if s := cs.Val().U.(string); len(s) <= maxRewriteLen { 1300 if len(s) > 0 { 1301 ncs = safeexpr(ncs, init) 1302 } 1303 // TODO(marvin): Fix Node.EType type union. 1304 r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s)))) 1305 for i := 0; i < len(s); i++ { 1306 cb := nodintconst(int64(s[i])) 1307 ncb := nod(OINDEX, ncs, nodintconst(int64(i))) 1308 r = nod(and, r, nod(cmp, ncb, cb)) 1309 } 1310 r = typecheck(r, Erv) 1311 r = walkexpr(r, init) 1312 r.Type = n.Type 1313 n = r 1314 break 1315 } 1316 } 1317 1318 var r *Node 1319 // TODO(marvin): Fix Node.EType type union. 1320 if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { 1321 // prepare for rewrite below 1322 n.Left = cheapexpr(n.Left, init) 1323 n.Right = cheapexpr(n.Right, init) 1324 1325 r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1326 1327 // quick check of len before full compare for == or != 1328 // eqstring assumes that the lengths are equal 1329 // TODO(marvin): Fix Node.EType type union. 1330 if Op(n.Etype) == OEQ { 1331 // len(left) == len(right) && eqstring(left, right) 1332 r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1333 } else { 1334 // len(left) != len(right) || !eqstring(left, right) 1335 r = nod(ONOT, r, nil) 1336 r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1337 } 1338 1339 r = typecheck(r, Erv) 1340 r = walkexpr(r, nil) 1341 } else { 1342 // sys_cmpstring(s1, s2) :: 0 1343 r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1344 // TODO(marvin): Fix Node.EType type union. 1345 r = nod(Op(n.Etype), r, nodintconst(0)) 1346 } 1347 1348 r = typecheck(r, Erv) 1349 if !n.Type.IsBoolean() { 1350 Fatalf("cmp %v", n.Type) 1351 } 1352 r.Type = n.Type 1353 n = r 1354 1355 case OADDSTR: 1356 n = addstr(n, init) 1357 1358 case OAPPEND: 1359 // order should make sure we only see OAS(node, OAPPEND), which we handle above. 1360 Fatalf("append outside assignment") 1361 1362 case OCOPY: 1363 n = copyany(n, init, instrumenting && !compiling_runtime) 1364 1365 // cannot use chanfn - closechan takes any, not chan any 1366 case OCLOSE: 1367 fn := syslook("closechan") 1368 1369 fn = substArgTypes(fn, n.Left.Type) 1370 n = mkcall1(fn, nil, init, n.Left) 1371 1372 case OMAKECHAN: 1373 n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64])) 1374 1375 case OMAKEMAP: 1376 t := n.Type 1377 1378 a := nodnil() // hmap buffer 1379 r := nodnil() // bucket buffer 1380 if n.Esc == EscNone { 1381 // Allocate hmap buffer on stack. 1382 var_ := temp(hmap(t)) 1383 1384 a = nod(OAS, var_, nil) // zero temp 1385 a = typecheck(a, Etop) 1386 init.Append(a) 1387 a = nod(OADDR, var_, nil) 1388 1389 // Allocate one bucket on stack. 1390 // Maximum key/value size is 128 bytes, larger objects 1391 // are stored with an indirection. So max bucket size is 2048+eps. 1392 var_ = temp(mapbucket(t)) 1393 1394 r = nod(OAS, var_, nil) // zero temp 1395 r = typecheck(r, Etop) 1396 init.Append(r) 1397 r = nod(OADDR, var_, nil) 1398 } 1399 1400 fn := syslook("makemap") 1401 fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val()) 1402 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r) 1403 1404 case OMAKESLICE: 1405 l := n.Left 1406 r := n.Right 1407 if r == nil { 1408 r = safeexpr(l, init) 1409 l = r 1410 } 1411 t := n.Type 1412 if n.Esc == EscNone { 1413 if !isSmallMakeSlice(n) { 1414 Fatalf("non-small OMAKESLICE with EscNone: %v", n) 1415 } 1416 // var arr [r]T 1417 // n = arr[:l] 1418 t = typArray(t.Elem(), nonnegintconst(r)) // [r]T 1419 var_ := temp(t) 1420 a := nod(OAS, var_, nil) // zero temp 1421 a = typecheck(a, Etop) 1422 init.Append(a) 1423 r := nod(OSLICE, var_, nil) // arr[:l] 1424 r.SetSliceBounds(nil, l, nil) 1425 r = conv(r, n.Type) // in case n.Type is named. 1426 r = typecheck(r, Erv) 1427 r = walkexpr(r, init) 1428 n = r 1429 } else { 1430 // n escapes; set up a call to makeslice. 1431 // When len and cap can fit into int, use makeslice instead of 1432 // makeslice64, which is faster and shorter on 32 bit platforms. 1433 1434 if t.Elem().NotInHeap { 1435 yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem()) 1436 } 1437 1438 len, cap := l, r 1439 1440 fnname := "makeslice64" 1441 argtype := Types[TINT64] 1442 1443 // typechecking guarantees that TIDEAL len/cap are positive and fit in an int. 1444 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 1445 // will be handled by the negative range checks in makeslice during runtime. 1446 if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) && 1447 (cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) { 1448 fnname = "makeslice" 1449 argtype = Types[TINT] 1450 } 1451 1452 fn := syslook(fnname) 1453 fn = substArgTypes(fn, t.Elem()) // any-1 1454 n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)) 1455 } 1456 1457 case ORUNESTR: 1458 a := nodnil() 1459 if n.Esc == EscNone { 1460 t := typArray(Types[TUINT8], 4) 1461 var_ := temp(t) 1462 a = nod(OADDR, var_, nil) 1463 } 1464 1465 // intstring(*[4]byte, rune) 1466 n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64])) 1467 1468 case OARRAYBYTESTR: 1469 a := nodnil() 1470 if n.Esc == EscNone { 1471 // Create temporary buffer for string on stack. 1472 t := typArray(Types[TUINT8], tmpstringbufsize) 1473 1474 a = nod(OADDR, temp(t), nil) 1475 } 1476 1477 // slicebytetostring(*[32]byte, []byte) string; 1478 n = mkcall("slicebytetostring", n.Type, init, a, n.Left) 1479 1480 // slicebytetostringtmp([]byte) string; 1481 case OARRAYBYTESTRTMP: 1482 n.Left = walkexpr(n.Left, init) 1483 1484 if !instrumenting { 1485 // Let the backend handle OARRAYBYTESTRTMP directly 1486 // to avoid a function call to slicebytetostringtmp. 1487 break 1488 } 1489 1490 n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) 1491 1492 // slicerunetostring(*[32]byte, []rune) string; 1493 case OARRAYRUNESTR: 1494 a := nodnil() 1495 1496 if n.Esc == EscNone { 1497 // Create temporary buffer for string on stack. 1498 t := typArray(Types[TUINT8], tmpstringbufsize) 1499 1500 a = nod(OADDR, temp(t), nil) 1501 } 1502 1503 n = mkcall("slicerunetostring", n.Type, init, a, n.Left) 1504 1505 // stringtoslicebyte(*32[byte], string) []byte; 1506 case OSTRARRAYBYTE: 1507 a := nodnil() 1508 1509 if n.Esc == EscNone { 1510 // Create temporary buffer for slice on stack. 1511 t := typArray(Types[TUINT8], tmpstringbufsize) 1512 1513 a = nod(OADDR, temp(t), nil) 1514 } 1515 1516 n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING])) 1517 1518 case OSTRARRAYBYTETMP: 1519 // []byte(string) conversion that creates a slice 1520 // referring to the actual string bytes. 1521 // This conversion is handled later by the backend and 1522 // is only for use by internal compiler optimizations 1523 // that know that the slice won't be mutated. 1524 // The only such case today is: 1525 // for i, c := range []byte(string) 1526 n.Left = walkexpr(n.Left, init) 1527 1528 // stringtoslicerune(*[32]rune, string) []rune 1529 case OSTRARRAYRUNE: 1530 a := nodnil() 1531 1532 if n.Esc == EscNone { 1533 // Create temporary buffer for slice on stack. 1534 t := typArray(Types[TINT32], tmpstringbufsize) 1535 1536 a = nod(OADDR, temp(t), nil) 1537 } 1538 1539 n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) 1540 1541 // ifaceeq(i1 any-1, i2 any-2) (ret bool); 1542 case OCMPIFACE: 1543 if !eqtype(n.Left.Type, n.Right.Type) { 1544 Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type) 1545 } 1546 var fn *Node 1547 if n.Left.Type.IsEmptyInterface() { 1548 fn = syslook("efaceeq") 1549 } else { 1550 fn = syslook("ifaceeq") 1551 } 1552 1553 n.Right = cheapexpr(n.Right, init) 1554 n.Left = cheapexpr(n.Left, init) 1555 fn = substArgTypes(fn, n.Right.Type, n.Left.Type) 1556 r := mkcall1(fn, n.Type, init, n.Left, n.Right) 1557 // TODO(marvin): Fix Node.EType type union. 1558 if Op(n.Etype) == ONE { 1559 r = nod(ONOT, r, nil) 1560 } 1561 1562 // check itable/type before full compare. 1563 // TODO(marvin): Fix Node.EType type union. 1564 if Op(n.Etype) == OEQ { 1565 r = nod(OANDAND, nod(OEQ, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1566 } else { 1567 r = nod(OOROR, nod(ONE, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1568 } 1569 r = typecheck(r, Erv) 1570 r = walkexpr(r, init) 1571 r.Type = n.Type 1572 n = r 1573 1574 case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: 1575 if n.Op == OSTRUCTLIT && iszero(n) && !instrumenting { // TODO: SSA doesn't yet handle ARRAYLIT with length > 1 1576 break 1577 } 1578 if isStaticCompositeLiteral(n) { 1579 // n can be directly represented in the read-only data section. 1580 // Make direct reference to the static data. See issue 12841. 1581 vstat := staticname(n.Type) 1582 vstat.Name.Readonly = true 1583 fixedlit(inInitFunction, initKindStatic, n, vstat, init) 1584 n = vstat 1585 n = typecheck(n, Erv) 1586 break 1587 } 1588 var_ := temp(n.Type) 1589 anylit(n, var_, init) 1590 n = var_ 1591 1592 case OSEND: 1593 n1 := n.Right 1594 n1 = assignconv(n1, n.Left.Type.Elem(), "chan send") 1595 n1 = walkexpr(n1, init) 1596 n1 = nod(OADDR, n1, nil) 1597 n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, typename(n.Left.Type), n.Left, n1) 1598 1599 case OCLOSURE: 1600 n = walkclosure(n, init) 1601 1602 case OCALLPART: 1603 n = walkpartialcall(n, init) 1604 } 1605 1606 // Expressions that are constant at run time but not 1607 // considered const by the language spec are not turned into 1608 // constants until walk. For example, if n is y%1 == 0, the 1609 // walk of y%1 may have replaced it by 0. 1610 // Check whether n with its updated args is itself now a constant. 1611 t := n.Type 1612 1613 evconst(n) 1614 n.Type = t 1615 if n.Op == OLITERAL { 1616 n = typecheck(n, Erv) 1617 } 1618 1619 ullmancalc(n) 1620 1621 if Debug['w'] != 0 && n != nil { 1622 Dump("walk", n) 1623 } 1624 1625 lineno = lno 1626 return n 1627 } 1628 1629 // TODO(josharian): combine this with its caller and simplify 1630 func reduceSlice(n *Node) *Node { 1631 low, high, max := n.SliceBounds() 1632 if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) { 1633 // Reduce x[i:len(x)] to x[i:]. 1634 high = nil 1635 } 1636 n.SetSliceBounds(low, high, max) 1637 if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { 1638 // Reduce x[:] to x. 1639 if Debug_slice > 0 { 1640 Warn("slice: omit slice operation") 1641 } 1642 return n.Left 1643 } 1644 return n 1645 } 1646 1647 func ascompatee1(op Op, l *Node, r *Node, init *Nodes) *Node { 1648 // convas will turn map assigns into function calls, 1649 // making it impossible for reorder3 to work. 1650 n := nod(OAS, l, r) 1651 1652 if l.Op == OINDEXMAP { 1653 return n 1654 } 1655 1656 return convas(n, init) 1657 } 1658 1659 func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { 1660 // check assign expression list to 1661 // a expression list. called in 1662 // expr-list = expr-list 1663 1664 // ensure order of evaluation for function calls 1665 for i := range nl { 1666 nl[i] = safeexpr(nl[i], init) 1667 } 1668 for i1 := range nr { 1669 nr[i1] = safeexpr(nr[i1], init) 1670 } 1671 1672 var nn []*Node 1673 i := 0 1674 for ; i < len(nl); i++ { 1675 if i >= len(nr) { 1676 break 1677 } 1678 // Do not generate 'x = x' during return. See issue 4014. 1679 if op == ORETURN && samesafeexpr(nl[i], nr[i]) { 1680 continue 1681 } 1682 nn = append(nn, ascompatee1(op, nl[i], nr[i], init)) 1683 } 1684 1685 // cannot happen: caller checked that lists had same length 1686 if i < len(nl) || i < len(nr) { 1687 var nln, nrn Nodes 1688 nln.Set(nl) 1689 nrn.Set(nr) 1690 yyerror("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.Func.Nname.Sym.Name) 1691 } 1692 return nn 1693 } 1694 1695 // l is an lv and rt is the type of an rv 1696 // return 1 if this implies a function call 1697 // evaluating the lv or a function call 1698 // in the conversion of the types 1699 func fncall(l *Node, rt *Type) bool { 1700 if l.Ullman >= UINF || l.Op == OINDEXMAP { 1701 return true 1702 } 1703 var r Node 1704 if needwritebarrier(l, &r) { 1705 return true 1706 } 1707 if eqtype(l.Type, rt) { 1708 return false 1709 } 1710 return true 1711 } 1712 1713 // check assign type list to 1714 // a expression list. called in 1715 // expr-list = func() 1716 func ascompatet(op Op, nl Nodes, nr *Type) []*Node { 1717 r, saver := iterFields(nr) 1718 1719 var nn, mm Nodes 1720 var ullmanOverflow bool 1721 var i int 1722 for i = 0; i < nl.Len(); i++ { 1723 if r == nil { 1724 break 1725 } 1726 l := nl.Index(i) 1727 if isblank(l) { 1728 r = saver.Next() 1729 continue 1730 } 1731 1732 // any lv that causes a fn call must be 1733 // deferred until all the return arguments 1734 // have been pulled from the output arguments 1735 if fncall(l, r.Type) { 1736 tmp := temp(r.Type) 1737 tmp = typecheck(tmp, Erv) 1738 a := nod(OAS, l, tmp) 1739 a = convas(a, &mm) 1740 mm.Append(a) 1741 l = tmp 1742 } 1743 1744 a := nod(OAS, l, nodarg(r, 0)) 1745 a = convas(a, &nn) 1746 ullmancalc(a) 1747 if a.Ullman >= UINF { 1748 Dump("ascompatet ucount", a) 1749 ullmanOverflow = true 1750 } 1751 1752 nn.Append(a) 1753 r = saver.Next() 1754 } 1755 1756 if i < nl.Len() || r != nil { 1757 yyerror("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) 1758 } 1759 1760 if ullmanOverflow { 1761 Fatalf("ascompatet: too many function calls evaluating parameters") 1762 } 1763 return append(nn.Slice(), mm.Slice()...) 1764 } 1765 1766 // package all the arguments that match a ... T parameter into a []T. 1767 func mkdotargslice(lr0, nn []*Node, l *Field, fp int, init *Nodes, ddd *Node) []*Node { 1768 esc := uint16(EscUnknown) 1769 if ddd != nil { 1770 esc = ddd.Esc 1771 } 1772 1773 tslice := typSlice(l.Type.Elem()) 1774 1775 var n *Node 1776 if len(lr0) == 0 { 1777 n = nodnil() 1778 n.Type = tslice 1779 } else { 1780 n = nod(OCOMPLIT, nil, typenod(tslice)) 1781 if ddd != nil && prealloc[ddd] != nil { 1782 prealloc[n] = prealloc[ddd] // temporary to use 1783 } 1784 n.List.Set(lr0) 1785 n.Esc = esc 1786 n = typecheck(n, Erv) 1787 if n.Type == nil { 1788 Fatalf("mkdotargslice: typecheck failed") 1789 } 1790 n = walkexpr(n, init) 1791 } 1792 1793 a := nod(OAS, nodarg(l, fp), n) 1794 nn = append(nn, convas(a, init)) 1795 return nn 1796 } 1797 1798 // helpers for shape errors 1799 func dumptypes(nl *Type, what string) string { 1800 s := "" 1801 for _, l := range nl.Fields().Slice() { 1802 if s != "" { 1803 s += ", " 1804 } 1805 s += fldconv(l, 0) 1806 } 1807 if s == "" { 1808 s = fmt.Sprintf("[no arguments %s]", what) 1809 } 1810 return s 1811 } 1812 1813 func dumpnodetypes(l []*Node, what string) string { 1814 s := "" 1815 for _, r := range l { 1816 if s != "" { 1817 s += ", " 1818 } 1819 s += r.Type.String() 1820 } 1821 if s == "" { 1822 s = fmt.Sprintf("[no arguments %s]", what) 1823 } 1824 return s 1825 } 1826 1827 // check assign expression list to 1828 // a type list. called in 1829 // return expr-list 1830 // func(expr-list) 1831 func ascompatte(op Op, call *Node, isddd bool, nl *Type, lr []*Node, fp int, init *Nodes) []*Node { 1832 lr0 := lr 1833 l, savel := iterFields(nl) 1834 var r *Node 1835 if len(lr) > 0 { 1836 r = lr[0] 1837 } 1838 var nn []*Node 1839 1840 // f(g()) where g has multiple return values 1841 if r != nil && len(lr) <= 1 && r.Type.IsFuncArgStruct() { 1842 // optimization - can do block copy 1843 if eqtypenoname(r.Type, nl) { 1844 arg := nodarg(nl, fp) 1845 r = nod(OCONVNOP, r, nil) 1846 r.Type = arg.Type 1847 nn = []*Node{convas(nod(OAS, arg, r), init)} 1848 goto ret 1849 } 1850 1851 // conversions involved. 1852 // copy into temporaries. 1853 var alist []*Node 1854 1855 for _, l := range r.Type.Fields().Slice() { 1856 tmp := temp(l.Type) 1857 alist = append(alist, tmp) 1858 } 1859 1860 a := nod(OAS2, nil, nil) 1861 a.List.Set(alist) 1862 a.Rlist.Set(lr) 1863 a = typecheck(a, Etop) 1864 a = walkstmt(a) 1865 init.Append(a) 1866 lr = alist 1867 r = lr[0] 1868 l, savel = iterFields(nl) 1869 } 1870 1871 for { 1872 if l != nil && l.Isddd { 1873 // the ddd parameter must be last 1874 ll := savel.Next() 1875 1876 if ll != nil { 1877 yyerror("... must be last argument") 1878 } 1879 1880 // special case -- 1881 // only if we are assigning a single ddd 1882 // argument to a ddd parameter then it is 1883 // passed through unencapsulated 1884 if r != nil && len(lr) <= 1 && isddd && eqtype(l.Type, r.Type) { 1885 a := nod(OAS, nodarg(l, fp), r) 1886 a = convas(a, init) 1887 nn = append(nn, a) 1888 break 1889 } 1890 1891 // normal case -- make a slice of all 1892 // remaining arguments and pass it to 1893 // the ddd parameter. 1894 nn = mkdotargslice(lr, nn, l, fp, init, call.Right) 1895 1896 break 1897 } 1898 1899 if l == nil || r == nil { 1900 if l != nil || r != nil { 1901 l1 := dumptypes(nl, "expected") 1902 l2 := dumpnodetypes(lr0, "given") 1903 if l != nil { 1904 yyerror("not enough arguments to %v\n\t%s\n\t%s", op, l1, l2) 1905 } else { 1906 yyerror("too many arguments to %v\n\t%s\n\t%s", op, l1, l2) 1907 } 1908 } 1909 1910 break 1911 } 1912 1913 a := nod(OAS, nodarg(l, fp), r) 1914 a = convas(a, init) 1915 nn = append(nn, a) 1916 1917 l = savel.Next() 1918 r = nil 1919 lr = lr[1:] 1920 if len(lr) > 0 { 1921 r = lr[0] 1922 } 1923 } 1924 1925 ret: 1926 for _, n := range nn { 1927 n.Typecheck = 1 1928 } 1929 return nn 1930 } 1931 1932 // generate code for print 1933 func walkprint(nn *Node, init *Nodes) *Node { 1934 var r *Node 1935 var n *Node 1936 var on *Node 1937 var t *Type 1938 var et EType 1939 1940 op := nn.Op 1941 all := nn.List 1942 var calls []*Node 1943 notfirst := false 1944 1945 // Hoist all the argument evaluation up before the lock. 1946 walkexprlistcheap(all.Slice(), init) 1947 1948 calls = append(calls, mkcall("printlock", nil, init)) 1949 for i1, n1 := range all.Slice() { 1950 if notfirst { 1951 calls = append(calls, mkcall("printsp", nil, init)) 1952 } 1953 1954 notfirst = op == OPRINTN 1955 1956 n = n1 1957 if n.Op == OLITERAL { 1958 switch n.Val().Ctype() { 1959 case CTRUNE: 1960 n = defaultlit(n, runetype) 1961 1962 case CTINT: 1963 n = defaultlit(n, Types[TINT64]) 1964 1965 case CTFLT: 1966 n = defaultlit(n, Types[TFLOAT64]) 1967 } 1968 } 1969 1970 if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { 1971 n = defaultlit(n, Types[TINT64]) 1972 } 1973 n = defaultlit(n, nil) 1974 all.SetIndex(i1, n) 1975 if n.Type == nil || n.Type.Etype == TFORW { 1976 continue 1977 } 1978 1979 t = n.Type 1980 et = n.Type.Etype 1981 if n.Type.IsInterface() { 1982 if n.Type.IsEmptyInterface() { 1983 on = syslook("printeface") 1984 } else { 1985 on = syslook("printiface") 1986 } 1987 on = substArgTypes(on, n.Type) // any-1 1988 } else if n.Type.IsPtr() || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR { 1989 on = syslook("printpointer") 1990 on = substArgTypes(on, n.Type) // any-1 1991 } else if n.Type.IsSlice() { 1992 on = syslook("printslice") 1993 on = substArgTypes(on, n.Type) // any-1 1994 } else if isInt[et] { 1995 if et == TUINT64 { 1996 if (t.Sym.Pkg == Runtimepkg || compiling_runtime) && t.Sym.Name == "hex" { 1997 on = syslook("printhex") 1998 } else { 1999 on = syslook("printuint") 2000 } 2001 } else { 2002 on = syslook("printint") 2003 } 2004 } else if isFloat[et] { 2005 on = syslook("printfloat") 2006 } else if isComplex[et] { 2007 on = syslook("printcomplex") 2008 } else if et == TBOOL { 2009 on = syslook("printbool") 2010 } else if et == TSTRING { 2011 on = syslook("printstring") 2012 } else { 2013 badtype(OPRINT, n.Type, nil) 2014 continue 2015 } 2016 2017 t = on.Type.Params().Field(0).Type 2018 2019 if !eqtype(t, n.Type) { 2020 n = nod(OCONV, n, nil) 2021 n.Type = t 2022 } 2023 2024 r = nod(OCALL, on, nil) 2025 r.List.Append(n) 2026 calls = append(calls, r) 2027 } 2028 2029 if op == OPRINTN { 2030 calls = append(calls, mkcall("printnl", nil, nil)) 2031 } 2032 2033 calls = append(calls, mkcall("printunlock", nil, init)) 2034 2035 typecheckslice(calls, Etop) 2036 walkexprlist(calls, init) 2037 2038 r = nod(OEMPTY, nil, nil) 2039 r = typecheck(r, Etop) 2040 r = walkexpr(r, init) 2041 r.Ninit.Set(calls) 2042 return r 2043 } 2044 2045 func callnew(t *Type) *Node { 2046 if t.NotInHeap { 2047 yyerror("%v is go:notinheap; heap allocation disallowed", t) 2048 } 2049 dowidth(t) 2050 fn := syslook("newobject") 2051 fn = substArgTypes(fn, t) 2052 v := mkcall1(fn, ptrto(t), nil, typename(t)) 2053 v.NonNil = true 2054 return v 2055 } 2056 2057 func iscallret(n *Node) bool { 2058 n = outervalue(n) 2059 return n.Op == OINDREGSP 2060 } 2061 2062 func isstack(n *Node) bool { 2063 n = outervalue(n) 2064 2065 // If n is *autotmp and autotmp = &foo, replace n with foo. 2066 // We introduce such temps when initializing struct literals. 2067 if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() { 2068 defn := n.Left.Name.Defn 2069 if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR { 2070 n = defn.Right.Left 2071 } 2072 } 2073 2074 switch n.Op { 2075 case OINDREGSP: 2076 return true 2077 2078 case ONAME: 2079 switch n.Class { 2080 case PAUTO, PPARAM, PPARAMOUT: 2081 return true 2082 } 2083 } 2084 2085 return false 2086 } 2087 2088 // Do we need a write barrier for the assignment l = r? 2089 func needwritebarrier(l *Node, r *Node) bool { 2090 if !use_writebarrier { 2091 return false 2092 } 2093 2094 if l == nil || isblank(l) { 2095 return false 2096 } 2097 2098 // No write barrier for write of non-pointers. 2099 dowidth(l.Type) 2100 2101 if !haspointers(l.Type) { 2102 return false 2103 } 2104 2105 // No write barrier for write to stack. 2106 if isstack(l) { 2107 return false 2108 } 2109 2110 // No write barrier if this is a pointer to a go:notinheap 2111 // type, since the write barrier's inheap(ptr) check will fail. 2112 if l.Type.IsPtr() && l.Type.Elem().NotInHeap { 2113 return false 2114 } 2115 2116 // Implicit zeroing is still zeroing, so it needs write 2117 // barriers. In practice, these are all to stack variables 2118 // (even if isstack isn't smart enough to figure that out), so 2119 // they'll be eliminated by the backend. 2120 if r == nil { 2121 return true 2122 } 2123 2124 // Ignore no-op conversions when making decision. 2125 // Ensures that xp = unsafe.Pointer(&x) is treated 2126 // the same as xp = &x. 2127 for r.Op == OCONVNOP { 2128 r = r.Left 2129 } 2130 2131 // TODO: We can eliminate write barriers if we know *both* the 2132 // current and new content of the slot must already be shaded. 2133 // We know a pointer is shaded if it's nil, or points to 2134 // static data, a global (variable or function), or the stack. 2135 // The nil optimization could be particularly useful for 2136 // writes to just-allocated objects. Unfortunately, knowing 2137 // the "current" value of the slot requires flow analysis. 2138 2139 // No write barrier for storing address of stack values, 2140 // which are guaranteed only to be written to the stack. 2141 if r.Op == OADDR && isstack(r.Left) { 2142 return false 2143 } 2144 2145 // Otherwise, be conservative and use write barrier. 2146 return true 2147 } 2148 2149 // TODO(rsc): Perhaps componentgen should run before this. 2150 2151 func applywritebarrier(n *Node) *Node { 2152 if n.Left != nil && n.Right != nil && needwritebarrier(n.Left, n.Right) { 2153 if Debug_wb > 1 { 2154 Warnl(n.Pos, "marking %v for barrier", n.Left) 2155 } 2156 n.Op = OASWB 2157 return n 2158 } 2159 return n 2160 } 2161 2162 func convas(n *Node, init *Nodes) *Node { 2163 if n.Op != OAS { 2164 Fatalf("convas: not OAS %v", n.Op) 2165 } 2166 2167 n.Typecheck = 1 2168 2169 var lt *Type 2170 var rt *Type 2171 if n.Left == nil || n.Right == nil { 2172 goto out 2173 } 2174 2175 lt = n.Left.Type 2176 rt = n.Right.Type 2177 if lt == nil || rt == nil { 2178 goto out 2179 } 2180 2181 if isblank(n.Left) { 2182 n.Right = defaultlit(n.Right, nil) 2183 goto out 2184 } 2185 2186 if !eqtype(lt, rt) { 2187 n.Right = assignconv(n.Right, lt, "assignment") 2188 n.Right = walkexpr(n.Right, init) 2189 } 2190 2191 out: 2192 ullmancalc(n) 2193 return n 2194 } 2195 2196 // from ascompat[te] 2197 // evaluating actual function arguments. 2198 // f(a,b) 2199 // if there is exactly one function expr, 2200 // then it is done first. otherwise must 2201 // make temp variables 2202 func reorder1(all []*Node) []*Node { 2203 c := 0 // function calls 2204 t := 0 // total parameters 2205 2206 for _, n := range all { 2207 t++ 2208 ullmancalc(n) 2209 if n.Ullman >= UINF { 2210 c++ 2211 } 2212 } 2213 2214 if c == 0 || t == 1 { 2215 return all 2216 } 2217 2218 var g []*Node // fncalls assigned to tempnames 2219 var f *Node // last fncall assigned to stack 2220 var r []*Node // non fncalls and tempnames assigned to stack 2221 d := 0 2222 var a *Node 2223 for _, n := range all { 2224 if n.Ullman < UINF { 2225 r = append(r, n) 2226 continue 2227 } 2228 2229 d++ 2230 if d == c { 2231 f = n 2232 continue 2233 } 2234 2235 // make assignment of fncall to tempname 2236 a = temp(n.Right.Type) 2237 2238 a = nod(OAS, a, n.Right) 2239 g = append(g, a) 2240 2241 // put normal arg assignment on list 2242 // with fncall replaced by tempname 2243 n.Right = a.Left 2244 2245 r = append(r, n) 2246 } 2247 2248 if f != nil { 2249 g = append(g, f) 2250 } 2251 return append(g, r...) 2252 } 2253 2254 // from ascompat[ee] 2255 // a,b = c,d 2256 // simultaneous assignment. there cannot 2257 // be later use of an earlier lvalue. 2258 // 2259 // function calls have been removed. 2260 func reorder3(all []*Node) []*Node { 2261 var l *Node 2262 2263 // If a needed expression may be affected by an 2264 // earlier assignment, make an early copy of that 2265 // expression and use the copy instead. 2266 var early []*Node 2267 2268 var mapinit Nodes 2269 for i, n := range all { 2270 l = n.Left 2271 2272 // Save subexpressions needed on left side. 2273 // Drill through non-dereferences. 2274 for { 2275 if l.Op == ODOT || l.Op == OPAREN { 2276 l = l.Left 2277 continue 2278 } 2279 2280 if l.Op == OINDEX && l.Left.Type.IsArray() { 2281 l.Right = reorder3save(l.Right, all, i, &early) 2282 l = l.Left 2283 continue 2284 } 2285 2286 break 2287 } 2288 2289 switch l.Op { 2290 default: 2291 Fatalf("reorder3 unexpected lvalue %#v", l.Op) 2292 2293 case ONAME: 2294 break 2295 2296 case OINDEX, OINDEXMAP: 2297 l.Left = reorder3save(l.Left, all, i, &early) 2298 l.Right = reorder3save(l.Right, all, i, &early) 2299 if l.Op == OINDEXMAP { 2300 all[i] = convas(all[i], &mapinit) 2301 } 2302 2303 case OIND, ODOTPTR: 2304 l.Left = reorder3save(l.Left, all, i, &early) 2305 } 2306 2307 // Save expression on right side. 2308 all[i].Right = reorder3save(all[i].Right, all, i, &early) 2309 } 2310 2311 early = append(mapinit.Slice(), early...) 2312 return append(early, all...) 2313 } 2314 2315 // if the evaluation of *np would be affected by the 2316 // assignments in all up to but not including the ith assignment, 2317 // copy into a temporary during *early and 2318 // replace *np with that temp. 2319 // The result of reorder3save MUST be assigned back to n, e.g. 2320 // n.Left = reorder3save(n.Left, all, i, early) 2321 func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node { 2322 if !aliased(n, all, i) { 2323 return n 2324 } 2325 2326 q := temp(n.Type) 2327 q = nod(OAS, q, n) 2328 q = typecheck(q, Etop) 2329 *early = append(*early, q) 2330 return q.Left 2331 } 2332 2333 // what's the outer value that a write to n affects? 2334 // outer value means containing struct or array. 2335 func outervalue(n *Node) *Node { 2336 for { 2337 if n.Op == OXDOT { 2338 Fatalf("OXDOT in walk") 2339 } 2340 if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP { 2341 n = n.Left 2342 continue 2343 } 2344 2345 if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() { 2346 n = n.Left 2347 continue 2348 } 2349 2350 break 2351 } 2352 2353 return n 2354 } 2355 2356 // Is it possible that the computation of n might be 2357 // affected by writes in as up to but not including the ith element? 2358 func aliased(n *Node, all []*Node, i int) bool { 2359 if n == nil { 2360 return false 2361 } 2362 2363 // Treat all fields of a struct as referring to the whole struct. 2364 // We could do better but we would have to keep track of the fields. 2365 for n.Op == ODOT { 2366 n = n.Left 2367 } 2368 2369 // Look for obvious aliasing: a variable being assigned 2370 // during the all list and appearing in n. 2371 // Also record whether there are any writes to main memory. 2372 // Also record whether there are any writes to variables 2373 // whose addresses have been taken. 2374 memwrite := 0 2375 2376 varwrite := 0 2377 var a *Node 2378 for _, an := range all[:i] { 2379 a = outervalue(an.Left) 2380 2381 for a.Op == ODOT { 2382 a = a.Left 2383 } 2384 2385 if a.Op != ONAME { 2386 memwrite = 1 2387 continue 2388 } 2389 2390 switch n.Class { 2391 default: 2392 varwrite = 1 2393 continue 2394 2395 case PAUTO, PPARAM, PPARAMOUT: 2396 if n.Addrtaken { 2397 varwrite = 1 2398 continue 2399 } 2400 2401 if vmatch2(a, n) { 2402 // Direct hit. 2403 return true 2404 } 2405 } 2406 } 2407 2408 // The variables being written do not appear in n. 2409 // However, n might refer to computed addresses 2410 // that are being written. 2411 2412 // If no computed addresses are affected by the writes, no aliasing. 2413 if memwrite == 0 && varwrite == 0 { 2414 return false 2415 } 2416 2417 // If n does not refer to computed addresses 2418 // (that is, if n only refers to variables whose addresses 2419 // have not been taken), no aliasing. 2420 if varexpr(n) { 2421 return false 2422 } 2423 2424 // Otherwise, both the writes and n refer to computed memory addresses. 2425 // Assume that they might conflict. 2426 return true 2427 } 2428 2429 // does the evaluation of n only refer to variables 2430 // whose addresses have not been taken? 2431 // (and no other memory) 2432 func varexpr(n *Node) bool { 2433 if n == nil { 2434 return true 2435 } 2436 2437 switch n.Op { 2438 case OLITERAL: 2439 return true 2440 2441 case ONAME: 2442 switch n.Class { 2443 case PAUTO, PPARAM, PPARAMOUT: 2444 if !n.Addrtaken { 2445 return true 2446 } 2447 } 2448 2449 return false 2450 2451 case OADD, 2452 OSUB, 2453 OOR, 2454 OXOR, 2455 OMUL, 2456 ODIV, 2457 OMOD, 2458 OLSH, 2459 ORSH, 2460 OAND, 2461 OANDNOT, 2462 OPLUS, 2463 OMINUS, 2464 OCOM, 2465 OPAREN, 2466 OANDAND, 2467 OOROR, 2468 OCONV, 2469 OCONVNOP, 2470 OCONVIFACE, 2471 ODOTTYPE: 2472 return varexpr(n.Left) && varexpr(n.Right) 2473 2474 case ODOT: // but not ODOTPTR 2475 // Should have been handled in aliased. 2476 Fatalf("varexpr unexpected ODOT") 2477 } 2478 2479 // Be conservative. 2480 return false 2481 } 2482 2483 // is the name l mentioned in r? 2484 func vmatch2(l *Node, r *Node) bool { 2485 if r == nil { 2486 return false 2487 } 2488 switch r.Op { 2489 // match each right given left 2490 case ONAME: 2491 return l == r 2492 2493 case OLITERAL: 2494 return false 2495 } 2496 2497 if vmatch2(l, r.Left) { 2498 return true 2499 } 2500 if vmatch2(l, r.Right) { 2501 return true 2502 } 2503 for _, n := range r.List.Slice() { 2504 if vmatch2(l, n) { 2505 return true 2506 } 2507 } 2508 return false 2509 } 2510 2511 // is any name mentioned in l also mentioned in r? 2512 // called by sinit.go 2513 func vmatch1(l *Node, r *Node) bool { 2514 // isolate all left sides 2515 if l == nil || r == nil { 2516 return false 2517 } 2518 switch l.Op { 2519 case ONAME: 2520 switch l.Class { 2521 case PPARAM, PAUTO: 2522 break 2523 2524 // assignment to non-stack variable 2525 // must be delayed if right has function calls. 2526 default: 2527 if r.Ullman >= UINF { 2528 return true 2529 } 2530 } 2531 2532 return vmatch2(l, r) 2533 2534 case OLITERAL: 2535 return false 2536 } 2537 2538 if vmatch1(l.Left, r) { 2539 return true 2540 } 2541 if vmatch1(l.Right, r) { 2542 return true 2543 } 2544 for _, n := range l.List.Slice() { 2545 if vmatch1(n, r) { 2546 return true 2547 } 2548 } 2549 return false 2550 } 2551 2552 // paramstoheap returns code to allocate memory for heap-escaped parameters 2553 // and to copy non-result prameters' values from the stack. 2554 // If out is true, then code is also produced to zero-initialize their 2555 // stack memory addresses. 2556 func paramstoheap(params *Type) []*Node { 2557 var nn []*Node 2558 for _, t := range params.Fields().Slice() { 2559 // For precise stacks, the garbage collector assumes results 2560 // are always live, so zero them always. 2561 if params.StructType().Funarg == FunargResults { 2562 // Defer might stop a panic and show the 2563 // return values as they exist at the time of panic. 2564 // Make sure to zero them on entry to the function. 2565 nn = append(nn, nod(OAS, nodarg(t, 1), nil)) 2566 } 2567 2568 v := t.Nname 2569 if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result 2570 v = nil 2571 } 2572 if v == nil { 2573 continue 2574 } 2575 2576 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil { 2577 nn = append(nn, walkstmt(nod(ODCL, v, nil))) 2578 if stackcopy.Class == PPARAM { 2579 nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop))) 2580 } 2581 } 2582 } 2583 2584 return nn 2585 } 2586 2587 // returnsfromheap returns code to copy values for heap-escaped parameters 2588 // back to the stack. 2589 func returnsfromheap(params *Type) []*Node { 2590 var nn []*Node 2591 for _, t := range params.Fields().Slice() { 2592 v := t.Nname 2593 if v == nil { 2594 continue 2595 } 2596 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class == PPARAMOUT { 2597 nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop))) 2598 } 2599 } 2600 2601 return nn 2602 } 2603 2604 // heapmoves generates code to handle migrating heap-escaped parameters 2605 // between the stack and the heap. The generated code is added to Curfn's 2606 // Enter and Exit lists. 2607 func heapmoves() { 2608 lno := lineno 2609 lineno = Curfn.Pos 2610 nn := paramstoheap(Curfn.Type.Recvs()) 2611 nn = append(nn, paramstoheap(Curfn.Type.Params())...) 2612 nn = append(nn, paramstoheap(Curfn.Type.Results())...) 2613 Curfn.Func.Enter.Append(nn...) 2614 lineno = Curfn.Func.Endlineno 2615 Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) 2616 lineno = lno 2617 } 2618 2619 func vmkcall(fn *Node, t *Type, init *Nodes, va []*Node) *Node { 2620 if fn.Type == nil || fn.Type.Etype != TFUNC { 2621 Fatalf("mkcall %v %v", fn, fn.Type) 2622 } 2623 2624 n := fn.Type.Params().NumFields() 2625 2626 r := nod(OCALL, fn, nil) 2627 r.List.Set(va[:n]) 2628 if fn.Type.Results().NumFields() > 0 { 2629 r = typecheck(r, Erv|Efnstruct) 2630 } else { 2631 r = typecheck(r, Etop) 2632 } 2633 r = walkexpr(r, init) 2634 r.Type = t 2635 return r 2636 } 2637 2638 func mkcall(name string, t *Type, init *Nodes, args ...*Node) *Node { 2639 return vmkcall(syslook(name), t, init, args) 2640 } 2641 2642 func mkcall1(fn *Node, t *Type, init *Nodes, args ...*Node) *Node { 2643 return vmkcall(fn, t, init, args) 2644 } 2645 2646 func conv(n *Node, t *Type) *Node { 2647 if eqtype(n.Type, t) { 2648 return n 2649 } 2650 n = nod(OCONV, n, nil) 2651 n.Type = t 2652 n = typecheck(n, Erv) 2653 return n 2654 } 2655 2656 // byteindex converts n, which is byte-sized, to a uint8. 2657 // We cannot use conv, because we allow converting bool to uint8 here, 2658 // which is forbidden in user code. 2659 func byteindex(n *Node) *Node { 2660 if eqtype(n.Type, Types[TUINT8]) { 2661 return n 2662 } 2663 n = nod(OCONV, n, nil) 2664 n.Type = Types[TUINT8] 2665 n.Typecheck = 1 2666 return n 2667 } 2668 2669 func chanfn(name string, n int, t *Type) *Node { 2670 if !t.IsChan() { 2671 Fatalf("chanfn %v", t) 2672 } 2673 fn := syslook(name) 2674 switch n { 2675 default: 2676 Fatalf("chanfn %d", n) 2677 case 1: 2678 fn = substArgTypes(fn, t.Elem()) 2679 case 2: 2680 fn = substArgTypes(fn, t.Elem(), t.Elem()) 2681 } 2682 return fn 2683 } 2684 2685 func mapfn(name string, t *Type) *Node { 2686 if !t.IsMap() { 2687 Fatalf("mapfn %v", t) 2688 } 2689 fn := syslook(name) 2690 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val()) 2691 return fn 2692 } 2693 2694 func mapfndel(name string, t *Type) *Node { 2695 if !t.IsMap() { 2696 Fatalf("mapfn %v", t) 2697 } 2698 fn := syslook(name) 2699 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key()) 2700 return fn 2701 } 2702 2703 func writebarrierfn(name string, l *Type, r *Type) *Node { 2704 fn := syslook(name) 2705 fn = substArgTypes(fn, l, r) 2706 return fn 2707 } 2708 2709 func addstr(n *Node, init *Nodes) *Node { 2710 // orderexpr rewrote OADDSTR to have a list of strings. 2711 c := n.List.Len() 2712 2713 if c < 2 { 2714 yyerror("addstr count %d too small", c) 2715 } 2716 2717 buf := nodnil() 2718 if n.Esc == EscNone { 2719 sz := int64(0) 2720 for _, n1 := range n.List.Slice() { 2721 if n1.Op == OLITERAL { 2722 sz += int64(len(n1.Val().U.(string))) 2723 } 2724 } 2725 2726 // Don't allocate the buffer if the result won't fit. 2727 if sz < tmpstringbufsize { 2728 // Create temporary buffer for result string on stack. 2729 t := typArray(Types[TUINT8], tmpstringbufsize) 2730 2731 buf = nod(OADDR, temp(t), nil) 2732 } 2733 } 2734 2735 // build list of string arguments 2736 args := []*Node{buf} 2737 for _, n2 := range n.List.Slice() { 2738 args = append(args, conv(n2, Types[TSTRING])) 2739 } 2740 2741 var fn string 2742 if c <= 5 { 2743 // small numbers of strings use direct runtime helpers. 2744 // note: orderexpr knows this cutoff too. 2745 fn = fmt.Sprintf("concatstring%d", c) 2746 } else { 2747 // large numbers of strings are passed to the runtime as a slice. 2748 fn = "concatstrings" 2749 2750 t := typSlice(Types[TSTRING]) 2751 slice := nod(OCOMPLIT, nil, typenod(t)) 2752 if prealloc[n] != nil { 2753 prealloc[slice] = prealloc[n] 2754 } 2755 slice.List.Set(args[1:]) // skip buf arg 2756 args = []*Node{buf, slice} 2757 slice.Esc = EscNone 2758 } 2759 2760 cat := syslook(fn) 2761 r := nod(OCALL, cat, nil) 2762 r.List.Set(args) 2763 r = typecheck(r, Erv) 2764 r = walkexpr(r, init) 2765 r.Type = n.Type 2766 2767 return r 2768 } 2769 2770 // expand append(l1, l2...) to 2771 // init { 2772 // s := l1 2773 // n := len(s) + len(l2) 2774 // // Compare as uint so growslice can panic on overflow. 2775 // if uint(n) > uint(cap(s)) { 2776 // s = growslice(s, n) 2777 // } 2778 // s = s[:n] 2779 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2780 // } 2781 // s 2782 // 2783 // l2 is allowed to be a string. 2784 func appendslice(n *Node, init *Nodes) *Node { 2785 walkexprlistsafe(n.List.Slice(), init) 2786 2787 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2788 // and n are name or literal, but those may index the slice we're 2789 // modifying here. Fix explicitly. 2790 ls := n.List.Slice() 2791 for i1, n1 := range ls { 2792 ls[i1] = cheapexpr(n1, init) 2793 } 2794 2795 l1 := n.List.First() 2796 l2 := n.List.Second() 2797 2798 var l []*Node 2799 2800 // var s []T 2801 s := temp(l1.Type) 2802 l = append(l, nod(OAS, s, l1)) // s = l1 2803 2804 // n := len(s) + len(l2) 2805 nn := temp(Types[TINT]) 2806 l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil)))) 2807 2808 // if uint(n) > uint(cap(s)) 2809 nif := nod(OIF, nil, nil) 2810 nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil)) 2811 nif.Left.Left.Type = Types[TUINT] 2812 nif.Left.Right.Type = Types[TUINT] 2813 2814 // instantiate growslice(Type*, []any, int) []any 2815 fn := syslook("growslice") 2816 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2817 2818 // s = growslice(T, s, n) 2819 nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn))) 2820 l = append(l, nif) 2821 2822 // s = s[:n] 2823 nt := nod(OSLICE, s, nil) 2824 nt.SetSliceBounds(nil, nn, nil) 2825 nt.Etype = 1 2826 l = append(l, nod(OAS, s, nt)) 2827 2828 if haspointers(l1.Type.Elem()) { 2829 // copy(s[len(l1):], l2) 2830 nptr1 := nod(OSLICE, s, nil) 2831 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2832 nptr1.Etype = 1 2833 nptr2 := l2 2834 fn := syslook("typedslicecopy") 2835 fn = substArgTypes(fn, l1.Type, l2.Type) 2836 var ln Nodes 2837 ln.Set(l) 2838 nt := mkcall1(fn, Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2) 2839 l = append(ln.Slice(), nt) 2840 } else if instrumenting && !compiling_runtime { 2841 // rely on runtime to instrument copy. 2842 // copy(s[len(l1):], l2) 2843 nptr1 := nod(OSLICE, s, nil) 2844 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2845 nptr1.Etype = 1 2846 nptr2 := l2 2847 var fn *Node 2848 if l2.Type.IsString() { 2849 fn = syslook("slicestringcopy") 2850 } else { 2851 fn = syslook("slicecopy") 2852 } 2853 fn = substArgTypes(fn, l1.Type, l2.Type) 2854 var ln Nodes 2855 ln.Set(l) 2856 nt := mkcall1(fn, Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width)) 2857 l = append(ln.Slice(), nt) 2858 } else { 2859 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2860 nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil)) 2861 nptr1.Bounded = true 2862 2863 nptr1 = nod(OADDR, nptr1, nil) 2864 2865 nptr2 := nod(OSPTR, l2, nil) 2866 2867 fn := syslook("memmove") 2868 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2869 2870 var ln Nodes 2871 ln.Set(l) 2872 nwid := cheapexpr(conv(nod(OLEN, l2, nil), Types[TUINTPTR]), &ln) 2873 2874 nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width)) 2875 nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid) 2876 l = append(ln.Slice(), nt) 2877 } 2878 2879 typecheckslice(l, Etop) 2880 walkstmtlist(l) 2881 init.Append(l...) 2882 return s 2883 } 2884 2885 // Rewrite append(src, x, y, z) so that any side effects in 2886 // x, y, z (including runtime panics) are evaluated in 2887 // initialization statements before the append. 2888 // For normal code generation, stop there and leave the 2889 // rest to cgen_append. 2890 // 2891 // For race detector, expand append(src, a [, b]* ) to 2892 // 2893 // init { 2894 // s := src 2895 // const argc = len(args) - 1 2896 // if cap(s) - len(s) < argc { 2897 // s = growslice(s, len(s)+argc) 2898 // } 2899 // n := len(s) 2900 // s = s[:n+argc] 2901 // s[n] = a 2902 // s[n+1] = b 2903 // ... 2904 // } 2905 // s 2906 func walkappend(n *Node, init *Nodes, dst *Node) *Node { 2907 if !samesafeexpr(dst, n.List.First()) { 2908 n.List.SetIndex(0, safeexpr(n.List.Index(0), init)) 2909 n.List.SetIndex(0, walkexpr(n.List.Index(0), init)) 2910 } 2911 walkexprlistsafe(n.List.Slice()[1:], init) 2912 2913 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2914 // and n are name or literal, but those may index the slice we're 2915 // modifying here. Fix explicitly. 2916 // Using cheapexpr also makes sure that the evaluation 2917 // of all arguments (and especially any panics) happen 2918 // before we begin to modify the slice in a visible way. 2919 ls := n.List.Slice()[1:] 2920 for i, n := range ls { 2921 ls[i] = cheapexpr(n, init) 2922 } 2923 2924 nsrc := n.List.First() 2925 2926 argc := n.List.Len() - 1 2927 if argc < 1 { 2928 return nsrc 2929 } 2930 2931 // General case, with no function calls left as arguments. 2932 // Leave for gen, except that instrumentation requires old form. 2933 if !instrumenting || compiling_runtime { 2934 return n 2935 } 2936 2937 var l []*Node 2938 2939 ns := temp(nsrc.Type) 2940 l = append(l, nod(OAS, ns, nsrc)) // s = src 2941 2942 na := nodintconst(int64(argc)) // const argc 2943 nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc 2944 nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na) 2945 2946 fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T) 2947 fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem()) 2948 2949 nx.Nbody.Set1(nod(OAS, ns, 2950 mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns, 2951 nod(OADD, nod(OLEN, ns, nil), na)))) 2952 2953 l = append(l, nx) 2954 2955 nn := temp(Types[TINT]) 2956 l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s) 2957 2958 nx = nod(OSLICE, ns, nil) // ...s[:n+argc] 2959 nx.SetSliceBounds(nil, nod(OADD, nn, na), nil) 2960 nx.Etype = 1 2961 l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc] 2962 2963 ls = n.List.Slice()[1:] 2964 for i, n := range ls { 2965 nx = nod(OINDEX, ns, nn) // s[n] ... 2966 nx.Bounded = true 2967 l = append(l, nod(OAS, nx, n)) // s[n] = arg 2968 if i+1 < len(ls) { 2969 l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1 2970 } 2971 } 2972 2973 typecheckslice(l, Etop) 2974 walkstmtlist(l) 2975 init.Append(l...) 2976 return ns 2977 } 2978 2979 // Lower copy(a, b) to a memmove call or a runtime call. 2980 // 2981 // init { 2982 // n := len(a) 2983 // if n > len(b) { n = len(b) } 2984 // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) 2985 // } 2986 // n; 2987 // 2988 // Also works if b is a string. 2989 // 2990 func copyany(n *Node, init *Nodes, runtimecall bool) *Node { 2991 if haspointers(n.Left.Type.Elem()) { 2992 fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) 2993 return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right) 2994 } 2995 2996 if runtimecall { 2997 var fn *Node 2998 if n.Right.Type.IsString() { 2999 fn = syslook("slicestringcopy") 3000 } else { 3001 fn = syslook("slicecopy") 3002 } 3003 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3004 return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width)) 3005 } 3006 3007 n.Left = walkexpr(n.Left, init) 3008 n.Right = walkexpr(n.Right, init) 3009 nl := temp(n.Left.Type) 3010 nr := temp(n.Right.Type) 3011 var l []*Node 3012 l = append(l, nod(OAS, nl, n.Left)) 3013 l = append(l, nod(OAS, nr, n.Right)) 3014 3015 nfrm := nod(OSPTR, nr, nil) 3016 nto := nod(OSPTR, nl, nil) 3017 3018 nlen := temp(Types[TINT]) 3019 3020 // n = len(to) 3021 l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil))) 3022 3023 // if n > len(frm) { n = len(frm) } 3024 nif := nod(OIF, nil, nil) 3025 3026 nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil)) 3027 nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil))) 3028 l = append(l, nif) 3029 3030 // Call memmove. 3031 fn := syslook("memmove") 3032 3033 fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem()) 3034 nwid := temp(Types[TUINTPTR]) 3035 l = append(l, nod(OAS, nwid, conv(nlen, Types[TUINTPTR]))) 3036 nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width)) 3037 l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) 3038 3039 typecheckslice(l, Etop) 3040 walkstmtlist(l) 3041 init.Append(l...) 3042 return nlen 3043 } 3044 3045 func eqfor(t *Type, needsize *int) *Node { 3046 // Should only arrive here with large memory or 3047 // a struct/array containing a non-memory field/element. 3048 // Small memory is handled inline, and single non-memory 3049 // is handled during type check (OCMPSTR etc). 3050 switch a, _ := algtype1(t); a { 3051 case AMEM: 3052 n := syslook("memequal") 3053 n = substArgTypes(n, t, t) 3054 *needsize = 1 3055 return n 3056 case ASPECIAL: 3057 sym := typesymprefix(".eq", t) 3058 n := newname(sym) 3059 n.Class = PFUNC 3060 ntype := nod(OTFUNC, nil, nil) 3061 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3062 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3063 ntype.Rlist.Append(nod(ODCLFIELD, nil, typenod(Types[TBOOL]))) 3064 ntype = typecheck(ntype, Etype) 3065 n.Type = ntype.Type 3066 *needsize = 0 3067 return n 3068 } 3069 Fatalf("eqfor %v", t) 3070 return nil 3071 } 3072 3073 // The result of walkcompare MUST be assigned back to n, e.g. 3074 // n.Left = walkcompare(n.Left, init) 3075 func walkcompare(n *Node, init *Nodes) *Node { 3076 // Given interface value l and concrete value r, rewrite 3077 // l == r 3078 // into types-equal && data-equal. 3079 // This is efficient, avoids allocations, and avoids runtime calls. 3080 var l, r *Node 3081 if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() { 3082 l = n.Left 3083 r = n.Right 3084 } else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() { 3085 l = n.Right 3086 r = n.Left 3087 } 3088 3089 if l != nil { 3090 // Handle both == and !=. 3091 eq := n.Op 3092 var andor Op 3093 if eq == OEQ { 3094 andor = OANDAND 3095 } else { 3096 andor = OOROR 3097 } 3098 // Check for types equal. 3099 // For empty interface, this is: 3100 // l.tab == type(r) 3101 // For non-empty interface, this is: 3102 // l.tab != nil && l.tab._type == type(r) 3103 var eqtype *Node 3104 tab := nod(OITAB, l, nil) 3105 rtyp := typename(r.Type) 3106 if l.Type.IsEmptyInterface() { 3107 tab.Type = ptrto(Types[TUINT8]) 3108 tab.Typecheck = 1 3109 eqtype = nod(eq, tab, rtyp) 3110 } else { 3111 nonnil := nod(brcom(eq), nodnil(), tab) 3112 match := nod(eq, itabType(tab), rtyp) 3113 eqtype = nod(andor, nonnil, match) 3114 } 3115 // Check for data equal. 3116 eqdata := nod(eq, ifaceData(l, r.Type), r) 3117 // Put it all together. 3118 expr := nod(andor, eqtype, eqdata) 3119 n = finishcompare(n, expr, init) 3120 return n 3121 } 3122 3123 // Must be comparison of array or struct. 3124 // Otherwise back end handles it. 3125 // While we're here, decide whether to 3126 // inline or call an eq alg. 3127 t := n.Left.Type 3128 var inline bool 3129 switch t.Etype { 3130 default: 3131 return n 3132 case TARRAY: 3133 inline = t.NumElem() <= 1 || (t.NumElem() <= 4 && issimple[t.Elem().Etype]) 3134 case TSTRUCT: 3135 inline = t.NumFields() <= 4 3136 } 3137 3138 cmpl := n.Left 3139 for cmpl != nil && cmpl.Op == OCONVNOP { 3140 cmpl = cmpl.Left 3141 } 3142 cmpr := n.Right 3143 for cmpr != nil && cmpr.Op == OCONVNOP { 3144 cmpr = cmpr.Left 3145 } 3146 3147 // Chose not to inline. Call equality function directly. 3148 if !inline { 3149 if isvaluelit(cmpl) { 3150 var_ := temp(cmpl.Type) 3151 anylit(cmpl, var_, init) 3152 cmpl = var_ 3153 } 3154 if isvaluelit(cmpr) { 3155 var_ := temp(cmpr.Type) 3156 anylit(cmpr, var_, init) 3157 cmpr = var_ 3158 } 3159 if !islvalue(cmpl) || !islvalue(cmpr) { 3160 Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) 3161 } 3162 3163 // eq algs take pointers 3164 pl := temp(ptrto(t)) 3165 al := nod(OAS, pl, nod(OADDR, cmpl, nil)) 3166 al.Right.Etype = 1 // addr does not escape 3167 al = typecheck(al, Etop) 3168 init.Append(al) 3169 3170 pr := temp(ptrto(t)) 3171 ar := nod(OAS, pr, nod(OADDR, cmpr, nil)) 3172 ar.Right.Etype = 1 // addr does not escape 3173 ar = typecheck(ar, Etop) 3174 init.Append(ar) 3175 3176 var needsize int 3177 call := nod(OCALL, eqfor(t, &needsize), nil) 3178 call.List.Append(pl) 3179 call.List.Append(pr) 3180 if needsize != 0 { 3181 call.List.Append(nodintconst(t.Width)) 3182 } 3183 res := call 3184 if n.Op != OEQ { 3185 res = nod(ONOT, res, nil) 3186 } 3187 n = finishcompare(n, res, init) 3188 return n 3189 } 3190 3191 // inline: build boolean expression comparing element by element 3192 andor := OANDAND 3193 if n.Op == ONE { 3194 andor = OOROR 3195 } 3196 var expr *Node 3197 compare := func(el, er *Node) { 3198 a := nod(n.Op, el, er) 3199 if expr == nil { 3200 expr = a 3201 } else { 3202 expr = nod(andor, expr, a) 3203 } 3204 } 3205 cmpl = safeexpr(cmpl, init) 3206 cmpr = safeexpr(cmpr, init) 3207 if t.IsStruct() { 3208 for _, f := range t.Fields().Slice() { 3209 sym := f.Sym 3210 if isblanksym(sym) { 3211 continue 3212 } 3213 compare( 3214 nodSym(OXDOT, cmpl, sym), 3215 nodSym(OXDOT, cmpr, sym), 3216 ) 3217 } 3218 } else { 3219 for i := 0; int64(i) < t.NumElem(); i++ { 3220 compare( 3221 nod(OINDEX, cmpl, nodintconst(int64(i))), 3222 nod(OINDEX, cmpr, nodintconst(int64(i))), 3223 ) 3224 } 3225 } 3226 if expr == nil { 3227 expr = nodbool(n.Op == OEQ) 3228 } 3229 n = finishcompare(n, expr, init) 3230 return n 3231 } 3232 3233 // The result of finishcompare MUST be assigned back to n, e.g. 3234 // n.Left = finishcompare(n.Left, x, r, init) 3235 func finishcompare(n, r *Node, init *Nodes) *Node { 3236 // Use nn here to avoid passing r to typecheck. 3237 nn := r 3238 nn = typecheck(nn, Erv) 3239 nn = walkexpr(nn, init) 3240 r = nn 3241 if r.Type != n.Type { 3242 r = nod(OCONVNOP, r, nil) 3243 r.Type = n.Type 3244 r.Typecheck = 1 3245 nn = r 3246 } 3247 return nn 3248 } 3249 3250 // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers. 3251 func (n *Node) isIntOrdering() bool { 3252 switch n.Op { 3253 case OLE, OLT, OGE, OGT: 3254 default: 3255 return false 3256 } 3257 return n.Left.Type.IsInteger() && n.Right.Type.IsInteger() 3258 } 3259 3260 // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10. 3261 // n must be an OANDAND or OOROR node. 3262 // The result of walkinrange MUST be assigned back to n, e.g. 3263 // n.Left = walkinrange(n.Left) 3264 func walkinrange(n *Node, init *Nodes) *Node { 3265 // We are looking for something equivalent to a opl b OP b opr c, where: 3266 // * a, b, and c have integer type 3267 // * b is side-effect-free 3268 // * opl and opr are each < or ≤ 3269 // * OP is && 3270 l := n.Left 3271 r := n.Right 3272 if !l.isIntOrdering() || !r.isIntOrdering() { 3273 return n 3274 } 3275 3276 // Find b, if it exists, and rename appropriately. 3277 // Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right 3278 // Output is: a opl b(==x) ANDAND/OROR b(==x) opr c 3279 a, opl, b := l.Left, l.Op, l.Right 3280 x, opr, c := r.Left, r.Op, r.Right 3281 for i := 0; ; i++ { 3282 if samesafeexpr(b, x) { 3283 break 3284 } 3285 if i == 3 { 3286 // Tried all permutations and couldn't find an appropriate b == x. 3287 return n 3288 } 3289 if i&1 == 0 { 3290 a, opl, b = b, brrev(opl), a 3291 } else { 3292 x, opr, c = c, brrev(opr), x 3293 } 3294 } 3295 3296 // If n.Op is ||, apply de Morgan. 3297 // Negate the internal ops now; we'll negate the top level op at the end. 3298 // Henceforth assume &&. 3299 negateResult := n.Op == OOROR 3300 if negateResult { 3301 opl = brcom(opl) 3302 opr = brcom(opr) 3303 } 3304 3305 cmpdir := func(o Op) int { 3306 switch o { 3307 case OLE, OLT: 3308 return -1 3309 case OGE, OGT: 3310 return +1 3311 } 3312 Fatalf("walkinrange cmpdir %v", o) 3313 return 0 3314 } 3315 if cmpdir(opl) != cmpdir(opr) { 3316 // Not a range check; something like b < a && b < c. 3317 return n 3318 } 3319 3320 switch opl { 3321 case OGE, OGT: 3322 // We have something like a > b && b ≥ c. 3323 // Switch and reverse ops and rename constants, 3324 // to make it look like a ≤ b && b < c. 3325 a, c = c, a 3326 opl, opr = brrev(opr), brrev(opl) 3327 } 3328 3329 // We must ensure that c-a is non-negative. 3330 // For now, require a and c to be constants. 3331 // In the future, we could also support a == 0 and c == len/cap(...). 3332 // Unfortunately, by this point, most len/cap expressions have been 3333 // stored into temporary variables. 3334 if !Isconst(a, CTINT) || !Isconst(c, CTINT) { 3335 return n 3336 } 3337 3338 if opl == OLT { 3339 // We have a < b && ... 3340 // We need a ≤ b && ... to safely use unsigned comparison tricks. 3341 // If a is not the maximum constant for b's type, 3342 // we can increment a and switch to ≤. 3343 if a.Int64() >= maxintval[b.Type.Etype].Int64() { 3344 return n 3345 } 3346 a = nodintconst(a.Int64() + 1) 3347 opl = OLE 3348 } 3349 3350 bound := c.Int64() - a.Int64() 3351 if bound < 0 { 3352 // Bad news. Something like 5 <= x && x < 3. 3353 // Rare in practice, and we still need to generate side-effects, 3354 // so just leave it alone. 3355 return n 3356 } 3357 3358 // We have a ≤ b && b < c (or a ≤ b && b ≤ c). 3359 // This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a), 3360 // which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a), 3361 // which is equivalent to uint(b-a) < uint(c-a). 3362 ut := b.Type.toUnsigned() 3363 lhs := conv(nod(OSUB, b, a), ut) 3364 rhs := nodintconst(bound) 3365 if negateResult { 3366 // Negate top level. 3367 opr = brcom(opr) 3368 } 3369 cmp := nod(opr, lhs, rhs) 3370 cmp.Pos = n.Pos 3371 cmp = addinit(cmp, l.Ninit.Slice()) 3372 cmp = addinit(cmp, r.Ninit.Slice()) 3373 // Typecheck the AST rooted at cmp... 3374 cmp = typecheck(cmp, Erv) 3375 // ...but then reset cmp's type to match n's type. 3376 cmp.Type = n.Type 3377 cmp = walkexpr(cmp, init) 3378 return cmp 3379 } 3380 3381 // walkdiv rewrites division by a constant as less expensive 3382 // operations. 3383 // The result of walkdiv MUST be assigned back to n, e.g. 3384 // n.Left = walkdiv(n.Left, init) 3385 func walkdiv(n *Node, init *Nodes) *Node { 3386 // if >= 0, nr is 1<<pow // 1 if nr is negative. 3387 3388 if n.Right.Op != OLITERAL { 3389 return n 3390 } 3391 3392 // nr is a constant. 3393 nl := cheapexpr(n.Left, init) 3394 3395 nr := n.Right 3396 3397 // special cases of mod/div 3398 // by a constant 3399 w := int(nl.Type.Width * 8) 3400 3401 s := 0 // 1 if nr is negative. 3402 pow := powtwo(nr) // if >= 0, nr is 1<<pow 3403 if pow >= 1000 { 3404 // negative power of 2 3405 s = 1 3406 3407 pow -= 1000 3408 } 3409 3410 if pow+1 >= w { 3411 // divisor too large. 3412 return n 3413 } 3414 3415 if pow < 0 { 3416 // try to do division by multiply by (2^w)/d 3417 // see hacker's delight chapter 10 3418 // TODO: support 64-bit magic multiply here. 3419 var m Magic 3420 m.W = w 3421 3422 if nl.Type.IsSigned() { 3423 m.Sd = nr.Int64() 3424 smagic(&m) 3425 } else { 3426 m.Ud = uint64(nr.Int64()) 3427 umagic(&m) 3428 } 3429 3430 if m.Bad != 0 { 3431 return n 3432 } 3433 3434 // We have a quick division method so use it 3435 // for modulo too. 3436 if n.Op == OMOD { 3437 // rewrite as A%B = A - (A/B*B). 3438 n1 := nod(ODIV, nl, nr) 3439 3440 n2 := nod(OMUL, n1, nr) 3441 n = nod(OSUB, nl, n2) 3442 goto ret 3443 } 3444 3445 switch simtype[nl.Type.Etype] { 3446 default: 3447 return n 3448 3449 // n1 = nl * magic >> w (HMUL) 3450 case TUINT8, TUINT16, TUINT32: 3451 var nc Node 3452 3453 Nodconst(&nc, nl.Type, int64(m.Um)) 3454 n1 := nod(OHMUL, nl, &nc) 3455 n1 = typecheck(n1, Erv) 3456 if m.Ua != 0 { 3457 // Select a Go type with (at least) twice the width. 3458 var twide *Type 3459 switch simtype[nl.Type.Etype] { 3460 default: 3461 return n 3462 3463 case TUINT8, TUINT16: 3464 twide = Types[TUINT32] 3465 3466 case TUINT32: 3467 twide = Types[TUINT64] 3468 3469 case TINT8, TINT16: 3470 twide = Types[TINT32] 3471 3472 case TINT32: 3473 twide = Types[TINT64] 3474 } 3475 3476 // add numerator (might overflow). 3477 // n2 = (n1 + nl) 3478 n2 := nod(OADD, conv(n1, twide), conv(nl, twide)) 3479 3480 // shift by m.s 3481 var nc Node 3482 3483 Nodconst(&nc, Types[TUINT], int64(m.S)) 3484 n = conv(nod(ORSH, n2, &nc), nl.Type) 3485 } else { 3486 // n = n1 >> m.s 3487 var nc Node 3488 3489 Nodconst(&nc, Types[TUINT], int64(m.S)) 3490 n = nod(ORSH, n1, &nc) 3491 } 3492 3493 // n1 = nl * magic >> w 3494 case TINT8, TINT16, TINT32: 3495 var nc Node 3496 3497 Nodconst(&nc, nl.Type, m.Sm) 3498 n1 := nod(OHMUL, nl, &nc) 3499 n1 = typecheck(n1, Erv) 3500 if m.Sm < 0 { 3501 // add the numerator. 3502 n1 = nod(OADD, n1, nl) 3503 } 3504 3505 // shift by m.s 3506 var ns Node 3507 3508 Nodconst(&ns, Types[TUINT], int64(m.S)) 3509 n2 := conv(nod(ORSH, n1, &ns), nl.Type) 3510 3511 // add 1 iff n1 is negative. 3512 var nneg Node 3513 3514 Nodconst(&nneg, Types[TUINT], int64(w)-1) 3515 n3 := nod(ORSH, nl, &nneg) // n4 = -1 iff n1 is negative. 3516 n = nod(OSUB, n2, n3) 3517 3518 // apply sign. 3519 if m.Sd < 0 { 3520 n = nod(OMINUS, n, nil) 3521 } 3522 } 3523 3524 goto ret 3525 } 3526 3527 switch pow { 3528 case 0: 3529 if n.Op == OMOD { 3530 // nl % 1 is zero. 3531 Nodconst(n, n.Type, 0) 3532 } else if s != 0 { 3533 // divide by -1 3534 n.Op = OMINUS 3535 3536 n.Right = nil 3537 } else { 3538 // divide by 1 3539 n = nl 3540 } 3541 3542 default: 3543 if n.Type.IsSigned() { 3544 if n.Op == OMOD { 3545 // signed modulo 2^pow is like ANDing 3546 // with the last pow bits, but if nl < 0, 3547 // nl & (2^pow-1) is (nl+1)%2^pow - 1. 3548 var nc Node 3549 3550 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3551 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3552 if pow == 1 { 3553 n1 = typecheck(n1, Erv) 3554 n1 = cheapexpr(n1, init) 3555 3556 // n = (nl+ε)&1 -ε where ε=1 iff nl<0. 3557 n2 := nod(OSUB, nl, n1) 3558 3559 var nc Node 3560 Nodconst(&nc, nl.Type, 1) 3561 n3 := nod(OAND, n2, &nc) 3562 n = nod(OADD, n3, n1) 3563 } else { 3564 // n = (nl+ε)&(nr-1) - ε where ε=2^pow-1 iff nl<0. 3565 var nc Node 3566 3567 Nodconst(&nc, nl.Type, (1<<uint(pow))-1) 3568 n2 := nod(OAND, n1, &nc) // n2 = 2^pow-1 iff nl<0. 3569 n2 = typecheck(n2, Erv) 3570 n2 = cheapexpr(n2, init) 3571 3572 n3 := nod(OADD, nl, n2) 3573 n4 := nod(OAND, n3, &nc) 3574 n = nod(OSUB, n4, n2) 3575 } 3576 3577 break 3578 } else { 3579 // arithmetic right shift does not give the correct rounding. 3580 // if nl >= 0, nl >> n == nl / nr 3581 // if nl < 0, we want to add 2^n-1 first. 3582 var nc Node 3583 3584 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3585 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3586 if pow == 1 { 3587 // nl+1 is nl-(-1) 3588 n.Left = nod(OSUB, nl, n1) 3589 } else { 3590 // Do a logical right right on -1 to keep pow bits. 3591 var nc Node 3592 3593 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-int64(pow)) 3594 n2 := nod(ORSH, conv(n1, nl.Type.toUnsigned()), &nc) 3595 n.Left = nod(OADD, nl, conv(n2, nl.Type)) 3596 } 3597 3598 // n = (nl + 2^pow-1) >> pow 3599 n.Op = ORSH 3600 3601 var n2 Node 3602 Nodconst(&n2, Types[simtype[TUINT]], int64(pow)) 3603 n.Right = &n2 3604 n.Typecheck = 0 3605 } 3606 3607 if s != 0 { 3608 n = nod(OMINUS, n, nil) 3609 } 3610 break 3611 } 3612 3613 var nc Node 3614 if n.Op == OMOD { 3615 // n = nl & (nr-1) 3616 n.Op = OAND 3617 3618 Nodconst(&nc, nl.Type, nr.Int64()-1) 3619 } else { 3620 // n = nl >> pow 3621 n.Op = ORSH 3622 3623 Nodconst(&nc, Types[simtype[TUINT]], int64(pow)) 3624 } 3625 3626 n.Typecheck = 0 3627 n.Right = &nc 3628 } 3629 3630 goto ret 3631 3632 ret: 3633 n = typecheck(n, Erv) 3634 n = walkexpr(n, init) 3635 return n 3636 } 3637 3638 // return 1 if integer n must be in range [0, max), 0 otherwise 3639 func bounded(n *Node, max int64) bool { 3640 if n.Type == nil || !n.Type.IsInteger() { 3641 return false 3642 } 3643 3644 sign := n.Type.IsSigned() 3645 bits := int32(8 * n.Type.Width) 3646 3647 if smallintconst(n) { 3648 v := n.Int64() 3649 return 0 <= v && v < max 3650 } 3651 3652 switch n.Op { 3653 case OAND: 3654 v := int64(-1) 3655 if smallintconst(n.Left) { 3656 v = n.Left.Int64() 3657 } else if smallintconst(n.Right) { 3658 v = n.Right.Int64() 3659 } 3660 3661 if 0 <= v && v < max { 3662 return true 3663 } 3664 3665 case OMOD: 3666 if !sign && smallintconst(n.Right) { 3667 v := n.Right.Int64() 3668 if 0 <= v && v <= max { 3669 return true 3670 } 3671 } 3672 3673 case ODIV: 3674 if !sign && smallintconst(n.Right) { 3675 v := n.Right.Int64() 3676 for bits > 0 && v >= 2 { 3677 bits-- 3678 v >>= 1 3679 } 3680 } 3681 3682 case ORSH: 3683 if !sign && smallintconst(n.Right) { 3684 v := n.Right.Int64() 3685 if v > int64(bits) { 3686 return true 3687 } 3688 bits -= int32(v) 3689 } 3690 } 3691 3692 if !sign && bits <= 62 && 1<<uint(bits) <= max { 3693 return true 3694 } 3695 3696 return false 3697 } 3698 3699 // usemethod check interface method calls for uses of reflect.Type.Method. 3700 func usemethod(n *Node) { 3701 t := n.Left.Type 3702 3703 // Looking for either of: 3704 // Method(int) reflect.Method 3705 // MethodByName(string) (reflect.Method, bool) 3706 // 3707 // TODO(crawshaw): improve precision of match by working out 3708 // how to check the method name. 3709 if n := t.Params().NumFields(); n != 1 { 3710 return 3711 } 3712 if n := t.Results().NumFields(); n != 1 && n != 2 { 3713 return 3714 } 3715 p0 := t.Params().Field(0) 3716 res0 := t.Results().Field(0) 3717 var res1 *Field 3718 if t.Results().NumFields() == 2 { 3719 res1 = t.Results().Field(1) 3720 } 3721 3722 if res1 == nil { 3723 if p0.Type.Etype != TINT { 3724 return 3725 } 3726 } else { 3727 if !p0.Type.IsString() { 3728 return 3729 } 3730 if !res1.Type.IsBoolean() { 3731 return 3732 } 3733 } 3734 if res0.Type.String() != "reflect.Method" { 3735 return 3736 } 3737 3738 Curfn.Func.ReflectMethod = true 3739 } 3740 3741 func usefield(n *Node) { 3742 if obj.Fieldtrack_enabled == 0 { 3743 return 3744 } 3745 3746 switch n.Op { 3747 default: 3748 Fatalf("usefield %v", n.Op) 3749 3750 case ODOT, ODOTPTR: 3751 break 3752 } 3753 if n.Sym == nil { 3754 // No field name. This DOTPTR was built by the compiler for access 3755 // to runtime data structures. Ignore. 3756 return 3757 } 3758 3759 t := n.Left.Type 3760 if t.IsPtr() { 3761 t = t.Elem() 3762 } 3763 field := dotField[typeSym{t.Orig, n.Sym}] 3764 if field == nil { 3765 Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) 3766 } 3767 if !strings.Contains(field.Note, "go:\"track\"") { 3768 return 3769 } 3770 3771 outer := n.Left.Type 3772 if outer.IsPtr() { 3773 outer = outer.Elem() 3774 } 3775 if outer.Sym == nil { 3776 yyerror("tracked field must be in named struct type") 3777 } 3778 if !exportname(field.Sym.Name) { 3779 yyerror("tracked field must be exported (upper case)") 3780 } 3781 3782 sym := tracksym(outer, field) 3783 if Curfn.Func.FieldTrack == nil { 3784 Curfn.Func.FieldTrack = make(map[*Sym]struct{}) 3785 } 3786 Curfn.Func.FieldTrack[sym] = struct{}{} 3787 } 3788 3789 func candiscardlist(l Nodes) bool { 3790 for _, n := range l.Slice() { 3791 if !candiscard(n) { 3792 return false 3793 } 3794 } 3795 return true 3796 } 3797 3798 func candiscard(n *Node) bool { 3799 if n == nil { 3800 return true 3801 } 3802 3803 switch n.Op { 3804 default: 3805 return false 3806 3807 // Discardable as long as the subpieces are. 3808 case ONAME, 3809 ONONAME, 3810 OTYPE, 3811 OPACK, 3812 OLITERAL, 3813 OADD, 3814 OSUB, 3815 OOR, 3816 OXOR, 3817 OADDSTR, 3818 OADDR, 3819 OANDAND, 3820 OARRAYBYTESTR, 3821 OARRAYRUNESTR, 3822 OSTRARRAYBYTE, 3823 OSTRARRAYRUNE, 3824 OCAP, 3825 OCMPIFACE, 3826 OCMPSTR, 3827 OCOMPLIT, 3828 OMAPLIT, 3829 OSTRUCTLIT, 3830 OARRAYLIT, 3831 OSLICELIT, 3832 OPTRLIT, 3833 OCONV, 3834 OCONVIFACE, 3835 OCONVNOP, 3836 ODOT, 3837 OEQ, 3838 ONE, 3839 OLT, 3840 OLE, 3841 OGT, 3842 OGE, 3843 OKEY, 3844 OSTRUCTKEY, 3845 OLEN, 3846 OMUL, 3847 OLSH, 3848 ORSH, 3849 OAND, 3850 OANDNOT, 3851 ONEW, 3852 ONOT, 3853 OCOM, 3854 OPLUS, 3855 OMINUS, 3856 OOROR, 3857 OPAREN, 3858 ORUNESTR, 3859 OREAL, 3860 OIMAG, 3861 OCOMPLEX: 3862 break 3863 3864 // Discardable as long as we know it's not division by zero. 3865 case ODIV, OMOD: 3866 if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 { 3867 break 3868 } 3869 if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 { 3870 break 3871 } 3872 return false 3873 3874 // Discardable as long as we know it won't fail because of a bad size. 3875 case OMAKECHAN, OMAKEMAP: 3876 if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 { 3877 break 3878 } 3879 return false 3880 3881 // Difficult to tell what sizes are okay. 3882 case OMAKESLICE: 3883 return false 3884 } 3885 3886 if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) { 3887 return false 3888 } 3889 3890 return true 3891 } 3892 3893 // rewrite 3894 // print(x, y, z) 3895 // into 3896 // func(a1, a2, a3) { 3897 // print(a1, a2, a3) 3898 // }(x, y, z) 3899 // and same for println. 3900 3901 var walkprintfunc_prgen int 3902 3903 // The result of walkprintfunc MUST be assigned back to n, e.g. 3904 // n.Left = walkprintfunc(n.Left, init) 3905 func walkprintfunc(n *Node, init *Nodes) *Node { 3906 if n.Ninit.Len() != 0 { 3907 walkstmtlist(n.Ninit.Slice()) 3908 init.AppendNodes(&n.Ninit) 3909 } 3910 3911 t := nod(OTFUNC, nil, nil) 3912 num := 0 3913 var printargs []*Node 3914 var a *Node 3915 var buf string 3916 for _, n1 := range n.List.Slice() { 3917 buf = fmt.Sprintf("a%d", num) 3918 num++ 3919 a = nod(ODCLFIELD, newname(lookup(buf)), typenod(n1.Type)) 3920 t.List.Append(a) 3921 printargs = append(printargs, a.Left) 3922 } 3923 3924 fn := nod(ODCLFUNC, nil, nil) 3925 walkprintfunc_prgen++ 3926 buf = fmt.Sprintf("print·%d", walkprintfunc_prgen) 3927 fn.Func.Nname = newname(lookup(buf)) 3928 fn.Func.Nname.Name.Defn = fn 3929 fn.Func.Nname.Name.Param.Ntype = t 3930 declare(fn.Func.Nname, PFUNC) 3931 3932 oldfn := Curfn 3933 Curfn = nil 3934 funchdr(fn) 3935 3936 a = nod(n.Op, nil, nil) 3937 a.List.Set(printargs) 3938 a = typecheck(a, Etop) 3939 a = walkstmt(a) 3940 3941 fn.Nbody.Set1(a) 3942 3943 funcbody(fn) 3944 3945 fn = typecheck(fn, Etop) 3946 typecheckslice(fn.Nbody.Slice(), Etop) 3947 xtop = append(xtop, fn) 3948 Curfn = oldfn 3949 3950 a = nod(OCALL, nil, nil) 3951 a.Left = fn.Func.Nname 3952 a.List.Set(n.List.Slice()) 3953 a = typecheck(a, Etop) 3954 a = walkexpr(a, init) 3955 return a 3956 }