github.com/panjjo/go@v0.0.0-20161104043856-d62b31386338/src/cmd/compile/internal/gc/walk.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/internal/obj" 9 "cmd/internal/sys" 10 "fmt" 11 "strings" 12 ) 13 14 // The constant is known to runtime. 15 const ( 16 tmpstringbufsize = 32 17 ) 18 19 func walk(fn *Node) { 20 Curfn = fn 21 22 if Debug['W'] != 0 { 23 s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) 24 dumplist(s, Curfn.Nbody) 25 } 26 27 lno := lineno 28 29 // Final typecheck for any unused variables. 30 for i, ln := range fn.Func.Dcl { 31 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) { 32 ln = typecheck(ln, Erv|Easgn) 33 fn.Func.Dcl[i] = ln 34 } 35 } 36 37 // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. 38 for _, ln := range fn.Func.Dcl { 39 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Used { 40 ln.Name.Defn.Left.Used = true 41 } 42 } 43 44 for _, ln := range fn.Func.Dcl { 45 if ln.Op != ONAME || (ln.Class != PAUTO && ln.Class != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Used { 46 continue 47 } 48 if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { 49 if defn.Left.Used { 50 continue 51 } 52 lineno = defn.Left.Lineno 53 yyerror("%v declared and not used", ln.Sym) 54 defn.Left.Used = true // suppress repeats 55 } else { 56 lineno = ln.Lineno 57 yyerror("%v declared and not used", ln.Sym) 58 } 59 } 60 61 lineno = lno 62 if nerrors != 0 { 63 return 64 } 65 walkstmtlist(Curfn.Nbody.Slice()) 66 if Debug['W'] != 0 { 67 s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) 68 dumplist(s, Curfn.Nbody) 69 } 70 71 heapmoves() 72 if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 { 73 s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) 74 dumplist(s, Curfn.Func.Enter) 75 } 76 } 77 78 func walkstmtlist(s []*Node) { 79 for i := range s { 80 s[i] = walkstmt(s[i]) 81 } 82 } 83 84 func samelist(a, b []*Node) bool { 85 if len(a) != len(b) { 86 return false 87 } 88 for i, n := range a { 89 if n != b[i] { 90 return false 91 } 92 } 93 return true 94 } 95 96 func paramoutheap(fn *Node) bool { 97 for _, ln := range fn.Func.Dcl { 98 switch ln.Class { 99 case PPARAMOUT: 100 if ln.isParamStackCopy() || ln.Addrtaken { 101 return true 102 } 103 104 case PAUTO: 105 // stop early - parameters are over 106 return false 107 } 108 } 109 110 return false 111 } 112 113 // adds "adjust" to all the argument locations for the call n. 114 // n must be a defer or go node that has already been walked. 115 func adjustargs(n *Node, adjust int) { 116 var arg *Node 117 var lhs *Node 118 119 callfunc := n.Left 120 for _, arg = range callfunc.List.Slice() { 121 if arg.Op != OAS { 122 yyerror("call arg not assignment") 123 } 124 lhs = arg.Left 125 if lhs.Op == ONAME { 126 // This is a temporary introduced by reorder1. 127 // The real store to the stack appears later in the arg list. 128 continue 129 } 130 131 if lhs.Op != OINDREGSP { 132 yyerror("call argument store does not use OINDREGSP") 133 } 134 135 // can't really check this in machine-indep code. 136 //if(lhs->val.u.reg != D_SP) 137 // yyerror("call arg assign not indreg(SP)"); 138 lhs.Xoffset += int64(adjust) 139 } 140 } 141 142 // The result of walkstmt MUST be assigned back to n, e.g. 143 // n.Left = walkstmt(n.Left) 144 func walkstmt(n *Node) *Node { 145 if n == nil { 146 return n 147 } 148 if n.IsStatic { // don't walk, generated by anylit. 149 return n 150 } 151 152 setlineno(n) 153 154 walkstmtlist(n.Ninit.Slice()) 155 156 switch n.Op { 157 default: 158 if n.Op == ONAME { 159 yyerror("%v is not a top level statement", n.Sym) 160 } else { 161 yyerror("%v is not a top level statement", n.Op) 162 } 163 Dump("nottop", n) 164 165 case OAS, 166 OASOP, 167 OAS2, 168 OAS2DOTTYPE, 169 OAS2RECV, 170 OAS2FUNC, 171 OAS2MAPR, 172 OCLOSE, 173 OCOPY, 174 OCALLMETH, 175 OCALLINTER, 176 OCALL, 177 OCALLFUNC, 178 ODELETE, 179 OSEND, 180 OPRINT, 181 OPRINTN, 182 OPANIC, 183 OEMPTY, 184 ORECOVER, 185 OGETG: 186 if n.Typecheck == 0 { 187 Fatalf("missing typecheck: %+v", n) 188 } 189 wascopy := n.Op == OCOPY 190 init := n.Ninit 191 n.Ninit.Set(nil) 192 n = walkexpr(n, &init) 193 n = addinit(n, init.Slice()) 194 if wascopy && n.Op == OCONVNOP { 195 n.Op = OEMPTY // don't leave plain values as statements. 196 } 197 198 // special case for a receive where we throw away 199 // the value received. 200 case ORECV: 201 if n.Typecheck == 0 { 202 Fatalf("missing typecheck: %+v", n) 203 } 204 init := n.Ninit 205 n.Ninit.Set(nil) 206 207 n.Left = walkexpr(n.Left, &init) 208 n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, typename(n.Left.Type), n.Left, nodnil()) 209 n = walkexpr(n, &init) 210 211 n = addinit(n, init.Slice()) 212 213 case OBREAK, 214 OCONTINUE, 215 OFALL, 216 OGOTO, 217 OLABEL, 218 ODCLCONST, 219 ODCLTYPE, 220 OCHECKNIL, 221 OVARKILL, 222 OVARLIVE: 223 break 224 225 case ODCL: 226 v := n.Left 227 if v.Class == PAUTOHEAP { 228 if compiling_runtime { 229 yyerror("%v escapes to heap, not allowed in runtime.", v) 230 } 231 if prealloc[v] == nil { 232 prealloc[v] = callnew(v.Type) 233 } 234 nn := nod(OAS, v.Name.Heapaddr, prealloc[v]) 235 nn.Colas = true 236 nn = typecheck(nn, Etop) 237 return walkstmt(nn) 238 } 239 240 case OBLOCK: 241 walkstmtlist(n.List.Slice()) 242 243 case OXCASE: 244 yyerror("case statement out of place") 245 n.Op = OCASE 246 fallthrough 247 248 case OCASE: 249 n.Right = walkstmt(n.Right) 250 251 case ODEFER: 252 hasdefer = true 253 switch n.Left.Op { 254 case OPRINT, OPRINTN: 255 n.Left = walkprintfunc(n.Left, &n.Ninit) 256 257 case OCOPY: 258 n.Left = copyany(n.Left, &n.Ninit, true) 259 260 default: 261 n.Left = walkexpr(n.Left, &n.Ninit) 262 } 263 264 // make room for size & fn arguments. 265 adjustargs(n, 2*Widthptr) 266 267 case OFOR: 268 if n.Left != nil { 269 walkstmtlist(n.Left.Ninit.Slice()) 270 init := n.Left.Ninit 271 n.Left.Ninit.Set(nil) 272 n.Left = walkexpr(n.Left, &init) 273 n.Left = addinit(n.Left, init.Slice()) 274 } 275 276 n.Right = walkstmt(n.Right) 277 walkstmtlist(n.Nbody.Slice()) 278 279 case OIF: 280 n.Left = walkexpr(n.Left, &n.Ninit) 281 walkstmtlist(n.Nbody.Slice()) 282 walkstmtlist(n.Rlist.Slice()) 283 284 case OPROC: 285 switch n.Left.Op { 286 case OPRINT, OPRINTN: 287 n.Left = walkprintfunc(n.Left, &n.Ninit) 288 289 case OCOPY: 290 n.Left = copyany(n.Left, &n.Ninit, true) 291 292 default: 293 n.Left = walkexpr(n.Left, &n.Ninit) 294 } 295 296 // make room for size & fn arguments. 297 adjustargs(n, 2*Widthptr) 298 299 case ORETURN: 300 walkexprlist(n.List.Slice(), &n.Ninit) 301 if n.List.Len() == 0 { 302 break 303 } 304 if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) { 305 // assign to the function out parameters, 306 // so that reorder3 can fix up conflicts 307 var rl []*Node 308 309 var cl Class 310 for _, ln := range Curfn.Func.Dcl { 311 cl = ln.Class 312 if cl == PAUTO || cl == PAUTOHEAP { 313 break 314 } 315 if cl == PPARAMOUT { 316 if ln.isParamStackCopy() { 317 ln = walkexpr(typecheck(nod(OIND, ln.Name.Heapaddr, nil), Erv), nil) 318 } 319 rl = append(rl, ln) 320 } 321 } 322 323 if got, want := n.List.Len(), len(rl); got != want { 324 // order should have rewritten multi-value function calls 325 // with explicit OAS2FUNC nodes. 326 Fatalf("expected %v return arguments, have %v", want, got) 327 } 328 329 if samelist(rl, n.List.Slice()) { 330 // special return in disguise 331 n.List.Set(nil) 332 333 break 334 } 335 336 // move function calls out, to make reorder3's job easier. 337 walkexprlistsafe(n.List.Slice(), &n.Ninit) 338 339 ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit) 340 n.List.Set(reorder3(ll)) 341 ls := n.List.Slice() 342 for i, n := range ls { 343 ls[i] = applywritebarrier(n) 344 } 345 break 346 } 347 348 ll := ascompatte(n.Op, nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit) 349 n.List.Set(ll) 350 351 case ORETJMP: 352 break 353 354 case OSELECT: 355 walkselect(n) 356 357 case OSWITCH: 358 walkswitch(n) 359 360 case ORANGE: 361 walkrange(n) 362 363 case OXFALL: 364 yyerror("fallthrough statement out of place") 365 n.Op = OFALL 366 } 367 368 if n.Op == ONAME { 369 Fatalf("walkstmt ended up with name: %+v", n) 370 } 371 return n 372 } 373 374 func isSmallMakeSlice(n *Node) bool { 375 if n.Op != OMAKESLICE { 376 return false 377 } 378 l := n.Left 379 r := n.Right 380 if r == nil { 381 r = l 382 } 383 t := n.Type 384 385 return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) 386 } 387 388 // walk the whole tree of the body of an 389 // expression or simple statement. 390 // the types expressions are calculated. 391 // compile-time constants are evaluated. 392 // complex side effects like statements are appended to init 393 func walkexprlist(s []*Node, init *Nodes) { 394 for i := range s { 395 s[i] = walkexpr(s[i], init) 396 } 397 } 398 399 func walkexprlistsafe(s []*Node, init *Nodes) { 400 for i, n := range s { 401 s[i] = safeexpr(n, init) 402 s[i] = walkexpr(s[i], init) 403 } 404 } 405 406 func walkexprlistcheap(s []*Node, init *Nodes) { 407 for i, n := range s { 408 s[i] = cheapexpr(n, init) 409 s[i] = walkexpr(s[i], init) 410 } 411 } 412 413 // Build name of function for interface conversion. 414 // Not all names are possible 415 // (e.g., we'll never generate convE2E or convE2I or convI2E). 416 func convFuncName(from, to *Type) string { 417 tkind := to.iet() 418 switch from.iet() { 419 case 'I': 420 switch tkind { 421 case 'I': 422 return "convI2I" 423 } 424 case 'T': 425 switch tkind { 426 case 'E': 427 return "convT2E" 428 case 'I': 429 return "convT2I" 430 } 431 } 432 Fatalf("unknown conv func %c2%c", from.iet(), to.iet()) 433 panic("unreachable") 434 } 435 436 // Build name of function: assertI2E etc. 437 // If with2suffix is true, the form ending in "2" is returned". 438 func assertFuncName(from, to *Type, with2suffix bool) string { 439 l := len("assertX2X2") 440 if !with2suffix { 441 l-- 442 } 443 tkind := to.iet() 444 switch from.iet() { 445 case 'E': 446 switch tkind { 447 case 'I': 448 return "assertE2I2"[:l] 449 case 'E': 450 return "assertE2E2"[:l] 451 case 'T': 452 return "assertE2T2"[:l] 453 } 454 case 'I': 455 switch tkind { 456 case 'I': 457 return "assertI2I2"[:l] 458 case 'E': 459 return "assertI2E2"[:l] 460 case 'T': 461 return "assertI2T2"[:l] 462 } 463 } 464 Fatalf("unknown assert func %c2%c", from.iet(), to.iet()) 465 panic("unreachable") 466 } 467 468 // The result of walkexpr MUST be assigned back to n, e.g. 469 // n.Left = walkexpr(n.Left, init) 470 func walkexpr(n *Node, init *Nodes) *Node { 471 if n == nil { 472 return n 473 } 474 475 if init == &n.Ninit { 476 // not okay to use n->ninit when walking n, 477 // because we might replace n with some other node 478 // and would lose the init list. 479 Fatalf("walkexpr init == &n->ninit") 480 } 481 482 if n.Ninit.Len() != 0 { 483 walkstmtlist(n.Ninit.Slice()) 484 init.AppendNodes(&n.Ninit) 485 } 486 487 lno := setlineno(n) 488 489 if Debug['w'] > 1 { 490 Dump("walk-before", n) 491 } 492 493 if n.Typecheck != 1 { 494 Fatalf("missed typecheck: %+v", n) 495 } 496 497 if n.Op == ONAME && n.Class == PAUTOHEAP { 498 nn := nod(OIND, n.Name.Heapaddr, nil) 499 nn = typecheck(nn, Erv) 500 nn = walkexpr(nn, init) 501 nn.Left.NonNil = true 502 return nn 503 } 504 505 opswitch: 506 switch n.Op { 507 default: 508 Dump("walk", n) 509 Fatalf("walkexpr: switch 1 unknown op %+S", n) 510 511 case OTYPE, 512 ONONAME, 513 OINDREGSP, 514 OEMPTY, 515 OGETG: 516 517 case ONOT, 518 OMINUS, 519 OPLUS, 520 OCOM, 521 OREAL, 522 OIMAG, 523 ODOTMETH, 524 ODOTINTER: 525 n.Left = walkexpr(n.Left, init) 526 527 case OIND: 528 n.Left = walkexpr(n.Left, init) 529 530 case ODOT: 531 usefield(n) 532 n.Left = walkexpr(n.Left, init) 533 534 case ODOTPTR: 535 usefield(n) 536 if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 { 537 // No actual copy will be generated, so emit an explicit nil check. 538 n.Left = cheapexpr(n.Left, init) 539 540 checknil(n.Left, init) 541 } 542 543 n.Left = walkexpr(n.Left, init) 544 545 case OEFACE: 546 n.Left = walkexpr(n.Left, init) 547 n.Right = walkexpr(n.Right, init) 548 549 case OSPTR, OITAB, OIDATA: 550 n.Left = walkexpr(n.Left, init) 551 552 case OLEN, OCAP: 553 n.Left = walkexpr(n.Left, init) 554 555 // replace len(*[10]int) with 10. 556 // delayed until now to preserve side effects. 557 t := n.Left.Type 558 559 if t.IsPtr() { 560 t = t.Elem() 561 } 562 if t.IsArray() { 563 safeexpr(n.Left, init) 564 Nodconst(n, n.Type, t.NumElem()) 565 n.Typecheck = 1 566 } 567 568 case OLSH, ORSH: 569 n.Left = walkexpr(n.Left, init) 570 n.Right = walkexpr(n.Right, init) 571 t := n.Left.Type 572 n.Bounded = bounded(n.Right, 8*t.Width) 573 if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { 574 Warn("shift bounds check elided") 575 } 576 577 // Use results from call expression as arguments for complex. 578 case OAND, 579 OSUB, 580 OHMUL, 581 OLT, 582 OLE, 583 OGE, 584 OGT, 585 OADD, 586 OCOMPLEX, 587 OLROT: 588 if n.Op == OCOMPLEX && n.Left == nil && n.Right == nil { 589 n.Left = n.List.First() 590 n.Right = n.List.Second() 591 } 592 593 n.Left = walkexpr(n.Left, init) 594 n.Right = walkexpr(n.Right, init) 595 596 case OOR, OXOR: 597 n.Left = walkexpr(n.Left, init) 598 n.Right = walkexpr(n.Right, init) 599 n = walkrotate(n) 600 601 case OEQ, ONE: 602 n.Left = walkexpr(n.Left, init) 603 n.Right = walkexpr(n.Right, init) 604 605 // Disable safemode while compiling this code: the code we 606 // generate internally can refer to unsafe.Pointer. 607 // In this case it can happen if we need to generate an == 608 // for a struct containing a reflect.Value, which itself has 609 // an unexported field of type unsafe.Pointer. 610 old_safemode := safemode 611 safemode = false 612 n = walkcompare(n, init) 613 safemode = old_safemode 614 615 case OANDAND, OOROR: 616 n.Left = walkexpr(n.Left, init) 617 618 // cannot put side effects from n.Right on init, 619 // because they cannot run before n.Left is checked. 620 // save elsewhere and store on the eventual n.Right. 621 var ll Nodes 622 623 n.Right = walkexpr(n.Right, &ll) 624 n.Right = addinit(n.Right, ll.Slice()) 625 n = walkinrange(n, init) 626 627 case OPRINT, OPRINTN: 628 walkexprlist(n.List.Slice(), init) 629 n = walkprint(n, init) 630 631 case OPANIC: 632 n = mkcall("gopanic", nil, init, n.Left) 633 634 case ORECOVER: 635 n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil)) 636 637 case OLITERAL: 638 n.Addable = true 639 640 case OCLOSUREVAR, OCFUNC: 641 n.Addable = true 642 643 case ONAME: 644 n.Addable = true 645 646 case OCALLINTER: 647 usemethod(n) 648 t := n.Left.Type 649 if n.List.Len() != 0 && n.List.First().Op == OAS { 650 break 651 } 652 n.Left = walkexpr(n.Left, init) 653 walkexprlist(n.List.Slice(), init) 654 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 655 n.List.Set(reorder1(ll)) 656 657 case OCALLFUNC: 658 if n.Left.Op == OCLOSURE { 659 // Transform direct call of a closure to call of a normal function. 660 // transformclosure already did all preparation work. 661 662 // Prepend captured variables to argument list. 663 n.List.Prepend(n.Left.Func.Enter.Slice()...) 664 665 n.Left.Func.Enter.Set(nil) 666 667 // Replace OCLOSURE with ONAME/PFUNC. 668 n.Left = n.Left.Func.Closure.Func.Nname 669 670 // Update type of OCALLFUNC node. 671 // Output arguments had not changed, but their offsets could. 672 if n.Left.Type.Results().NumFields() == 1 { 673 n.Type = n.Left.Type.Results().Field(0).Type 674 } else { 675 n.Type = n.Left.Type.Results() 676 } 677 } 678 679 t := n.Left.Type 680 if n.List.Len() != 0 && n.List.First().Op == OAS { 681 break 682 } 683 684 n.Left = walkexpr(n.Left, init) 685 walkexprlist(n.List.Slice(), init) 686 687 if n.Left.Op == ONAME && n.Left.Sym.Name == "Sqrt" && 688 (n.Left.Sym.Pkg.Path == "math" || n.Left.Sym.Pkg == localpkg && myimportpath == "math") { 689 if Thearch.LinkArch.InFamily(sys.AMD64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X) { 690 n.Op = OSQRT 691 n.Left = n.List.First() 692 n.List.Set(nil) 693 break opswitch 694 } 695 } 696 697 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 698 n.List.Set(reorder1(ll)) 699 700 case OCALLMETH: 701 t := n.Left.Type 702 if n.List.Len() != 0 && n.List.First().Op == OAS { 703 break 704 } 705 n.Left = walkexpr(n.Left, init) 706 walkexprlist(n.List.Slice(), init) 707 ll := ascompatte(n.Op, n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init) 708 lr := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 709 ll = append(ll, lr...) 710 n.Left.Left = nil 711 ullmancalc(n.Left) 712 n.List.Set(reorder1(ll)) 713 714 case OAS: 715 init.AppendNodes(&n.Ninit) 716 717 n.Left = walkexpr(n.Left, init) 718 n.Left = safeexpr(n.Left, init) 719 720 if oaslit(n, init) { 721 break 722 } 723 724 if n.Right == nil { 725 // TODO(austin): Check all "implicit zeroing" 726 break 727 } 728 729 switch n.Right.Op { 730 default: 731 n.Right = walkexpr(n.Right, init) 732 733 case ORECV: 734 // x = <-c; n.Left is x, n.Right.Left is c. 735 // orderstmt made sure x is addressable. 736 n.Right.Left = walkexpr(n.Right.Left, init) 737 738 n1 := nod(OADDR, n.Left, nil) 739 r := n.Right.Left // the channel 740 n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, typename(r.Type), r, n1) 741 n = walkexpr(n, init) 742 break opswitch 743 744 case OAPPEND: 745 // x = append(...) 746 r := n.Right 747 if r.Type.Elem().NotInHeap { 748 yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem()) 749 } 750 if r.Isddd { 751 r = appendslice(r, init) // also works for append(slice, string). 752 } else { 753 r = walkappend(r, init, n) 754 } 755 n.Right = r 756 if r.Op == OAPPEND { 757 // Left in place for back end. 758 // Do not add a new write barrier. 759 break opswitch 760 } 761 // Otherwise, lowered for race detector. 762 // Treat as ordinary assignment. 763 } 764 765 if n.Left != nil && n.Right != nil { 766 static := n.IsStatic 767 n = convas(n, init) 768 n.IsStatic = static 769 n = applywritebarrier(n) 770 } 771 772 case OAS2: 773 init.AppendNodes(&n.Ninit) 774 walkexprlistsafe(n.List.Slice(), init) 775 walkexprlistsafe(n.Rlist.Slice(), init) 776 ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init) 777 ll = reorder3(ll) 778 for i, n := range ll { 779 ll[i] = applywritebarrier(n) 780 } 781 n = liststmt(ll) 782 783 // a,b,... = fn() 784 case OAS2FUNC: 785 init.AppendNodes(&n.Ninit) 786 787 r := n.Rlist.First() 788 walkexprlistsafe(n.List.Slice(), init) 789 r = walkexpr(r, init) 790 791 if isIntrinsicCall(r) { 792 n.Rlist.Set1(r) 793 break 794 } 795 init.Append(r) 796 797 ll := ascompatet(n.Op, n.List, r.Type) 798 for i, n := range ll { 799 ll[i] = applywritebarrier(n) 800 } 801 n = liststmt(ll) 802 803 // x, y = <-c 804 // orderstmt made sure x is addressable. 805 case OAS2RECV: 806 init.AppendNodes(&n.Ninit) 807 808 r := n.Rlist.First() 809 walkexprlistsafe(n.List.Slice(), init) 810 r.Left = walkexpr(r.Left, init) 811 var n1 *Node 812 if isblank(n.List.First()) { 813 n1 = nodnil() 814 } else { 815 n1 = nod(OADDR, n.List.First(), nil) 816 } 817 n1.Etype = 1 // addr does not escape 818 fn := chanfn("chanrecv2", 2, r.Left.Type) 819 ok := n.List.Second() 820 call := mkcall1(fn, ok.Type, init, typename(r.Left.Type), r.Left, n1) 821 n = nod(OAS, ok, call) 822 n = typecheck(n, Etop) 823 824 // a,b = m[i]; 825 case OAS2MAPR: 826 init.AppendNodes(&n.Ninit) 827 828 r := n.Rlist.First() 829 walkexprlistsafe(n.List.Slice(), init) 830 r.Left = walkexpr(r.Left, init) 831 r.Right = walkexpr(r.Right, init) 832 t := r.Left.Type 833 p := "" 834 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 835 switch algtype(t.Key()) { 836 case AMEM32: 837 p = "mapaccess2_fast32" 838 case AMEM64: 839 p = "mapaccess2_fast64" 840 case ASTRING: 841 p = "mapaccess2_faststr" 842 } 843 } 844 845 var key *Node 846 if p != "" { 847 // fast versions take key by value 848 key = r.Right 849 } else { 850 // standard version takes key by reference 851 // orderexpr made sure key is addressable. 852 key = nod(OADDR, r.Right, nil) 853 854 p = "mapaccess2" 855 } 856 857 // from: 858 // a,b = m[i] 859 // to: 860 // var,b = mapaccess2*(t, m, i) 861 // a = *var 862 a := n.List.First() 863 864 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 865 fn := mapfn(p, t) 866 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key) 867 } else { 868 fn := mapfn("mapaccess2_fat", t) 869 z := zeroaddr(w) 870 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z) 871 } 872 873 // mapaccess2* returns a typed bool, but due to spec changes, 874 // the boolean result of i.(T) is now untyped so we make it the 875 // same type as the variable on the lhs. 876 if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() { 877 r.Type.Field(1).Type = ok.Type 878 } 879 n.Rlist.Set1(r) 880 n.Op = OAS2FUNC 881 882 // don't generate a = *var if a is _ 883 if !isblank(a) { 884 var_ := temp(ptrto(t.Val())) 885 var_.Typecheck = 1 886 var_.NonNil = true // mapaccess always returns a non-nil pointer 887 n.List.SetIndex(0, var_) 888 n = walkexpr(n, init) 889 init.Append(n) 890 n = nod(OAS, a, nod(OIND, var_, nil)) 891 } 892 893 n = typecheck(n, Etop) 894 n = walkexpr(n, init) 895 896 case ODELETE: 897 init.AppendNodes(&n.Ninit) 898 map_ := n.List.First() 899 key := n.List.Second() 900 map_ = walkexpr(map_, init) 901 key = walkexpr(key, init) 902 903 // orderstmt made sure key is addressable. 904 key = nod(OADDR, key, nil) 905 906 t := map_.Type 907 n = mkcall1(mapfndel("mapdelete", t), nil, init, typename(t), map_, key) 908 909 case OAS2DOTTYPE: 910 walkexprlistsafe(n.List.Slice(), init) 911 e := n.Rlist.First() // i.(T) 912 e.Left = walkexpr(e.Left, init) 913 914 case ODOTTYPE, ODOTTYPE2: 915 n.Left = walkexpr(n.Left, init) 916 917 case OCONVIFACE: 918 n.Left = walkexpr(n.Left, init) 919 920 // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped. 921 if isdirectiface(n.Left.Type) { 922 var t *Node 923 if n.Type.IsEmptyInterface() { 924 t = typename(n.Left.Type) 925 } else { 926 t = itabname(n.Left.Type, n.Type) 927 } 928 l := nod(OEFACE, t, n.Left) 929 l.Type = n.Type 930 l.Typecheck = n.Typecheck 931 n = l 932 break 933 } 934 // Optimize convT2{E,I} when T is not pointer-shaped. 935 // We make the interface by initializing a stack temporary to 936 // the value we want to put in the interface, then using the address of 937 // that stack temporary for the interface data word. 938 if !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024 { 939 tmp := temp(n.Left.Type) 940 init.Append(typecheck(nod(OAS, tmp, n.Left), Etop)) 941 var t *Node 942 if n.Type.IsEmptyInterface() { 943 t = typename(n.Left.Type) 944 } else { 945 t = itabname(n.Left.Type, n.Type) 946 } 947 l := nod(OEFACE, t, typecheck(nod(OADDR, tmp, nil), Erv)) 948 l.Type = n.Type 949 l.Typecheck = n.Typecheck 950 n = l 951 break 952 } 953 954 // Implement interface to empty interface conversion. 955 // tmp = i.itab 956 // if tmp != nil { 957 // tmp = tmp.type 958 // } 959 // e = iface{tmp, i.data} 960 if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 961 // Evaluate the input interface. 962 c := temp(n.Left.Type) 963 init.Append(nod(OAS, c, n.Left)) 964 965 // Get the itab out of the interface. 966 tmp := temp(ptrto(Types[TUINT8])) 967 init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv))) 968 969 // Get the type out of the itab. 970 nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil) 971 nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp))) 972 init.Append(nif) 973 974 // Build the result. 975 e := nod(OEFACE, tmp, ifaceData(c, ptrto(Types[TUINT8]))) 976 e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE. 977 e.Typecheck = 1 978 n = e 979 break 980 } 981 982 var ll []*Node 983 if n.Type.IsEmptyInterface() { 984 if !n.Left.Type.IsInterface() { 985 ll = append(ll, typename(n.Left.Type)) 986 } 987 } else { 988 if n.Left.Type.IsInterface() { 989 ll = append(ll, typename(n.Type)) 990 } else { 991 ll = append(ll, itabname(n.Left.Type, n.Type)) 992 } 993 } 994 995 if n.Left.Type.IsInterface() { 996 ll = append(ll, n.Left) 997 } else { 998 // regular types are passed by reference to avoid C vararg calls 999 // orderexpr arranged for n.Left to be a temporary for all 1000 // the conversions it could see. comparison of an interface 1001 // with a non-interface, especially in a switch on interface value 1002 // with non-interface cases, is not visible to orderstmt, so we 1003 // have to fall back on allocating a temp here. 1004 if islvalue(n.Left) { 1005 ll = append(ll, nod(OADDR, n.Left, nil)) 1006 } else { 1007 ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) 1008 } 1009 dowidth(n.Left.Type) 1010 } 1011 1012 fn := syslook(convFuncName(n.Left.Type, n.Type)) 1013 fn = substArgTypes(fn, n.Left.Type, n.Type) 1014 dowidth(fn.Type) 1015 n = nod(OCALL, fn, nil) 1016 n.List.Set(ll) 1017 n = typecheck(n, Erv) 1018 n = walkexpr(n, init) 1019 1020 case OCONV, OCONVNOP: 1021 if Thearch.LinkArch.Family == sys.ARM { 1022 if n.Left.Type.IsFloat() { 1023 if n.Type.Etype == TINT64 { 1024 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1025 break 1026 } 1027 1028 if n.Type.Etype == TUINT64 { 1029 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1030 break 1031 } 1032 } 1033 1034 if n.Type.IsFloat() { 1035 if n.Left.Type.Etype == TINT64 { 1036 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1037 break 1038 } 1039 1040 if n.Left.Type.Etype == TUINT64 { 1041 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1042 break 1043 } 1044 } 1045 } 1046 1047 if Thearch.LinkArch.Family == sys.I386 { 1048 if n.Left.Type.IsFloat() { 1049 if n.Type.Etype == TINT64 { 1050 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1051 break 1052 } 1053 1054 if n.Type.Etype == TUINT64 { 1055 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1056 break 1057 } 1058 if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR { 1059 n = mkcall("float64touint32", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1060 break 1061 } 1062 } 1063 if n.Type.IsFloat() { 1064 if n.Left.Type.Etype == TINT64 { 1065 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1066 break 1067 } 1068 1069 if n.Left.Type.Etype == TUINT64 { 1070 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1071 break 1072 } 1073 if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR { 1074 n = conv(mkcall("uint32tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT32])), n.Type) 1075 break 1076 } 1077 } 1078 } 1079 1080 n.Left = walkexpr(n.Left, init) 1081 1082 case OANDNOT: 1083 n.Left = walkexpr(n.Left, init) 1084 n.Op = OAND 1085 n.Right = nod(OCOM, n.Right, nil) 1086 n.Right = typecheck(n.Right, Erv) 1087 n.Right = walkexpr(n.Right, init) 1088 1089 case OMUL: 1090 n.Left = walkexpr(n.Left, init) 1091 n.Right = walkexpr(n.Right, init) 1092 n = walkmul(n, init) 1093 1094 case ODIV, OMOD: 1095 n.Left = walkexpr(n.Left, init) 1096 n.Right = walkexpr(n.Right, init) 1097 1098 // rewrite complex div into function call. 1099 et := n.Left.Type.Etype 1100 1101 if isComplex[et] && n.Op == ODIV { 1102 t := n.Type 1103 n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128])) 1104 n = conv(n, t) 1105 break 1106 } 1107 1108 // Nothing to do for float divisions. 1109 if isFloat[et] { 1110 break 1111 } 1112 1113 // Try rewriting as shifts or magic multiplies. 1114 n = walkdiv(n, init) 1115 1116 // rewrite 64-bit div and mod into function calls 1117 // on 32-bit architectures. 1118 switch n.Op { 1119 case OMOD, ODIV: 1120 if Widthreg >= 8 || (et != TUINT64 && et != TINT64) { 1121 break opswitch 1122 } 1123 var fn string 1124 if et == TINT64 { 1125 fn = "int64" 1126 } else { 1127 fn = "uint64" 1128 } 1129 if n.Op == ODIV { 1130 fn += "div" 1131 } else { 1132 fn += "mod" 1133 } 1134 n = mkcall(fn, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et])) 1135 } 1136 1137 case OINDEX: 1138 n.Left = walkexpr(n.Left, init) 1139 1140 // save the original node for bounds checking elision. 1141 // If it was a ODIV/OMOD walk might rewrite it. 1142 r := n.Right 1143 1144 n.Right = walkexpr(n.Right, init) 1145 1146 // if range of type cannot exceed static array bound, 1147 // disable bounds check. 1148 if n.Bounded { 1149 break 1150 } 1151 t := n.Left.Type 1152 if t != nil && t.IsPtr() { 1153 t = t.Elem() 1154 } 1155 if t.IsArray() { 1156 n.Bounded = bounded(r, t.NumElem()) 1157 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1158 Warn("index bounds check elided") 1159 } 1160 if smallintconst(n.Right) && !n.Bounded { 1161 yyerror("index out of bounds") 1162 } 1163 } else if Isconst(n.Left, CTSTR) { 1164 n.Bounded = bounded(r, int64(len(n.Left.Val().U.(string)))) 1165 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1166 Warn("index bounds check elided") 1167 } 1168 if smallintconst(n.Right) && !n.Bounded { 1169 yyerror("index out of bounds") 1170 } 1171 } 1172 1173 if Isconst(n.Right, CTINT) { 1174 if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 { 1175 yyerror("index out of bounds") 1176 } 1177 } 1178 1179 case OINDEXMAP: 1180 // Replace m[k] with *map{access1,assign}(maptype, m, &k) 1181 n.Left = walkexpr(n.Left, init) 1182 n.Right = walkexpr(n.Right, init) 1183 map_ := n.Left 1184 key := n.Right 1185 t := map_.Type 1186 if n.Etype == 1 { 1187 // This m[k] expression is on the left-hand side of an assignment. 1188 // orderexpr made sure key is addressable. 1189 key = nod(OADDR, key, nil) 1190 n = mkcall1(mapfn("mapassign", t), nil, init, typename(t), map_, key) 1191 } else { 1192 // m[k] is not the target of an assignment. 1193 p := "" 1194 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 1195 switch algtype(t.Key()) { 1196 case AMEM32: 1197 p = "mapaccess1_fast32" 1198 case AMEM64: 1199 p = "mapaccess1_fast64" 1200 case ASTRING: 1201 p = "mapaccess1_faststr" 1202 } 1203 } 1204 1205 if p == "" { 1206 // standard version takes key by reference. 1207 // orderexpr made sure key is addressable. 1208 key = nod(OADDR, key, nil) 1209 p = "mapaccess1" 1210 } 1211 1212 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 1213 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key) 1214 } else { 1215 p = "mapaccess1_fat" 1216 z := zeroaddr(w) 1217 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key, z) 1218 } 1219 } 1220 n.Type = ptrto(t.Val()) 1221 n.NonNil = true // mapaccess1* and mapassign always return non-nil pointers. 1222 n = nod(OIND, n, nil) 1223 n.Type = t.Val() 1224 n.Typecheck = 1 1225 1226 case ORECV: 1227 Fatalf("walkexpr ORECV") // should see inside OAS only 1228 1229 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1230 n.Left = walkexpr(n.Left, init) 1231 low, high, max := n.SliceBounds() 1232 low = walkexpr(low, init) 1233 if low != nil && iszero(low) { 1234 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k]. 1235 low = nil 1236 } 1237 high = walkexpr(high, init) 1238 max = walkexpr(max, init) 1239 n.SetSliceBounds(low, high, max) 1240 if n.Op.IsSlice3() { 1241 if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) { 1242 // Reduce x[i:j:cap(x)] to x[i:j]. 1243 if n.Op == OSLICE3 { 1244 n.Op = OSLICE 1245 } else { 1246 n.Op = OSLICEARR 1247 } 1248 n = reduceSlice(n) 1249 } 1250 } else { 1251 n = reduceSlice(n) 1252 } 1253 1254 case OADDR: 1255 n.Left = walkexpr(n.Left, init) 1256 1257 case ONEW: 1258 if n.Esc == EscNone { 1259 if n.Type.Elem().Width >= 1<<16 { 1260 Fatalf("large ONEW with EscNone: %v", n) 1261 } 1262 r := temp(n.Type.Elem()) 1263 r = nod(OAS, r, nil) // zero temp 1264 r = typecheck(r, Etop) 1265 init.Append(r) 1266 r = nod(OADDR, r.Left, nil) 1267 r = typecheck(r, Erv) 1268 n = r 1269 } else { 1270 n = callnew(n.Type.Elem()) 1271 } 1272 1273 case OCMPSTR: 1274 // s + "badgerbadgerbadger" == "badgerbadgerbadger" 1275 if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) { 1276 // TODO(marvin): Fix Node.EType type union. 1277 r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0)) 1278 r = typecheck(r, Erv) 1279 r = walkexpr(r, init) 1280 r.Type = n.Type 1281 n = r 1282 break 1283 } 1284 1285 // Rewrite comparisons to short constant strings as length+byte-wise comparisons. 1286 var cs, ncs *Node // const string, non-const string 1287 switch { 1288 case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR): 1289 // ignore; will be constant evaluated 1290 case Isconst(n.Left, CTSTR): 1291 cs = n.Left 1292 ncs = n.Right 1293 case Isconst(n.Right, CTSTR): 1294 cs = n.Right 1295 ncs = n.Left 1296 } 1297 if cs != nil { 1298 cmp := Op(n.Etype) 1299 // maxRewriteLen was chosen empirically. 1300 // It is the value that minimizes cmd/go file size 1301 // across most architectures. 1302 // See the commit description for CL 26758 for details. 1303 maxRewriteLen := 6 1304 var and Op 1305 switch cmp { 1306 case OEQ: 1307 and = OANDAND 1308 case ONE: 1309 and = OOROR 1310 default: 1311 // Don't do byte-wise comparisons for <, <=, etc. 1312 // They're fairly complicated. 1313 // Length-only checks are ok, though. 1314 maxRewriteLen = 0 1315 } 1316 if s := cs.Val().U.(string); len(s) <= maxRewriteLen { 1317 if len(s) > 0 { 1318 ncs = safeexpr(ncs, init) 1319 } 1320 // TODO(marvin): Fix Node.EType type union. 1321 r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s)))) 1322 for i := 0; i < len(s); i++ { 1323 cb := nodintconst(int64(s[i])) 1324 ncb := nod(OINDEX, ncs, nodintconst(int64(i))) 1325 r = nod(and, r, nod(cmp, ncb, cb)) 1326 } 1327 r = typecheck(r, Erv) 1328 r = walkexpr(r, init) 1329 r.Type = n.Type 1330 n = r 1331 break 1332 } 1333 } 1334 1335 var r *Node 1336 // TODO(marvin): Fix Node.EType type union. 1337 if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { 1338 // prepare for rewrite below 1339 n.Left = cheapexpr(n.Left, init) 1340 n.Right = cheapexpr(n.Right, init) 1341 1342 r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1343 1344 // quick check of len before full compare for == or != 1345 // eqstring assumes that the lengths are equal 1346 // TODO(marvin): Fix Node.EType type union. 1347 if Op(n.Etype) == OEQ { 1348 // len(left) == len(right) && eqstring(left, right) 1349 r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1350 } else { 1351 // len(left) != len(right) || !eqstring(left, right) 1352 r = nod(ONOT, r, nil) 1353 r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1354 } 1355 1356 r = typecheck(r, Erv) 1357 r = walkexpr(r, nil) 1358 } else { 1359 // sys_cmpstring(s1, s2) :: 0 1360 r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1361 // TODO(marvin): Fix Node.EType type union. 1362 r = nod(Op(n.Etype), r, nodintconst(0)) 1363 } 1364 1365 r = typecheck(r, Erv) 1366 if !n.Type.IsBoolean() { 1367 Fatalf("cmp %v", n.Type) 1368 } 1369 r.Type = n.Type 1370 n = r 1371 1372 case OADDSTR: 1373 n = addstr(n, init) 1374 1375 case OAPPEND: 1376 // order should make sure we only see OAS(node, OAPPEND), which we handle above. 1377 Fatalf("append outside assignment") 1378 1379 case OCOPY: 1380 n = copyany(n, init, instrumenting && !compiling_runtime) 1381 1382 // cannot use chanfn - closechan takes any, not chan any 1383 case OCLOSE: 1384 fn := syslook("closechan") 1385 1386 fn = substArgTypes(fn, n.Left.Type) 1387 n = mkcall1(fn, nil, init, n.Left) 1388 1389 case OMAKECHAN: 1390 n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64])) 1391 1392 case OMAKEMAP: 1393 t := n.Type 1394 1395 a := nodnil() // hmap buffer 1396 r := nodnil() // bucket buffer 1397 if n.Esc == EscNone { 1398 // Allocate hmap buffer on stack. 1399 var_ := temp(hmap(t)) 1400 1401 a = nod(OAS, var_, nil) // zero temp 1402 a = typecheck(a, Etop) 1403 init.Append(a) 1404 a = nod(OADDR, var_, nil) 1405 1406 // Allocate one bucket on stack. 1407 // Maximum key/value size is 128 bytes, larger objects 1408 // are stored with an indirection. So max bucket size is 2048+eps. 1409 var_ = temp(mapbucket(t)) 1410 1411 r = nod(OAS, var_, nil) // zero temp 1412 r = typecheck(r, Etop) 1413 init.Append(r) 1414 r = nod(OADDR, var_, nil) 1415 } 1416 1417 fn := syslook("makemap") 1418 fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val()) 1419 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r) 1420 1421 case OMAKESLICE: 1422 l := n.Left 1423 r := n.Right 1424 if r == nil { 1425 r = safeexpr(l, init) 1426 l = r 1427 } 1428 t := n.Type 1429 if n.Esc == EscNone { 1430 if !isSmallMakeSlice(n) { 1431 Fatalf("non-small OMAKESLICE with EscNone: %v", n) 1432 } 1433 // var arr [r]T 1434 // n = arr[:l] 1435 t = typArray(t.Elem(), nonnegintconst(r)) // [r]T 1436 var_ := temp(t) 1437 a := nod(OAS, var_, nil) // zero temp 1438 a = typecheck(a, Etop) 1439 init.Append(a) 1440 r := nod(OSLICE, var_, nil) // arr[:l] 1441 r.SetSliceBounds(nil, l, nil) 1442 r = conv(r, n.Type) // in case n.Type is named. 1443 r = typecheck(r, Erv) 1444 r = walkexpr(r, init) 1445 n = r 1446 } else { 1447 // n escapes; set up a call to makeslice. 1448 // When len and cap can fit into int, use makeslice instead of 1449 // makeslice64, which is faster and shorter on 32 bit platforms. 1450 1451 if t.Elem().NotInHeap { 1452 yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem()) 1453 } 1454 1455 len, cap := l, r 1456 1457 fnname := "makeslice64" 1458 argtype := Types[TINT64] 1459 1460 // typechecking guarantees that TIDEAL len/cap are positive and fit in an int. 1461 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 1462 // will be handled by the negative range checks in makeslice during runtime. 1463 if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) && 1464 (cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) { 1465 fnname = "makeslice" 1466 argtype = Types[TINT] 1467 } 1468 1469 fn := syslook(fnname) 1470 fn = substArgTypes(fn, t.Elem()) // any-1 1471 n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)) 1472 } 1473 1474 case ORUNESTR: 1475 a := nodnil() 1476 if n.Esc == EscNone { 1477 t := typArray(Types[TUINT8], 4) 1478 var_ := temp(t) 1479 a = nod(OADDR, var_, nil) 1480 } 1481 1482 // intstring(*[4]byte, rune) 1483 n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64])) 1484 1485 case OARRAYBYTESTR: 1486 a := nodnil() 1487 if n.Esc == EscNone { 1488 // Create temporary buffer for string on stack. 1489 t := typArray(Types[TUINT8], tmpstringbufsize) 1490 1491 a = nod(OADDR, temp(t), nil) 1492 } 1493 1494 // slicebytetostring(*[32]byte, []byte) string; 1495 n = mkcall("slicebytetostring", n.Type, init, a, n.Left) 1496 1497 // slicebytetostringtmp([]byte) string; 1498 case OARRAYBYTESTRTMP: 1499 n.Left = walkexpr(n.Left, init) 1500 1501 if !instrumenting { 1502 // Let the backend handle OARRAYBYTESTRTMP directly 1503 // to avoid a function call to slicebytetostringtmp. 1504 break 1505 } 1506 1507 n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) 1508 1509 // slicerunetostring(*[32]byte, []rune) string; 1510 case OARRAYRUNESTR: 1511 a := nodnil() 1512 1513 if n.Esc == EscNone { 1514 // Create temporary buffer for string on stack. 1515 t := typArray(Types[TUINT8], tmpstringbufsize) 1516 1517 a = nod(OADDR, temp(t), nil) 1518 } 1519 1520 n = mkcall("slicerunetostring", n.Type, init, a, n.Left) 1521 1522 // stringtoslicebyte(*32[byte], string) []byte; 1523 case OSTRARRAYBYTE: 1524 a := nodnil() 1525 1526 if n.Esc == EscNone { 1527 // Create temporary buffer for slice on stack. 1528 t := typArray(Types[TUINT8], tmpstringbufsize) 1529 1530 a = nod(OADDR, temp(t), nil) 1531 } 1532 1533 n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING])) 1534 1535 case OSTRARRAYBYTETMP: 1536 // []byte(string) conversion that creates a slice 1537 // referring to the actual string bytes. 1538 // This conversion is handled later by the backend and 1539 // is only for use by internal compiler optimizations 1540 // that know that the slice won't be mutated. 1541 // The only such case today is: 1542 // for i, c := range []byte(string) 1543 n.Left = walkexpr(n.Left, init) 1544 1545 // stringtoslicerune(*[32]rune, string) []rune 1546 case OSTRARRAYRUNE: 1547 a := nodnil() 1548 1549 if n.Esc == EscNone { 1550 // Create temporary buffer for slice on stack. 1551 t := typArray(Types[TINT32], tmpstringbufsize) 1552 1553 a = nod(OADDR, temp(t), nil) 1554 } 1555 1556 n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) 1557 1558 // ifaceeq(i1 any-1, i2 any-2) (ret bool); 1559 case OCMPIFACE: 1560 if !eqtype(n.Left.Type, n.Right.Type) { 1561 Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type) 1562 } 1563 var fn *Node 1564 if n.Left.Type.IsEmptyInterface() { 1565 fn = syslook("efaceeq") 1566 } else { 1567 fn = syslook("ifaceeq") 1568 } 1569 1570 n.Right = cheapexpr(n.Right, init) 1571 n.Left = cheapexpr(n.Left, init) 1572 fn = substArgTypes(fn, n.Right.Type, n.Left.Type) 1573 r := mkcall1(fn, n.Type, init, n.Left, n.Right) 1574 // TODO(marvin): Fix Node.EType type union. 1575 if Op(n.Etype) == ONE { 1576 r = nod(ONOT, r, nil) 1577 } 1578 1579 // check itable/type before full compare. 1580 // TODO(marvin): Fix Node.EType type union. 1581 if Op(n.Etype) == OEQ { 1582 r = nod(OANDAND, nod(OEQ, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1583 } else { 1584 r = nod(OOROR, nod(ONE, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1585 } 1586 r = typecheck(r, Erv) 1587 r = walkexpr(r, init) 1588 r.Type = n.Type 1589 n = r 1590 1591 case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: 1592 if isStaticCompositeLiteral(n) { 1593 // n can be directly represented in the read-only data section. 1594 // Make direct reference to the static data. See issue 12841. 1595 vstat := staticname(n.Type) 1596 vstat.Name.Readonly = true 1597 fixedlit(inInitFunction, initKindStatic, n, vstat, init) 1598 n = vstat 1599 n = typecheck(n, Erv) 1600 break 1601 } 1602 var_ := temp(n.Type) 1603 anylit(n, var_, init) 1604 n = var_ 1605 1606 case OSEND: 1607 n1 := n.Right 1608 n1 = assignconv(n1, n.Left.Type.Elem(), "chan send") 1609 n1 = walkexpr(n1, init) 1610 n1 = nod(OADDR, n1, nil) 1611 n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, typename(n.Left.Type), n.Left, n1) 1612 1613 case OCLOSURE: 1614 n = walkclosure(n, init) 1615 1616 case OCALLPART: 1617 n = walkpartialcall(n, init) 1618 } 1619 1620 // Expressions that are constant at run time but not 1621 // considered const by the language spec are not turned into 1622 // constants until walk. For example, if n is y%1 == 0, the 1623 // walk of y%1 may have replaced it by 0. 1624 // Check whether n with its updated args is itself now a constant. 1625 t := n.Type 1626 1627 evconst(n) 1628 n.Type = t 1629 if n.Op == OLITERAL { 1630 n = typecheck(n, Erv) 1631 } 1632 1633 ullmancalc(n) 1634 1635 if Debug['w'] != 0 && n != nil { 1636 Dump("walk", n) 1637 } 1638 1639 lineno = lno 1640 return n 1641 } 1642 1643 // TODO(josharian): combine this with its caller and simplify 1644 func reduceSlice(n *Node) *Node { 1645 low, high, max := n.SliceBounds() 1646 if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) { 1647 // Reduce x[i:len(x)] to x[i:]. 1648 high = nil 1649 } 1650 n.SetSliceBounds(low, high, max) 1651 if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { 1652 // Reduce x[:] to x. 1653 if Debug_slice > 0 { 1654 Warn("slice: omit slice operation") 1655 } 1656 return n.Left 1657 } 1658 return n 1659 } 1660 1661 func ascompatee1(op Op, l *Node, r *Node, init *Nodes) *Node { 1662 // convas will turn map assigns into function calls, 1663 // making it impossible for reorder3 to work. 1664 n := nod(OAS, l, r) 1665 1666 if l.Op == OINDEXMAP { 1667 return n 1668 } 1669 1670 return convas(n, init) 1671 } 1672 1673 func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { 1674 // check assign expression list to 1675 // a expression list. called in 1676 // expr-list = expr-list 1677 1678 // ensure order of evaluation for function calls 1679 for i := range nl { 1680 nl[i] = safeexpr(nl[i], init) 1681 } 1682 for i1 := range nr { 1683 nr[i1] = safeexpr(nr[i1], init) 1684 } 1685 1686 var nn []*Node 1687 i := 0 1688 for ; i < len(nl); i++ { 1689 if i >= len(nr) { 1690 break 1691 } 1692 // Do not generate 'x = x' during return. See issue 4014. 1693 if op == ORETURN && samesafeexpr(nl[i], nr[i]) { 1694 continue 1695 } 1696 nn = append(nn, ascompatee1(op, nl[i], nr[i], init)) 1697 } 1698 1699 // cannot happen: caller checked that lists had same length 1700 if i < len(nl) || i < len(nr) { 1701 var nln, nrn Nodes 1702 nln.Set(nl) 1703 nrn.Set(nr) 1704 yyerror("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.Func.Nname.Sym.Name) 1705 } 1706 return nn 1707 } 1708 1709 // l is an lv and rt is the type of an rv 1710 // return 1 if this implies a function call 1711 // evaluating the lv or a function call 1712 // in the conversion of the types 1713 func fncall(l *Node, rt *Type) bool { 1714 if l.Ullman >= UINF || l.Op == OINDEXMAP { 1715 return true 1716 } 1717 var r Node 1718 if needwritebarrier(l, &r) { 1719 return true 1720 } 1721 if eqtype(l.Type, rt) { 1722 return false 1723 } 1724 return true 1725 } 1726 1727 // check assign type list to 1728 // a expression list. called in 1729 // expr-list = func() 1730 func ascompatet(op Op, nl Nodes, nr *Type) []*Node { 1731 r, saver := iterFields(nr) 1732 1733 var nn, mm Nodes 1734 var ullmanOverflow bool 1735 var i int 1736 for i = 0; i < nl.Len(); i++ { 1737 if r == nil { 1738 break 1739 } 1740 l := nl.Index(i) 1741 if isblank(l) { 1742 r = saver.Next() 1743 continue 1744 } 1745 1746 // any lv that causes a fn call must be 1747 // deferred until all the return arguments 1748 // have been pulled from the output arguments 1749 if fncall(l, r.Type) { 1750 tmp := temp(r.Type) 1751 tmp = typecheck(tmp, Erv) 1752 a := nod(OAS, l, tmp) 1753 a = convas(a, &mm) 1754 mm.Append(a) 1755 l = tmp 1756 } 1757 1758 a := nod(OAS, l, nodarg(r, 0)) 1759 a = convas(a, &nn) 1760 ullmancalc(a) 1761 if a.Ullman >= UINF { 1762 Dump("ascompatet ucount", a) 1763 ullmanOverflow = true 1764 } 1765 1766 nn.Append(a) 1767 r = saver.Next() 1768 } 1769 1770 if i < nl.Len() || r != nil { 1771 yyerror("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) 1772 } 1773 1774 if ullmanOverflow { 1775 Fatalf("ascompatet: too many function calls evaluating parameters") 1776 } 1777 return append(nn.Slice(), mm.Slice()...) 1778 } 1779 1780 // package all the arguments that match a ... T parameter into a []T. 1781 func mkdotargslice(lr0, nn []*Node, l *Field, fp int, init *Nodes, ddd *Node) []*Node { 1782 esc := uint16(EscUnknown) 1783 if ddd != nil { 1784 esc = ddd.Esc 1785 } 1786 1787 tslice := typSlice(l.Type.Elem()) 1788 1789 var n *Node 1790 if len(lr0) == 0 { 1791 n = nodnil() 1792 n.Type = tslice 1793 } else { 1794 n = nod(OCOMPLIT, nil, typenod(tslice)) 1795 if ddd != nil && prealloc[ddd] != nil { 1796 prealloc[n] = prealloc[ddd] // temporary to use 1797 } 1798 n.List.Set(lr0) 1799 n.Esc = esc 1800 n = typecheck(n, Erv) 1801 if n.Type == nil { 1802 Fatalf("mkdotargslice: typecheck failed") 1803 } 1804 n = walkexpr(n, init) 1805 } 1806 1807 a := nod(OAS, nodarg(l, fp), n) 1808 nn = append(nn, convas(a, init)) 1809 return nn 1810 } 1811 1812 // helpers for shape errors 1813 func dumptypes(nl *Type, what string) string { 1814 s := "" 1815 for _, l := range nl.Fields().Slice() { 1816 if s != "" { 1817 s += ", " 1818 } 1819 s += fldconv(l, 0) 1820 } 1821 if s == "" { 1822 s = fmt.Sprintf("[no arguments %s]", what) 1823 } 1824 return s 1825 } 1826 1827 func dumpnodetypes(l []*Node, what string) string { 1828 s := "" 1829 for _, r := range l { 1830 if s != "" { 1831 s += ", " 1832 } 1833 s += r.Type.String() 1834 } 1835 if s == "" { 1836 s = fmt.Sprintf("[no arguments %s]", what) 1837 } 1838 return s 1839 } 1840 1841 // check assign expression list to 1842 // a type list. called in 1843 // return expr-list 1844 // func(expr-list) 1845 func ascompatte(op Op, call *Node, isddd bool, nl *Type, lr []*Node, fp int, init *Nodes) []*Node { 1846 lr0 := lr 1847 l, savel := iterFields(nl) 1848 var r *Node 1849 if len(lr) > 0 { 1850 r = lr[0] 1851 } 1852 var nn []*Node 1853 1854 // f(g()) where g has multiple return values 1855 if r != nil && len(lr) <= 1 && r.Type.IsFuncArgStruct() { 1856 // optimization - can do block copy 1857 if eqtypenoname(r.Type, nl) { 1858 arg := nodarg(nl, fp) 1859 r = nod(OCONVNOP, r, nil) 1860 r.Type = arg.Type 1861 nn = []*Node{convas(nod(OAS, arg, r), init)} 1862 goto ret 1863 } 1864 1865 // conversions involved. 1866 // copy into temporaries. 1867 var alist []*Node 1868 1869 for _, l := range r.Type.Fields().Slice() { 1870 tmp := temp(l.Type) 1871 alist = append(alist, tmp) 1872 } 1873 1874 a := nod(OAS2, nil, nil) 1875 a.List.Set(alist) 1876 a.Rlist.Set(lr) 1877 a = typecheck(a, Etop) 1878 a = walkstmt(a) 1879 init.Append(a) 1880 lr = alist 1881 r = lr[0] 1882 l, savel = iterFields(nl) 1883 } 1884 1885 for { 1886 if l != nil && l.Isddd { 1887 // the ddd parameter must be last 1888 ll := savel.Next() 1889 1890 if ll != nil { 1891 yyerror("... must be last argument") 1892 } 1893 1894 // special case -- 1895 // only if we are assigning a single ddd 1896 // argument to a ddd parameter then it is 1897 // passed through unencapsulated 1898 if r != nil && len(lr) <= 1 && isddd && eqtype(l.Type, r.Type) { 1899 a := nod(OAS, nodarg(l, fp), r) 1900 a = convas(a, init) 1901 nn = append(nn, a) 1902 break 1903 } 1904 1905 // normal case -- make a slice of all 1906 // remaining arguments and pass it to 1907 // the ddd parameter. 1908 nn = mkdotargslice(lr, nn, l, fp, init, call.Right) 1909 1910 break 1911 } 1912 1913 if l == nil || r == nil { 1914 if l != nil || r != nil { 1915 l1 := dumptypes(nl, "expected") 1916 l2 := dumpnodetypes(lr0, "given") 1917 if l != nil { 1918 yyerror("not enough arguments to %v\n\t%s\n\t%s", op, l1, l2) 1919 } else { 1920 yyerror("too many arguments to %v\n\t%s\n\t%s", op, l1, l2) 1921 } 1922 } 1923 1924 break 1925 } 1926 1927 a := nod(OAS, nodarg(l, fp), r) 1928 a = convas(a, init) 1929 nn = append(nn, a) 1930 1931 l = savel.Next() 1932 r = nil 1933 lr = lr[1:] 1934 if len(lr) > 0 { 1935 r = lr[0] 1936 } 1937 } 1938 1939 ret: 1940 for _, n := range nn { 1941 n.Typecheck = 1 1942 } 1943 return nn 1944 } 1945 1946 // generate code for print 1947 func walkprint(nn *Node, init *Nodes) *Node { 1948 var r *Node 1949 var n *Node 1950 var on *Node 1951 var t *Type 1952 var et EType 1953 1954 op := nn.Op 1955 all := nn.List 1956 var calls []*Node 1957 notfirst := false 1958 1959 // Hoist all the argument evaluation up before the lock. 1960 walkexprlistcheap(all.Slice(), init) 1961 1962 calls = append(calls, mkcall("printlock", nil, init)) 1963 for i1, n1 := range all.Slice() { 1964 if notfirst { 1965 calls = append(calls, mkcall("printsp", nil, init)) 1966 } 1967 1968 notfirst = op == OPRINTN 1969 1970 n = n1 1971 if n.Op == OLITERAL { 1972 switch n.Val().Ctype() { 1973 case CTRUNE: 1974 n = defaultlit(n, runetype) 1975 1976 case CTINT: 1977 n = defaultlit(n, Types[TINT64]) 1978 1979 case CTFLT: 1980 n = defaultlit(n, Types[TFLOAT64]) 1981 } 1982 } 1983 1984 if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { 1985 n = defaultlit(n, Types[TINT64]) 1986 } 1987 n = defaultlit(n, nil) 1988 all.SetIndex(i1, n) 1989 if n.Type == nil || n.Type.Etype == TFORW { 1990 continue 1991 } 1992 1993 t = n.Type 1994 et = n.Type.Etype 1995 if n.Type.IsInterface() { 1996 if n.Type.IsEmptyInterface() { 1997 on = syslook("printeface") 1998 } else { 1999 on = syslook("printiface") 2000 } 2001 on = substArgTypes(on, n.Type) // any-1 2002 } else if n.Type.IsPtr() || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR { 2003 on = syslook("printpointer") 2004 on = substArgTypes(on, n.Type) // any-1 2005 } else if n.Type.IsSlice() { 2006 on = syslook("printslice") 2007 on = substArgTypes(on, n.Type) // any-1 2008 } else if isInt[et] { 2009 if et == TUINT64 { 2010 if (t.Sym.Pkg == Runtimepkg || compiling_runtime) && t.Sym.Name == "hex" { 2011 on = syslook("printhex") 2012 } else { 2013 on = syslook("printuint") 2014 } 2015 } else { 2016 on = syslook("printint") 2017 } 2018 } else if isFloat[et] { 2019 on = syslook("printfloat") 2020 } else if isComplex[et] { 2021 on = syslook("printcomplex") 2022 } else if et == TBOOL { 2023 on = syslook("printbool") 2024 } else if et == TSTRING { 2025 on = syslook("printstring") 2026 } else { 2027 badtype(OPRINT, n.Type, nil) 2028 continue 2029 } 2030 2031 t = on.Type.Params().Field(0).Type 2032 2033 if !eqtype(t, n.Type) { 2034 n = nod(OCONV, n, nil) 2035 n.Type = t 2036 } 2037 2038 r = nod(OCALL, on, nil) 2039 r.List.Append(n) 2040 calls = append(calls, r) 2041 } 2042 2043 if op == OPRINTN { 2044 calls = append(calls, mkcall("printnl", nil, nil)) 2045 } 2046 2047 calls = append(calls, mkcall("printunlock", nil, init)) 2048 2049 typecheckslice(calls, Etop) 2050 walkexprlist(calls, init) 2051 2052 r = nod(OEMPTY, nil, nil) 2053 r = typecheck(r, Etop) 2054 r = walkexpr(r, init) 2055 r.Ninit.Set(calls) 2056 return r 2057 } 2058 2059 func callnew(t *Type) *Node { 2060 if t.NotInHeap { 2061 yyerror("%v is go:notinheap; heap allocation disallowed", t) 2062 } 2063 dowidth(t) 2064 fn := syslook("newobject") 2065 fn = substArgTypes(fn, t) 2066 v := mkcall1(fn, ptrto(t), nil, typename(t)) 2067 v.NonNil = true 2068 return v 2069 } 2070 2071 func iscallret(n *Node) bool { 2072 n = outervalue(n) 2073 return n.Op == OINDREGSP 2074 } 2075 2076 func isstack(n *Node) bool { 2077 n = outervalue(n) 2078 2079 // If n is *autotmp and autotmp = &foo, replace n with foo. 2080 // We introduce such temps when initializing struct literals. 2081 if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() { 2082 defn := n.Left.Name.Defn 2083 if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR { 2084 n = defn.Right.Left 2085 } 2086 } 2087 2088 switch n.Op { 2089 case OINDREGSP: 2090 return true 2091 2092 case ONAME: 2093 switch n.Class { 2094 case PAUTO, PPARAM, PPARAMOUT: 2095 return true 2096 } 2097 } 2098 2099 return false 2100 } 2101 2102 func (n *Node) isGlobal() bool { 2103 n = outervalue(n) 2104 return n.Op == ONAME && n.Class == PEXTERN 2105 } 2106 2107 // Do we need a write barrier for the assignment l = r? 2108 func needwritebarrier(l *Node, r *Node) bool { 2109 if !use_writebarrier { 2110 return false 2111 } 2112 2113 if l == nil || isblank(l) { 2114 return false 2115 } 2116 2117 // No write barrier for write of non-pointers. 2118 dowidth(l.Type) 2119 2120 if !haspointers(l.Type) { 2121 return false 2122 } 2123 2124 // No write barrier for write to stack. 2125 if isstack(l) { 2126 return false 2127 } 2128 2129 // No write barrier if this is a pointer to a go:notinheap 2130 // type, since the write barrier's inheap(ptr) check will fail. 2131 if l.Type.IsPtr() && l.Type.Elem().NotInHeap { 2132 return false 2133 } 2134 2135 // Implicit zeroing is still zeroing, so it needs write 2136 // barriers. In practice, these are all to stack variables 2137 // (even if isstack isn't smart enough to figure that out), so 2138 // they'll be eliminated by the backend. 2139 if r == nil { 2140 return true 2141 } 2142 2143 // Ignore no-op conversions when making decision. 2144 // Ensures that xp = unsafe.Pointer(&x) is treated 2145 // the same as xp = &x. 2146 for r.Op == OCONVNOP { 2147 r = r.Left 2148 } 2149 2150 // TODO: We can eliminate write barriers if we know *both* the 2151 // current and new content of the slot must already be shaded. 2152 // We know a pointer is shaded if it's nil, or points to 2153 // static data, a global (variable or function), or the stack. 2154 // The nil optimization could be particularly useful for 2155 // writes to just-allocated objects. Unfortunately, knowing 2156 // the "current" value of the slot requires flow analysis. 2157 2158 // No write barrier for storing address of stack values, 2159 // which are guaranteed only to be written to the stack. 2160 if r.Op == OADDR && isstack(r.Left) { 2161 return false 2162 } 2163 2164 // Otherwise, be conservative and use write barrier. 2165 return true 2166 } 2167 2168 // TODO(rsc): Perhaps componentgen should run before this. 2169 2170 func applywritebarrier(n *Node) *Node { 2171 if n.Left != nil && n.Right != nil && needwritebarrier(n.Left, n.Right) { 2172 if Debug_wb > 1 { 2173 Warnl(n.Lineno, "marking %v for barrier", n.Left) 2174 } 2175 n.Op = OASWB 2176 return n 2177 } 2178 return n 2179 } 2180 2181 func convas(n *Node, init *Nodes) *Node { 2182 if n.Op != OAS { 2183 Fatalf("convas: not OAS %v", n.Op) 2184 } 2185 2186 n.Typecheck = 1 2187 2188 var lt *Type 2189 var rt *Type 2190 if n.Left == nil || n.Right == nil { 2191 goto out 2192 } 2193 2194 lt = n.Left.Type 2195 rt = n.Right.Type 2196 if lt == nil || rt == nil { 2197 goto out 2198 } 2199 2200 if isblank(n.Left) { 2201 n.Right = defaultlit(n.Right, nil) 2202 goto out 2203 } 2204 2205 if !eqtype(lt, rt) { 2206 n.Right = assignconv(n.Right, lt, "assignment") 2207 n.Right = walkexpr(n.Right, init) 2208 } 2209 2210 out: 2211 ullmancalc(n) 2212 return n 2213 } 2214 2215 // from ascompat[te] 2216 // evaluating actual function arguments. 2217 // f(a,b) 2218 // if there is exactly one function expr, 2219 // then it is done first. otherwise must 2220 // make temp variables 2221 func reorder1(all []*Node) []*Node { 2222 c := 0 // function calls 2223 t := 0 // total parameters 2224 2225 for _, n := range all { 2226 t++ 2227 ullmancalc(n) 2228 if n.Ullman >= UINF { 2229 c++ 2230 } 2231 } 2232 2233 if c == 0 || t == 1 { 2234 return all 2235 } 2236 2237 var g []*Node // fncalls assigned to tempnames 2238 var f *Node // last fncall assigned to stack 2239 var r []*Node // non fncalls and tempnames assigned to stack 2240 d := 0 2241 var a *Node 2242 for _, n := range all { 2243 if n.Ullman < UINF { 2244 r = append(r, n) 2245 continue 2246 } 2247 2248 d++ 2249 if d == c { 2250 f = n 2251 continue 2252 } 2253 2254 // make assignment of fncall to tempname 2255 a = temp(n.Right.Type) 2256 2257 a = nod(OAS, a, n.Right) 2258 g = append(g, a) 2259 2260 // put normal arg assignment on list 2261 // with fncall replaced by tempname 2262 n.Right = a.Left 2263 2264 r = append(r, n) 2265 } 2266 2267 if f != nil { 2268 g = append(g, f) 2269 } 2270 return append(g, r...) 2271 } 2272 2273 // from ascompat[ee] 2274 // a,b = c,d 2275 // simultaneous assignment. there cannot 2276 // be later use of an earlier lvalue. 2277 // 2278 // function calls have been removed. 2279 func reorder3(all []*Node) []*Node { 2280 var l *Node 2281 2282 // If a needed expression may be affected by an 2283 // earlier assignment, make an early copy of that 2284 // expression and use the copy instead. 2285 var early []*Node 2286 2287 var mapinit Nodes 2288 for i, n := range all { 2289 l = n.Left 2290 2291 // Save subexpressions needed on left side. 2292 // Drill through non-dereferences. 2293 for { 2294 if l.Op == ODOT || l.Op == OPAREN { 2295 l = l.Left 2296 continue 2297 } 2298 2299 if l.Op == OINDEX && l.Left.Type.IsArray() { 2300 l.Right = reorder3save(l.Right, all, i, &early) 2301 l = l.Left 2302 continue 2303 } 2304 2305 break 2306 } 2307 2308 switch l.Op { 2309 default: 2310 Fatalf("reorder3 unexpected lvalue %#v", l.Op) 2311 2312 case ONAME: 2313 break 2314 2315 case OINDEX, OINDEXMAP: 2316 l.Left = reorder3save(l.Left, all, i, &early) 2317 l.Right = reorder3save(l.Right, all, i, &early) 2318 if l.Op == OINDEXMAP { 2319 all[i] = convas(all[i], &mapinit) 2320 } 2321 2322 case OIND, ODOTPTR: 2323 l.Left = reorder3save(l.Left, all, i, &early) 2324 } 2325 2326 // Save expression on right side. 2327 all[i].Right = reorder3save(all[i].Right, all, i, &early) 2328 } 2329 2330 early = append(mapinit.Slice(), early...) 2331 return append(early, all...) 2332 } 2333 2334 // if the evaluation of *np would be affected by the 2335 // assignments in all up to but not including the ith assignment, 2336 // copy into a temporary during *early and 2337 // replace *np with that temp. 2338 // The result of reorder3save MUST be assigned back to n, e.g. 2339 // n.Left = reorder3save(n.Left, all, i, early) 2340 func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node { 2341 if !aliased(n, all, i) { 2342 return n 2343 } 2344 2345 q := temp(n.Type) 2346 q = nod(OAS, q, n) 2347 q = typecheck(q, Etop) 2348 *early = append(*early, q) 2349 return q.Left 2350 } 2351 2352 // what's the outer value that a write to n affects? 2353 // outer value means containing struct or array. 2354 func outervalue(n *Node) *Node { 2355 for { 2356 if n.Op == OXDOT { 2357 Fatalf("OXDOT in walk") 2358 } 2359 if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP { 2360 n = n.Left 2361 continue 2362 } 2363 2364 if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() { 2365 n = n.Left 2366 continue 2367 } 2368 2369 break 2370 } 2371 2372 return n 2373 } 2374 2375 // Is it possible that the computation of n might be 2376 // affected by writes in as up to but not including the ith element? 2377 func aliased(n *Node, all []*Node, i int) bool { 2378 if n == nil { 2379 return false 2380 } 2381 2382 // Treat all fields of a struct as referring to the whole struct. 2383 // We could do better but we would have to keep track of the fields. 2384 for n.Op == ODOT { 2385 n = n.Left 2386 } 2387 2388 // Look for obvious aliasing: a variable being assigned 2389 // during the all list and appearing in n. 2390 // Also record whether there are any writes to main memory. 2391 // Also record whether there are any writes to variables 2392 // whose addresses have been taken. 2393 memwrite := 0 2394 2395 varwrite := 0 2396 var a *Node 2397 for _, an := range all[:i] { 2398 a = outervalue(an.Left) 2399 2400 for a.Op == ODOT { 2401 a = a.Left 2402 } 2403 2404 if a.Op != ONAME { 2405 memwrite = 1 2406 continue 2407 } 2408 2409 switch n.Class { 2410 default: 2411 varwrite = 1 2412 continue 2413 2414 case PAUTO, PPARAM, PPARAMOUT: 2415 if n.Addrtaken { 2416 varwrite = 1 2417 continue 2418 } 2419 2420 if vmatch2(a, n) { 2421 // Direct hit. 2422 return true 2423 } 2424 } 2425 } 2426 2427 // The variables being written do not appear in n. 2428 // However, n might refer to computed addresses 2429 // that are being written. 2430 2431 // If no computed addresses are affected by the writes, no aliasing. 2432 if memwrite == 0 && varwrite == 0 { 2433 return false 2434 } 2435 2436 // If n does not refer to computed addresses 2437 // (that is, if n only refers to variables whose addresses 2438 // have not been taken), no aliasing. 2439 if varexpr(n) { 2440 return false 2441 } 2442 2443 // Otherwise, both the writes and n refer to computed memory addresses. 2444 // Assume that they might conflict. 2445 return true 2446 } 2447 2448 // does the evaluation of n only refer to variables 2449 // whose addresses have not been taken? 2450 // (and no other memory) 2451 func varexpr(n *Node) bool { 2452 if n == nil { 2453 return true 2454 } 2455 2456 switch n.Op { 2457 case OLITERAL: 2458 return true 2459 2460 case ONAME: 2461 switch n.Class { 2462 case PAUTO, PPARAM, PPARAMOUT: 2463 if !n.Addrtaken { 2464 return true 2465 } 2466 } 2467 2468 return false 2469 2470 case OADD, 2471 OSUB, 2472 OOR, 2473 OXOR, 2474 OMUL, 2475 ODIV, 2476 OMOD, 2477 OLSH, 2478 ORSH, 2479 OAND, 2480 OANDNOT, 2481 OPLUS, 2482 OMINUS, 2483 OCOM, 2484 OPAREN, 2485 OANDAND, 2486 OOROR, 2487 OCONV, 2488 OCONVNOP, 2489 OCONVIFACE, 2490 ODOTTYPE: 2491 return varexpr(n.Left) && varexpr(n.Right) 2492 2493 case ODOT: // but not ODOTPTR 2494 // Should have been handled in aliased. 2495 Fatalf("varexpr unexpected ODOT") 2496 } 2497 2498 // Be conservative. 2499 return false 2500 } 2501 2502 // is the name l mentioned in r? 2503 func vmatch2(l *Node, r *Node) bool { 2504 if r == nil { 2505 return false 2506 } 2507 switch r.Op { 2508 // match each right given left 2509 case ONAME: 2510 return l == r 2511 2512 case OLITERAL: 2513 return false 2514 } 2515 2516 if vmatch2(l, r.Left) { 2517 return true 2518 } 2519 if vmatch2(l, r.Right) { 2520 return true 2521 } 2522 for _, n := range r.List.Slice() { 2523 if vmatch2(l, n) { 2524 return true 2525 } 2526 } 2527 return false 2528 } 2529 2530 // is any name mentioned in l also mentioned in r? 2531 // called by sinit.go 2532 func vmatch1(l *Node, r *Node) bool { 2533 // isolate all left sides 2534 if l == nil || r == nil { 2535 return false 2536 } 2537 switch l.Op { 2538 case ONAME: 2539 switch l.Class { 2540 case PPARAM, PAUTO: 2541 break 2542 2543 // assignment to non-stack variable 2544 // must be delayed if right has function calls. 2545 default: 2546 if r.Ullman >= UINF { 2547 return true 2548 } 2549 } 2550 2551 return vmatch2(l, r) 2552 2553 case OLITERAL: 2554 return false 2555 } 2556 2557 if vmatch1(l.Left, r) { 2558 return true 2559 } 2560 if vmatch1(l.Right, r) { 2561 return true 2562 } 2563 for _, n := range l.List.Slice() { 2564 if vmatch1(n, r) { 2565 return true 2566 } 2567 } 2568 return false 2569 } 2570 2571 // paramstoheap returns code to allocate memory for heap-escaped parameters 2572 // and to copy non-result prameters' values from the stack. 2573 // If out is true, then code is also produced to zero-initialize their 2574 // stack memory addresses. 2575 func paramstoheap(params *Type) []*Node { 2576 var nn []*Node 2577 for _, t := range params.Fields().Slice() { 2578 // For precise stacks, the garbage collector assumes results 2579 // are always live, so zero them always. 2580 if params.StructType().Funarg == FunargResults { 2581 // Defer might stop a panic and show the 2582 // return values as they exist at the time of panic. 2583 // Make sure to zero them on entry to the function. 2584 nn = append(nn, nod(OAS, nodarg(t, 1), nil)) 2585 } 2586 2587 v := t.Nname 2588 if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result 2589 v = nil 2590 } 2591 if v == nil { 2592 continue 2593 } 2594 2595 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil { 2596 nn = append(nn, walkstmt(nod(ODCL, v, nil))) 2597 if stackcopy.Class == PPARAM { 2598 nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop))) 2599 } 2600 } 2601 } 2602 2603 return nn 2604 } 2605 2606 // returnsfromheap returns code to copy values for heap-escaped parameters 2607 // back to the stack. 2608 func returnsfromheap(params *Type) []*Node { 2609 var nn []*Node 2610 for _, t := range params.Fields().Slice() { 2611 v := t.Nname 2612 if v == nil { 2613 continue 2614 } 2615 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class == PPARAMOUT { 2616 nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop))) 2617 } 2618 } 2619 2620 return nn 2621 } 2622 2623 // heapmoves generates code to handle migrating heap-escaped parameters 2624 // between the stack and the heap. The generated code is added to Curfn's 2625 // Enter and Exit lists. 2626 func heapmoves() { 2627 lno := lineno 2628 lineno = Curfn.Lineno 2629 nn := paramstoheap(Curfn.Type.Recvs()) 2630 nn = append(nn, paramstoheap(Curfn.Type.Params())...) 2631 nn = append(nn, paramstoheap(Curfn.Type.Results())...) 2632 Curfn.Func.Enter.Append(nn...) 2633 lineno = Curfn.Func.Endlineno 2634 Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) 2635 lineno = lno 2636 } 2637 2638 func vmkcall(fn *Node, t *Type, init *Nodes, va []*Node) *Node { 2639 if fn.Type == nil || fn.Type.Etype != TFUNC { 2640 Fatalf("mkcall %v %v", fn, fn.Type) 2641 } 2642 2643 n := fn.Type.Params().NumFields() 2644 2645 r := nod(OCALL, fn, nil) 2646 r.List.Set(va[:n]) 2647 if fn.Type.Results().NumFields() > 0 { 2648 r = typecheck(r, Erv|Efnstruct) 2649 } else { 2650 r = typecheck(r, Etop) 2651 } 2652 r = walkexpr(r, init) 2653 r.Type = t 2654 return r 2655 } 2656 2657 func mkcall(name string, t *Type, init *Nodes, args ...*Node) *Node { 2658 return vmkcall(syslook(name), t, init, args) 2659 } 2660 2661 func mkcall1(fn *Node, t *Type, init *Nodes, args ...*Node) *Node { 2662 return vmkcall(fn, t, init, args) 2663 } 2664 2665 func conv(n *Node, t *Type) *Node { 2666 if eqtype(n.Type, t) { 2667 return n 2668 } 2669 n = nod(OCONV, n, nil) 2670 n.Type = t 2671 n = typecheck(n, Erv) 2672 return n 2673 } 2674 2675 func chanfn(name string, n int, t *Type) *Node { 2676 if !t.IsChan() { 2677 Fatalf("chanfn %v", t) 2678 } 2679 fn := syslook(name) 2680 switch n { 2681 default: 2682 Fatalf("chanfn %d", n) 2683 case 1: 2684 fn = substArgTypes(fn, t.Elem()) 2685 case 2: 2686 fn = substArgTypes(fn, t.Elem(), t.Elem()) 2687 } 2688 return fn 2689 } 2690 2691 func mapfn(name string, t *Type) *Node { 2692 if !t.IsMap() { 2693 Fatalf("mapfn %v", t) 2694 } 2695 fn := syslook(name) 2696 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val()) 2697 return fn 2698 } 2699 2700 func mapfndel(name string, t *Type) *Node { 2701 if !t.IsMap() { 2702 Fatalf("mapfn %v", t) 2703 } 2704 fn := syslook(name) 2705 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key()) 2706 return fn 2707 } 2708 2709 func writebarrierfn(name string, l *Type, r *Type) *Node { 2710 fn := syslook(name) 2711 fn = substArgTypes(fn, l, r) 2712 return fn 2713 } 2714 2715 func addstr(n *Node, init *Nodes) *Node { 2716 // orderexpr rewrote OADDSTR to have a list of strings. 2717 c := n.List.Len() 2718 2719 if c < 2 { 2720 yyerror("addstr count %d too small", c) 2721 } 2722 2723 buf := nodnil() 2724 if n.Esc == EscNone { 2725 sz := int64(0) 2726 for _, n1 := range n.List.Slice() { 2727 if n1.Op == OLITERAL { 2728 sz += int64(len(n1.Val().U.(string))) 2729 } 2730 } 2731 2732 // Don't allocate the buffer if the result won't fit. 2733 if sz < tmpstringbufsize { 2734 // Create temporary buffer for result string on stack. 2735 t := typArray(Types[TUINT8], tmpstringbufsize) 2736 2737 buf = nod(OADDR, temp(t), nil) 2738 } 2739 } 2740 2741 // build list of string arguments 2742 args := []*Node{buf} 2743 for _, n2 := range n.List.Slice() { 2744 args = append(args, conv(n2, Types[TSTRING])) 2745 } 2746 2747 var fn string 2748 if c <= 5 { 2749 // small numbers of strings use direct runtime helpers. 2750 // note: orderexpr knows this cutoff too. 2751 fn = fmt.Sprintf("concatstring%d", c) 2752 } else { 2753 // large numbers of strings are passed to the runtime as a slice. 2754 fn = "concatstrings" 2755 2756 t := typSlice(Types[TSTRING]) 2757 slice := nod(OCOMPLIT, nil, typenod(t)) 2758 if prealloc[n] != nil { 2759 prealloc[slice] = prealloc[n] 2760 } 2761 slice.List.Set(args[1:]) // skip buf arg 2762 args = []*Node{buf, slice} 2763 slice.Esc = EscNone 2764 } 2765 2766 cat := syslook(fn) 2767 r := nod(OCALL, cat, nil) 2768 r.List.Set(args) 2769 r = typecheck(r, Erv) 2770 r = walkexpr(r, init) 2771 r.Type = n.Type 2772 2773 return r 2774 } 2775 2776 // expand append(l1, l2...) to 2777 // init { 2778 // s := l1 2779 // n := len(s) + len(l2) 2780 // // Compare as uint so growslice can panic on overflow. 2781 // if uint(n) > uint(cap(s)) { 2782 // s = growslice(s, n) 2783 // } 2784 // s = s[:n] 2785 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2786 // } 2787 // s 2788 // 2789 // l2 is allowed to be a string. 2790 func appendslice(n *Node, init *Nodes) *Node { 2791 walkexprlistsafe(n.List.Slice(), init) 2792 2793 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2794 // and n are name or literal, but those may index the slice we're 2795 // modifying here. Fix explicitly. 2796 ls := n.List.Slice() 2797 for i1, n1 := range ls { 2798 ls[i1] = cheapexpr(n1, init) 2799 } 2800 2801 l1 := n.List.First() 2802 l2 := n.List.Second() 2803 2804 var l []*Node 2805 2806 // var s []T 2807 s := temp(l1.Type) 2808 l = append(l, nod(OAS, s, l1)) // s = l1 2809 2810 // n := len(s) + len(l2) 2811 nn := temp(Types[TINT]) 2812 l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil)))) 2813 2814 // if uint(n) > uint(cap(s)) 2815 nif := nod(OIF, nil, nil) 2816 nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil)) 2817 nif.Left.Left.Type = Types[TUINT] 2818 nif.Left.Right.Type = Types[TUINT] 2819 2820 // instantiate growslice(Type*, []any, int) []any 2821 fn := syslook("growslice") 2822 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2823 2824 // s = growslice(T, s, n) 2825 nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn))) 2826 l = append(l, nif) 2827 2828 // s = s[:n] 2829 nt := nod(OSLICE, s, nil) 2830 nt.SetSliceBounds(nil, nn, nil) 2831 nt.Etype = 1 2832 l = append(l, nod(OAS, s, nt)) 2833 2834 if haspointers(l1.Type.Elem()) { 2835 // copy(s[len(l1):], l2) 2836 nptr1 := nod(OSLICE, s, nil) 2837 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2838 nptr1.Etype = 1 2839 nptr2 := l2 2840 fn := syslook("typedslicecopy") 2841 fn = substArgTypes(fn, l1.Type, l2.Type) 2842 var ln Nodes 2843 ln.Set(l) 2844 nt := mkcall1(fn, Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2) 2845 l = append(ln.Slice(), nt) 2846 } else if instrumenting && !compiling_runtime { 2847 // rely on runtime to instrument copy. 2848 // copy(s[len(l1):], l2) 2849 nptr1 := nod(OSLICE, s, nil) 2850 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2851 nptr1.Etype = 1 2852 nptr2 := l2 2853 var fn *Node 2854 if l2.Type.IsString() { 2855 fn = syslook("slicestringcopy") 2856 } else { 2857 fn = syslook("slicecopy") 2858 } 2859 fn = substArgTypes(fn, l1.Type, l2.Type) 2860 var ln Nodes 2861 ln.Set(l) 2862 nt := mkcall1(fn, Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width)) 2863 l = append(ln.Slice(), nt) 2864 } else { 2865 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2866 nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil)) 2867 nptr1.Bounded = true 2868 2869 nptr1 = nod(OADDR, nptr1, nil) 2870 2871 nptr2 := nod(OSPTR, l2, nil) 2872 2873 fn := syslook("memmove") 2874 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2875 2876 var ln Nodes 2877 ln.Set(l) 2878 nwid := cheapexpr(conv(nod(OLEN, l2, nil), Types[TUINTPTR]), &ln) 2879 2880 nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width)) 2881 nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid) 2882 l = append(ln.Slice(), nt) 2883 } 2884 2885 typecheckslice(l, Etop) 2886 walkstmtlist(l) 2887 init.Append(l...) 2888 return s 2889 } 2890 2891 // Rewrite append(src, x, y, z) so that any side effects in 2892 // x, y, z (including runtime panics) are evaluated in 2893 // initialization statements before the append. 2894 // For normal code generation, stop there and leave the 2895 // rest to cgen_append. 2896 // 2897 // For race detector, expand append(src, a [, b]* ) to 2898 // 2899 // init { 2900 // s := src 2901 // const argc = len(args) - 1 2902 // if cap(s) - len(s) < argc { 2903 // s = growslice(s, len(s)+argc) 2904 // } 2905 // n := len(s) 2906 // s = s[:n+argc] 2907 // s[n] = a 2908 // s[n+1] = b 2909 // ... 2910 // } 2911 // s 2912 func walkappend(n *Node, init *Nodes, dst *Node) *Node { 2913 if !samesafeexpr(dst, n.List.First()) { 2914 n.List.SetIndex(0, safeexpr(n.List.Index(0), init)) 2915 n.List.SetIndex(0, walkexpr(n.List.Index(0), init)) 2916 } 2917 walkexprlistsafe(n.List.Slice()[1:], init) 2918 2919 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2920 // and n are name or literal, but those may index the slice we're 2921 // modifying here. Fix explicitly. 2922 // Using cheapexpr also makes sure that the evaluation 2923 // of all arguments (and especially any panics) happen 2924 // before we begin to modify the slice in a visible way. 2925 ls := n.List.Slice()[1:] 2926 for i, n := range ls { 2927 ls[i] = cheapexpr(n, init) 2928 } 2929 2930 nsrc := n.List.First() 2931 2932 argc := n.List.Len() - 1 2933 if argc < 1 { 2934 return nsrc 2935 } 2936 2937 // General case, with no function calls left as arguments. 2938 // Leave for gen, except that instrumentation requires old form. 2939 if !instrumenting || compiling_runtime { 2940 return n 2941 } 2942 2943 var l []*Node 2944 2945 ns := temp(nsrc.Type) 2946 l = append(l, nod(OAS, ns, nsrc)) // s = src 2947 2948 na := nodintconst(int64(argc)) // const argc 2949 nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc 2950 nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na) 2951 2952 fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T) 2953 fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem()) 2954 2955 nx.Nbody.Set1(nod(OAS, ns, 2956 mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns, 2957 nod(OADD, nod(OLEN, ns, nil), na)))) 2958 2959 l = append(l, nx) 2960 2961 nn := temp(Types[TINT]) 2962 l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s) 2963 2964 nx = nod(OSLICE, ns, nil) // ...s[:n+argc] 2965 nx.SetSliceBounds(nil, nod(OADD, nn, na), nil) 2966 nx.Etype = 1 2967 l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc] 2968 2969 ls = n.List.Slice()[1:] 2970 for i, n := range ls { 2971 nx = nod(OINDEX, ns, nn) // s[n] ... 2972 nx.Bounded = true 2973 l = append(l, nod(OAS, nx, n)) // s[n] = arg 2974 if i+1 < len(ls) { 2975 l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1 2976 } 2977 } 2978 2979 typecheckslice(l, Etop) 2980 walkstmtlist(l) 2981 init.Append(l...) 2982 return ns 2983 } 2984 2985 // Lower copy(a, b) to a memmove call or a runtime call. 2986 // 2987 // init { 2988 // n := len(a) 2989 // if n > len(b) { n = len(b) } 2990 // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) 2991 // } 2992 // n; 2993 // 2994 // Also works if b is a string. 2995 // 2996 func copyany(n *Node, init *Nodes, runtimecall bool) *Node { 2997 if haspointers(n.Left.Type.Elem()) { 2998 fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) 2999 return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right) 3000 } 3001 3002 if runtimecall { 3003 var fn *Node 3004 if n.Right.Type.IsString() { 3005 fn = syslook("slicestringcopy") 3006 } else { 3007 fn = syslook("slicecopy") 3008 } 3009 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3010 return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width)) 3011 } 3012 3013 n.Left = walkexpr(n.Left, init) 3014 n.Right = walkexpr(n.Right, init) 3015 nl := temp(n.Left.Type) 3016 nr := temp(n.Right.Type) 3017 var l []*Node 3018 l = append(l, nod(OAS, nl, n.Left)) 3019 l = append(l, nod(OAS, nr, n.Right)) 3020 3021 nfrm := nod(OSPTR, nr, nil) 3022 nto := nod(OSPTR, nl, nil) 3023 3024 nlen := temp(Types[TINT]) 3025 3026 // n = len(to) 3027 l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil))) 3028 3029 // if n > len(frm) { n = len(frm) } 3030 nif := nod(OIF, nil, nil) 3031 3032 nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil)) 3033 nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil))) 3034 l = append(l, nif) 3035 3036 // Call memmove. 3037 fn := syslook("memmove") 3038 3039 fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem()) 3040 nwid := temp(Types[TUINTPTR]) 3041 l = append(l, nod(OAS, nwid, conv(nlen, Types[TUINTPTR]))) 3042 nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width)) 3043 l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) 3044 3045 typecheckslice(l, Etop) 3046 walkstmtlist(l) 3047 init.Append(l...) 3048 return nlen 3049 } 3050 3051 func eqfor(t *Type, needsize *int) *Node { 3052 // Should only arrive here with large memory or 3053 // a struct/array containing a non-memory field/element. 3054 // Small memory is handled inline, and single non-memory 3055 // is handled during type check (OCMPSTR etc). 3056 switch a, _ := algtype1(t); a { 3057 case AMEM: 3058 n := syslook("memequal") 3059 n = substArgTypes(n, t, t) 3060 *needsize = 1 3061 return n 3062 case ASPECIAL: 3063 sym := typesymprefix(".eq", t) 3064 n := newname(sym) 3065 n.Class = PFUNC 3066 ntype := nod(OTFUNC, nil, nil) 3067 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3068 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3069 ntype.Rlist.Append(nod(ODCLFIELD, nil, typenod(Types[TBOOL]))) 3070 ntype = typecheck(ntype, Etype) 3071 n.Type = ntype.Type 3072 *needsize = 0 3073 return n 3074 } 3075 Fatalf("eqfor %v", t) 3076 return nil 3077 } 3078 3079 // The result of walkcompare MUST be assigned back to n, e.g. 3080 // n.Left = walkcompare(n.Left, init) 3081 func walkcompare(n *Node, init *Nodes) *Node { 3082 // Given interface value l and concrete value r, rewrite 3083 // l == r 3084 // into types-equal && data-equal. 3085 // This is efficient, avoids allocations, and avoids runtime calls. 3086 var l, r *Node 3087 if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() { 3088 l = n.Left 3089 r = n.Right 3090 } else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() { 3091 l = n.Right 3092 r = n.Left 3093 } 3094 3095 if l != nil { 3096 // Handle both == and !=. 3097 eq := n.Op 3098 var andor Op 3099 if eq == OEQ { 3100 andor = OANDAND 3101 } else { 3102 andor = OOROR 3103 } 3104 // Check for types equal. 3105 // For empty interface, this is: 3106 // l.tab == type(r) 3107 // For non-empty interface, this is: 3108 // l.tab != nil && l.tab._type == type(r) 3109 var eqtype *Node 3110 tab := nod(OITAB, l, nil) 3111 rtyp := typename(r.Type) 3112 if l.Type.IsEmptyInterface() { 3113 tab.Type = ptrto(Types[TUINT8]) 3114 tab.Typecheck = 1 3115 eqtype = nod(eq, tab, rtyp) 3116 } else { 3117 nonnil := nod(brcom(eq), nodnil(), tab) 3118 match := nod(eq, itabType(tab), rtyp) 3119 eqtype = nod(andor, nonnil, match) 3120 } 3121 // Check for data equal. 3122 eqdata := nod(eq, ifaceData(l, r.Type), r) 3123 // Put it all together. 3124 expr := nod(andor, eqtype, eqdata) 3125 n = finishcompare(n, expr, init) 3126 return n 3127 } 3128 3129 // Must be comparison of array or struct. 3130 // Otherwise back end handles it. 3131 // While we're here, decide whether to 3132 // inline or call an eq alg. 3133 t := n.Left.Type 3134 var inline bool 3135 switch t.Etype { 3136 default: 3137 return n 3138 case TARRAY: 3139 inline = t.NumElem() <= 1 || (t.NumElem() <= 4 && issimple[t.Elem().Etype]) 3140 case TSTRUCT: 3141 inline = t.NumFields() <= 4 3142 } 3143 3144 cmpl := n.Left 3145 for cmpl != nil && cmpl.Op == OCONVNOP { 3146 cmpl = cmpl.Left 3147 } 3148 cmpr := n.Right 3149 for cmpr != nil && cmpr.Op == OCONVNOP { 3150 cmpr = cmpr.Left 3151 } 3152 3153 if !islvalue(cmpl) || !islvalue(cmpr) { 3154 Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) 3155 } 3156 3157 // Chose not to inline. Call equality function directly. 3158 if !inline { 3159 // eq algs take pointers 3160 pl := temp(ptrto(t)) 3161 al := nod(OAS, pl, nod(OADDR, cmpl, nil)) 3162 al.Right.Etype = 1 // addr does not escape 3163 al = typecheck(al, Etop) 3164 init.Append(al) 3165 3166 pr := temp(ptrto(t)) 3167 ar := nod(OAS, pr, nod(OADDR, cmpr, nil)) 3168 ar.Right.Etype = 1 // addr does not escape 3169 ar = typecheck(ar, Etop) 3170 init.Append(ar) 3171 3172 var needsize int 3173 call := nod(OCALL, eqfor(t, &needsize), nil) 3174 call.List.Append(pl) 3175 call.List.Append(pr) 3176 if needsize != 0 { 3177 call.List.Append(nodintconst(t.Width)) 3178 } 3179 res := call 3180 if n.Op != OEQ { 3181 res = nod(ONOT, res, nil) 3182 } 3183 n = finishcompare(n, res, init) 3184 return n 3185 } 3186 3187 // inline: build boolean expression comparing element by element 3188 andor := OANDAND 3189 if n.Op == ONE { 3190 andor = OOROR 3191 } 3192 var expr *Node 3193 compare := func(el, er *Node) { 3194 a := nod(n.Op, el, er) 3195 if expr == nil { 3196 expr = a 3197 } else { 3198 expr = nod(andor, expr, a) 3199 } 3200 } 3201 cmpl = safeexpr(cmpl, init) 3202 cmpr = safeexpr(cmpr, init) 3203 if t.IsStruct() { 3204 for _, f := range t.Fields().Slice() { 3205 sym := f.Sym 3206 if isblanksym(sym) { 3207 continue 3208 } 3209 compare( 3210 nodSym(OXDOT, cmpl, sym), 3211 nodSym(OXDOT, cmpr, sym), 3212 ) 3213 } 3214 } else { 3215 for i := 0; int64(i) < t.NumElem(); i++ { 3216 compare( 3217 nod(OINDEX, cmpl, nodintconst(int64(i))), 3218 nod(OINDEX, cmpr, nodintconst(int64(i))), 3219 ) 3220 } 3221 } 3222 if expr == nil { 3223 expr = nodbool(n.Op == OEQ) 3224 } 3225 n = finishcompare(n, expr, init) 3226 return n 3227 } 3228 3229 // The result of finishcompare MUST be assigned back to n, e.g. 3230 // n.Left = finishcompare(n.Left, x, r, init) 3231 func finishcompare(n, r *Node, init *Nodes) *Node { 3232 // Use nn here to avoid passing r to typecheck. 3233 nn := r 3234 nn = typecheck(nn, Erv) 3235 nn = walkexpr(nn, init) 3236 r = nn 3237 if r.Type != n.Type { 3238 r = nod(OCONVNOP, r, nil) 3239 r.Type = n.Type 3240 r.Typecheck = 1 3241 nn = r 3242 } 3243 return nn 3244 } 3245 3246 func samecheap(a *Node, b *Node) bool { 3247 var ar *Node 3248 var br *Node 3249 for a != nil && b != nil && a.Op == b.Op { 3250 switch a.Op { 3251 default: 3252 return false 3253 3254 case ONAME: 3255 return a == b 3256 3257 case ODOT, ODOTPTR: 3258 if a.Sym != b.Sym { 3259 return false 3260 } 3261 3262 case OINDEX: 3263 ar = a.Right 3264 br = b.Right 3265 if !Isconst(ar, CTINT) || !Isconst(br, CTINT) || ar.Val().U.(*Mpint).Cmp(br.Val().U.(*Mpint)) != 0 { 3266 return false 3267 } 3268 } 3269 3270 a = a.Left 3271 b = b.Left 3272 } 3273 3274 return false 3275 } 3276 3277 // The result of walkrotate MUST be assigned back to n, e.g. 3278 // n.Left = walkrotate(n.Left) 3279 func walkrotate(n *Node) *Node { 3280 if Thearch.LinkArch.InFamily(sys.MIPS64, sys.PPC64) { 3281 return n 3282 } 3283 3284 // Want << | >> or >> | << or << ^ >> or >> ^ << on unsigned value. 3285 l := n.Left 3286 3287 r := n.Right 3288 if (n.Op != OOR && n.Op != OXOR) || (l.Op != OLSH && l.Op != ORSH) || (r.Op != OLSH && r.Op != ORSH) || n.Type == nil || n.Type.IsSigned() || l.Op == r.Op { 3289 return n 3290 } 3291 3292 // Want same, side effect-free expression on lhs of both shifts. 3293 if !samecheap(l.Left, r.Left) { 3294 return n 3295 } 3296 3297 // Constants adding to width? 3298 w := int(l.Type.Width * 8) 3299 3300 if Thearch.LinkArch.Family == sys.S390X && w != 32 && w != 64 { 3301 // only supports 32-bit and 64-bit rotates 3302 return n 3303 } 3304 3305 if smallintconst(l.Right) && smallintconst(r.Right) { 3306 sl := int(l.Right.Int64()) 3307 if sl >= 0 { 3308 sr := int(r.Right.Int64()) 3309 if sr >= 0 && sl+sr == w { 3310 // Rewrite left shift half to left rotate. 3311 if l.Op == OLSH { 3312 n = l 3313 } else { 3314 n = r 3315 } 3316 n.Op = OLROT 3317 3318 // Remove rotate 0 and rotate w. 3319 s := int(n.Right.Int64()) 3320 3321 if s == 0 || s == w { 3322 n = n.Left 3323 } 3324 return n 3325 } 3326 } 3327 return n 3328 } 3329 3330 // TODO: Could allow s and 32-s if s is bounded (maybe s&31 and 32-s&31). 3331 return n 3332 } 3333 3334 // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers. 3335 func (n *Node) isIntOrdering() bool { 3336 switch n.Op { 3337 case OLE, OLT, OGE, OGT: 3338 default: 3339 return false 3340 } 3341 return n.Left.Type.IsInteger() && n.Right.Type.IsInteger() 3342 } 3343 3344 // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10. 3345 // n must be an OANDAND or OOROR node. 3346 // The result of walkinrange MUST be assigned back to n, e.g. 3347 // n.Left = walkinrange(n.Left) 3348 func walkinrange(n *Node, init *Nodes) *Node { 3349 // We are looking for something equivalent to a opl b OP b opr c, where: 3350 // * a, b, and c have integer type 3351 // * b is side-effect-free 3352 // * opl and opr are each < or ≤ 3353 // * OP is && 3354 l := n.Left 3355 r := n.Right 3356 if !l.isIntOrdering() || !r.isIntOrdering() { 3357 return n 3358 } 3359 3360 // Find b, if it exists, and rename appropriately. 3361 // Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right 3362 // Output is: a opl b(==x) ANDAND/OROR b(==x) opr c 3363 a, opl, b := l.Left, l.Op, l.Right 3364 x, opr, c := r.Left, r.Op, r.Right 3365 for i := 0; ; i++ { 3366 if samesafeexpr(b, x) { 3367 break 3368 } 3369 if i == 3 { 3370 // Tried all permutations and couldn't find an appropriate b == x. 3371 return n 3372 } 3373 if i&1 == 0 { 3374 a, opl, b = b, brrev(opl), a 3375 } else { 3376 x, opr, c = c, brrev(opr), x 3377 } 3378 } 3379 3380 // If n.Op is ||, apply de Morgan. 3381 // Negate the internal ops now; we'll negate the top level op at the end. 3382 // Henceforth assume &&. 3383 negateResult := n.Op == OOROR 3384 if negateResult { 3385 opl = brcom(opl) 3386 opr = brcom(opr) 3387 } 3388 3389 cmpdir := func(o Op) int { 3390 switch o { 3391 case OLE, OLT: 3392 return -1 3393 case OGE, OGT: 3394 return +1 3395 } 3396 Fatalf("walkinrange cmpdir %v", o) 3397 return 0 3398 } 3399 if cmpdir(opl) != cmpdir(opr) { 3400 // Not a range check; something like b < a && b < c. 3401 return n 3402 } 3403 3404 switch opl { 3405 case OGE, OGT: 3406 // We have something like a > b && b ≥ c. 3407 // Switch and reverse ops and rename constants, 3408 // to make it look like a ≤ b && b < c. 3409 a, c = c, a 3410 opl, opr = brrev(opr), brrev(opl) 3411 } 3412 3413 // We must ensure that c-a is non-negative. 3414 // For now, require a and c to be constants. 3415 // In the future, we could also support a == 0 and c == len/cap(...). 3416 // Unfortunately, by this point, most len/cap expressions have been 3417 // stored into temporary variables. 3418 if !Isconst(a, CTINT) || !Isconst(c, CTINT) { 3419 return n 3420 } 3421 3422 if opl == OLT { 3423 // We have a < b && ... 3424 // We need a ≤ b && ... to safely use unsigned comparison tricks. 3425 // If a is not the maximum constant for b's type, 3426 // we can increment a and switch to ≤. 3427 if a.Int64() >= maxintval[b.Type.Etype].Int64() { 3428 return n 3429 } 3430 a = nodintconst(a.Int64() + 1) 3431 opl = OLE 3432 } 3433 3434 bound := c.Int64() - a.Int64() 3435 if bound < 0 { 3436 // Bad news. Something like 5 <= x && x < 3. 3437 // Rare in practice, and we still need to generate side-effects, 3438 // so just leave it alone. 3439 return n 3440 } 3441 3442 // We have a ≤ b && b < c (or a ≤ b && b ≤ c). 3443 // This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a), 3444 // which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a), 3445 // which is equivalent to uint(b-a) < uint(c-a). 3446 ut := b.Type.toUnsigned() 3447 lhs := conv(nod(OSUB, b, a), ut) 3448 rhs := nodintconst(bound) 3449 if negateResult { 3450 // Negate top level. 3451 opr = brcom(opr) 3452 } 3453 cmp := nod(opr, lhs, rhs) 3454 cmp.Lineno = n.Lineno 3455 cmp = addinit(cmp, l.Ninit.Slice()) 3456 cmp = addinit(cmp, r.Ninit.Slice()) 3457 // Typecheck the AST rooted at cmp... 3458 cmp = typecheck(cmp, Erv) 3459 // ...but then reset cmp's type to match n's type. 3460 cmp.Type = n.Type 3461 cmp = walkexpr(cmp, init) 3462 return cmp 3463 } 3464 3465 // walkmul rewrites integer multiplication by powers of two as shifts. 3466 // The result of walkmul MUST be assigned back to n, e.g. 3467 // n.Left = walkmul(n.Left, init) 3468 func walkmul(n *Node, init *Nodes) *Node { 3469 if !n.Type.IsInteger() { 3470 return n 3471 } 3472 3473 var nr *Node 3474 var nl *Node 3475 if n.Right.Op == OLITERAL { 3476 nl = n.Left 3477 nr = n.Right 3478 } else if n.Left.Op == OLITERAL { 3479 nl = n.Right 3480 nr = n.Left 3481 } else { 3482 return n 3483 } 3484 3485 neg := 0 3486 3487 // x*0 is 0 (and side effects of x). 3488 var pow int 3489 var w int 3490 if nr.Int64() == 0 { 3491 cheapexpr(nl, init) 3492 Nodconst(n, n.Type, 0) 3493 goto ret 3494 } 3495 3496 // nr is a constant. 3497 pow = powtwo(nr) 3498 3499 if pow < 0 { 3500 return n 3501 } 3502 if pow >= 1000 { 3503 // negative power of 2, like -16 3504 neg = 1 3505 3506 pow -= 1000 3507 } 3508 3509 w = int(nl.Type.Width * 8) 3510 if pow+1 >= w { // too big, shouldn't happen 3511 return n 3512 } 3513 3514 nl = cheapexpr(nl, init) 3515 3516 if pow == 0 { 3517 // x*1 is x 3518 n = nl 3519 3520 goto ret 3521 } 3522 3523 n = nod(OLSH, nl, nodintconst(int64(pow))) 3524 3525 ret: 3526 if neg != 0 { 3527 n = nod(OMINUS, n, nil) 3528 } 3529 3530 n = typecheck(n, Erv) 3531 n = walkexpr(n, init) 3532 return n 3533 } 3534 3535 // walkdiv rewrites division by a constant as less expensive 3536 // operations. 3537 // The result of walkdiv MUST be assigned back to n, e.g. 3538 // n.Left = walkdiv(n.Left, init) 3539 func walkdiv(n *Node, init *Nodes) *Node { 3540 // if >= 0, nr is 1<<pow // 1 if nr is negative. 3541 3542 if n.Right.Op != OLITERAL { 3543 return n 3544 } 3545 3546 // nr is a constant. 3547 nl := cheapexpr(n.Left, init) 3548 3549 nr := n.Right 3550 3551 // special cases of mod/div 3552 // by a constant 3553 w := int(nl.Type.Width * 8) 3554 3555 s := 0 // 1 if nr is negative. 3556 pow := powtwo(nr) // if >= 0, nr is 1<<pow 3557 if pow >= 1000 { 3558 // negative power of 2 3559 s = 1 3560 3561 pow -= 1000 3562 } 3563 3564 if pow+1 >= w { 3565 // divisor too large. 3566 return n 3567 } 3568 3569 if pow < 0 { 3570 // try to do division by multiply by (2^w)/d 3571 // see hacker's delight chapter 10 3572 // TODO: support 64-bit magic multiply here. 3573 var m Magic 3574 m.W = w 3575 3576 if nl.Type.IsSigned() { 3577 m.Sd = nr.Int64() 3578 smagic(&m) 3579 } else { 3580 m.Ud = uint64(nr.Int64()) 3581 umagic(&m) 3582 } 3583 3584 if m.Bad != 0 { 3585 return n 3586 } 3587 3588 // We have a quick division method so use it 3589 // for modulo too. 3590 if n.Op == OMOD { 3591 // rewrite as A%B = A - (A/B*B). 3592 n1 := nod(ODIV, nl, nr) 3593 3594 n2 := nod(OMUL, n1, nr) 3595 n = nod(OSUB, nl, n2) 3596 goto ret 3597 } 3598 3599 switch simtype[nl.Type.Etype] { 3600 default: 3601 return n 3602 3603 // n1 = nl * magic >> w (HMUL) 3604 case TUINT8, TUINT16, TUINT32: 3605 var nc Node 3606 3607 Nodconst(&nc, nl.Type, int64(m.Um)) 3608 n1 := nod(OHMUL, nl, &nc) 3609 n1 = typecheck(n1, Erv) 3610 if m.Ua != 0 { 3611 // Select a Go type with (at least) twice the width. 3612 var twide *Type 3613 switch simtype[nl.Type.Etype] { 3614 default: 3615 return n 3616 3617 case TUINT8, TUINT16: 3618 twide = Types[TUINT32] 3619 3620 case TUINT32: 3621 twide = Types[TUINT64] 3622 3623 case TINT8, TINT16: 3624 twide = Types[TINT32] 3625 3626 case TINT32: 3627 twide = Types[TINT64] 3628 } 3629 3630 // add numerator (might overflow). 3631 // n2 = (n1 + nl) 3632 n2 := nod(OADD, conv(n1, twide), conv(nl, twide)) 3633 3634 // shift by m.s 3635 var nc Node 3636 3637 Nodconst(&nc, Types[TUINT], int64(m.S)) 3638 n = conv(nod(ORSH, n2, &nc), nl.Type) 3639 } else { 3640 // n = n1 >> m.s 3641 var nc Node 3642 3643 Nodconst(&nc, Types[TUINT], int64(m.S)) 3644 n = nod(ORSH, n1, &nc) 3645 } 3646 3647 // n1 = nl * magic >> w 3648 case TINT8, TINT16, TINT32: 3649 var nc Node 3650 3651 Nodconst(&nc, nl.Type, m.Sm) 3652 n1 := nod(OHMUL, nl, &nc) 3653 n1 = typecheck(n1, Erv) 3654 if m.Sm < 0 { 3655 // add the numerator. 3656 n1 = nod(OADD, n1, nl) 3657 } 3658 3659 // shift by m.s 3660 var ns Node 3661 3662 Nodconst(&ns, Types[TUINT], int64(m.S)) 3663 n2 := conv(nod(ORSH, n1, &ns), nl.Type) 3664 3665 // add 1 iff n1 is negative. 3666 var nneg Node 3667 3668 Nodconst(&nneg, Types[TUINT], int64(w)-1) 3669 n3 := nod(ORSH, nl, &nneg) // n4 = -1 iff n1 is negative. 3670 n = nod(OSUB, n2, n3) 3671 3672 // apply sign. 3673 if m.Sd < 0 { 3674 n = nod(OMINUS, n, nil) 3675 } 3676 } 3677 3678 goto ret 3679 } 3680 3681 switch pow { 3682 case 0: 3683 if n.Op == OMOD { 3684 // nl % 1 is zero. 3685 Nodconst(n, n.Type, 0) 3686 } else if s != 0 { 3687 // divide by -1 3688 n.Op = OMINUS 3689 3690 n.Right = nil 3691 } else { 3692 // divide by 1 3693 n = nl 3694 } 3695 3696 default: 3697 if n.Type.IsSigned() { 3698 if n.Op == OMOD { 3699 // signed modulo 2^pow is like ANDing 3700 // with the last pow bits, but if nl < 0, 3701 // nl & (2^pow-1) is (nl+1)%2^pow - 1. 3702 var nc Node 3703 3704 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3705 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3706 if pow == 1 { 3707 n1 = typecheck(n1, Erv) 3708 n1 = cheapexpr(n1, init) 3709 3710 // n = (nl+ε)&1 -ε where ε=1 iff nl<0. 3711 n2 := nod(OSUB, nl, n1) 3712 3713 var nc Node 3714 Nodconst(&nc, nl.Type, 1) 3715 n3 := nod(OAND, n2, &nc) 3716 n = nod(OADD, n3, n1) 3717 } else { 3718 // n = (nl+ε)&(nr-1) - ε where ε=2^pow-1 iff nl<0. 3719 var nc Node 3720 3721 Nodconst(&nc, nl.Type, (1<<uint(pow))-1) 3722 n2 := nod(OAND, n1, &nc) // n2 = 2^pow-1 iff nl<0. 3723 n2 = typecheck(n2, Erv) 3724 n2 = cheapexpr(n2, init) 3725 3726 n3 := nod(OADD, nl, n2) 3727 n4 := nod(OAND, n3, &nc) 3728 n = nod(OSUB, n4, n2) 3729 } 3730 3731 break 3732 } else { 3733 // arithmetic right shift does not give the correct rounding. 3734 // if nl >= 0, nl >> n == nl / nr 3735 // if nl < 0, we want to add 2^n-1 first. 3736 var nc Node 3737 3738 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3739 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3740 if pow == 1 { 3741 // nl+1 is nl-(-1) 3742 n.Left = nod(OSUB, nl, n1) 3743 } else { 3744 // Do a logical right right on -1 to keep pow bits. 3745 var nc Node 3746 3747 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-int64(pow)) 3748 n2 := nod(ORSH, conv(n1, nl.Type.toUnsigned()), &nc) 3749 n.Left = nod(OADD, nl, conv(n2, nl.Type)) 3750 } 3751 3752 // n = (nl + 2^pow-1) >> pow 3753 n.Op = ORSH 3754 3755 var n2 Node 3756 Nodconst(&n2, Types[simtype[TUINT]], int64(pow)) 3757 n.Right = &n2 3758 n.Typecheck = 0 3759 } 3760 3761 if s != 0 { 3762 n = nod(OMINUS, n, nil) 3763 } 3764 break 3765 } 3766 3767 var nc Node 3768 if n.Op == OMOD { 3769 // n = nl & (nr-1) 3770 n.Op = OAND 3771 3772 Nodconst(&nc, nl.Type, nr.Int64()-1) 3773 } else { 3774 // n = nl >> pow 3775 n.Op = ORSH 3776 3777 Nodconst(&nc, Types[simtype[TUINT]], int64(pow)) 3778 } 3779 3780 n.Typecheck = 0 3781 n.Right = &nc 3782 } 3783 3784 goto ret 3785 3786 ret: 3787 n = typecheck(n, Erv) 3788 n = walkexpr(n, init) 3789 return n 3790 } 3791 3792 // return 1 if integer n must be in range [0, max), 0 otherwise 3793 func bounded(n *Node, max int64) bool { 3794 if n.Type == nil || !n.Type.IsInteger() { 3795 return false 3796 } 3797 3798 sign := n.Type.IsSigned() 3799 bits := int32(8 * n.Type.Width) 3800 3801 if smallintconst(n) { 3802 v := n.Int64() 3803 return 0 <= v && v < max 3804 } 3805 3806 switch n.Op { 3807 case OAND: 3808 v := int64(-1) 3809 if smallintconst(n.Left) { 3810 v = n.Left.Int64() 3811 } else if smallintconst(n.Right) { 3812 v = n.Right.Int64() 3813 } 3814 3815 if 0 <= v && v < max { 3816 return true 3817 } 3818 3819 case OMOD: 3820 if !sign && smallintconst(n.Right) { 3821 v := n.Right.Int64() 3822 if 0 <= v && v <= max { 3823 return true 3824 } 3825 } 3826 3827 case ODIV: 3828 if !sign && smallintconst(n.Right) { 3829 v := n.Right.Int64() 3830 for bits > 0 && v >= 2 { 3831 bits-- 3832 v >>= 1 3833 } 3834 } 3835 3836 case ORSH: 3837 if !sign && smallintconst(n.Right) { 3838 v := n.Right.Int64() 3839 if v > int64(bits) { 3840 return true 3841 } 3842 bits -= int32(v) 3843 } 3844 } 3845 3846 if !sign && bits <= 62 && 1<<uint(bits) <= max { 3847 return true 3848 } 3849 3850 return false 3851 } 3852 3853 // usemethod check interface method calls for uses of reflect.Type.Method. 3854 func usemethod(n *Node) { 3855 t := n.Left.Type 3856 3857 // Looking for either of: 3858 // Method(int) reflect.Method 3859 // MethodByName(string) (reflect.Method, bool) 3860 // 3861 // TODO(crawshaw): improve precision of match by working out 3862 // how to check the method name. 3863 if n := t.Params().NumFields(); n != 1 { 3864 return 3865 } 3866 if n := t.Results().NumFields(); n != 1 && n != 2 { 3867 return 3868 } 3869 p0 := t.Params().Field(0) 3870 res0 := t.Results().Field(0) 3871 var res1 *Field 3872 if t.Results().NumFields() == 2 { 3873 res1 = t.Results().Field(1) 3874 } 3875 3876 if res1 == nil { 3877 if p0.Type.Etype != TINT { 3878 return 3879 } 3880 } else { 3881 if !p0.Type.IsString() { 3882 return 3883 } 3884 if !res1.Type.IsBoolean() { 3885 return 3886 } 3887 } 3888 if res0.Type.String() != "reflect.Method" { 3889 return 3890 } 3891 3892 Curfn.Func.ReflectMethod = true 3893 } 3894 3895 func usefield(n *Node) { 3896 if obj.Fieldtrack_enabled == 0 { 3897 return 3898 } 3899 3900 switch n.Op { 3901 default: 3902 Fatalf("usefield %v", n.Op) 3903 3904 case ODOT, ODOTPTR: 3905 break 3906 } 3907 if n.Sym == nil { 3908 // No field name. This DOTPTR was built by the compiler for access 3909 // to runtime data structures. Ignore. 3910 return 3911 } 3912 3913 t := n.Left.Type 3914 if t.IsPtr() { 3915 t = t.Elem() 3916 } 3917 field := dotField[typeSym{t.Orig, n.Sym}] 3918 if field == nil { 3919 Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) 3920 } 3921 if !strings.Contains(field.Note, "go:\"track\"") { 3922 return 3923 } 3924 3925 outer := n.Left.Type 3926 if outer.IsPtr() { 3927 outer = outer.Elem() 3928 } 3929 if outer.Sym == nil { 3930 yyerror("tracked field must be in named struct type") 3931 } 3932 if !exportname(field.Sym.Name) { 3933 yyerror("tracked field must be exported (upper case)") 3934 } 3935 3936 sym := tracksym(outer, field) 3937 if Curfn.Func.FieldTrack == nil { 3938 Curfn.Func.FieldTrack = make(map[*Sym]struct{}) 3939 } 3940 Curfn.Func.FieldTrack[sym] = struct{}{} 3941 } 3942 3943 func candiscardlist(l Nodes) bool { 3944 for _, n := range l.Slice() { 3945 if !candiscard(n) { 3946 return false 3947 } 3948 } 3949 return true 3950 } 3951 3952 func candiscard(n *Node) bool { 3953 if n == nil { 3954 return true 3955 } 3956 3957 switch n.Op { 3958 default: 3959 return false 3960 3961 // Discardable as long as the subpieces are. 3962 case ONAME, 3963 ONONAME, 3964 OTYPE, 3965 OPACK, 3966 OLITERAL, 3967 OADD, 3968 OSUB, 3969 OOR, 3970 OXOR, 3971 OADDSTR, 3972 OADDR, 3973 OANDAND, 3974 OARRAYBYTESTR, 3975 OARRAYRUNESTR, 3976 OSTRARRAYBYTE, 3977 OSTRARRAYRUNE, 3978 OCAP, 3979 OCMPIFACE, 3980 OCMPSTR, 3981 OCOMPLIT, 3982 OMAPLIT, 3983 OSTRUCTLIT, 3984 OARRAYLIT, 3985 OSLICELIT, 3986 OPTRLIT, 3987 OCONV, 3988 OCONVIFACE, 3989 OCONVNOP, 3990 ODOT, 3991 OEQ, 3992 ONE, 3993 OLT, 3994 OLE, 3995 OGT, 3996 OGE, 3997 OKEY, 3998 OSTRUCTKEY, 3999 OLEN, 4000 OMUL, 4001 OLSH, 4002 ORSH, 4003 OAND, 4004 OANDNOT, 4005 ONEW, 4006 ONOT, 4007 OCOM, 4008 OPLUS, 4009 OMINUS, 4010 OOROR, 4011 OPAREN, 4012 ORUNESTR, 4013 OREAL, 4014 OIMAG, 4015 OCOMPLEX: 4016 break 4017 4018 // Discardable as long as we know it's not division by zero. 4019 case ODIV, OMOD: 4020 if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 { 4021 break 4022 } 4023 if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 { 4024 break 4025 } 4026 return false 4027 4028 // Discardable as long as we know it won't fail because of a bad size. 4029 case OMAKECHAN, OMAKEMAP: 4030 if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 { 4031 break 4032 } 4033 return false 4034 4035 // Difficult to tell what sizes are okay. 4036 case OMAKESLICE: 4037 return false 4038 } 4039 4040 if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) { 4041 return false 4042 } 4043 4044 return true 4045 } 4046 4047 // rewrite 4048 // print(x, y, z) 4049 // into 4050 // func(a1, a2, a3) { 4051 // print(a1, a2, a3) 4052 // }(x, y, z) 4053 // and same for println. 4054 4055 var walkprintfunc_prgen int 4056 4057 // The result of walkprintfunc MUST be assigned back to n, e.g. 4058 // n.Left = walkprintfunc(n.Left, init) 4059 func walkprintfunc(n *Node, init *Nodes) *Node { 4060 if n.Ninit.Len() != 0 { 4061 walkstmtlist(n.Ninit.Slice()) 4062 init.AppendNodes(&n.Ninit) 4063 } 4064 4065 t := nod(OTFUNC, nil, nil) 4066 num := 0 4067 var printargs []*Node 4068 var a *Node 4069 var buf string 4070 for _, n1 := range n.List.Slice() { 4071 buf = fmt.Sprintf("a%d", num) 4072 num++ 4073 a = nod(ODCLFIELD, newname(lookup(buf)), typenod(n1.Type)) 4074 t.List.Append(a) 4075 printargs = append(printargs, a.Left) 4076 } 4077 4078 fn := nod(ODCLFUNC, nil, nil) 4079 walkprintfunc_prgen++ 4080 buf = fmt.Sprintf("print·%d", walkprintfunc_prgen) 4081 fn.Func.Nname = newname(lookup(buf)) 4082 fn.Func.Nname.Name.Defn = fn 4083 fn.Func.Nname.Name.Param.Ntype = t 4084 declare(fn.Func.Nname, PFUNC) 4085 4086 oldfn := Curfn 4087 Curfn = nil 4088 funchdr(fn) 4089 4090 a = nod(n.Op, nil, nil) 4091 a.List.Set(printargs) 4092 a = typecheck(a, Etop) 4093 a = walkstmt(a) 4094 4095 fn.Nbody.Set1(a) 4096 4097 funcbody(fn) 4098 4099 fn = typecheck(fn, Etop) 4100 typecheckslice(fn.Nbody.Slice(), Etop) 4101 xtop = append(xtop, fn) 4102 Curfn = oldfn 4103 4104 a = nod(OCALL, nil, nil) 4105 a.Left = fn.Func.Nname 4106 a.List.Set(n.List.Slice()) 4107 a = typecheck(a, Etop) 4108 a = walkexpr(a, init) 4109 return a 4110 }