github.com/dannin/go@v0.0.0-20161031215817-d35dfd405eaa/src/cmd/compile/internal/gc/walk.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/internal/obj" 9 "cmd/internal/sys" 10 "fmt" 11 "strings" 12 ) 13 14 // The constant is known to runtime. 15 const ( 16 tmpstringbufsize = 32 17 ) 18 19 func walk(fn *Node) { 20 Curfn = fn 21 22 if Debug['W'] != 0 { 23 s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) 24 dumplist(s, Curfn.Nbody) 25 } 26 27 lno := lineno 28 29 // Final typecheck for any unused variables. 30 for i, ln := range fn.Func.Dcl { 31 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) { 32 ln = typecheck(ln, Erv|Easgn) 33 fn.Func.Dcl[i] = ln 34 } 35 } 36 37 // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. 38 for _, ln := range fn.Func.Dcl { 39 if ln.Op == ONAME && (ln.Class == PAUTO || ln.Class == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Used { 40 ln.Name.Defn.Left.Used = true 41 } 42 } 43 44 for _, ln := range fn.Func.Dcl { 45 if ln.Op != ONAME || (ln.Class != PAUTO && ln.Class != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Used { 46 continue 47 } 48 if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { 49 if defn.Left.Used { 50 continue 51 } 52 lineno = defn.Left.Lineno 53 yyerror("%v declared and not used", ln.Sym) 54 defn.Left.Used = true // suppress repeats 55 } else { 56 lineno = ln.Lineno 57 yyerror("%v declared and not used", ln.Sym) 58 } 59 } 60 61 lineno = lno 62 if nerrors != 0 { 63 return 64 } 65 walkstmtlist(Curfn.Nbody.Slice()) 66 if Debug['W'] != 0 { 67 s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) 68 dumplist(s, Curfn.Nbody) 69 } 70 71 heapmoves() 72 if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 { 73 s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) 74 dumplist(s, Curfn.Func.Enter) 75 } 76 } 77 78 func walkstmtlist(s []*Node) { 79 for i := range s { 80 s[i] = walkstmt(s[i]) 81 } 82 } 83 84 func samelist(a, b []*Node) bool { 85 if len(a) != len(b) { 86 return false 87 } 88 for i, n := range a { 89 if n != b[i] { 90 return false 91 } 92 } 93 return true 94 } 95 96 func paramoutheap(fn *Node) bool { 97 for _, ln := range fn.Func.Dcl { 98 switch ln.Class { 99 case PPARAMOUT: 100 if ln.isParamStackCopy() || ln.Addrtaken { 101 return true 102 } 103 104 case PAUTO: 105 // stop early - parameters are over 106 return false 107 } 108 } 109 110 return false 111 } 112 113 // adds "adjust" to all the argument locations for the call n. 114 // n must be a defer or go node that has already been walked. 115 func adjustargs(n *Node, adjust int) { 116 var arg *Node 117 var lhs *Node 118 119 callfunc := n.Left 120 for _, arg = range callfunc.List.Slice() { 121 if arg.Op != OAS { 122 yyerror("call arg not assignment") 123 } 124 lhs = arg.Left 125 if lhs.Op == ONAME { 126 // This is a temporary introduced by reorder1. 127 // The real store to the stack appears later in the arg list. 128 continue 129 } 130 131 if lhs.Op != OINDREGSP { 132 yyerror("call argument store does not use OINDREGSP") 133 } 134 135 // can't really check this in machine-indep code. 136 //if(lhs->val.u.reg != D_SP) 137 // yyerror("call arg assign not indreg(SP)"); 138 lhs.Xoffset += int64(adjust) 139 } 140 } 141 142 // The result of walkstmt MUST be assigned back to n, e.g. 143 // n.Left = walkstmt(n.Left) 144 func walkstmt(n *Node) *Node { 145 if n == nil { 146 return n 147 } 148 if n.IsStatic { // don't walk, generated by anylit. 149 return n 150 } 151 152 setlineno(n) 153 154 walkstmtlist(n.Ninit.Slice()) 155 156 switch n.Op { 157 default: 158 if n.Op == ONAME { 159 yyerror("%v is not a top level statement", n.Sym) 160 } else { 161 yyerror("%v is not a top level statement", n.Op) 162 } 163 Dump("nottop", n) 164 165 case OAS, 166 OASOP, 167 OAS2, 168 OAS2DOTTYPE, 169 OAS2RECV, 170 OAS2FUNC, 171 OAS2MAPR, 172 OCLOSE, 173 OCOPY, 174 OCALLMETH, 175 OCALLINTER, 176 OCALL, 177 OCALLFUNC, 178 ODELETE, 179 OSEND, 180 OPRINT, 181 OPRINTN, 182 OPANIC, 183 OEMPTY, 184 ORECOVER, 185 OGETG: 186 if n.Typecheck == 0 { 187 Fatalf("missing typecheck: %+v", n) 188 } 189 wascopy := n.Op == OCOPY 190 init := n.Ninit 191 n.Ninit.Set(nil) 192 n = walkexpr(n, &init) 193 n = addinit(n, init.Slice()) 194 if wascopy && n.Op == OCONVNOP { 195 n.Op = OEMPTY // don't leave plain values as statements. 196 } 197 198 // special case for a receive where we throw away 199 // the value received. 200 case ORECV: 201 if n.Typecheck == 0 { 202 Fatalf("missing typecheck: %+v", n) 203 } 204 init := n.Ninit 205 n.Ninit.Set(nil) 206 207 n.Left = walkexpr(n.Left, &init) 208 n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, typename(n.Left.Type), n.Left, nodnil()) 209 n = walkexpr(n, &init) 210 211 n = addinit(n, init.Slice()) 212 213 case OBREAK, 214 OCONTINUE, 215 OFALL, 216 OGOTO, 217 OLABEL, 218 ODCLCONST, 219 ODCLTYPE, 220 OCHECKNIL, 221 OVARKILL, 222 OVARLIVE: 223 break 224 225 case ODCL: 226 v := n.Left 227 if v.Class == PAUTOHEAP { 228 if compiling_runtime { 229 yyerror("%v escapes to heap, not allowed in runtime.", v) 230 } 231 if prealloc[v] == nil { 232 prealloc[v] = callnew(v.Type) 233 } 234 nn := nod(OAS, v.Name.Heapaddr, prealloc[v]) 235 nn.Colas = true 236 nn = typecheck(nn, Etop) 237 return walkstmt(nn) 238 } 239 240 case OBLOCK: 241 walkstmtlist(n.List.Slice()) 242 243 case OXCASE: 244 yyerror("case statement out of place") 245 n.Op = OCASE 246 fallthrough 247 248 case OCASE: 249 n.Right = walkstmt(n.Right) 250 251 case ODEFER: 252 hasdefer = true 253 switch n.Left.Op { 254 case OPRINT, OPRINTN: 255 n.Left = walkprintfunc(n.Left, &n.Ninit) 256 257 case OCOPY: 258 n.Left = copyany(n.Left, &n.Ninit, true) 259 260 default: 261 n.Left = walkexpr(n.Left, &n.Ninit) 262 } 263 264 // make room for size & fn arguments. 265 adjustargs(n, 2*Widthptr) 266 267 case OFOR: 268 if n.Left != nil { 269 walkstmtlist(n.Left.Ninit.Slice()) 270 init := n.Left.Ninit 271 n.Left.Ninit.Set(nil) 272 n.Left = walkexpr(n.Left, &init) 273 n.Left = addinit(n.Left, init.Slice()) 274 } 275 276 n.Right = walkstmt(n.Right) 277 walkstmtlist(n.Nbody.Slice()) 278 279 case OIF: 280 n.Left = walkexpr(n.Left, &n.Ninit) 281 walkstmtlist(n.Nbody.Slice()) 282 walkstmtlist(n.Rlist.Slice()) 283 284 case OPROC: 285 switch n.Left.Op { 286 case OPRINT, OPRINTN: 287 n.Left = walkprintfunc(n.Left, &n.Ninit) 288 289 case OCOPY: 290 n.Left = copyany(n.Left, &n.Ninit, true) 291 292 default: 293 n.Left = walkexpr(n.Left, &n.Ninit) 294 } 295 296 // make room for size & fn arguments. 297 adjustargs(n, 2*Widthptr) 298 299 case ORETURN: 300 walkexprlist(n.List.Slice(), &n.Ninit) 301 if n.List.Len() == 0 { 302 break 303 } 304 if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) { 305 // assign to the function out parameters, 306 // so that reorder3 can fix up conflicts 307 var rl []*Node 308 309 var cl Class 310 for _, ln := range Curfn.Func.Dcl { 311 cl = ln.Class 312 if cl == PAUTO || cl == PAUTOHEAP { 313 break 314 } 315 if cl == PPARAMOUT { 316 if ln.isParamStackCopy() { 317 ln = walkexpr(typecheck(nod(OIND, ln.Name.Heapaddr, nil), Erv), nil) 318 } 319 rl = append(rl, ln) 320 } 321 } 322 323 if got, want := n.List.Len(), len(rl); got != want { 324 // order should have rewritten multi-value function calls 325 // with explicit OAS2FUNC nodes. 326 Fatalf("expected %v return arguments, have %v", want, got) 327 } 328 329 if samelist(rl, n.List.Slice()) { 330 // special return in disguise 331 n.List.Set(nil) 332 333 break 334 } 335 336 // move function calls out, to make reorder3's job easier. 337 walkexprlistsafe(n.List.Slice(), &n.Ninit) 338 339 ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit) 340 n.List.Set(reorder3(ll)) 341 ls := n.List.Slice() 342 for i, n := range ls { 343 ls[i] = applywritebarrier(n) 344 } 345 break 346 } 347 348 ll := ascompatte(n.Op, nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit) 349 n.List.Set(ll) 350 351 case ORETJMP: 352 break 353 354 case OSELECT: 355 walkselect(n) 356 357 case OSWITCH: 358 walkswitch(n) 359 360 case ORANGE: 361 walkrange(n) 362 363 case OXFALL: 364 yyerror("fallthrough statement out of place") 365 n.Op = OFALL 366 } 367 368 if n.Op == ONAME { 369 Fatalf("walkstmt ended up with name: %+v", n) 370 } 371 return n 372 } 373 374 func isSmallMakeSlice(n *Node) bool { 375 if n.Op != OMAKESLICE { 376 return false 377 } 378 l := n.Left 379 r := n.Right 380 if r == nil { 381 r = l 382 } 383 t := n.Type 384 385 return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) 386 } 387 388 // walk the whole tree of the body of an 389 // expression or simple statement. 390 // the types expressions are calculated. 391 // compile-time constants are evaluated. 392 // complex side effects like statements are appended to init 393 func walkexprlist(s []*Node, init *Nodes) { 394 for i := range s { 395 s[i] = walkexpr(s[i], init) 396 } 397 } 398 399 func walkexprlistsafe(s []*Node, init *Nodes) { 400 for i, n := range s { 401 s[i] = safeexpr(n, init) 402 s[i] = walkexpr(s[i], init) 403 } 404 } 405 406 func walkexprlistcheap(s []*Node, init *Nodes) { 407 for i, n := range s { 408 s[i] = cheapexpr(n, init) 409 s[i] = walkexpr(s[i], init) 410 } 411 } 412 413 // Build name of function for interface conversion. 414 // Not all names are possible 415 // (e.g., we'll never generate convE2E or convE2I or convI2E). 416 func convFuncName(from, to *Type) string { 417 tkind := to.iet() 418 switch from.iet() { 419 case 'I': 420 switch tkind { 421 case 'I': 422 return "convI2I" 423 } 424 case 'T': 425 switch tkind { 426 case 'E': 427 return "convT2E" 428 case 'I': 429 return "convT2I" 430 } 431 } 432 Fatalf("unknown conv func %c2%c", from.iet(), to.iet()) 433 panic("unreachable") 434 } 435 436 // Build name of function: assertI2E etc. 437 // If with2suffix is true, the form ending in "2" is returned". 438 func assertFuncName(from, to *Type, with2suffix bool) string { 439 l := len("assertX2X2") 440 if !with2suffix { 441 l-- 442 } 443 tkind := to.iet() 444 switch from.iet() { 445 case 'E': 446 switch tkind { 447 case 'I': 448 return "assertE2I2"[:l] 449 case 'E': 450 return "assertE2E2"[:l] 451 case 'T': 452 return "assertE2T2"[:l] 453 } 454 case 'I': 455 switch tkind { 456 case 'I': 457 return "assertI2I2"[:l] 458 case 'E': 459 return "assertI2E2"[:l] 460 case 'T': 461 return "assertI2T2"[:l] 462 } 463 } 464 Fatalf("unknown assert func %c2%c", from.iet(), to.iet()) 465 panic("unreachable") 466 } 467 468 // The result of walkexpr MUST be assigned back to n, e.g. 469 // n.Left = walkexpr(n.Left, init) 470 func walkexpr(n *Node, init *Nodes) *Node { 471 if n == nil { 472 return n 473 } 474 475 if init == &n.Ninit { 476 // not okay to use n->ninit when walking n, 477 // because we might replace n with some other node 478 // and would lose the init list. 479 Fatalf("walkexpr init == &n->ninit") 480 } 481 482 if n.Ninit.Len() != 0 { 483 walkstmtlist(n.Ninit.Slice()) 484 init.AppendNodes(&n.Ninit) 485 } 486 487 lno := setlineno(n) 488 489 if Debug['w'] > 1 { 490 Dump("walk-before", n) 491 } 492 493 if n.Typecheck != 1 { 494 Fatalf("missed typecheck: %+v", n) 495 } 496 497 if n.Op == ONAME && n.Class == PAUTOHEAP { 498 nn := nod(OIND, n.Name.Heapaddr, nil) 499 nn = typecheck(nn, Erv) 500 nn = walkexpr(nn, init) 501 nn.Left.NonNil = true 502 return nn 503 } 504 505 opswitch: 506 switch n.Op { 507 default: 508 Dump("walk", n) 509 Fatalf("walkexpr: switch 1 unknown op %+S", n) 510 511 case OTYPE, 512 ONONAME, 513 OINDREGSP, 514 OEMPTY, 515 OGETG: 516 517 case ONOT, 518 OMINUS, 519 OPLUS, 520 OCOM, 521 OREAL, 522 OIMAG, 523 ODOTMETH, 524 ODOTINTER: 525 n.Left = walkexpr(n.Left, init) 526 527 case OIND: 528 n.Left = walkexpr(n.Left, init) 529 530 case ODOT: 531 usefield(n) 532 n.Left = walkexpr(n.Left, init) 533 534 case ODOTPTR: 535 usefield(n) 536 if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 { 537 // No actual copy will be generated, so emit an explicit nil check. 538 n.Left = cheapexpr(n.Left, init) 539 540 checknil(n.Left, init) 541 } 542 543 n.Left = walkexpr(n.Left, init) 544 545 case OEFACE: 546 n.Left = walkexpr(n.Left, init) 547 n.Right = walkexpr(n.Right, init) 548 549 case OSPTR, OITAB, OIDATA: 550 n.Left = walkexpr(n.Left, init) 551 552 case OLEN, OCAP: 553 n.Left = walkexpr(n.Left, init) 554 555 // replace len(*[10]int) with 10. 556 // delayed until now to preserve side effects. 557 t := n.Left.Type 558 559 if t.IsPtr() { 560 t = t.Elem() 561 } 562 if t.IsArray() { 563 safeexpr(n.Left, init) 564 Nodconst(n, n.Type, t.NumElem()) 565 n.Typecheck = 1 566 } 567 568 case OLSH, ORSH: 569 n.Left = walkexpr(n.Left, init) 570 n.Right = walkexpr(n.Right, init) 571 t := n.Left.Type 572 n.Bounded = bounded(n.Right, 8*t.Width) 573 if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { 574 Warn("shift bounds check elided") 575 } 576 577 // Use results from call expression as arguments for complex. 578 case OAND, 579 OSUB, 580 OHMUL, 581 OLT, 582 OLE, 583 OGE, 584 OGT, 585 OADD, 586 OCOMPLEX, 587 OLROT: 588 if n.Op == OCOMPLEX && n.Left == nil && n.Right == nil { 589 n.Left = n.List.First() 590 n.Right = n.List.Second() 591 } 592 593 n.Left = walkexpr(n.Left, init) 594 n.Right = walkexpr(n.Right, init) 595 596 case OOR, OXOR: 597 n.Left = walkexpr(n.Left, init) 598 n.Right = walkexpr(n.Right, init) 599 n = walkrotate(n) 600 601 case OEQ, ONE: 602 n.Left = walkexpr(n.Left, init) 603 n.Right = walkexpr(n.Right, init) 604 605 // Disable safemode while compiling this code: the code we 606 // generate internally can refer to unsafe.Pointer. 607 // In this case it can happen if we need to generate an == 608 // for a struct containing a reflect.Value, which itself has 609 // an unexported field of type unsafe.Pointer. 610 old_safemode := safemode 611 safemode = false 612 n = walkcompare(n, init) 613 safemode = old_safemode 614 615 case OANDAND, OOROR: 616 n.Left = walkexpr(n.Left, init) 617 618 // cannot put side effects from n.Right on init, 619 // because they cannot run before n.Left is checked. 620 // save elsewhere and store on the eventual n.Right. 621 var ll Nodes 622 623 n.Right = walkexpr(n.Right, &ll) 624 n.Right = addinit(n.Right, ll.Slice()) 625 n = walkinrange(n, init) 626 627 case OPRINT, OPRINTN: 628 walkexprlist(n.List.Slice(), init) 629 n = walkprint(n, init) 630 631 case OPANIC: 632 n = mkcall("gopanic", nil, init, n.Left) 633 634 case ORECOVER: 635 n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil)) 636 637 case OLITERAL: 638 n.Addable = true 639 640 case OCLOSUREVAR, OCFUNC: 641 n.Addable = true 642 643 case ONAME: 644 n.Addable = true 645 646 case OCALLINTER: 647 usemethod(n) 648 t := n.Left.Type 649 if n.List.Len() != 0 && n.List.First().Op == OAS { 650 break 651 } 652 n.Left = walkexpr(n.Left, init) 653 walkexprlist(n.List.Slice(), init) 654 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 655 n.List.Set(reorder1(ll)) 656 657 case OCALLFUNC: 658 if n.Left.Op == OCLOSURE { 659 // Transform direct call of a closure to call of a normal function. 660 // transformclosure already did all preparation work. 661 662 // Prepend captured variables to argument list. 663 n.List.Prepend(n.Left.Func.Enter.Slice()...) 664 665 n.Left.Func.Enter.Set(nil) 666 667 // Replace OCLOSURE with ONAME/PFUNC. 668 n.Left = n.Left.Func.Closure.Func.Nname 669 670 // Update type of OCALLFUNC node. 671 // Output arguments had not changed, but their offsets could. 672 if n.Left.Type.Results().NumFields() == 1 { 673 n.Type = n.Left.Type.Results().Field(0).Type 674 } else { 675 n.Type = n.Left.Type.Results() 676 } 677 } 678 679 t := n.Left.Type 680 if n.List.Len() != 0 && n.List.First().Op == OAS { 681 break 682 } 683 684 n.Left = walkexpr(n.Left, init) 685 walkexprlist(n.List.Slice(), init) 686 687 if n.Left.Op == ONAME && n.Left.Sym.Name == "Sqrt" && 688 (n.Left.Sym.Pkg.Path == "math" || n.Left.Sym.Pkg == localpkg && myimportpath == "math") { 689 if Thearch.LinkArch.InFamily(sys.AMD64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X) { 690 n.Op = OSQRT 691 n.Left = n.List.First() 692 n.List.Set(nil) 693 break opswitch 694 } 695 } 696 697 ll := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 698 n.List.Set(reorder1(ll)) 699 700 case OCALLMETH: 701 t := n.Left.Type 702 if n.List.Len() != 0 && n.List.First().Op == OAS { 703 break 704 } 705 n.Left = walkexpr(n.Left, init) 706 walkexprlist(n.List.Slice(), init) 707 ll := ascompatte(n.Op, n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init) 708 lr := ascompatte(n.Op, n, n.Isddd, t.Params(), n.List.Slice(), 0, init) 709 ll = append(ll, lr...) 710 n.Left.Left = nil 711 ullmancalc(n.Left) 712 n.List.Set(reorder1(ll)) 713 714 case OAS: 715 init.AppendNodes(&n.Ninit) 716 717 n.Left = walkexpr(n.Left, init) 718 n.Left = safeexpr(n.Left, init) 719 720 if oaslit(n, init) { 721 break 722 } 723 724 if n.Right == nil { 725 // TODO(austin): Check all "implicit zeroing" 726 break 727 } 728 729 switch n.Right.Op { 730 default: 731 n.Right = walkexpr(n.Right, init) 732 733 case ODOTTYPE: 734 // TODO(rsc): The isfat is for consistency with componentgen and orderexpr. 735 // It needs to be removed in all three places. 736 // That would allow inlining x.(struct{*int}) the same as x.(*int). 737 if isdirectiface(n.Right.Type) && !isfat(n.Right.Type) && !instrumenting { 738 // handled directly during cgen 739 n.Right = walkexpr(n.Right, init) 740 break 741 } 742 743 // x = i.(T); n.Left is x, n.Right.Left is i. 744 // orderstmt made sure x is addressable. 745 n.Right.Left = walkexpr(n.Right.Left, init) 746 747 n1 := nod(OADDR, n.Left, nil) 748 r := n.Right // i.(T) 749 750 if Debug_typeassert > 0 { 751 Warn("type assertion not inlined") 752 } 753 754 fn := syslook(assertFuncName(r.Left.Type, r.Type, false)) 755 fn = substArgTypes(fn, r.Left.Type, r.Type) 756 757 n = mkcall1(fn, nil, init, typename(r.Type), r.Left, n1) 758 n = walkexpr(n, init) 759 break opswitch 760 761 case ORECV: 762 // x = <-c; n.Left is x, n.Right.Left is c. 763 // orderstmt made sure x is addressable. 764 n.Right.Left = walkexpr(n.Right.Left, init) 765 766 n1 := nod(OADDR, n.Left, nil) 767 r := n.Right.Left // the channel 768 n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, typename(r.Type), r, n1) 769 n = walkexpr(n, init) 770 break opswitch 771 772 case OAPPEND: 773 // x = append(...) 774 r := n.Right 775 if r.Type.Elem().NotInHeap { 776 yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem()) 777 } 778 if r.Isddd { 779 r = appendslice(r, init) // also works for append(slice, string). 780 } else { 781 r = walkappend(r, init, n) 782 } 783 n.Right = r 784 if r.Op == OAPPEND { 785 // Left in place for back end. 786 // Do not add a new write barrier. 787 break opswitch 788 } 789 // Otherwise, lowered for race detector. 790 // Treat as ordinary assignment. 791 } 792 793 if n.Left != nil && n.Right != nil { 794 static := n.IsStatic 795 n = convas(n, init) 796 n.IsStatic = static 797 n = applywritebarrier(n) 798 } 799 800 case OAS2: 801 init.AppendNodes(&n.Ninit) 802 walkexprlistsafe(n.List.Slice(), init) 803 walkexprlistsafe(n.Rlist.Slice(), init) 804 ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init) 805 ll = reorder3(ll) 806 for i, n := range ll { 807 ll[i] = applywritebarrier(n) 808 } 809 n = liststmt(ll) 810 811 // a,b,... = fn() 812 case OAS2FUNC: 813 init.AppendNodes(&n.Ninit) 814 815 r := n.Rlist.First() 816 walkexprlistsafe(n.List.Slice(), init) 817 r = walkexpr(r, init) 818 819 if isIntrinsicCall(r) { 820 n.Rlist.Set1(r) 821 break 822 } 823 init.Append(r) 824 825 ll := ascompatet(n.Op, n.List, r.Type) 826 for i, n := range ll { 827 ll[i] = applywritebarrier(n) 828 } 829 n = liststmt(ll) 830 831 // x, y = <-c 832 // orderstmt made sure x is addressable. 833 case OAS2RECV: 834 init.AppendNodes(&n.Ninit) 835 836 r := n.Rlist.First() 837 walkexprlistsafe(n.List.Slice(), init) 838 r.Left = walkexpr(r.Left, init) 839 var n1 *Node 840 if isblank(n.List.First()) { 841 n1 = nodnil() 842 } else { 843 n1 = nod(OADDR, n.List.First(), nil) 844 } 845 n1.Etype = 1 // addr does not escape 846 fn := chanfn("chanrecv2", 2, r.Left.Type) 847 ok := n.List.Second() 848 call := mkcall1(fn, ok.Type, init, typename(r.Left.Type), r.Left, n1) 849 n = nod(OAS, ok, call) 850 n = typecheck(n, Etop) 851 852 // a,b = m[i]; 853 case OAS2MAPR: 854 init.AppendNodes(&n.Ninit) 855 856 r := n.Rlist.First() 857 walkexprlistsafe(n.List.Slice(), init) 858 r.Left = walkexpr(r.Left, init) 859 r.Right = walkexpr(r.Right, init) 860 t := r.Left.Type 861 p := "" 862 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 863 switch algtype(t.Key()) { 864 case AMEM32: 865 p = "mapaccess2_fast32" 866 case AMEM64: 867 p = "mapaccess2_fast64" 868 case ASTRING: 869 p = "mapaccess2_faststr" 870 } 871 } 872 873 var key *Node 874 if p != "" { 875 // fast versions take key by value 876 key = r.Right 877 } else { 878 // standard version takes key by reference 879 // orderexpr made sure key is addressable. 880 key = nod(OADDR, r.Right, nil) 881 882 p = "mapaccess2" 883 } 884 885 // from: 886 // a,b = m[i] 887 // to: 888 // var,b = mapaccess2*(t, m, i) 889 // a = *var 890 a := n.List.First() 891 892 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 893 fn := mapfn(p, t) 894 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key) 895 } else { 896 fn := mapfn("mapaccess2_fat", t) 897 z := zeroaddr(w) 898 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z) 899 } 900 901 // mapaccess2* returns a typed bool, but due to spec changes, 902 // the boolean result of i.(T) is now untyped so we make it the 903 // same type as the variable on the lhs. 904 if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() { 905 r.Type.Field(1).Type = ok.Type 906 } 907 n.Rlist.Set1(r) 908 n.Op = OAS2FUNC 909 910 // don't generate a = *var if a is _ 911 if !isblank(a) { 912 var_ := temp(ptrto(t.Val())) 913 var_.Typecheck = 1 914 var_.NonNil = true // mapaccess always returns a non-nil pointer 915 n.List.SetIndex(0, var_) 916 n = walkexpr(n, init) 917 init.Append(n) 918 n = nod(OAS, a, nod(OIND, var_, nil)) 919 } 920 921 n = typecheck(n, Etop) 922 n = walkexpr(n, init) 923 924 case ODELETE: 925 init.AppendNodes(&n.Ninit) 926 map_ := n.List.First() 927 key := n.List.Second() 928 map_ = walkexpr(map_, init) 929 key = walkexpr(key, init) 930 931 // orderstmt made sure key is addressable. 932 key = nod(OADDR, key, nil) 933 934 t := map_.Type 935 n = mkcall1(mapfndel("mapdelete", t), nil, init, typename(t), map_, key) 936 937 case OAS2DOTTYPE: 938 e := n.Rlist.First() // i.(T) 939 940 // TODO(rsc): The isfat is for consistency with componentgen and orderexpr. 941 // It needs to be removed in all three places. 942 // That would allow inlining x.(struct{*int}) the same as x.(*int). 943 if isdirectiface(e.Type) && !isfat(e.Type) && !instrumenting { 944 // handled directly during gen. 945 walkexprlistsafe(n.List.Slice(), init) 946 e.Left = walkexpr(e.Left, init) 947 break 948 } 949 950 // res, ok = i.(T) 951 // orderstmt made sure a is addressable. 952 init.AppendNodes(&n.Ninit) 953 954 walkexprlistsafe(n.List.Slice(), init) 955 e.Left = walkexpr(e.Left, init) 956 t := e.Type // T 957 from := e.Left // i 958 959 oktype := Types[TBOOL] 960 ok := n.List.Second() 961 if !isblank(ok) { 962 oktype = ok.Type 963 } 964 if !oktype.IsBoolean() { 965 Fatalf("orderstmt broken: got %L, want boolean", oktype) 966 } 967 968 fromKind := from.Type.iet() 969 toKind := t.iet() 970 971 res := n.List.First() 972 scalar := !haspointers(res.Type) 973 974 // Avoid runtime calls in a few cases of the form _, ok := i.(T). 975 // This is faster and shorter and allows the corresponding assertX2X2 976 // routines to skip nil checks on their last argument. 977 // Also avoid runtime calls for converting interfaces to scalar concrete types. 978 if isblank(res) || (scalar && toKind == 'T') { 979 var fast *Node 980 switch toKind { 981 case 'T': 982 tab := nod(OITAB, from, nil) 983 if fromKind == 'E' { 984 typ := nod(OCONVNOP, typename(t), nil) 985 typ.Type = ptrto(Types[TUINTPTR]) 986 fast = nod(OEQ, tab, typ) 987 break 988 } 989 fast = nod(OANDAND, 990 nod(ONE, nodnil(), tab), 991 nod(OEQ, itabType(tab), typename(t)), 992 ) 993 case 'E': 994 tab := nod(OITAB, from, nil) 995 fast = nod(ONE, nodnil(), tab) 996 } 997 if fast != nil { 998 if isblank(res) { 999 if Debug_typeassert > 0 { 1000 Warn("type assertion (ok only) inlined") 1001 } 1002 n = nod(OAS, ok, fast) 1003 n = typecheck(n, Etop) 1004 } else { 1005 if Debug_typeassert > 0 { 1006 Warn("type assertion (scalar result) inlined") 1007 } 1008 n = nod(OIF, ok, nil) 1009 n.Likely = 1 1010 if isblank(ok) { 1011 n.Left = fast 1012 } else { 1013 n.Ninit.Set1(nod(OAS, ok, fast)) 1014 } 1015 n.Nbody.Set1(nod(OAS, res, ifaceData(from, res.Type))) 1016 n.Rlist.Set1(nod(OAS, res, nil)) 1017 n = typecheck(n, Etop) 1018 } 1019 break 1020 } 1021 } 1022 1023 var resptr *Node // &res 1024 if isblank(res) { 1025 resptr = nodnil() 1026 } else { 1027 resptr = nod(OADDR, res, nil) 1028 } 1029 resptr.Etype = 1 // addr does not escape 1030 1031 if Debug_typeassert > 0 { 1032 Warn("type assertion not inlined") 1033 } 1034 fn := syslook(assertFuncName(from.Type, t, true)) 1035 fn = substArgTypes(fn, from.Type, t) 1036 call := mkcall1(fn, oktype, init, typename(t), from, resptr) 1037 n = nod(OAS, ok, call) 1038 n = typecheck(n, Etop) 1039 1040 case ODOTTYPE, ODOTTYPE2: 1041 if !isdirectiface(n.Type) || isfat(n.Type) { 1042 Fatalf("walkexpr ODOTTYPE") // should see inside OAS only 1043 } 1044 n.Left = walkexpr(n.Left, init) 1045 1046 case OCONVIFACE: 1047 n.Left = walkexpr(n.Left, init) 1048 1049 // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped. 1050 if isdirectiface(n.Left.Type) { 1051 var t *Node 1052 if n.Type.IsEmptyInterface() { 1053 t = typename(n.Left.Type) 1054 } else { 1055 t = itabname(n.Left.Type, n.Type) 1056 } 1057 l := nod(OEFACE, t, n.Left) 1058 l.Type = n.Type 1059 l.Typecheck = n.Typecheck 1060 n = l 1061 break 1062 } 1063 // Optimize convT2{E,I} when T is not pointer-shaped. 1064 // We make the interface by initializing a stack temporary to 1065 // the value we want to put in the interface, then using the address of 1066 // that stack temporary for the interface data word. 1067 if !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024 { 1068 tmp := temp(n.Left.Type) 1069 init.Append(typecheck(nod(OAS, tmp, n.Left), Etop)) 1070 var t *Node 1071 if n.Type.IsEmptyInterface() { 1072 t = typename(n.Left.Type) 1073 } else { 1074 t = itabname(n.Left.Type, n.Type) 1075 } 1076 l := nod(OEFACE, t, typecheck(nod(OADDR, tmp, nil), Erv)) 1077 l.Type = n.Type 1078 l.Typecheck = n.Typecheck 1079 n = l 1080 break 1081 } 1082 1083 // Implement interface to empty interface conversion. 1084 // tmp = i.itab 1085 // if tmp != nil { 1086 // tmp = tmp.type 1087 // } 1088 // e = iface{tmp, i.data} 1089 if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 1090 // Evaluate the input interface. 1091 c := temp(n.Left.Type) 1092 init.Append(nod(OAS, c, n.Left)) 1093 1094 // Get the itab out of the interface. 1095 tmp := temp(ptrto(Types[TUINT8])) 1096 init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv))) 1097 1098 // Get the type out of the itab. 1099 nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil) 1100 nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp))) 1101 init.Append(nif) 1102 1103 // Build the result. 1104 e := nod(OEFACE, tmp, ifaceData(c, ptrto(Types[TUINT8]))) 1105 e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE. 1106 e.Typecheck = 1 1107 n = e 1108 break 1109 } 1110 1111 var ll []*Node 1112 if n.Type.IsEmptyInterface() { 1113 if !n.Left.Type.IsInterface() { 1114 ll = append(ll, typename(n.Left.Type)) 1115 } 1116 } else { 1117 if n.Left.Type.IsInterface() { 1118 ll = append(ll, typename(n.Type)) 1119 } else { 1120 ll = append(ll, itabname(n.Left.Type, n.Type)) 1121 } 1122 } 1123 1124 if n.Left.Type.IsInterface() { 1125 ll = append(ll, n.Left) 1126 } else { 1127 // regular types are passed by reference to avoid C vararg calls 1128 // orderexpr arranged for n.Left to be a temporary for all 1129 // the conversions it could see. comparison of an interface 1130 // with a non-interface, especially in a switch on interface value 1131 // with non-interface cases, is not visible to orderstmt, so we 1132 // have to fall back on allocating a temp here. 1133 if islvalue(n.Left) { 1134 ll = append(ll, nod(OADDR, n.Left, nil)) 1135 } else { 1136 ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) 1137 } 1138 dowidth(n.Left.Type) 1139 } 1140 1141 fn := syslook(convFuncName(n.Left.Type, n.Type)) 1142 fn = substArgTypes(fn, n.Left.Type, n.Type) 1143 dowidth(fn.Type) 1144 n = nod(OCALL, fn, nil) 1145 n.List.Set(ll) 1146 n = typecheck(n, Erv) 1147 n = walkexpr(n, init) 1148 1149 case OCONV, OCONVNOP: 1150 if Thearch.LinkArch.Family == sys.ARM { 1151 if n.Left.Type.IsFloat() { 1152 if n.Type.Etype == TINT64 { 1153 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1154 break 1155 } 1156 1157 if n.Type.Etype == TUINT64 { 1158 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1159 break 1160 } 1161 } 1162 1163 if n.Type.IsFloat() { 1164 if n.Left.Type.Etype == TINT64 { 1165 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1166 break 1167 } 1168 1169 if n.Left.Type.Etype == TUINT64 { 1170 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1171 break 1172 } 1173 } 1174 } 1175 1176 if Thearch.LinkArch.Family == sys.I386 { 1177 if n.Left.Type.IsFloat() { 1178 if n.Type.Etype == TINT64 { 1179 n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1180 break 1181 } 1182 1183 if n.Type.Etype == TUINT64 { 1184 n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1185 break 1186 } 1187 if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR { 1188 n = mkcall("float64touint32", n.Type, init, conv(n.Left, Types[TFLOAT64])) 1189 break 1190 } 1191 } 1192 if n.Type.IsFloat() { 1193 if n.Left.Type.Etype == TINT64 { 1194 n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type) 1195 break 1196 } 1197 1198 if n.Left.Type.Etype == TUINT64 { 1199 n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type) 1200 break 1201 } 1202 if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR { 1203 n = conv(mkcall("uint32tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT32])), n.Type) 1204 break 1205 } 1206 } 1207 } 1208 1209 n.Left = walkexpr(n.Left, init) 1210 1211 case OANDNOT: 1212 n.Left = walkexpr(n.Left, init) 1213 n.Op = OAND 1214 n.Right = nod(OCOM, n.Right, nil) 1215 n.Right = typecheck(n.Right, Erv) 1216 n.Right = walkexpr(n.Right, init) 1217 1218 case OMUL: 1219 n.Left = walkexpr(n.Left, init) 1220 n.Right = walkexpr(n.Right, init) 1221 n = walkmul(n, init) 1222 1223 case ODIV, OMOD: 1224 n.Left = walkexpr(n.Left, init) 1225 n.Right = walkexpr(n.Right, init) 1226 1227 // rewrite complex div into function call. 1228 et := n.Left.Type.Etype 1229 1230 if isComplex[et] && n.Op == ODIV { 1231 t := n.Type 1232 n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128])) 1233 n = conv(n, t) 1234 break 1235 } 1236 1237 // Nothing to do for float divisions. 1238 if isFloat[et] { 1239 break 1240 } 1241 1242 // Try rewriting as shifts or magic multiplies. 1243 n = walkdiv(n, init) 1244 1245 // rewrite 64-bit div and mod into function calls 1246 // on 32-bit architectures. 1247 switch n.Op { 1248 case OMOD, ODIV: 1249 if Widthreg >= 8 || (et != TUINT64 && et != TINT64) { 1250 break opswitch 1251 } 1252 var fn string 1253 if et == TINT64 { 1254 fn = "int64" 1255 } else { 1256 fn = "uint64" 1257 } 1258 if n.Op == ODIV { 1259 fn += "div" 1260 } else { 1261 fn += "mod" 1262 } 1263 n = mkcall(fn, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et])) 1264 } 1265 1266 case OINDEX: 1267 n.Left = walkexpr(n.Left, init) 1268 1269 // save the original node for bounds checking elision. 1270 // If it was a ODIV/OMOD walk might rewrite it. 1271 r := n.Right 1272 1273 n.Right = walkexpr(n.Right, init) 1274 1275 // if range of type cannot exceed static array bound, 1276 // disable bounds check. 1277 if n.Bounded { 1278 break 1279 } 1280 t := n.Left.Type 1281 if t != nil && t.IsPtr() { 1282 t = t.Elem() 1283 } 1284 if t.IsArray() { 1285 n.Bounded = bounded(r, t.NumElem()) 1286 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1287 Warn("index bounds check elided") 1288 } 1289 if smallintconst(n.Right) && !n.Bounded { 1290 yyerror("index out of bounds") 1291 } 1292 } else if Isconst(n.Left, CTSTR) { 1293 n.Bounded = bounded(r, int64(len(n.Left.Val().U.(string)))) 1294 if Debug['m'] != 0 && n.Bounded && !Isconst(n.Right, CTINT) { 1295 Warn("index bounds check elided") 1296 } 1297 if smallintconst(n.Right) && !n.Bounded { 1298 yyerror("index out of bounds") 1299 } 1300 } 1301 1302 if Isconst(n.Right, CTINT) { 1303 if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 { 1304 yyerror("index out of bounds") 1305 } 1306 } 1307 1308 case OINDEXMAP: 1309 // Replace m[k] with *map{access1,assign}(maptype, m, &k) 1310 n.Left = walkexpr(n.Left, init) 1311 n.Right = walkexpr(n.Right, init) 1312 map_ := n.Left 1313 key := n.Right 1314 t := map_.Type 1315 if n.Etype == 1 { 1316 // This m[k] expression is on the left-hand side of an assignment. 1317 // orderexpr made sure key is addressable. 1318 key = nod(OADDR, key, nil) 1319 n = mkcall1(mapfn("mapassign", t), nil, init, typename(t), map_, key) 1320 } else { 1321 // m[k] is not the target of an assignment. 1322 p := "" 1323 if t.Val().Width <= 128 { // Check ../../runtime/hashmap.go:maxValueSize before changing. 1324 switch algtype(t.Key()) { 1325 case AMEM32: 1326 p = "mapaccess1_fast32" 1327 case AMEM64: 1328 p = "mapaccess1_fast64" 1329 case ASTRING: 1330 p = "mapaccess1_faststr" 1331 } 1332 } 1333 1334 if p == "" { 1335 // standard version takes key by reference. 1336 // orderexpr made sure key is addressable. 1337 key = nod(OADDR, key, nil) 1338 p = "mapaccess1" 1339 } 1340 1341 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 1342 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key) 1343 } else { 1344 p = "mapaccess1_fat" 1345 z := zeroaddr(w) 1346 n = mkcall1(mapfn(p, t), ptrto(t.Val()), init, typename(t), map_, key, z) 1347 } 1348 } 1349 n.Type = ptrto(t.Val()) 1350 n.NonNil = true // mapaccess1* and mapassign always return non-nil pointers. 1351 n = nod(OIND, n, nil) 1352 n.Type = t.Val() 1353 n.Typecheck = 1 1354 1355 case ORECV: 1356 Fatalf("walkexpr ORECV") // should see inside OAS only 1357 1358 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1359 n.Left = walkexpr(n.Left, init) 1360 low, high, max := n.SliceBounds() 1361 low = walkexpr(low, init) 1362 if low != nil && iszero(low) { 1363 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k]. 1364 low = nil 1365 } 1366 high = walkexpr(high, init) 1367 max = walkexpr(max, init) 1368 n.SetSliceBounds(low, high, max) 1369 if n.Op.IsSlice3() { 1370 if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) { 1371 // Reduce x[i:j:cap(x)] to x[i:j]. 1372 if n.Op == OSLICE3 { 1373 n.Op = OSLICE 1374 } else { 1375 n.Op = OSLICEARR 1376 } 1377 n = reduceSlice(n) 1378 } 1379 } else { 1380 n = reduceSlice(n) 1381 } 1382 1383 case OADDR: 1384 n.Left = walkexpr(n.Left, init) 1385 1386 case ONEW: 1387 if n.Esc == EscNone { 1388 if n.Type.Elem().Width >= 1<<16 { 1389 Fatalf("large ONEW with EscNone: %v", n) 1390 } 1391 r := temp(n.Type.Elem()) 1392 r = nod(OAS, r, nil) // zero temp 1393 r = typecheck(r, Etop) 1394 init.Append(r) 1395 r = nod(OADDR, r.Left, nil) 1396 r = typecheck(r, Erv) 1397 n = r 1398 } else { 1399 n = callnew(n.Type.Elem()) 1400 } 1401 1402 case OCMPSTR: 1403 // s + "badgerbadgerbadger" == "badgerbadgerbadger" 1404 if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) { 1405 // TODO(marvin): Fix Node.EType type union. 1406 r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0)) 1407 r = typecheck(r, Erv) 1408 r = walkexpr(r, init) 1409 r.Type = n.Type 1410 n = r 1411 break 1412 } 1413 1414 // Rewrite comparisons to short constant strings as length+byte-wise comparisons. 1415 var cs, ncs *Node // const string, non-const string 1416 switch { 1417 case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR): 1418 // ignore; will be constant evaluated 1419 case Isconst(n.Left, CTSTR): 1420 cs = n.Left 1421 ncs = n.Right 1422 case Isconst(n.Right, CTSTR): 1423 cs = n.Right 1424 ncs = n.Left 1425 } 1426 if cs != nil { 1427 cmp := Op(n.Etype) 1428 // maxRewriteLen was chosen empirically. 1429 // It is the value that minimizes cmd/go file size 1430 // across most architectures. 1431 // See the commit description for CL 26758 for details. 1432 maxRewriteLen := 6 1433 var and Op 1434 switch cmp { 1435 case OEQ: 1436 and = OANDAND 1437 case ONE: 1438 and = OOROR 1439 default: 1440 // Don't do byte-wise comparisons for <, <=, etc. 1441 // They're fairly complicated. 1442 // Length-only checks are ok, though. 1443 maxRewriteLen = 0 1444 } 1445 if s := cs.Val().U.(string); len(s) <= maxRewriteLen { 1446 if len(s) > 0 { 1447 ncs = safeexpr(ncs, init) 1448 } 1449 // TODO(marvin): Fix Node.EType type union. 1450 r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s)))) 1451 for i := 0; i < len(s); i++ { 1452 cb := nodintconst(int64(s[i])) 1453 ncb := nod(OINDEX, ncs, nodintconst(int64(i))) 1454 r = nod(and, r, nod(cmp, ncb, cb)) 1455 } 1456 r = typecheck(r, Erv) 1457 r = walkexpr(r, init) 1458 r.Type = n.Type 1459 n = r 1460 break 1461 } 1462 } 1463 1464 var r *Node 1465 // TODO(marvin): Fix Node.EType type union. 1466 if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { 1467 // prepare for rewrite below 1468 n.Left = cheapexpr(n.Left, init) 1469 n.Right = cheapexpr(n.Right, init) 1470 1471 r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1472 1473 // quick check of len before full compare for == or != 1474 // eqstring assumes that the lengths are equal 1475 // TODO(marvin): Fix Node.EType type union. 1476 if Op(n.Etype) == OEQ { 1477 // len(left) == len(right) && eqstring(left, right) 1478 r = nod(OANDAND, nod(OEQ, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1479 } else { 1480 // len(left) != len(right) || !eqstring(left, right) 1481 r = nod(ONOT, r, nil) 1482 r = nod(OOROR, nod(ONE, nod(OLEN, n.Left, nil), nod(OLEN, n.Right, nil)), r) 1483 } 1484 1485 r = typecheck(r, Erv) 1486 r = walkexpr(r, nil) 1487 } else { 1488 // sys_cmpstring(s1, s2) :: 0 1489 r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING])) 1490 // TODO(marvin): Fix Node.EType type union. 1491 r = nod(Op(n.Etype), r, nodintconst(0)) 1492 } 1493 1494 r = typecheck(r, Erv) 1495 if !n.Type.IsBoolean() { 1496 Fatalf("cmp %v", n.Type) 1497 } 1498 r.Type = n.Type 1499 n = r 1500 1501 case OADDSTR: 1502 n = addstr(n, init) 1503 1504 case OAPPEND: 1505 // order should make sure we only see OAS(node, OAPPEND), which we handle above. 1506 Fatalf("append outside assignment") 1507 1508 case OCOPY: 1509 n = copyany(n, init, instrumenting && !compiling_runtime) 1510 1511 // cannot use chanfn - closechan takes any, not chan any 1512 case OCLOSE: 1513 fn := syslook("closechan") 1514 1515 fn = substArgTypes(fn, n.Left.Type) 1516 n = mkcall1(fn, nil, init, n.Left) 1517 1518 case OMAKECHAN: 1519 n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64])) 1520 1521 case OMAKEMAP: 1522 t := n.Type 1523 1524 a := nodnil() // hmap buffer 1525 r := nodnil() // bucket buffer 1526 if n.Esc == EscNone { 1527 // Allocate hmap buffer on stack. 1528 var_ := temp(hmap(t)) 1529 1530 a = nod(OAS, var_, nil) // zero temp 1531 a = typecheck(a, Etop) 1532 init.Append(a) 1533 a = nod(OADDR, var_, nil) 1534 1535 // Allocate one bucket on stack. 1536 // Maximum key/value size is 128 bytes, larger objects 1537 // are stored with an indirection. So max bucket size is 2048+eps. 1538 var_ = temp(mapbucket(t)) 1539 1540 r = nod(OAS, var_, nil) // zero temp 1541 r = typecheck(r, Etop) 1542 init.Append(r) 1543 r = nod(OADDR, var_, nil) 1544 } 1545 1546 fn := syslook("makemap") 1547 fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val()) 1548 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r) 1549 1550 case OMAKESLICE: 1551 l := n.Left 1552 r := n.Right 1553 if r == nil { 1554 r = safeexpr(l, init) 1555 l = r 1556 } 1557 t := n.Type 1558 if n.Esc == EscNone { 1559 if !isSmallMakeSlice(n) { 1560 Fatalf("non-small OMAKESLICE with EscNone: %v", n) 1561 } 1562 // var arr [r]T 1563 // n = arr[:l] 1564 t = typArray(t.Elem(), nonnegintconst(r)) // [r]T 1565 var_ := temp(t) 1566 a := nod(OAS, var_, nil) // zero temp 1567 a = typecheck(a, Etop) 1568 init.Append(a) 1569 r := nod(OSLICE, var_, nil) // arr[:l] 1570 r.SetSliceBounds(nil, l, nil) 1571 r = conv(r, n.Type) // in case n.Type is named. 1572 r = typecheck(r, Erv) 1573 r = walkexpr(r, init) 1574 n = r 1575 } else { 1576 // n escapes; set up a call to makeslice. 1577 // When len and cap can fit into int, use makeslice instead of 1578 // makeslice64, which is faster and shorter on 32 bit platforms. 1579 1580 if t.Elem().NotInHeap { 1581 yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem()) 1582 } 1583 1584 len, cap := l, r 1585 1586 fnname := "makeslice64" 1587 argtype := Types[TINT64] 1588 1589 // typechecking guarantees that TIDEAL len/cap are positive and fit in an int. 1590 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 1591 // will be handled by the negative range checks in makeslice during runtime. 1592 if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) && 1593 (cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) { 1594 fnname = "makeslice" 1595 argtype = Types[TINT] 1596 } 1597 1598 fn := syslook(fnname) 1599 fn = substArgTypes(fn, t.Elem()) // any-1 1600 n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)) 1601 } 1602 1603 case ORUNESTR: 1604 a := nodnil() 1605 if n.Esc == EscNone { 1606 t := typArray(Types[TUINT8], 4) 1607 var_ := temp(t) 1608 a = nod(OADDR, var_, nil) 1609 } 1610 1611 // intstring(*[4]byte, rune) 1612 n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64])) 1613 1614 case OARRAYBYTESTR: 1615 a := nodnil() 1616 if n.Esc == EscNone { 1617 // Create temporary buffer for string on stack. 1618 t := typArray(Types[TUINT8], tmpstringbufsize) 1619 1620 a = nod(OADDR, temp(t), nil) 1621 } 1622 1623 // slicebytetostring(*[32]byte, []byte) string; 1624 n = mkcall("slicebytetostring", n.Type, init, a, n.Left) 1625 1626 // slicebytetostringtmp([]byte) string; 1627 case OARRAYBYTESTRTMP: 1628 n.Left = walkexpr(n.Left, init) 1629 1630 if !instrumenting { 1631 // Let the backend handle OARRAYBYTESTRTMP directly 1632 // to avoid a function call to slicebytetostringtmp. 1633 break 1634 } 1635 1636 n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) 1637 1638 // slicerunetostring(*[32]byte, []rune) string; 1639 case OARRAYRUNESTR: 1640 a := nodnil() 1641 1642 if n.Esc == EscNone { 1643 // Create temporary buffer for string on stack. 1644 t := typArray(Types[TUINT8], tmpstringbufsize) 1645 1646 a = nod(OADDR, temp(t), nil) 1647 } 1648 1649 n = mkcall("slicerunetostring", n.Type, init, a, n.Left) 1650 1651 // stringtoslicebyte(*32[byte], string) []byte; 1652 case OSTRARRAYBYTE: 1653 a := nodnil() 1654 1655 if n.Esc == EscNone { 1656 // Create temporary buffer for slice on stack. 1657 t := typArray(Types[TUINT8], tmpstringbufsize) 1658 1659 a = nod(OADDR, temp(t), nil) 1660 } 1661 1662 n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING])) 1663 1664 case OSTRARRAYBYTETMP: 1665 // []byte(string) conversion that creates a slice 1666 // referring to the actual string bytes. 1667 // This conversion is handled later by the backend and 1668 // is only for use by internal compiler optimizations 1669 // that know that the slice won't be mutated. 1670 // The only such case today is: 1671 // for i, c := range []byte(string) 1672 n.Left = walkexpr(n.Left, init) 1673 1674 // stringtoslicerune(*[32]rune, string) []rune 1675 case OSTRARRAYRUNE: 1676 a := nodnil() 1677 1678 if n.Esc == EscNone { 1679 // Create temporary buffer for slice on stack. 1680 t := typArray(Types[TINT32], tmpstringbufsize) 1681 1682 a = nod(OADDR, temp(t), nil) 1683 } 1684 1685 n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) 1686 1687 // ifaceeq(i1 any-1, i2 any-2) (ret bool); 1688 case OCMPIFACE: 1689 if !eqtype(n.Left.Type, n.Right.Type) { 1690 Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type) 1691 } 1692 var fn *Node 1693 if n.Left.Type.IsEmptyInterface() { 1694 fn = syslook("efaceeq") 1695 } else { 1696 fn = syslook("ifaceeq") 1697 } 1698 1699 n.Right = cheapexpr(n.Right, init) 1700 n.Left = cheapexpr(n.Left, init) 1701 fn = substArgTypes(fn, n.Right.Type, n.Left.Type) 1702 r := mkcall1(fn, n.Type, init, n.Left, n.Right) 1703 // TODO(marvin): Fix Node.EType type union. 1704 if Op(n.Etype) == ONE { 1705 r = nod(ONOT, r, nil) 1706 } 1707 1708 // check itable/type before full compare. 1709 // TODO(marvin): Fix Node.EType type union. 1710 if Op(n.Etype) == OEQ { 1711 r = nod(OANDAND, nod(OEQ, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1712 } else { 1713 r = nod(OOROR, nod(ONE, nod(OITAB, n.Left, nil), nod(OITAB, n.Right, nil)), r) 1714 } 1715 r = typecheck(r, Erv) 1716 r = walkexpr(r, init) 1717 r.Type = n.Type 1718 n = r 1719 1720 case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: 1721 if isStaticCompositeLiteral(n) { 1722 // n can be directly represented in the read-only data section. 1723 // Make direct reference to the static data. See issue 12841. 1724 vstat := staticname(n.Type) 1725 vstat.Name.Readonly = true 1726 fixedlit(inInitFunction, initKindStatic, n, vstat, init) 1727 n = vstat 1728 n = typecheck(n, Erv) 1729 break 1730 } 1731 var_ := temp(n.Type) 1732 anylit(n, var_, init) 1733 n = var_ 1734 1735 case OSEND: 1736 n1 := n.Right 1737 n1 = assignconv(n1, n.Left.Type.Elem(), "chan send") 1738 n1 = walkexpr(n1, init) 1739 n1 = nod(OADDR, n1, nil) 1740 n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, typename(n.Left.Type), n.Left, n1) 1741 1742 case OCLOSURE: 1743 n = walkclosure(n, init) 1744 1745 case OCALLPART: 1746 n = walkpartialcall(n, init) 1747 } 1748 1749 // Expressions that are constant at run time but not 1750 // considered const by the language spec are not turned into 1751 // constants until walk. For example, if n is y%1 == 0, the 1752 // walk of y%1 may have replaced it by 0. 1753 // Check whether n with its updated args is itself now a constant. 1754 t := n.Type 1755 1756 evconst(n) 1757 n.Type = t 1758 if n.Op == OLITERAL { 1759 n = typecheck(n, Erv) 1760 } 1761 1762 ullmancalc(n) 1763 1764 if Debug['w'] != 0 && n != nil { 1765 Dump("walk", n) 1766 } 1767 1768 lineno = lno 1769 return n 1770 } 1771 1772 // TODO(josharian): combine this with its caller and simplify 1773 func reduceSlice(n *Node) *Node { 1774 low, high, max := n.SliceBounds() 1775 if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) { 1776 // Reduce x[i:len(x)] to x[i:]. 1777 high = nil 1778 } 1779 n.SetSliceBounds(low, high, max) 1780 if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { 1781 // Reduce x[:] to x. 1782 if Debug_slice > 0 { 1783 Warn("slice: omit slice operation") 1784 } 1785 return n.Left 1786 } 1787 return n 1788 } 1789 1790 func ascompatee1(op Op, l *Node, r *Node, init *Nodes) *Node { 1791 // convas will turn map assigns into function calls, 1792 // making it impossible for reorder3 to work. 1793 n := nod(OAS, l, r) 1794 1795 if l.Op == OINDEXMAP { 1796 return n 1797 } 1798 1799 return convas(n, init) 1800 } 1801 1802 func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { 1803 // check assign expression list to 1804 // a expression list. called in 1805 // expr-list = expr-list 1806 1807 // ensure order of evaluation for function calls 1808 for i := range nl { 1809 nl[i] = safeexpr(nl[i], init) 1810 } 1811 for i1 := range nr { 1812 nr[i1] = safeexpr(nr[i1], init) 1813 } 1814 1815 var nn []*Node 1816 i := 0 1817 for ; i < len(nl); i++ { 1818 if i >= len(nr) { 1819 break 1820 } 1821 // Do not generate 'x = x' during return. See issue 4014. 1822 if op == ORETURN && samesafeexpr(nl[i], nr[i]) { 1823 continue 1824 } 1825 nn = append(nn, ascompatee1(op, nl[i], nr[i], init)) 1826 } 1827 1828 // cannot happen: caller checked that lists had same length 1829 if i < len(nl) || i < len(nr) { 1830 var nln, nrn Nodes 1831 nln.Set(nl) 1832 nrn.Set(nr) 1833 yyerror("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.Func.Nname.Sym.Name) 1834 } 1835 return nn 1836 } 1837 1838 // l is an lv and rt is the type of an rv 1839 // return 1 if this implies a function call 1840 // evaluating the lv or a function call 1841 // in the conversion of the types 1842 func fncall(l *Node, rt *Type) bool { 1843 if l.Ullman >= UINF || l.Op == OINDEXMAP { 1844 return true 1845 } 1846 var r Node 1847 if needwritebarrier(l, &r) { 1848 return true 1849 } 1850 if eqtype(l.Type, rt) { 1851 return false 1852 } 1853 return true 1854 } 1855 1856 // check assign type list to 1857 // a expression list. called in 1858 // expr-list = func() 1859 func ascompatet(op Op, nl Nodes, nr *Type) []*Node { 1860 r, saver := iterFields(nr) 1861 1862 var nn, mm Nodes 1863 var ullmanOverflow bool 1864 var i int 1865 for i = 0; i < nl.Len(); i++ { 1866 if r == nil { 1867 break 1868 } 1869 l := nl.Index(i) 1870 if isblank(l) { 1871 r = saver.Next() 1872 continue 1873 } 1874 1875 // any lv that causes a fn call must be 1876 // deferred until all the return arguments 1877 // have been pulled from the output arguments 1878 if fncall(l, r.Type) { 1879 tmp := temp(r.Type) 1880 tmp = typecheck(tmp, Erv) 1881 a := nod(OAS, l, tmp) 1882 a = convas(a, &mm) 1883 mm.Append(a) 1884 l = tmp 1885 } 1886 1887 a := nod(OAS, l, nodarg(r, 0)) 1888 a = convas(a, &nn) 1889 ullmancalc(a) 1890 if a.Ullman >= UINF { 1891 Dump("ascompatet ucount", a) 1892 ullmanOverflow = true 1893 } 1894 1895 nn.Append(a) 1896 r = saver.Next() 1897 } 1898 1899 if i < nl.Len() || r != nil { 1900 yyerror("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) 1901 } 1902 1903 if ullmanOverflow { 1904 Fatalf("ascompatet: too many function calls evaluating parameters") 1905 } 1906 return append(nn.Slice(), mm.Slice()...) 1907 } 1908 1909 // package all the arguments that match a ... T parameter into a []T. 1910 func mkdotargslice(lr0, nn []*Node, l *Field, fp int, init *Nodes, ddd *Node) []*Node { 1911 esc := uint16(EscUnknown) 1912 if ddd != nil { 1913 esc = ddd.Esc 1914 } 1915 1916 tslice := typSlice(l.Type.Elem()) 1917 tslice.Noalg = true 1918 1919 var n *Node 1920 if len(lr0) == 0 { 1921 n = nodnil() 1922 n.Type = tslice 1923 } else { 1924 n = nod(OCOMPLIT, nil, typenod(tslice)) 1925 if ddd != nil && prealloc[ddd] != nil { 1926 prealloc[n] = prealloc[ddd] // temporary to use 1927 } 1928 n.List.Set(lr0) 1929 n.Esc = esc 1930 n = typecheck(n, Erv) 1931 if n.Type == nil { 1932 Fatalf("mkdotargslice: typecheck failed") 1933 } 1934 n = walkexpr(n, init) 1935 } 1936 1937 a := nod(OAS, nodarg(l, fp), n) 1938 nn = append(nn, convas(a, init)) 1939 return nn 1940 } 1941 1942 // helpers for shape errors 1943 func dumptypes(nl *Type, what string) string { 1944 s := "" 1945 for _, l := range nl.Fields().Slice() { 1946 if s != "" { 1947 s += ", " 1948 } 1949 s += fldconv(l, 0) 1950 } 1951 if s == "" { 1952 s = fmt.Sprintf("[no arguments %s]", what) 1953 } 1954 return s 1955 } 1956 1957 func dumpnodetypes(l []*Node, what string) string { 1958 s := "" 1959 for _, r := range l { 1960 if s != "" { 1961 s += ", " 1962 } 1963 s += r.Type.String() 1964 } 1965 if s == "" { 1966 s = fmt.Sprintf("[no arguments %s]", what) 1967 } 1968 return s 1969 } 1970 1971 // check assign expression list to 1972 // a type list. called in 1973 // return expr-list 1974 // func(expr-list) 1975 func ascompatte(op Op, call *Node, isddd bool, nl *Type, lr []*Node, fp int, init *Nodes) []*Node { 1976 lr0 := lr 1977 l, savel := iterFields(nl) 1978 var r *Node 1979 if len(lr) > 0 { 1980 r = lr[0] 1981 } 1982 var nn []*Node 1983 1984 // f(g()) where g has multiple return values 1985 if r != nil && len(lr) <= 1 && r.Type.IsFuncArgStruct() { 1986 // optimization - can do block copy 1987 if eqtypenoname(r.Type, nl) { 1988 arg := nodarg(nl, fp) 1989 r = nod(OCONVNOP, r, nil) 1990 r.Type = arg.Type 1991 nn = []*Node{convas(nod(OAS, arg, r), init)} 1992 goto ret 1993 } 1994 1995 // conversions involved. 1996 // copy into temporaries. 1997 var alist []*Node 1998 1999 for _, l := range r.Type.Fields().Slice() { 2000 tmp := temp(l.Type) 2001 alist = append(alist, tmp) 2002 } 2003 2004 a := nod(OAS2, nil, nil) 2005 a.List.Set(alist) 2006 a.Rlist.Set(lr) 2007 a = typecheck(a, Etop) 2008 a = walkstmt(a) 2009 init.Append(a) 2010 lr = alist 2011 r = lr[0] 2012 l, savel = iterFields(nl) 2013 } 2014 2015 for { 2016 if l != nil && l.Isddd { 2017 // the ddd parameter must be last 2018 ll := savel.Next() 2019 2020 if ll != nil { 2021 yyerror("... must be last argument") 2022 } 2023 2024 // special case -- 2025 // only if we are assigning a single ddd 2026 // argument to a ddd parameter then it is 2027 // passed through unencapsulated 2028 if r != nil && len(lr) <= 1 && isddd && eqtype(l.Type, r.Type) { 2029 a := nod(OAS, nodarg(l, fp), r) 2030 a = convas(a, init) 2031 nn = append(nn, a) 2032 break 2033 } 2034 2035 // normal case -- make a slice of all 2036 // remaining arguments and pass it to 2037 // the ddd parameter. 2038 nn = mkdotargslice(lr, nn, l, fp, init, call.Right) 2039 2040 break 2041 } 2042 2043 if l == nil || r == nil { 2044 if l != nil || r != nil { 2045 l1 := dumptypes(nl, "expected") 2046 l2 := dumpnodetypes(lr0, "given") 2047 if l != nil { 2048 yyerror("not enough arguments to %v\n\t%s\n\t%s", op, l1, l2) 2049 } else { 2050 yyerror("too many arguments to %v\n\t%s\n\t%s", op, l1, l2) 2051 } 2052 } 2053 2054 break 2055 } 2056 2057 a := nod(OAS, nodarg(l, fp), r) 2058 a = convas(a, init) 2059 nn = append(nn, a) 2060 2061 l = savel.Next() 2062 r = nil 2063 lr = lr[1:] 2064 if len(lr) > 0 { 2065 r = lr[0] 2066 } 2067 } 2068 2069 ret: 2070 for _, n := range nn { 2071 n.Typecheck = 1 2072 } 2073 return nn 2074 } 2075 2076 // generate code for print 2077 func walkprint(nn *Node, init *Nodes) *Node { 2078 var r *Node 2079 var n *Node 2080 var on *Node 2081 var t *Type 2082 var et EType 2083 2084 op := nn.Op 2085 all := nn.List 2086 var calls []*Node 2087 notfirst := false 2088 2089 // Hoist all the argument evaluation up before the lock. 2090 walkexprlistcheap(all.Slice(), init) 2091 2092 calls = append(calls, mkcall("printlock", nil, init)) 2093 for i1, n1 := range all.Slice() { 2094 if notfirst { 2095 calls = append(calls, mkcall("printsp", nil, init)) 2096 } 2097 2098 notfirst = op == OPRINTN 2099 2100 n = n1 2101 if n.Op == OLITERAL { 2102 switch n.Val().Ctype() { 2103 case CTRUNE: 2104 n = defaultlit(n, runetype) 2105 2106 case CTINT: 2107 n = defaultlit(n, Types[TINT64]) 2108 2109 case CTFLT: 2110 n = defaultlit(n, Types[TFLOAT64]) 2111 } 2112 } 2113 2114 if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { 2115 n = defaultlit(n, Types[TINT64]) 2116 } 2117 n = defaultlit(n, nil) 2118 all.SetIndex(i1, n) 2119 if n.Type == nil || n.Type.Etype == TFORW { 2120 continue 2121 } 2122 2123 t = n.Type 2124 et = n.Type.Etype 2125 if n.Type.IsInterface() { 2126 if n.Type.IsEmptyInterface() { 2127 on = syslook("printeface") 2128 } else { 2129 on = syslook("printiface") 2130 } 2131 on = substArgTypes(on, n.Type) // any-1 2132 } else if n.Type.IsPtr() || et == TCHAN || et == TMAP || et == TFUNC || et == TUNSAFEPTR { 2133 on = syslook("printpointer") 2134 on = substArgTypes(on, n.Type) // any-1 2135 } else if n.Type.IsSlice() { 2136 on = syslook("printslice") 2137 on = substArgTypes(on, n.Type) // any-1 2138 } else if isInt[et] { 2139 if et == TUINT64 { 2140 if (t.Sym.Pkg == Runtimepkg || compiling_runtime) && t.Sym.Name == "hex" { 2141 on = syslook("printhex") 2142 } else { 2143 on = syslook("printuint") 2144 } 2145 } else { 2146 on = syslook("printint") 2147 } 2148 } else if isFloat[et] { 2149 on = syslook("printfloat") 2150 } else if isComplex[et] { 2151 on = syslook("printcomplex") 2152 } else if et == TBOOL { 2153 on = syslook("printbool") 2154 } else if et == TSTRING { 2155 on = syslook("printstring") 2156 } else { 2157 badtype(OPRINT, n.Type, nil) 2158 continue 2159 } 2160 2161 t = on.Type.Params().Field(0).Type 2162 2163 if !eqtype(t, n.Type) { 2164 n = nod(OCONV, n, nil) 2165 n.Type = t 2166 } 2167 2168 r = nod(OCALL, on, nil) 2169 r.List.Append(n) 2170 calls = append(calls, r) 2171 } 2172 2173 if op == OPRINTN { 2174 calls = append(calls, mkcall("printnl", nil, nil)) 2175 } 2176 2177 calls = append(calls, mkcall("printunlock", nil, init)) 2178 2179 typecheckslice(calls, Etop) 2180 walkexprlist(calls, init) 2181 2182 r = nod(OEMPTY, nil, nil) 2183 r = typecheck(r, Etop) 2184 r = walkexpr(r, init) 2185 r.Ninit.Set(calls) 2186 return r 2187 } 2188 2189 func callnew(t *Type) *Node { 2190 if t.NotInHeap { 2191 yyerror("%v is go:notinheap; heap allocation disallowed", t) 2192 } 2193 dowidth(t) 2194 fn := syslook("newobject") 2195 fn = substArgTypes(fn, t) 2196 v := mkcall1(fn, ptrto(t), nil, typename(t)) 2197 v.NonNil = true 2198 return v 2199 } 2200 2201 func iscallret(n *Node) bool { 2202 n = outervalue(n) 2203 return n.Op == OINDREGSP 2204 } 2205 2206 func isstack(n *Node) bool { 2207 n = outervalue(n) 2208 2209 // If n is *autotmp and autotmp = &foo, replace n with foo. 2210 // We introduce such temps when initializing struct literals. 2211 if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() { 2212 defn := n.Left.Name.Defn 2213 if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR { 2214 n = defn.Right.Left 2215 } 2216 } 2217 2218 switch n.Op { 2219 case OINDREGSP: 2220 return true 2221 2222 case ONAME: 2223 switch n.Class { 2224 case PAUTO, PPARAM, PPARAMOUT: 2225 return true 2226 } 2227 } 2228 2229 return false 2230 } 2231 2232 func (n *Node) isGlobal() bool { 2233 n = outervalue(n) 2234 return n.Op == ONAME && n.Class == PEXTERN 2235 } 2236 2237 // Do we need a write barrier for the assignment l = r? 2238 func needwritebarrier(l *Node, r *Node) bool { 2239 if !use_writebarrier { 2240 return false 2241 } 2242 2243 if l == nil || isblank(l) { 2244 return false 2245 } 2246 2247 // No write barrier for write of non-pointers. 2248 dowidth(l.Type) 2249 2250 if !haspointers(l.Type) { 2251 return false 2252 } 2253 2254 // No write barrier for write to stack. 2255 if isstack(l) { 2256 return false 2257 } 2258 2259 // No write barrier if this is a pointer to a go:notinheap 2260 // type, since the write barrier's inheap(ptr) check will fail. 2261 if l.Type.IsPtr() && l.Type.Elem().NotInHeap { 2262 return false 2263 } 2264 2265 // Implicit zeroing is still zeroing, so it needs write 2266 // barriers. In practice, these are all to stack variables 2267 // (even if isstack isn't smart enough to figure that out), so 2268 // they'll be eliminated by the backend. 2269 if r == nil { 2270 return true 2271 } 2272 2273 // Ignore no-op conversions when making decision. 2274 // Ensures that xp = unsafe.Pointer(&x) is treated 2275 // the same as xp = &x. 2276 for r.Op == OCONVNOP { 2277 r = r.Left 2278 } 2279 2280 // TODO: We can eliminate write barriers if we know *both* the 2281 // current and new content of the slot must already be shaded. 2282 // We know a pointer is shaded if it's nil, or points to 2283 // static data, a global (variable or function), or the stack. 2284 // The nil optimization could be particularly useful for 2285 // writes to just-allocated objects. Unfortunately, knowing 2286 // the "current" value of the slot requires flow analysis. 2287 2288 // No write barrier for storing address of stack values, 2289 // which are guaranteed only to be written to the stack. 2290 if r.Op == OADDR && isstack(r.Left) { 2291 return false 2292 } 2293 2294 // Otherwise, be conservative and use write barrier. 2295 return true 2296 } 2297 2298 // TODO(rsc): Perhaps componentgen should run before this. 2299 2300 func applywritebarrier(n *Node) *Node { 2301 if n.Left != nil && n.Right != nil && needwritebarrier(n.Left, n.Right) { 2302 if Debug_wb > 1 { 2303 Warnl(n.Lineno, "marking %v for barrier", n.Left) 2304 } 2305 n.Op = OASWB 2306 return n 2307 } 2308 return n 2309 } 2310 2311 func convas(n *Node, init *Nodes) *Node { 2312 if n.Op != OAS { 2313 Fatalf("convas: not OAS %v", n.Op) 2314 } 2315 2316 n.Typecheck = 1 2317 2318 var lt *Type 2319 var rt *Type 2320 if n.Left == nil || n.Right == nil { 2321 goto out 2322 } 2323 2324 lt = n.Left.Type 2325 rt = n.Right.Type 2326 if lt == nil || rt == nil { 2327 goto out 2328 } 2329 2330 if isblank(n.Left) { 2331 n.Right = defaultlit(n.Right, nil) 2332 goto out 2333 } 2334 2335 if !eqtype(lt, rt) { 2336 n.Right = assignconv(n.Right, lt, "assignment") 2337 n.Right = walkexpr(n.Right, init) 2338 } 2339 2340 out: 2341 ullmancalc(n) 2342 return n 2343 } 2344 2345 // from ascompat[te] 2346 // evaluating actual function arguments. 2347 // f(a,b) 2348 // if there is exactly one function expr, 2349 // then it is done first. otherwise must 2350 // make temp variables 2351 func reorder1(all []*Node) []*Node { 2352 c := 0 // function calls 2353 t := 0 // total parameters 2354 2355 for _, n := range all { 2356 t++ 2357 ullmancalc(n) 2358 if n.Ullman >= UINF { 2359 c++ 2360 } 2361 } 2362 2363 if c == 0 || t == 1 { 2364 return all 2365 } 2366 2367 var g []*Node // fncalls assigned to tempnames 2368 var f *Node // last fncall assigned to stack 2369 var r []*Node // non fncalls and tempnames assigned to stack 2370 d := 0 2371 var a *Node 2372 for _, n := range all { 2373 if n.Ullman < UINF { 2374 r = append(r, n) 2375 continue 2376 } 2377 2378 d++ 2379 if d == c { 2380 f = n 2381 continue 2382 } 2383 2384 // make assignment of fncall to tempname 2385 a = temp(n.Right.Type) 2386 2387 a = nod(OAS, a, n.Right) 2388 g = append(g, a) 2389 2390 // put normal arg assignment on list 2391 // with fncall replaced by tempname 2392 n.Right = a.Left 2393 2394 r = append(r, n) 2395 } 2396 2397 if f != nil { 2398 g = append(g, f) 2399 } 2400 return append(g, r...) 2401 } 2402 2403 // from ascompat[ee] 2404 // a,b = c,d 2405 // simultaneous assignment. there cannot 2406 // be later use of an earlier lvalue. 2407 // 2408 // function calls have been removed. 2409 func reorder3(all []*Node) []*Node { 2410 var l *Node 2411 2412 // If a needed expression may be affected by an 2413 // earlier assignment, make an early copy of that 2414 // expression and use the copy instead. 2415 var early []*Node 2416 2417 var mapinit Nodes 2418 for i, n := range all { 2419 l = n.Left 2420 2421 // Save subexpressions needed on left side. 2422 // Drill through non-dereferences. 2423 for { 2424 if l.Op == ODOT || l.Op == OPAREN { 2425 l = l.Left 2426 continue 2427 } 2428 2429 if l.Op == OINDEX && l.Left.Type.IsArray() { 2430 l.Right = reorder3save(l.Right, all, i, &early) 2431 l = l.Left 2432 continue 2433 } 2434 2435 break 2436 } 2437 2438 switch l.Op { 2439 default: 2440 Fatalf("reorder3 unexpected lvalue %#v", l.Op) 2441 2442 case ONAME: 2443 break 2444 2445 case OINDEX, OINDEXMAP: 2446 l.Left = reorder3save(l.Left, all, i, &early) 2447 l.Right = reorder3save(l.Right, all, i, &early) 2448 if l.Op == OINDEXMAP { 2449 all[i] = convas(all[i], &mapinit) 2450 } 2451 2452 case OIND, ODOTPTR: 2453 l.Left = reorder3save(l.Left, all, i, &early) 2454 } 2455 2456 // Save expression on right side. 2457 all[i].Right = reorder3save(all[i].Right, all, i, &early) 2458 } 2459 2460 early = append(mapinit.Slice(), early...) 2461 return append(early, all...) 2462 } 2463 2464 // if the evaluation of *np would be affected by the 2465 // assignments in all up to but not including the ith assignment, 2466 // copy into a temporary during *early and 2467 // replace *np with that temp. 2468 // The result of reorder3save MUST be assigned back to n, e.g. 2469 // n.Left = reorder3save(n.Left, all, i, early) 2470 func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node { 2471 if !aliased(n, all, i) { 2472 return n 2473 } 2474 2475 q := temp(n.Type) 2476 q = nod(OAS, q, n) 2477 q = typecheck(q, Etop) 2478 *early = append(*early, q) 2479 return q.Left 2480 } 2481 2482 // what's the outer value that a write to n affects? 2483 // outer value means containing struct or array. 2484 func outervalue(n *Node) *Node { 2485 for { 2486 if n.Op == OXDOT { 2487 Fatalf("OXDOT in walk") 2488 } 2489 if n.Op == ODOT || n.Op == OPAREN || n.Op == OCONVNOP { 2490 n = n.Left 2491 continue 2492 } 2493 2494 if n.Op == OINDEX && n.Left.Type != nil && n.Left.Type.IsArray() { 2495 n = n.Left 2496 continue 2497 } 2498 2499 break 2500 } 2501 2502 return n 2503 } 2504 2505 // Is it possible that the computation of n might be 2506 // affected by writes in as up to but not including the ith element? 2507 func aliased(n *Node, all []*Node, i int) bool { 2508 if n == nil { 2509 return false 2510 } 2511 2512 // Treat all fields of a struct as referring to the whole struct. 2513 // We could do better but we would have to keep track of the fields. 2514 for n.Op == ODOT { 2515 n = n.Left 2516 } 2517 2518 // Look for obvious aliasing: a variable being assigned 2519 // during the all list and appearing in n. 2520 // Also record whether there are any writes to main memory. 2521 // Also record whether there are any writes to variables 2522 // whose addresses have been taken. 2523 memwrite := 0 2524 2525 varwrite := 0 2526 var a *Node 2527 for _, an := range all[:i] { 2528 a = outervalue(an.Left) 2529 2530 for a.Op == ODOT { 2531 a = a.Left 2532 } 2533 2534 if a.Op != ONAME { 2535 memwrite = 1 2536 continue 2537 } 2538 2539 switch n.Class { 2540 default: 2541 varwrite = 1 2542 continue 2543 2544 case PAUTO, PPARAM, PPARAMOUT: 2545 if n.Addrtaken { 2546 varwrite = 1 2547 continue 2548 } 2549 2550 if vmatch2(a, n) { 2551 // Direct hit. 2552 return true 2553 } 2554 } 2555 } 2556 2557 // The variables being written do not appear in n. 2558 // However, n might refer to computed addresses 2559 // that are being written. 2560 2561 // If no computed addresses are affected by the writes, no aliasing. 2562 if memwrite == 0 && varwrite == 0 { 2563 return false 2564 } 2565 2566 // If n does not refer to computed addresses 2567 // (that is, if n only refers to variables whose addresses 2568 // have not been taken), no aliasing. 2569 if varexpr(n) { 2570 return false 2571 } 2572 2573 // Otherwise, both the writes and n refer to computed memory addresses. 2574 // Assume that they might conflict. 2575 return true 2576 } 2577 2578 // does the evaluation of n only refer to variables 2579 // whose addresses have not been taken? 2580 // (and no other memory) 2581 func varexpr(n *Node) bool { 2582 if n == nil { 2583 return true 2584 } 2585 2586 switch n.Op { 2587 case OLITERAL: 2588 return true 2589 2590 case ONAME: 2591 switch n.Class { 2592 case PAUTO, PPARAM, PPARAMOUT: 2593 if !n.Addrtaken { 2594 return true 2595 } 2596 } 2597 2598 return false 2599 2600 case OADD, 2601 OSUB, 2602 OOR, 2603 OXOR, 2604 OMUL, 2605 ODIV, 2606 OMOD, 2607 OLSH, 2608 ORSH, 2609 OAND, 2610 OANDNOT, 2611 OPLUS, 2612 OMINUS, 2613 OCOM, 2614 OPAREN, 2615 OANDAND, 2616 OOROR, 2617 OCONV, 2618 OCONVNOP, 2619 OCONVIFACE, 2620 ODOTTYPE: 2621 return varexpr(n.Left) && varexpr(n.Right) 2622 2623 case ODOT: // but not ODOTPTR 2624 // Should have been handled in aliased. 2625 Fatalf("varexpr unexpected ODOT") 2626 } 2627 2628 // Be conservative. 2629 return false 2630 } 2631 2632 // is the name l mentioned in r? 2633 func vmatch2(l *Node, r *Node) bool { 2634 if r == nil { 2635 return false 2636 } 2637 switch r.Op { 2638 // match each right given left 2639 case ONAME: 2640 return l == r 2641 2642 case OLITERAL: 2643 return false 2644 } 2645 2646 if vmatch2(l, r.Left) { 2647 return true 2648 } 2649 if vmatch2(l, r.Right) { 2650 return true 2651 } 2652 for _, n := range r.List.Slice() { 2653 if vmatch2(l, n) { 2654 return true 2655 } 2656 } 2657 return false 2658 } 2659 2660 // is any name mentioned in l also mentioned in r? 2661 // called by sinit.go 2662 func vmatch1(l *Node, r *Node) bool { 2663 // isolate all left sides 2664 if l == nil || r == nil { 2665 return false 2666 } 2667 switch l.Op { 2668 case ONAME: 2669 switch l.Class { 2670 case PPARAM, PAUTO: 2671 break 2672 2673 // assignment to non-stack variable 2674 // must be delayed if right has function calls. 2675 default: 2676 if r.Ullman >= UINF { 2677 return true 2678 } 2679 } 2680 2681 return vmatch2(l, r) 2682 2683 case OLITERAL: 2684 return false 2685 } 2686 2687 if vmatch1(l.Left, r) { 2688 return true 2689 } 2690 if vmatch1(l.Right, r) { 2691 return true 2692 } 2693 for _, n := range l.List.Slice() { 2694 if vmatch1(n, r) { 2695 return true 2696 } 2697 } 2698 return false 2699 } 2700 2701 // paramstoheap returns code to allocate memory for heap-escaped parameters 2702 // and to copy non-result prameters' values from the stack. 2703 // If out is true, then code is also produced to zero-initialize their 2704 // stack memory addresses. 2705 func paramstoheap(params *Type) []*Node { 2706 var nn []*Node 2707 for _, t := range params.Fields().Slice() { 2708 // For precise stacks, the garbage collector assumes results 2709 // are always live, so zero them always. 2710 if params.StructType().Funarg == FunargResults { 2711 // Defer might stop a panic and show the 2712 // return values as they exist at the time of panic. 2713 // Make sure to zero them on entry to the function. 2714 nn = append(nn, nod(OAS, nodarg(t, 1), nil)) 2715 } 2716 2717 v := t.Nname 2718 if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result 2719 v = nil 2720 } 2721 if v == nil { 2722 continue 2723 } 2724 2725 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil { 2726 nn = append(nn, walkstmt(nod(ODCL, v, nil))) 2727 if stackcopy.Class == PPARAM { 2728 nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop))) 2729 } 2730 } 2731 } 2732 2733 return nn 2734 } 2735 2736 // returnsfromheap returns code to copy values for heap-escaped parameters 2737 // back to the stack. 2738 func returnsfromheap(params *Type) []*Node { 2739 var nn []*Node 2740 for _, t := range params.Fields().Slice() { 2741 v := t.Nname 2742 if v == nil { 2743 continue 2744 } 2745 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class == PPARAMOUT { 2746 nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop))) 2747 } 2748 } 2749 2750 return nn 2751 } 2752 2753 // heapmoves generates code to handle migrating heap-escaped parameters 2754 // between the stack and the heap. The generated code is added to Curfn's 2755 // Enter and Exit lists. 2756 func heapmoves() { 2757 lno := lineno 2758 lineno = Curfn.Lineno 2759 nn := paramstoheap(Curfn.Type.Recvs()) 2760 nn = append(nn, paramstoheap(Curfn.Type.Params())...) 2761 nn = append(nn, paramstoheap(Curfn.Type.Results())...) 2762 Curfn.Func.Enter.Append(nn...) 2763 lineno = Curfn.Func.Endlineno 2764 Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) 2765 lineno = lno 2766 } 2767 2768 func vmkcall(fn *Node, t *Type, init *Nodes, va []*Node) *Node { 2769 if fn.Type == nil || fn.Type.Etype != TFUNC { 2770 Fatalf("mkcall %v %v", fn, fn.Type) 2771 } 2772 2773 n := fn.Type.Params().NumFields() 2774 2775 r := nod(OCALL, fn, nil) 2776 r.List.Set(va[:n]) 2777 if fn.Type.Results().NumFields() > 0 { 2778 r = typecheck(r, Erv|Efnstruct) 2779 } else { 2780 r = typecheck(r, Etop) 2781 } 2782 r = walkexpr(r, init) 2783 r.Type = t 2784 return r 2785 } 2786 2787 func mkcall(name string, t *Type, init *Nodes, args ...*Node) *Node { 2788 return vmkcall(syslook(name), t, init, args) 2789 } 2790 2791 func mkcall1(fn *Node, t *Type, init *Nodes, args ...*Node) *Node { 2792 return vmkcall(fn, t, init, args) 2793 } 2794 2795 func conv(n *Node, t *Type) *Node { 2796 if eqtype(n.Type, t) { 2797 return n 2798 } 2799 n = nod(OCONV, n, nil) 2800 n.Type = t 2801 n = typecheck(n, Erv) 2802 return n 2803 } 2804 2805 func chanfn(name string, n int, t *Type) *Node { 2806 if !t.IsChan() { 2807 Fatalf("chanfn %v", t) 2808 } 2809 fn := syslook(name) 2810 switch n { 2811 default: 2812 Fatalf("chanfn %d", n) 2813 case 1: 2814 fn = substArgTypes(fn, t.Elem()) 2815 case 2: 2816 fn = substArgTypes(fn, t.Elem(), t.Elem()) 2817 } 2818 return fn 2819 } 2820 2821 func mapfn(name string, t *Type) *Node { 2822 if !t.IsMap() { 2823 Fatalf("mapfn %v", t) 2824 } 2825 fn := syslook(name) 2826 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val()) 2827 return fn 2828 } 2829 2830 func mapfndel(name string, t *Type) *Node { 2831 if !t.IsMap() { 2832 Fatalf("mapfn %v", t) 2833 } 2834 fn := syslook(name) 2835 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key()) 2836 return fn 2837 } 2838 2839 func writebarrierfn(name string, l *Type, r *Type) *Node { 2840 fn := syslook(name) 2841 fn = substArgTypes(fn, l, r) 2842 return fn 2843 } 2844 2845 func addstr(n *Node, init *Nodes) *Node { 2846 // orderexpr rewrote OADDSTR to have a list of strings. 2847 c := n.List.Len() 2848 2849 if c < 2 { 2850 yyerror("addstr count %d too small", c) 2851 } 2852 2853 buf := nodnil() 2854 if n.Esc == EscNone { 2855 sz := int64(0) 2856 for _, n1 := range n.List.Slice() { 2857 if n1.Op == OLITERAL { 2858 sz += int64(len(n1.Val().U.(string))) 2859 } 2860 } 2861 2862 // Don't allocate the buffer if the result won't fit. 2863 if sz < tmpstringbufsize { 2864 // Create temporary buffer for result string on stack. 2865 t := typArray(Types[TUINT8], tmpstringbufsize) 2866 2867 buf = nod(OADDR, temp(t), nil) 2868 } 2869 } 2870 2871 // build list of string arguments 2872 args := []*Node{buf} 2873 for _, n2 := range n.List.Slice() { 2874 args = append(args, conv(n2, Types[TSTRING])) 2875 } 2876 2877 var fn string 2878 if c <= 5 { 2879 // small numbers of strings use direct runtime helpers. 2880 // note: orderexpr knows this cutoff too. 2881 fn = fmt.Sprintf("concatstring%d", c) 2882 } else { 2883 // large numbers of strings are passed to the runtime as a slice. 2884 fn = "concatstrings" 2885 2886 t := typSlice(Types[TSTRING]) 2887 slice := nod(OCOMPLIT, nil, typenod(t)) 2888 if prealloc[n] != nil { 2889 prealloc[slice] = prealloc[n] 2890 } 2891 slice.List.Set(args[1:]) // skip buf arg 2892 args = []*Node{buf, slice} 2893 slice.Esc = EscNone 2894 } 2895 2896 cat := syslook(fn) 2897 r := nod(OCALL, cat, nil) 2898 r.List.Set(args) 2899 r = typecheck(r, Erv) 2900 r = walkexpr(r, init) 2901 r.Type = n.Type 2902 2903 return r 2904 } 2905 2906 // expand append(l1, l2...) to 2907 // init { 2908 // s := l1 2909 // n := len(s) + len(l2) 2910 // // Compare as uint so growslice can panic on overflow. 2911 // if uint(n) > uint(cap(s)) { 2912 // s = growslice(s, n) 2913 // } 2914 // s = s[:n] 2915 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2916 // } 2917 // s 2918 // 2919 // l2 is allowed to be a string. 2920 func appendslice(n *Node, init *Nodes) *Node { 2921 walkexprlistsafe(n.List.Slice(), init) 2922 2923 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2924 // and n are name or literal, but those may index the slice we're 2925 // modifying here. Fix explicitly. 2926 ls := n.List.Slice() 2927 for i1, n1 := range ls { 2928 ls[i1] = cheapexpr(n1, init) 2929 } 2930 2931 l1 := n.List.First() 2932 l2 := n.List.Second() 2933 2934 var l []*Node 2935 2936 // var s []T 2937 s := temp(l1.Type) 2938 l = append(l, nod(OAS, s, l1)) // s = l1 2939 2940 // n := len(s) + len(l2) 2941 nn := temp(Types[TINT]) 2942 l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil)))) 2943 2944 // if uint(n) > uint(cap(s)) 2945 nif := nod(OIF, nil, nil) 2946 nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil)) 2947 nif.Left.Left.Type = Types[TUINT] 2948 nif.Left.Right.Type = Types[TUINT] 2949 2950 // instantiate growslice(Type*, []any, int) []any 2951 fn := syslook("growslice") 2952 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2953 2954 // s = growslice(T, s, n) 2955 nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn))) 2956 l = append(l, nif) 2957 2958 // s = s[:n] 2959 nt := nod(OSLICE, s, nil) 2960 nt.SetSliceBounds(nil, nn, nil) 2961 nt.Etype = 1 2962 l = append(l, nod(OAS, s, nt)) 2963 2964 if haspointers(l1.Type.Elem()) { 2965 // copy(s[len(l1):], l2) 2966 nptr1 := nod(OSLICE, s, nil) 2967 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2968 nptr1.Etype = 1 2969 nptr2 := l2 2970 fn := syslook("typedslicecopy") 2971 fn = substArgTypes(fn, l1.Type, l2.Type) 2972 var ln Nodes 2973 ln.Set(l) 2974 nt := mkcall1(fn, Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2) 2975 l = append(ln.Slice(), nt) 2976 } else if instrumenting && !compiling_runtime { 2977 // rely on runtime to instrument copy. 2978 // copy(s[len(l1):], l2) 2979 nptr1 := nod(OSLICE, s, nil) 2980 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2981 nptr1.Etype = 1 2982 nptr2 := l2 2983 var fn *Node 2984 if l2.Type.IsString() { 2985 fn = syslook("slicestringcopy") 2986 } else { 2987 fn = syslook("slicecopy") 2988 } 2989 fn = substArgTypes(fn, l1.Type, l2.Type) 2990 var ln Nodes 2991 ln.Set(l) 2992 nt := mkcall1(fn, Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width)) 2993 l = append(ln.Slice(), nt) 2994 } else { 2995 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2996 nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil)) 2997 nptr1.Bounded = true 2998 2999 nptr1 = nod(OADDR, nptr1, nil) 3000 3001 nptr2 := nod(OSPTR, l2, nil) 3002 3003 fn := syslook("memmove") 3004 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 3005 3006 var ln Nodes 3007 ln.Set(l) 3008 nwid := cheapexpr(conv(nod(OLEN, l2, nil), Types[TUINTPTR]), &ln) 3009 3010 nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width)) 3011 nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid) 3012 l = append(ln.Slice(), nt) 3013 } 3014 3015 typecheckslice(l, Etop) 3016 walkstmtlist(l) 3017 init.Append(l...) 3018 return s 3019 } 3020 3021 // Rewrite append(src, x, y, z) so that any side effects in 3022 // x, y, z (including runtime panics) are evaluated in 3023 // initialization statements before the append. 3024 // For normal code generation, stop there and leave the 3025 // rest to cgen_append. 3026 // 3027 // For race detector, expand append(src, a [, b]* ) to 3028 // 3029 // init { 3030 // s := src 3031 // const argc = len(args) - 1 3032 // if cap(s) - len(s) < argc { 3033 // s = growslice(s, len(s)+argc) 3034 // } 3035 // n := len(s) 3036 // s = s[:n+argc] 3037 // s[n] = a 3038 // s[n+1] = b 3039 // ... 3040 // } 3041 // s 3042 func walkappend(n *Node, init *Nodes, dst *Node) *Node { 3043 if !samesafeexpr(dst, n.List.First()) { 3044 n.List.SetIndex(0, safeexpr(n.List.Index(0), init)) 3045 n.List.SetIndex(0, walkexpr(n.List.Index(0), init)) 3046 } 3047 walkexprlistsafe(n.List.Slice()[1:], init) 3048 3049 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 3050 // and n are name or literal, but those may index the slice we're 3051 // modifying here. Fix explicitly. 3052 // Using cheapexpr also makes sure that the evaluation 3053 // of all arguments (and especially any panics) happen 3054 // before we begin to modify the slice in a visible way. 3055 ls := n.List.Slice()[1:] 3056 for i, n := range ls { 3057 ls[i] = cheapexpr(n, init) 3058 } 3059 3060 nsrc := n.List.First() 3061 3062 argc := n.List.Len() - 1 3063 if argc < 1 { 3064 return nsrc 3065 } 3066 3067 // General case, with no function calls left as arguments. 3068 // Leave for gen, except that instrumentation requires old form. 3069 if !instrumenting || compiling_runtime { 3070 return n 3071 } 3072 3073 var l []*Node 3074 3075 ns := temp(nsrc.Type) 3076 l = append(l, nod(OAS, ns, nsrc)) // s = src 3077 3078 na := nodintconst(int64(argc)) // const argc 3079 nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc 3080 nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na) 3081 3082 fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T) 3083 fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem()) 3084 3085 nx.Nbody.Set1(nod(OAS, ns, 3086 mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns, 3087 nod(OADD, nod(OLEN, ns, nil), na)))) 3088 3089 l = append(l, nx) 3090 3091 nn := temp(Types[TINT]) 3092 l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s) 3093 3094 nx = nod(OSLICE, ns, nil) // ...s[:n+argc] 3095 nx.SetSliceBounds(nil, nod(OADD, nn, na), nil) 3096 nx.Etype = 1 3097 l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc] 3098 3099 ls = n.List.Slice()[1:] 3100 for i, n := range ls { 3101 nx = nod(OINDEX, ns, nn) // s[n] ... 3102 nx.Bounded = true 3103 l = append(l, nod(OAS, nx, n)) // s[n] = arg 3104 if i+1 < len(ls) { 3105 l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1 3106 } 3107 } 3108 3109 typecheckslice(l, Etop) 3110 walkstmtlist(l) 3111 init.Append(l...) 3112 return ns 3113 } 3114 3115 // Lower copy(a, b) to a memmove call or a runtime call. 3116 // 3117 // init { 3118 // n := len(a) 3119 // if n > len(b) { n = len(b) } 3120 // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) 3121 // } 3122 // n; 3123 // 3124 // Also works if b is a string. 3125 // 3126 func copyany(n *Node, init *Nodes, runtimecall bool) *Node { 3127 if haspointers(n.Left.Type.Elem()) { 3128 fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) 3129 return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right) 3130 } 3131 3132 if runtimecall { 3133 var fn *Node 3134 if n.Right.Type.IsString() { 3135 fn = syslook("slicestringcopy") 3136 } else { 3137 fn = syslook("slicecopy") 3138 } 3139 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3140 return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width)) 3141 } 3142 3143 n.Left = walkexpr(n.Left, init) 3144 n.Right = walkexpr(n.Right, init) 3145 nl := temp(n.Left.Type) 3146 nr := temp(n.Right.Type) 3147 var l []*Node 3148 l = append(l, nod(OAS, nl, n.Left)) 3149 l = append(l, nod(OAS, nr, n.Right)) 3150 3151 nfrm := nod(OSPTR, nr, nil) 3152 nto := nod(OSPTR, nl, nil) 3153 3154 nlen := temp(Types[TINT]) 3155 3156 // n = len(to) 3157 l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil))) 3158 3159 // if n > len(frm) { n = len(frm) } 3160 nif := nod(OIF, nil, nil) 3161 3162 nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil)) 3163 nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil))) 3164 l = append(l, nif) 3165 3166 // Call memmove. 3167 fn := syslook("memmove") 3168 3169 fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem()) 3170 nwid := temp(Types[TUINTPTR]) 3171 l = append(l, nod(OAS, nwid, conv(nlen, Types[TUINTPTR]))) 3172 nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width)) 3173 l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) 3174 3175 typecheckslice(l, Etop) 3176 walkstmtlist(l) 3177 init.Append(l...) 3178 return nlen 3179 } 3180 3181 func eqfor(t *Type, needsize *int) *Node { 3182 // Should only arrive here with large memory or 3183 // a struct/array containing a non-memory field/element. 3184 // Small memory is handled inline, and single non-memory 3185 // is handled during type check (OCMPSTR etc). 3186 switch a, _ := algtype1(t); a { 3187 case AMEM: 3188 n := syslook("memequal") 3189 n = substArgTypes(n, t, t) 3190 *needsize = 1 3191 return n 3192 case ASPECIAL: 3193 sym := typesymprefix(".eq", t) 3194 n := newname(sym) 3195 n.Class = PFUNC 3196 ntype := nod(OTFUNC, nil, nil) 3197 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3198 ntype.List.Append(nod(ODCLFIELD, nil, typenod(ptrto(t)))) 3199 ntype.Rlist.Append(nod(ODCLFIELD, nil, typenod(Types[TBOOL]))) 3200 ntype = typecheck(ntype, Etype) 3201 n.Type = ntype.Type 3202 *needsize = 0 3203 return n 3204 } 3205 Fatalf("eqfor %v", t) 3206 return nil 3207 } 3208 3209 // The result of walkcompare MUST be assigned back to n, e.g. 3210 // n.Left = walkcompare(n.Left, init) 3211 func walkcompare(n *Node, init *Nodes) *Node { 3212 // Given interface value l and concrete value r, rewrite 3213 // l == r 3214 // into types-equal && data-equal. 3215 // This is efficient, avoids allocations, and avoids runtime calls. 3216 var l, r *Node 3217 if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() { 3218 l = n.Left 3219 r = n.Right 3220 } else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() { 3221 l = n.Right 3222 r = n.Left 3223 } 3224 3225 if l != nil { 3226 // Handle both == and !=. 3227 eq := n.Op 3228 var andor Op 3229 if eq == OEQ { 3230 andor = OANDAND 3231 } else { 3232 andor = OOROR 3233 } 3234 // Check for types equal. 3235 // For empty interface, this is: 3236 // l.tab == type(r) 3237 // For non-empty interface, this is: 3238 // l.tab != nil && l.tab._type == type(r) 3239 var eqtype *Node 3240 tab := nod(OITAB, l, nil) 3241 rtyp := typename(r.Type) 3242 if l.Type.IsEmptyInterface() { 3243 tab.Type = ptrto(Types[TUINT8]) 3244 tab.Typecheck = 1 3245 eqtype = nod(eq, tab, rtyp) 3246 } else { 3247 nonnil := nod(brcom(eq), nodnil(), tab) 3248 match := nod(eq, itabType(tab), rtyp) 3249 eqtype = nod(andor, nonnil, match) 3250 } 3251 // Check for data equal. 3252 eqdata := nod(eq, ifaceData(l, r.Type), r) 3253 // Put it all together. 3254 expr := nod(andor, eqtype, eqdata) 3255 n = finishcompare(n, expr, init) 3256 return n 3257 } 3258 3259 // Must be comparison of array or struct. 3260 // Otherwise back end handles it. 3261 // While we're here, decide whether to 3262 // inline or call an eq alg. 3263 t := n.Left.Type 3264 var inline bool 3265 switch t.Etype { 3266 default: 3267 return n 3268 case TARRAY: 3269 inline = t.NumElem() <= 1 || (t.NumElem() <= 4 && issimple[t.Elem().Etype]) 3270 case TSTRUCT: 3271 inline = t.NumFields() <= 4 3272 } 3273 3274 cmpl := n.Left 3275 for cmpl != nil && cmpl.Op == OCONVNOP { 3276 cmpl = cmpl.Left 3277 } 3278 cmpr := n.Right 3279 for cmpr != nil && cmpr.Op == OCONVNOP { 3280 cmpr = cmpr.Left 3281 } 3282 3283 if !islvalue(cmpl) || !islvalue(cmpr) { 3284 Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) 3285 } 3286 3287 // Chose not to inline. Call equality function directly. 3288 if !inline { 3289 // eq algs take pointers 3290 pl := temp(ptrto(t)) 3291 al := nod(OAS, pl, nod(OADDR, cmpl, nil)) 3292 al.Right.Etype = 1 // addr does not escape 3293 al = typecheck(al, Etop) 3294 init.Append(al) 3295 3296 pr := temp(ptrto(t)) 3297 ar := nod(OAS, pr, nod(OADDR, cmpr, nil)) 3298 ar.Right.Etype = 1 // addr does not escape 3299 ar = typecheck(ar, Etop) 3300 init.Append(ar) 3301 3302 var needsize int 3303 call := nod(OCALL, eqfor(t, &needsize), nil) 3304 call.List.Append(pl) 3305 call.List.Append(pr) 3306 if needsize != 0 { 3307 call.List.Append(nodintconst(t.Width)) 3308 } 3309 res := call 3310 if n.Op != OEQ { 3311 res = nod(ONOT, res, nil) 3312 } 3313 n = finishcompare(n, res, init) 3314 return n 3315 } 3316 3317 // inline: build boolean expression comparing element by element 3318 andor := OANDAND 3319 if n.Op == ONE { 3320 andor = OOROR 3321 } 3322 var expr *Node 3323 compare := func(el, er *Node) { 3324 a := nod(n.Op, el, er) 3325 if expr == nil { 3326 expr = a 3327 } else { 3328 expr = nod(andor, expr, a) 3329 } 3330 } 3331 cmpl = safeexpr(cmpl, init) 3332 cmpr = safeexpr(cmpr, init) 3333 if t.IsStruct() { 3334 for _, f := range t.Fields().Slice() { 3335 sym := f.Sym 3336 if isblanksym(sym) { 3337 continue 3338 } 3339 compare( 3340 nodSym(OXDOT, cmpl, sym), 3341 nodSym(OXDOT, cmpr, sym), 3342 ) 3343 } 3344 } else { 3345 for i := 0; int64(i) < t.NumElem(); i++ { 3346 compare( 3347 nod(OINDEX, cmpl, nodintconst(int64(i))), 3348 nod(OINDEX, cmpr, nodintconst(int64(i))), 3349 ) 3350 } 3351 } 3352 if expr == nil { 3353 expr = nodbool(n.Op == OEQ) 3354 } 3355 n = finishcompare(n, expr, init) 3356 return n 3357 } 3358 3359 // The result of finishcompare MUST be assigned back to n, e.g. 3360 // n.Left = finishcompare(n.Left, x, r, init) 3361 func finishcompare(n, r *Node, init *Nodes) *Node { 3362 // Use nn here to avoid passing r to typecheck. 3363 nn := r 3364 nn = typecheck(nn, Erv) 3365 nn = walkexpr(nn, init) 3366 r = nn 3367 if r.Type != n.Type { 3368 r = nod(OCONVNOP, r, nil) 3369 r.Type = n.Type 3370 r.Typecheck = 1 3371 nn = r 3372 } 3373 return nn 3374 } 3375 3376 func samecheap(a *Node, b *Node) bool { 3377 var ar *Node 3378 var br *Node 3379 for a != nil && b != nil && a.Op == b.Op { 3380 switch a.Op { 3381 default: 3382 return false 3383 3384 case ONAME: 3385 return a == b 3386 3387 case ODOT, ODOTPTR: 3388 if a.Sym != b.Sym { 3389 return false 3390 } 3391 3392 case OINDEX: 3393 ar = a.Right 3394 br = b.Right 3395 if !Isconst(ar, CTINT) || !Isconst(br, CTINT) || ar.Val().U.(*Mpint).Cmp(br.Val().U.(*Mpint)) != 0 { 3396 return false 3397 } 3398 } 3399 3400 a = a.Left 3401 b = b.Left 3402 } 3403 3404 return false 3405 } 3406 3407 // The result of walkrotate MUST be assigned back to n, e.g. 3408 // n.Left = walkrotate(n.Left) 3409 func walkrotate(n *Node) *Node { 3410 if Thearch.LinkArch.InFamily(sys.MIPS64, sys.PPC64) { 3411 return n 3412 } 3413 3414 // Want << | >> or >> | << or << ^ >> or >> ^ << on unsigned value. 3415 l := n.Left 3416 3417 r := n.Right 3418 if (n.Op != OOR && n.Op != OXOR) || (l.Op != OLSH && l.Op != ORSH) || (r.Op != OLSH && r.Op != ORSH) || n.Type == nil || n.Type.IsSigned() || l.Op == r.Op { 3419 return n 3420 } 3421 3422 // Want same, side effect-free expression on lhs of both shifts. 3423 if !samecheap(l.Left, r.Left) { 3424 return n 3425 } 3426 3427 // Constants adding to width? 3428 w := int(l.Type.Width * 8) 3429 3430 if Thearch.LinkArch.Family == sys.S390X && w != 32 && w != 64 { 3431 // only supports 32-bit and 64-bit rotates 3432 return n 3433 } 3434 3435 if smallintconst(l.Right) && smallintconst(r.Right) { 3436 sl := int(l.Right.Int64()) 3437 if sl >= 0 { 3438 sr := int(r.Right.Int64()) 3439 if sr >= 0 && sl+sr == w { 3440 // Rewrite left shift half to left rotate. 3441 if l.Op == OLSH { 3442 n = l 3443 } else { 3444 n = r 3445 } 3446 n.Op = OLROT 3447 3448 // Remove rotate 0 and rotate w. 3449 s := int(n.Right.Int64()) 3450 3451 if s == 0 || s == w { 3452 n = n.Left 3453 } 3454 return n 3455 } 3456 } 3457 return n 3458 } 3459 3460 // TODO: Could allow s and 32-s if s is bounded (maybe s&31 and 32-s&31). 3461 return n 3462 } 3463 3464 // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers. 3465 func (n *Node) isIntOrdering() bool { 3466 switch n.Op { 3467 case OLE, OLT, OGE, OGT: 3468 default: 3469 return false 3470 } 3471 return n.Left.Type.IsInteger() && n.Right.Type.IsInteger() 3472 } 3473 3474 // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10. 3475 // n must be an OANDAND or OOROR node. 3476 // The result of walkinrange MUST be assigned back to n, e.g. 3477 // n.Left = walkinrange(n.Left) 3478 func walkinrange(n *Node, init *Nodes) *Node { 3479 // We are looking for something equivalent to a opl b OP b opr c, where: 3480 // * a, b, and c have integer type 3481 // * b is side-effect-free 3482 // * opl and opr are each < or ≤ 3483 // * OP is && 3484 l := n.Left 3485 r := n.Right 3486 if !l.isIntOrdering() || !r.isIntOrdering() { 3487 return n 3488 } 3489 3490 // Find b, if it exists, and rename appropriately. 3491 // Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right 3492 // Output is: a opl b(==x) ANDAND/OROR b(==x) opr c 3493 a, opl, b := l.Left, l.Op, l.Right 3494 x, opr, c := r.Left, r.Op, r.Right 3495 for i := 0; ; i++ { 3496 if samesafeexpr(b, x) { 3497 break 3498 } 3499 if i == 3 { 3500 // Tried all permutations and couldn't find an appropriate b == x. 3501 return n 3502 } 3503 if i&1 == 0 { 3504 a, opl, b = b, brrev(opl), a 3505 } else { 3506 x, opr, c = c, brrev(opr), x 3507 } 3508 } 3509 3510 // If n.Op is ||, apply de Morgan. 3511 // Negate the internal ops now; we'll negate the top level op at the end. 3512 // Henceforth assume &&. 3513 negateResult := n.Op == OOROR 3514 if negateResult { 3515 opl = brcom(opl) 3516 opr = brcom(opr) 3517 } 3518 3519 cmpdir := func(o Op) int { 3520 switch o { 3521 case OLE, OLT: 3522 return -1 3523 case OGE, OGT: 3524 return +1 3525 } 3526 Fatalf("walkinrange cmpdir %v", o) 3527 return 0 3528 } 3529 if cmpdir(opl) != cmpdir(opr) { 3530 // Not a range check; something like b < a && b < c. 3531 return n 3532 } 3533 3534 switch opl { 3535 case OGE, OGT: 3536 // We have something like a > b && b ≥ c. 3537 // Switch and reverse ops and rename constants, 3538 // to make it look like a ≤ b && b < c. 3539 a, c = c, a 3540 opl, opr = brrev(opr), brrev(opl) 3541 } 3542 3543 // We must ensure that c-a is non-negative. 3544 // For now, require a and c to be constants. 3545 // In the future, we could also support a == 0 and c == len/cap(...). 3546 // Unfortunately, by this point, most len/cap expressions have been 3547 // stored into temporary variables. 3548 if !Isconst(a, CTINT) || !Isconst(c, CTINT) { 3549 return n 3550 } 3551 3552 if opl == OLT { 3553 // We have a < b && ... 3554 // We need a ≤ b && ... to safely use unsigned comparison tricks. 3555 // If a is not the maximum constant for b's type, 3556 // we can increment a and switch to ≤. 3557 if a.Int64() >= maxintval[b.Type.Etype].Int64() { 3558 return n 3559 } 3560 a = nodintconst(a.Int64() + 1) 3561 opl = OLE 3562 } 3563 3564 bound := c.Int64() - a.Int64() 3565 if bound < 0 { 3566 // Bad news. Something like 5 <= x && x < 3. 3567 // Rare in practice, and we still need to generate side-effects, 3568 // so just leave it alone. 3569 return n 3570 } 3571 3572 // We have a ≤ b && b < c (or a ≤ b && b ≤ c). 3573 // This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a), 3574 // which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a), 3575 // which is equivalent to uint(b-a) < uint(c-a). 3576 ut := b.Type.toUnsigned() 3577 lhs := conv(nod(OSUB, b, a), ut) 3578 rhs := nodintconst(bound) 3579 if negateResult { 3580 // Negate top level. 3581 opr = brcom(opr) 3582 } 3583 cmp := nod(opr, lhs, rhs) 3584 cmp.Lineno = n.Lineno 3585 cmp = addinit(cmp, l.Ninit.Slice()) 3586 cmp = addinit(cmp, r.Ninit.Slice()) 3587 // Typecheck the AST rooted at cmp... 3588 cmp = typecheck(cmp, Erv) 3589 // ...but then reset cmp's type to match n's type. 3590 cmp.Type = n.Type 3591 cmp = walkexpr(cmp, init) 3592 return cmp 3593 } 3594 3595 // walkmul rewrites integer multiplication by powers of two as shifts. 3596 // The result of walkmul MUST be assigned back to n, e.g. 3597 // n.Left = walkmul(n.Left, init) 3598 func walkmul(n *Node, init *Nodes) *Node { 3599 if !n.Type.IsInteger() { 3600 return n 3601 } 3602 3603 var nr *Node 3604 var nl *Node 3605 if n.Right.Op == OLITERAL { 3606 nl = n.Left 3607 nr = n.Right 3608 } else if n.Left.Op == OLITERAL { 3609 nl = n.Right 3610 nr = n.Left 3611 } else { 3612 return n 3613 } 3614 3615 neg := 0 3616 3617 // x*0 is 0 (and side effects of x). 3618 var pow int 3619 var w int 3620 if nr.Int64() == 0 { 3621 cheapexpr(nl, init) 3622 Nodconst(n, n.Type, 0) 3623 goto ret 3624 } 3625 3626 // nr is a constant. 3627 pow = powtwo(nr) 3628 3629 if pow < 0 { 3630 return n 3631 } 3632 if pow >= 1000 { 3633 // negative power of 2, like -16 3634 neg = 1 3635 3636 pow -= 1000 3637 } 3638 3639 w = int(nl.Type.Width * 8) 3640 if pow+1 >= w { // too big, shouldn't happen 3641 return n 3642 } 3643 3644 nl = cheapexpr(nl, init) 3645 3646 if pow == 0 { 3647 // x*1 is x 3648 n = nl 3649 3650 goto ret 3651 } 3652 3653 n = nod(OLSH, nl, nodintconst(int64(pow))) 3654 3655 ret: 3656 if neg != 0 { 3657 n = nod(OMINUS, n, nil) 3658 } 3659 3660 n = typecheck(n, Erv) 3661 n = walkexpr(n, init) 3662 return n 3663 } 3664 3665 // walkdiv rewrites division by a constant as less expensive 3666 // operations. 3667 // The result of walkdiv MUST be assigned back to n, e.g. 3668 // n.Left = walkdiv(n.Left, init) 3669 func walkdiv(n *Node, init *Nodes) *Node { 3670 // if >= 0, nr is 1<<pow // 1 if nr is negative. 3671 3672 if n.Right.Op != OLITERAL { 3673 return n 3674 } 3675 3676 // nr is a constant. 3677 nl := cheapexpr(n.Left, init) 3678 3679 nr := n.Right 3680 3681 // special cases of mod/div 3682 // by a constant 3683 w := int(nl.Type.Width * 8) 3684 3685 s := 0 // 1 if nr is negative. 3686 pow := powtwo(nr) // if >= 0, nr is 1<<pow 3687 if pow >= 1000 { 3688 // negative power of 2 3689 s = 1 3690 3691 pow -= 1000 3692 } 3693 3694 if pow+1 >= w { 3695 // divisor too large. 3696 return n 3697 } 3698 3699 if pow < 0 { 3700 // try to do division by multiply by (2^w)/d 3701 // see hacker's delight chapter 10 3702 // TODO: support 64-bit magic multiply here. 3703 var m Magic 3704 m.W = w 3705 3706 if nl.Type.IsSigned() { 3707 m.Sd = nr.Int64() 3708 smagic(&m) 3709 } else { 3710 m.Ud = uint64(nr.Int64()) 3711 umagic(&m) 3712 } 3713 3714 if m.Bad != 0 { 3715 return n 3716 } 3717 3718 // We have a quick division method so use it 3719 // for modulo too. 3720 if n.Op == OMOD { 3721 // rewrite as A%B = A - (A/B*B). 3722 n1 := nod(ODIV, nl, nr) 3723 3724 n2 := nod(OMUL, n1, nr) 3725 n = nod(OSUB, nl, n2) 3726 goto ret 3727 } 3728 3729 switch simtype[nl.Type.Etype] { 3730 default: 3731 return n 3732 3733 // n1 = nl * magic >> w (HMUL) 3734 case TUINT8, TUINT16, TUINT32: 3735 var nc Node 3736 3737 Nodconst(&nc, nl.Type, int64(m.Um)) 3738 n1 := nod(OHMUL, nl, &nc) 3739 n1 = typecheck(n1, Erv) 3740 if m.Ua != 0 { 3741 // Select a Go type with (at least) twice the width. 3742 var twide *Type 3743 switch simtype[nl.Type.Etype] { 3744 default: 3745 return n 3746 3747 case TUINT8, TUINT16: 3748 twide = Types[TUINT32] 3749 3750 case TUINT32: 3751 twide = Types[TUINT64] 3752 3753 case TINT8, TINT16: 3754 twide = Types[TINT32] 3755 3756 case TINT32: 3757 twide = Types[TINT64] 3758 } 3759 3760 // add numerator (might overflow). 3761 // n2 = (n1 + nl) 3762 n2 := nod(OADD, conv(n1, twide), conv(nl, twide)) 3763 3764 // shift by m.s 3765 var nc Node 3766 3767 Nodconst(&nc, Types[TUINT], int64(m.S)) 3768 n = conv(nod(ORSH, n2, &nc), nl.Type) 3769 } else { 3770 // n = n1 >> m.s 3771 var nc Node 3772 3773 Nodconst(&nc, Types[TUINT], int64(m.S)) 3774 n = nod(ORSH, n1, &nc) 3775 } 3776 3777 // n1 = nl * magic >> w 3778 case TINT8, TINT16, TINT32: 3779 var nc Node 3780 3781 Nodconst(&nc, nl.Type, m.Sm) 3782 n1 := nod(OHMUL, nl, &nc) 3783 n1 = typecheck(n1, Erv) 3784 if m.Sm < 0 { 3785 // add the numerator. 3786 n1 = nod(OADD, n1, nl) 3787 } 3788 3789 // shift by m.s 3790 var ns Node 3791 3792 Nodconst(&ns, Types[TUINT], int64(m.S)) 3793 n2 := conv(nod(ORSH, n1, &ns), nl.Type) 3794 3795 // add 1 iff n1 is negative. 3796 var nneg Node 3797 3798 Nodconst(&nneg, Types[TUINT], int64(w)-1) 3799 n3 := nod(ORSH, nl, &nneg) // n4 = -1 iff n1 is negative. 3800 n = nod(OSUB, n2, n3) 3801 3802 // apply sign. 3803 if m.Sd < 0 { 3804 n = nod(OMINUS, n, nil) 3805 } 3806 } 3807 3808 goto ret 3809 } 3810 3811 switch pow { 3812 case 0: 3813 if n.Op == OMOD { 3814 // nl % 1 is zero. 3815 Nodconst(n, n.Type, 0) 3816 } else if s != 0 { 3817 // divide by -1 3818 n.Op = OMINUS 3819 3820 n.Right = nil 3821 } else { 3822 // divide by 1 3823 n = nl 3824 } 3825 3826 default: 3827 if n.Type.IsSigned() { 3828 if n.Op == OMOD { 3829 // signed modulo 2^pow is like ANDing 3830 // with the last pow bits, but if nl < 0, 3831 // nl & (2^pow-1) is (nl+1)%2^pow - 1. 3832 var nc Node 3833 3834 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3835 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3836 if pow == 1 { 3837 n1 = typecheck(n1, Erv) 3838 n1 = cheapexpr(n1, init) 3839 3840 // n = (nl+ε)&1 -ε where ε=1 iff nl<0. 3841 n2 := nod(OSUB, nl, n1) 3842 3843 var nc Node 3844 Nodconst(&nc, nl.Type, 1) 3845 n3 := nod(OAND, n2, &nc) 3846 n = nod(OADD, n3, n1) 3847 } else { 3848 // n = (nl+ε)&(nr-1) - ε where ε=2^pow-1 iff nl<0. 3849 var nc Node 3850 3851 Nodconst(&nc, nl.Type, (1<<uint(pow))-1) 3852 n2 := nod(OAND, n1, &nc) // n2 = 2^pow-1 iff nl<0. 3853 n2 = typecheck(n2, Erv) 3854 n2 = cheapexpr(n2, init) 3855 3856 n3 := nod(OADD, nl, n2) 3857 n4 := nod(OAND, n3, &nc) 3858 n = nod(OSUB, n4, n2) 3859 } 3860 3861 break 3862 } else { 3863 // arithmetic right shift does not give the correct rounding. 3864 // if nl >= 0, nl >> n == nl / nr 3865 // if nl < 0, we want to add 2^n-1 first. 3866 var nc Node 3867 3868 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-1) 3869 n1 := nod(ORSH, nl, &nc) // n1 = -1 iff nl < 0. 3870 if pow == 1 { 3871 // nl+1 is nl-(-1) 3872 n.Left = nod(OSUB, nl, n1) 3873 } else { 3874 // Do a logical right right on -1 to keep pow bits. 3875 var nc Node 3876 3877 Nodconst(&nc, Types[simtype[TUINT]], int64(w)-int64(pow)) 3878 n2 := nod(ORSH, conv(n1, nl.Type.toUnsigned()), &nc) 3879 n.Left = nod(OADD, nl, conv(n2, nl.Type)) 3880 } 3881 3882 // n = (nl + 2^pow-1) >> pow 3883 n.Op = ORSH 3884 3885 var n2 Node 3886 Nodconst(&n2, Types[simtype[TUINT]], int64(pow)) 3887 n.Right = &n2 3888 n.Typecheck = 0 3889 } 3890 3891 if s != 0 { 3892 n = nod(OMINUS, n, nil) 3893 } 3894 break 3895 } 3896 3897 var nc Node 3898 if n.Op == OMOD { 3899 // n = nl & (nr-1) 3900 n.Op = OAND 3901 3902 Nodconst(&nc, nl.Type, nr.Int64()-1) 3903 } else { 3904 // n = nl >> pow 3905 n.Op = ORSH 3906 3907 Nodconst(&nc, Types[simtype[TUINT]], int64(pow)) 3908 } 3909 3910 n.Typecheck = 0 3911 n.Right = &nc 3912 } 3913 3914 goto ret 3915 3916 ret: 3917 n = typecheck(n, Erv) 3918 n = walkexpr(n, init) 3919 return n 3920 } 3921 3922 // return 1 if integer n must be in range [0, max), 0 otherwise 3923 func bounded(n *Node, max int64) bool { 3924 if n.Type == nil || !n.Type.IsInteger() { 3925 return false 3926 } 3927 3928 sign := n.Type.IsSigned() 3929 bits := int32(8 * n.Type.Width) 3930 3931 if smallintconst(n) { 3932 v := n.Int64() 3933 return 0 <= v && v < max 3934 } 3935 3936 switch n.Op { 3937 case OAND: 3938 v := int64(-1) 3939 if smallintconst(n.Left) { 3940 v = n.Left.Int64() 3941 } else if smallintconst(n.Right) { 3942 v = n.Right.Int64() 3943 } 3944 3945 if 0 <= v && v < max { 3946 return true 3947 } 3948 3949 case OMOD: 3950 if !sign && smallintconst(n.Right) { 3951 v := n.Right.Int64() 3952 if 0 <= v && v <= max { 3953 return true 3954 } 3955 } 3956 3957 case ODIV: 3958 if !sign && smallintconst(n.Right) { 3959 v := n.Right.Int64() 3960 for bits > 0 && v >= 2 { 3961 bits-- 3962 v >>= 1 3963 } 3964 } 3965 3966 case ORSH: 3967 if !sign && smallintconst(n.Right) { 3968 v := n.Right.Int64() 3969 if v > int64(bits) { 3970 return true 3971 } 3972 bits -= int32(v) 3973 } 3974 } 3975 3976 if !sign && bits <= 62 && 1<<uint(bits) <= max { 3977 return true 3978 } 3979 3980 return false 3981 } 3982 3983 // usemethod check interface method calls for uses of reflect.Type.Method. 3984 func usemethod(n *Node) { 3985 t := n.Left.Type 3986 3987 // Looking for either of: 3988 // Method(int) reflect.Method 3989 // MethodByName(string) (reflect.Method, bool) 3990 // 3991 // TODO(crawshaw): improve precision of match by working out 3992 // how to check the method name. 3993 if n := t.Params().NumFields(); n != 1 { 3994 return 3995 } 3996 if n := t.Results().NumFields(); n != 1 && n != 2 { 3997 return 3998 } 3999 p0 := t.Params().Field(0) 4000 res0 := t.Results().Field(0) 4001 var res1 *Field 4002 if t.Results().NumFields() == 2 { 4003 res1 = t.Results().Field(1) 4004 } 4005 4006 if res1 == nil { 4007 if p0.Type.Etype != TINT { 4008 return 4009 } 4010 } else { 4011 if !p0.Type.IsString() { 4012 return 4013 } 4014 if !res1.Type.IsBoolean() { 4015 return 4016 } 4017 } 4018 if res0.Type.String() != "reflect.Method" { 4019 return 4020 } 4021 4022 Curfn.Func.ReflectMethod = true 4023 } 4024 4025 func usefield(n *Node) { 4026 if obj.Fieldtrack_enabled == 0 { 4027 return 4028 } 4029 4030 switch n.Op { 4031 default: 4032 Fatalf("usefield %v", n.Op) 4033 4034 case ODOT, ODOTPTR: 4035 break 4036 } 4037 if n.Sym == nil { 4038 // No field name. This DOTPTR was built by the compiler for access 4039 // to runtime data structures. Ignore. 4040 return 4041 } 4042 4043 t := n.Left.Type 4044 if t.IsPtr() { 4045 t = t.Elem() 4046 } 4047 field := dotField[typeSym{t.Orig, n.Sym}] 4048 if field == nil { 4049 Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) 4050 } 4051 if !strings.Contains(field.Note, "go:\"track\"") { 4052 return 4053 } 4054 4055 outer := n.Left.Type 4056 if outer.IsPtr() { 4057 outer = outer.Elem() 4058 } 4059 if outer.Sym == nil { 4060 yyerror("tracked field must be in named struct type") 4061 } 4062 if !exportname(field.Sym.Name) { 4063 yyerror("tracked field must be exported (upper case)") 4064 } 4065 4066 sym := tracksym(outer, field) 4067 if Curfn.Func.FieldTrack == nil { 4068 Curfn.Func.FieldTrack = make(map[*Sym]struct{}) 4069 } 4070 Curfn.Func.FieldTrack[sym] = struct{}{} 4071 } 4072 4073 func candiscardlist(l Nodes) bool { 4074 for _, n := range l.Slice() { 4075 if !candiscard(n) { 4076 return false 4077 } 4078 } 4079 return true 4080 } 4081 4082 func candiscard(n *Node) bool { 4083 if n == nil { 4084 return true 4085 } 4086 4087 switch n.Op { 4088 default: 4089 return false 4090 4091 // Discardable as long as the subpieces are. 4092 case ONAME, 4093 ONONAME, 4094 OTYPE, 4095 OPACK, 4096 OLITERAL, 4097 OADD, 4098 OSUB, 4099 OOR, 4100 OXOR, 4101 OADDSTR, 4102 OADDR, 4103 OANDAND, 4104 OARRAYBYTESTR, 4105 OARRAYRUNESTR, 4106 OSTRARRAYBYTE, 4107 OSTRARRAYRUNE, 4108 OCAP, 4109 OCMPIFACE, 4110 OCMPSTR, 4111 OCOMPLIT, 4112 OMAPLIT, 4113 OSTRUCTLIT, 4114 OARRAYLIT, 4115 OSLICELIT, 4116 OPTRLIT, 4117 OCONV, 4118 OCONVIFACE, 4119 OCONVNOP, 4120 ODOT, 4121 OEQ, 4122 ONE, 4123 OLT, 4124 OLE, 4125 OGT, 4126 OGE, 4127 OKEY, 4128 OSTRUCTKEY, 4129 OLEN, 4130 OMUL, 4131 OLSH, 4132 ORSH, 4133 OAND, 4134 OANDNOT, 4135 ONEW, 4136 ONOT, 4137 OCOM, 4138 OPLUS, 4139 OMINUS, 4140 OOROR, 4141 OPAREN, 4142 ORUNESTR, 4143 OREAL, 4144 OIMAG, 4145 OCOMPLEX: 4146 break 4147 4148 // Discardable as long as we know it's not division by zero. 4149 case ODIV, OMOD: 4150 if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 { 4151 break 4152 } 4153 if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 { 4154 break 4155 } 4156 return false 4157 4158 // Discardable as long as we know it won't fail because of a bad size. 4159 case OMAKECHAN, OMAKEMAP: 4160 if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 { 4161 break 4162 } 4163 return false 4164 4165 // Difficult to tell what sizes are okay. 4166 case OMAKESLICE: 4167 return false 4168 } 4169 4170 if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) { 4171 return false 4172 } 4173 4174 return true 4175 } 4176 4177 // rewrite 4178 // print(x, y, z) 4179 // into 4180 // func(a1, a2, a3) { 4181 // print(a1, a2, a3) 4182 // }(x, y, z) 4183 // and same for println. 4184 4185 var walkprintfunc_prgen int 4186 4187 // The result of walkprintfunc MUST be assigned back to n, e.g. 4188 // n.Left = walkprintfunc(n.Left, init) 4189 func walkprintfunc(n *Node, init *Nodes) *Node { 4190 if n.Ninit.Len() != 0 { 4191 walkstmtlist(n.Ninit.Slice()) 4192 init.AppendNodes(&n.Ninit) 4193 } 4194 4195 t := nod(OTFUNC, nil, nil) 4196 num := 0 4197 var printargs []*Node 4198 var a *Node 4199 var buf string 4200 for _, n1 := range n.List.Slice() { 4201 buf = fmt.Sprintf("a%d", num) 4202 num++ 4203 a = nod(ODCLFIELD, newname(lookup(buf)), typenod(n1.Type)) 4204 t.List.Append(a) 4205 printargs = append(printargs, a.Left) 4206 } 4207 4208 fn := nod(ODCLFUNC, nil, nil) 4209 walkprintfunc_prgen++ 4210 buf = fmt.Sprintf("print·%d", walkprintfunc_prgen) 4211 fn.Func.Nname = newname(lookup(buf)) 4212 fn.Func.Nname.Name.Defn = fn 4213 fn.Func.Nname.Name.Param.Ntype = t 4214 declare(fn.Func.Nname, PFUNC) 4215 4216 oldfn := Curfn 4217 Curfn = nil 4218 funchdr(fn) 4219 4220 a = nod(n.Op, nil, nil) 4221 a.List.Set(printargs) 4222 a = typecheck(a, Etop) 4223 a = walkstmt(a) 4224 4225 fn.Nbody.Set1(a) 4226 4227 funcbody(fn) 4228 4229 fn = typecheck(fn, Etop) 4230 typecheckslice(fn.Nbody.Slice(), Etop) 4231 xtop = append(xtop, fn) 4232 Curfn = oldfn 4233 4234 a = nod(OCALL, nil, nil) 4235 a.Left = fn.Func.Nname 4236 a.List.Set(n.List.Slice()) 4237 a = typecheck(a, Etop) 4238 a = walkexpr(a, init) 4239 return a 4240 }