github.com/ltltlt/go-source-code@v0.0.0-20190830023027-95be009773aa/cmd/compile/internal/gc/walk.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/compile/internal/types" 9 "cmd/internal/objabi" 10 "cmd/internal/sys" 11 "fmt" 12 "strings" 13 ) 14 15 // The constant is known to runtime. 16 const tmpstringbufsize = 32 17 18 func walk(fn *Node) { 19 Curfn = fn 20 21 if Debug['W'] != 0 { 22 s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) 23 dumplist(s, Curfn.Nbody) 24 } 25 26 lno := lineno 27 28 // Final typecheck for any unused variables. 29 for i, ln := range fn.Func.Dcl { 30 if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) { 31 ln = typecheck(ln, Erv|Easgn) 32 fn.Func.Dcl[i] = ln 33 } 34 } 35 36 // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. 37 for _, ln := range fn.Func.Dcl { 38 if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() { 39 ln.Name.Defn.Left.Name.SetUsed(true) 40 } 41 } 42 43 for _, ln := range fn.Func.Dcl { 44 if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() { 45 continue 46 } 47 if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { 48 if defn.Left.Name.Used() { 49 continue 50 } 51 yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym) 52 defn.Left.Name.SetUsed(true) // suppress repeats 53 } else { 54 yyerrorl(ln.Pos, "%v declared and not used", ln.Sym) 55 } 56 } 57 58 lineno = lno 59 if nerrors != 0 { 60 return 61 } 62 walkstmtlist(Curfn.Nbody.Slice()) 63 if Debug['W'] != 0 { 64 s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) 65 dumplist(s, Curfn.Nbody) 66 } 67 68 zeroResults() 69 heapmoves() 70 if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 { 71 s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) 72 dumplist(s, Curfn.Func.Enter) 73 } 74 } 75 76 func walkstmtlist(s []*Node) { 77 for i := range s { 78 s[i] = walkstmt(s[i]) 79 } 80 } 81 82 func samelist(a, b []*Node) bool { 83 if len(a) != len(b) { 84 return false 85 } 86 for i, n := range a { 87 if n != b[i] { 88 return false 89 } 90 } 91 return true 92 } 93 94 func paramoutheap(fn *Node) bool { 95 for _, ln := range fn.Func.Dcl { 96 switch ln.Class() { 97 case PPARAMOUT: 98 if ln.isParamStackCopy() || ln.Addrtaken() { 99 return true 100 } 101 102 case PAUTO: 103 // stop early - parameters are over 104 return false 105 } 106 } 107 108 return false 109 } 110 111 // adds "adjust" to all the argument locations for the call n. 112 // n must be a defer or go node that has already been walked. 113 func adjustargs(n *Node, adjust int) { 114 callfunc := n.Left 115 for _, arg := range callfunc.List.Slice() { 116 if arg.Op != OAS { 117 Fatalf("call arg not assignment") 118 } 119 lhs := arg.Left 120 if lhs.Op == ONAME { 121 // This is a temporary introduced by reorder1. 122 // The real store to the stack appears later in the arg list. 123 continue 124 } 125 126 if lhs.Op != OINDREGSP { 127 Fatalf("call argument store does not use OINDREGSP") 128 } 129 130 // can't really check this in machine-indep code. 131 //if(lhs->val.u.reg != D_SP) 132 // Fatalf("call arg assign not indreg(SP)") 133 lhs.Xoffset += int64(adjust) 134 } 135 } 136 137 // The result of walkstmt MUST be assigned back to n, e.g. 138 // n.Left = walkstmt(n.Left) 139 func walkstmt(n *Node) *Node { 140 if n == nil { 141 return n 142 } 143 144 setlineno(n) 145 146 walkstmtlist(n.Ninit.Slice()) 147 148 switch n.Op { 149 default: 150 if n.Op == ONAME { 151 yyerror("%v is not a top level statement", n.Sym) 152 } else { 153 yyerror("%v is not a top level statement", n.Op) 154 } 155 Dump("nottop", n) 156 157 case OAS, 158 OASOP, 159 OAS2, 160 OAS2DOTTYPE, 161 OAS2RECV, 162 OAS2FUNC, 163 OAS2MAPR, 164 OCLOSE, 165 OCOPY, 166 OCALLMETH, 167 OCALLINTER, 168 OCALL, 169 OCALLFUNC, 170 ODELETE, 171 OSEND, 172 OPRINT, 173 OPRINTN, 174 OPANIC, 175 OEMPTY, 176 ORECOVER, 177 OGETG: 178 if n.Typecheck() == 0 { 179 Fatalf("missing typecheck: %+v", n) 180 } 181 wascopy := n.Op == OCOPY 182 init := n.Ninit 183 n.Ninit.Set(nil) 184 n = walkexpr(n, &init) 185 n = addinit(n, init.Slice()) 186 if wascopy && n.Op == OCONVNOP { 187 n.Op = OEMPTY // don't leave plain values as statements. 188 } 189 190 // special case for a receive where we throw away 191 // the value received. 192 case ORECV: 193 if n.Typecheck() == 0 { 194 Fatalf("missing typecheck: %+v", n) 195 } 196 init := n.Ninit 197 n.Ninit.Set(nil) 198 199 n.Left = walkexpr(n.Left, &init) 200 n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil()) 201 n = walkexpr(n, &init) 202 203 n = addinit(n, init.Slice()) 204 205 case OBREAK, 206 OCONTINUE, 207 OFALL, 208 OGOTO, 209 OLABEL, 210 ODCLCONST, 211 ODCLTYPE, 212 OCHECKNIL, 213 OVARKILL, 214 OVARLIVE: 215 break 216 217 case ODCL: 218 v := n.Left 219 if v.Class() == PAUTOHEAP { 220 if compiling_runtime { 221 yyerror("%v escapes to heap, not allowed in runtime.", v) 222 } 223 if prealloc[v] == nil { 224 prealloc[v] = callnew(v.Type) 225 } 226 nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v]) 227 nn.SetColas(true) 228 nn = typecheck(nn, Etop) 229 return walkstmt(nn) 230 } 231 232 case OBLOCK: 233 walkstmtlist(n.List.Slice()) 234 235 case OXCASE: 236 yyerror("case statement out of place") 237 n.Op = OCASE 238 fallthrough 239 240 case OCASE: 241 n.Right = walkstmt(n.Right) 242 243 case ODEFER: 244 Curfn.Func.SetHasDefer(true) 245 switch n.Left.Op { 246 case OPRINT, OPRINTN: 247 n.Left = walkprintfunc(n.Left, &n.Ninit) 248 249 case OCOPY: 250 n.Left = copyany(n.Left, &n.Ninit, true) 251 252 default: 253 n.Left = walkexpr(n.Left, &n.Ninit) 254 } 255 256 // make room for size & fn arguments. 257 adjustargs(n, 2*Widthptr) 258 259 case OFOR, OFORUNTIL: 260 if n.Left != nil { 261 walkstmtlist(n.Left.Ninit.Slice()) 262 init := n.Left.Ninit 263 n.Left.Ninit.Set(nil) 264 n.Left = walkexpr(n.Left, &init) 265 n.Left = addinit(n.Left, init.Slice()) 266 } 267 268 n.Right = walkstmt(n.Right) 269 walkstmtlist(n.Nbody.Slice()) 270 271 case OIF: 272 n.Left = walkexpr(n.Left, &n.Ninit) 273 walkstmtlist(n.Nbody.Slice()) 274 walkstmtlist(n.Rlist.Slice()) 275 276 case OPROC: 277 switch n.Left.Op { 278 case OPRINT, OPRINTN: 279 n.Left = walkprintfunc(n.Left, &n.Ninit) 280 281 case OCOPY: 282 n.Left = copyany(n.Left, &n.Ninit, true) 283 284 default: 285 n.Left = walkexpr(n.Left, &n.Ninit) 286 } 287 288 // make room for size & fn arguments. 289 adjustargs(n, 2*Widthptr) 290 291 case ORETURN: 292 walkexprlist(n.List.Slice(), &n.Ninit) 293 if n.List.Len() == 0 { 294 break 295 } 296 if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) { 297 // assign to the function out parameters, 298 // so that reorder3 can fix up conflicts 299 var rl []*Node 300 301 for _, ln := range Curfn.Func.Dcl { 302 cl := ln.Class() 303 if cl == PAUTO || cl == PAUTOHEAP { 304 break 305 } 306 if cl == PPARAMOUT { 307 if ln.isParamStackCopy() { 308 ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil) 309 } 310 rl = append(rl, ln) 311 } 312 } 313 314 if got, want := n.List.Len(), len(rl); got != want { 315 // order should have rewritten multi-value function calls 316 // with explicit OAS2FUNC nodes. 317 Fatalf("expected %v return arguments, have %v", want, got) 318 } 319 320 if samelist(rl, n.List.Slice()) { 321 // special return in disguise 322 n.List.Set(nil) 323 324 break 325 } 326 327 // move function calls out, to make reorder3's job easier. 328 walkexprlistsafe(n.List.Slice(), &n.Ninit) 329 330 ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit) 331 n.List.Set(reorder3(ll)) 332 break 333 } 334 335 ll := ascompatte(nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit) 336 n.List.Set(ll) 337 338 case ORETJMP: 339 break 340 341 case OSELECT: 342 walkselect(n) 343 344 case OSWITCH: 345 walkswitch(n) 346 347 case ORANGE: 348 n = walkrange(n) 349 } 350 351 if n.Op == ONAME { 352 Fatalf("walkstmt ended up with name: %+v", n) 353 } 354 return n 355 } 356 357 func isSmallMakeSlice(n *Node) bool { 358 if n.Op != OMAKESLICE { 359 return false 360 } 361 l := n.Left 362 r := n.Right 363 if r == nil { 364 r = l 365 } 366 t := n.Type 367 368 return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) 369 } 370 371 // walk the whole tree of the body of an 372 // expression or simple statement. 373 // the types expressions are calculated. 374 // compile-time constants are evaluated. 375 // complex side effects like statements are appended to init 376 func walkexprlist(s []*Node, init *Nodes) { 377 for i := range s { 378 s[i] = walkexpr(s[i], init) 379 } 380 } 381 382 func walkexprlistsafe(s []*Node, init *Nodes) { 383 for i, n := range s { 384 s[i] = safeexpr(n, init) 385 s[i] = walkexpr(s[i], init) 386 } 387 } 388 389 func walkexprlistcheap(s []*Node, init *Nodes) { 390 for i, n := range s { 391 s[i] = cheapexpr(n, init) 392 s[i] = walkexpr(s[i], init) 393 } 394 } 395 396 // Build name of function for interface conversion. 397 // Not all names are possible 398 // (e.g., we'll never generate convE2E or convE2I or convI2E). 399 func convFuncName(from, to *types.Type) string { 400 tkind := to.Tie() 401 switch from.Tie() { 402 case 'I': 403 switch tkind { 404 case 'I': 405 return "convI2I" 406 } 407 case 'T': 408 switch tkind { 409 case 'E': 410 switch { 411 case from.Size() == 2 && from.Align == 2: 412 return "convT2E16" 413 case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from): 414 return "convT2E32" 415 case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from): 416 return "convT2E64" 417 case from.IsString(): 418 return "convT2Estring" 419 case from.IsSlice(): 420 return "convT2Eslice" 421 case !types.Haspointers(from): 422 return "convT2Enoptr" 423 } 424 return "convT2E" 425 case 'I': 426 switch { 427 case from.Size() == 2 && from.Align == 2: 428 return "convT2I16" 429 case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from): 430 return "convT2I32" 431 case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from): 432 return "convT2I64" 433 case from.IsString(): 434 return "convT2Istring" 435 case from.IsSlice(): 436 return "convT2Islice" 437 case !types.Haspointers(from): 438 return "convT2Inoptr" 439 } 440 return "convT2I" 441 } 442 } 443 Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie()) 444 panic("unreachable") 445 } 446 447 // The result of walkexpr MUST be assigned back to n, e.g. 448 // n.Left = walkexpr(n.Left, init) 449 func walkexpr(n *Node, init *Nodes) *Node { 450 if n == nil { 451 return n 452 } 453 454 // Eagerly checkwidth all expressions for the back end. 455 if n.Type != nil && !n.Type.WidthCalculated() { 456 switch n.Type.Etype { 457 case TBLANK, TNIL, TIDEAL: 458 default: 459 checkwidth(n.Type) 460 } 461 } 462 463 if init == &n.Ninit { 464 // not okay to use n->ninit when walking n, 465 // because we might replace n with some other node 466 // and would lose the init list. 467 Fatalf("walkexpr init == &n->ninit") 468 } 469 470 if n.Ninit.Len() != 0 { 471 walkstmtlist(n.Ninit.Slice()) 472 init.AppendNodes(&n.Ninit) 473 } 474 475 lno := setlineno(n) 476 477 if Debug['w'] > 1 { 478 Dump("walk-before", n) 479 } 480 481 if n.Typecheck() != 1 { 482 Fatalf("missed typecheck: %+v", n) 483 } 484 485 if n.Op == ONAME && n.Class() == PAUTOHEAP { 486 nn := nod(OIND, n.Name.Param.Heapaddr, nil) 487 nn = typecheck(nn, Erv) 488 nn = walkexpr(nn, init) 489 nn.Left.SetNonNil(true) 490 return nn 491 } 492 493 opswitch: 494 switch n.Op { 495 default: 496 Dump("walk", n) 497 Fatalf("walkexpr: switch 1 unknown op %+S", n) 498 499 case ONONAME, OINDREGSP, OEMPTY, OGETG: 500 501 case OTYPE, ONAME, OLITERAL: 502 // TODO(mdempsky): Just return n; see discussion on CL 38655. 503 // Perhaps refactor to use Node.mayBeShared for these instead. 504 // If these return early, make sure to still call 505 // stringsym for constant strings. 506 507 case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER, 508 OIND, OSPTR, OITAB, OIDATA, OADDR: 509 n.Left = walkexpr(n.Left, init) 510 511 case OEFACE, OAND, OSUB, OMUL, OLT, OLE, OGE, OGT, OADD, OOR, OXOR: 512 n.Left = walkexpr(n.Left, init) 513 n.Right = walkexpr(n.Right, init) 514 515 case ODOT: 516 usefield(n) 517 n.Left = walkexpr(n.Left, init) 518 519 case ODOTTYPE, ODOTTYPE2: 520 n.Left = walkexpr(n.Left, init) 521 // Set up interface type addresses for back end. 522 n.Right = typename(n.Type) 523 if n.Op == ODOTTYPE { 524 n.Right.Right = typename(n.Left.Type) 525 } 526 if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 527 n.List.Set1(itabname(n.Type, n.Left.Type)) 528 } 529 530 case ODOTPTR: 531 usefield(n) 532 if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 { 533 // No actual copy will be generated, so emit an explicit nil check. 534 n.Left = cheapexpr(n.Left, init) 535 536 checknil(n.Left, init) 537 } 538 539 n.Left = walkexpr(n.Left, init) 540 541 case OLEN, OCAP: 542 n.Left = walkexpr(n.Left, init) 543 544 // replace len(*[10]int) with 10. 545 // delayed until now to preserve side effects. 546 t := n.Left.Type 547 548 if t.IsPtr() { 549 t = t.Elem() 550 } 551 if t.IsArray() { 552 safeexpr(n.Left, init) 553 nodconst(n, n.Type, t.NumElem()) 554 n.SetTypecheck(1) 555 } 556 557 case OLSH, ORSH: 558 n.Left = walkexpr(n.Left, init) 559 n.Right = walkexpr(n.Right, init) 560 t := n.Left.Type 561 n.SetBounded(bounded(n.Right, 8*t.Width)) 562 if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { 563 Warn("shift bounds check elided") 564 } 565 566 case OCOMPLEX: 567 // Use results from call expression as arguments for complex. 568 if n.Left == nil && n.Right == nil { 569 n.Left = n.List.First() 570 n.Right = n.List.Second() 571 } 572 n.Left = walkexpr(n.Left, init) 573 n.Right = walkexpr(n.Right, init) 574 575 case OEQ, ONE: 576 n.Left = walkexpr(n.Left, init) 577 n.Right = walkexpr(n.Right, init) 578 579 // Disable safemode while compiling this code: the code we 580 // generate internally can refer to unsafe.Pointer. 581 // In this case it can happen if we need to generate an == 582 // for a struct containing a reflect.Value, which itself has 583 // an unexported field of type unsafe.Pointer. 584 old_safemode := safemode 585 safemode = false 586 n = walkcompare(n, init) 587 safemode = old_safemode 588 589 case OANDAND, OOROR: 590 n.Left = walkexpr(n.Left, init) 591 592 // cannot put side effects from n.Right on init, 593 // because they cannot run before n.Left is checked. 594 // save elsewhere and store on the eventual n.Right. 595 var ll Nodes 596 597 n.Right = walkexpr(n.Right, &ll) 598 n.Right = addinit(n.Right, ll.Slice()) 599 n = walkinrange(n, init) 600 601 case OPRINT, OPRINTN: 602 walkexprlist(n.List.Slice(), init) 603 n = walkprint(n, init) 604 605 case OPANIC: 606 n = mkcall("gopanic", nil, init, n.Left) 607 608 case ORECOVER: 609 n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil)) 610 611 case OCLOSUREVAR, OCFUNC: 612 n.SetAddable(true) 613 614 case OCALLINTER: 615 usemethod(n) 616 t := n.Left.Type 617 if n.List.Len() != 0 && n.List.First().Op == OAS { 618 break 619 } 620 n.Left = walkexpr(n.Left, init) 621 walkexprlist(n.List.Slice(), init) 622 ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 623 n.List.Set(reorder1(ll)) 624 625 case OCALLFUNC: 626 if n.Left.Op == OCLOSURE { 627 // Transform direct call of a closure to call of a normal function. 628 // transformclosure already did all preparation work. 629 630 // Prepend captured variables to argument list. 631 n.List.Prepend(n.Left.Func.Enter.Slice()...) 632 633 n.Left.Func.Enter.Set(nil) 634 635 // Replace OCLOSURE with ONAME/PFUNC. 636 n.Left = n.Left.Func.Closure.Func.Nname 637 638 // Update type of OCALLFUNC node. 639 // Output arguments had not changed, but their offsets could. 640 if n.Left.Type.NumResults() == 1 { 641 n.Type = n.Left.Type.Results().Field(0).Type 642 } else { 643 n.Type = n.Left.Type.Results() 644 } 645 } 646 647 t := n.Left.Type 648 if n.List.Len() != 0 && n.List.First().Op == OAS { 649 break 650 } 651 652 n.Left = walkexpr(n.Left, init) 653 walkexprlist(n.List.Slice(), init) 654 655 ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 656 n.List.Set(reorder1(ll)) 657 658 case OCALLMETH: 659 t := n.Left.Type 660 if n.List.Len() != 0 && n.List.First().Op == OAS { 661 break 662 } 663 n.Left = walkexpr(n.Left, init) 664 walkexprlist(n.List.Slice(), init) 665 ll := ascompatte(n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init) 666 lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 667 ll = append(ll, lr...) 668 n.Left.Left = nil 669 updateHasCall(n.Left) 670 n.List.Set(reorder1(ll)) 671 672 case OAS: 673 init.AppendNodes(&n.Ninit) 674 675 n.Left = walkexpr(n.Left, init) 676 n.Left = safeexpr(n.Left, init) 677 678 if oaslit(n, init) { 679 break 680 } 681 682 if n.Right == nil { 683 // TODO(austin): Check all "implicit zeroing" 684 break 685 } 686 687 if !instrumenting && iszero(n.Right) { 688 break 689 } 690 691 switch n.Right.Op { 692 default: 693 n.Right = walkexpr(n.Right, init) 694 695 case ORECV: 696 // x = <-c; n.Left is x, n.Right.Left is c. 697 // orderstmt made sure x is addressable. 698 n.Right.Left = walkexpr(n.Right.Left, init) 699 700 n1 := nod(OADDR, n.Left, nil) 701 r := n.Right.Left // the channel 702 n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1) 703 n = walkexpr(n, init) 704 break opswitch 705 706 case OAPPEND: 707 // x = append(...) 708 r := n.Right 709 if r.Type.Elem().NotInHeap() { 710 yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem()) 711 } 712 if r.Isddd() { 713 r = appendslice(r, init) // also works for append(slice, string). 714 } else { 715 r = walkappend(r, init, n) 716 } 717 n.Right = r 718 if r.Op == OAPPEND { 719 // Left in place for back end. 720 // Do not add a new write barrier. 721 // Set up address of type for back end. 722 r.Left = typename(r.Type.Elem()) 723 break opswitch 724 } 725 // Otherwise, lowered for race detector. 726 // Treat as ordinary assignment. 727 } 728 729 if n.Left != nil && n.Right != nil { 730 n = convas(n, init) 731 } 732 733 case OAS2: 734 init.AppendNodes(&n.Ninit) 735 walkexprlistsafe(n.List.Slice(), init) 736 walkexprlistsafe(n.Rlist.Slice(), init) 737 ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init) 738 ll = reorder3(ll) 739 n = liststmt(ll) 740 741 // a,b,... = fn() 742 case OAS2FUNC: 743 init.AppendNodes(&n.Ninit) 744 745 r := n.Rlist.First() 746 walkexprlistsafe(n.List.Slice(), init) 747 r = walkexpr(r, init) 748 749 if isIntrinsicCall(r) { 750 n.Rlist.Set1(r) 751 break 752 } 753 init.Append(r) 754 755 ll := ascompatet(n.List, r.Type) 756 n = liststmt(ll) 757 758 // x, y = <-c 759 // orderstmt made sure x is addressable. 760 case OAS2RECV: 761 init.AppendNodes(&n.Ninit) 762 763 r := n.Rlist.First() 764 walkexprlistsafe(n.List.Slice(), init) 765 r.Left = walkexpr(r.Left, init) 766 var n1 *Node 767 if isblank(n.List.First()) { 768 n1 = nodnil() 769 } else { 770 n1 = nod(OADDR, n.List.First(), nil) 771 } 772 n1.Etype = 1 // addr does not escape 773 fn := chanfn("chanrecv2", 2, r.Left.Type) 774 ok := n.List.Second() 775 call := mkcall1(fn, ok.Type, init, r.Left, n1) 776 n = nod(OAS, ok, call) 777 n = typecheck(n, Etop) 778 779 // a,b = m[i] 780 case OAS2MAPR: 781 init.AppendNodes(&n.Ninit) 782 783 r := n.Rlist.First() 784 walkexprlistsafe(n.List.Slice(), init) 785 r.Left = walkexpr(r.Left, init) 786 r.Right = walkexpr(r.Right, init) 787 t := r.Left.Type 788 789 fast := mapfast(t) 790 var key *Node 791 if fast != mapslow { 792 // fast versions take key by value 793 key = r.Right 794 } else { 795 // standard version takes key by reference 796 // orderexpr made sure key is addressable. 797 key = nod(OADDR, r.Right, nil) 798 } 799 800 // from: 801 // a,b = m[i] 802 // to: 803 // var,b = mapaccess2*(t, m, i) 804 // a = *var 805 a := n.List.First() 806 807 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 808 fn := mapfn(mapaccess2[fast], t) 809 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key) 810 } else { 811 fn := mapfn("mapaccess2_fat", t) 812 z := zeroaddr(w) 813 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z) 814 } 815 816 // mapaccess2* returns a typed bool, but due to spec changes, 817 // the boolean result of i.(T) is now untyped so we make it the 818 // same type as the variable on the lhs. 819 if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() { 820 r.Type.Field(1).Type = ok.Type 821 } 822 n.Rlist.Set1(r) 823 n.Op = OAS2FUNC 824 825 // don't generate a = *var if a is _ 826 if !isblank(a) { 827 var_ := temp(types.NewPtr(t.Val())) 828 var_.SetTypecheck(1) 829 var_.SetNonNil(true) // mapaccess always returns a non-nil pointer 830 n.List.SetFirst(var_) 831 n = walkexpr(n, init) 832 init.Append(n) 833 n = nod(OAS, a, nod(OIND, var_, nil)) 834 } 835 836 n = typecheck(n, Etop) 837 n = walkexpr(n, init) 838 839 case ODELETE: 840 init.AppendNodes(&n.Ninit) 841 map_ := n.List.First() 842 key := n.List.Second() 843 map_ = walkexpr(map_, init) 844 key = walkexpr(key, init) 845 846 t := map_.Type 847 fast := mapfast(t) 848 if fast == mapslow { 849 // orderstmt made sure key is addressable. 850 key = nod(OADDR, key, nil) 851 } 852 n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key) 853 854 case OAS2DOTTYPE: 855 walkexprlistsafe(n.List.Slice(), init) 856 n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init)) 857 858 case OCONVIFACE: 859 n.Left = walkexpr(n.Left, init) 860 861 // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped. 862 if isdirectiface(n.Left.Type) { 863 var t *Node 864 if n.Type.IsEmptyInterface() { 865 t = typename(n.Left.Type) 866 } else { 867 t = itabname(n.Left.Type, n.Type) 868 } 869 l := nod(OEFACE, t, n.Left) 870 l.Type = n.Type 871 l.SetTypecheck(n.Typecheck()) 872 n = l 873 break 874 } 875 876 if staticbytes == nil { 877 staticbytes = newname(Runtimepkg.Lookup("staticbytes")) 878 staticbytes.SetClass(PEXTERN) 879 staticbytes.Type = types.NewArray(types.Types[TUINT8], 256) 880 zerobase = newname(Runtimepkg.Lookup("zerobase")) 881 zerobase.SetClass(PEXTERN) 882 zerobase.Type = types.Types[TUINTPTR] 883 } 884 885 // Optimize convT2{E,I} for many cases in which T is not pointer-shaped, 886 // by using an existing addressable value identical to n.Left 887 // or creating one on the stack. 888 var value *Node 889 switch { 890 case n.Left.Type.Size() == 0: 891 // n.Left is zero-sized. Use zerobase. 892 cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246. 893 value = zerobase 894 case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()): 895 // n.Left is a bool/byte. Use staticbytes[n.Left]. 896 n.Left = cheapexpr(n.Left, init) 897 value = nod(OINDEX, staticbytes, byteindex(n.Left)) 898 value.SetBounded(true) 899 case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly(): 900 // n.Left is a readonly global; use it directly. 901 value = n.Left 902 case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024: 903 // n.Left does not escape. Use a stack temporary initialized to n.Left. 904 value = temp(n.Left.Type) 905 init.Append(typecheck(nod(OAS, value, n.Left), Etop)) 906 } 907 908 if value != nil { 909 // Value is identical to n.Left. 910 // Construct the interface directly: {type/itab, &value}. 911 var t *Node 912 if n.Type.IsEmptyInterface() { 913 t = typename(n.Left.Type) 914 } else { 915 t = itabname(n.Left.Type, n.Type) 916 } 917 l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv)) 918 l.Type = n.Type 919 l.SetTypecheck(n.Typecheck()) 920 n = l 921 break 922 } 923 924 // Implement interface to empty interface conversion. 925 // tmp = i.itab 926 // if tmp != nil { 927 // tmp = tmp.type 928 // } 929 // e = iface{tmp, i.data} 930 if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 931 // Evaluate the input interface. 932 c := temp(n.Left.Type) 933 init.Append(nod(OAS, c, n.Left)) 934 935 // Get the itab out of the interface. 936 tmp := temp(types.NewPtr(types.Types[TUINT8])) 937 init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv))) 938 939 // Get the type out of the itab. 940 nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil) 941 nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp))) 942 init.Append(nif) 943 944 // Build the result. 945 e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8]))) 946 e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE. 947 e.SetTypecheck(1) 948 n = e 949 break 950 } 951 952 var ll []*Node 953 if n.Type.IsEmptyInterface() { 954 if !n.Left.Type.IsInterface() { 955 ll = append(ll, typename(n.Left.Type)) 956 } 957 } else { 958 if n.Left.Type.IsInterface() { 959 ll = append(ll, typename(n.Type)) 960 } else { 961 ll = append(ll, itabname(n.Left.Type, n.Type)) 962 } 963 } 964 965 if n.Left.Type.IsInterface() { 966 ll = append(ll, n.Left) 967 } else { 968 // regular types are passed by reference to avoid C vararg calls 969 // orderexpr arranged for n.Left to be a temporary for all 970 // the conversions it could see. comparison of an interface 971 // with a non-interface, especially in a switch on interface value 972 // with non-interface cases, is not visible to orderstmt, so we 973 // have to fall back on allocating a temp here. 974 if islvalue(n.Left) { 975 ll = append(ll, nod(OADDR, n.Left, nil)) 976 } else { 977 ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) 978 } 979 dowidth(n.Left.Type) 980 } 981 982 fn := syslook(convFuncName(n.Left.Type, n.Type)) 983 fn = substArgTypes(fn, n.Left.Type, n.Type) 984 dowidth(fn.Type) 985 n = nod(OCALL, fn, nil) 986 n.List.Set(ll) 987 n = typecheck(n, Erv) 988 n = walkexpr(n, init) 989 990 case OCONV, OCONVNOP: 991 if thearch.SoftFloat { 992 // For the soft-float case, ssa.go handles these conversions. 993 goto oconv_walkexpr 994 } 995 switch thearch.LinkArch.Family { 996 case sys.ARM, sys.MIPS: 997 if n.Left.Type.IsFloat() { 998 switch n.Type.Etype { 999 case TINT64: 1000 n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1001 break opswitch 1002 case TUINT64: 1003 n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1004 break opswitch 1005 } 1006 } 1007 1008 if n.Type.IsFloat() { 1009 switch n.Left.Type.Etype { 1010 case TINT64: 1011 n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type) 1012 break opswitch 1013 case TUINT64: 1014 n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type) 1015 break opswitch 1016 } 1017 } 1018 1019 case sys.I386: 1020 if n.Left.Type.IsFloat() { 1021 switch n.Type.Etype { 1022 case TINT64: 1023 n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1024 break opswitch 1025 case TUINT64: 1026 n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1027 break opswitch 1028 case TUINT32, TUINT, TUINTPTR: 1029 n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1030 break opswitch 1031 } 1032 } 1033 if n.Type.IsFloat() { 1034 switch n.Left.Type.Etype { 1035 case TINT64: 1036 n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type) 1037 break opswitch 1038 case TUINT64: 1039 n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type) 1040 break opswitch 1041 case TUINT32, TUINT, TUINTPTR: 1042 n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type) 1043 break opswitch 1044 } 1045 } 1046 } 1047 1048 oconv_walkexpr: 1049 n.Left = walkexpr(n.Left, init) 1050 1051 case OANDNOT: 1052 n.Left = walkexpr(n.Left, init) 1053 n.Op = OAND 1054 n.Right = nod(OCOM, n.Right, nil) 1055 n.Right = typecheck(n.Right, Erv) 1056 n.Right = walkexpr(n.Right, init) 1057 1058 case ODIV, OMOD: 1059 n.Left = walkexpr(n.Left, init) 1060 n.Right = walkexpr(n.Right, init) 1061 1062 // rewrite complex div into function call. 1063 et := n.Left.Type.Etype 1064 1065 if isComplex[et] && n.Op == ODIV { 1066 t := n.Type 1067 n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128])) 1068 n = conv(n, t) 1069 break 1070 } 1071 1072 // Nothing to do for float divisions. 1073 if isFloat[et] { 1074 break 1075 } 1076 1077 // rewrite 64-bit div and mod on 32-bit architectures. 1078 // TODO: Remove this code once we can introduce 1079 // runtime calls late in SSA processing. 1080 if Widthreg < 8 && (et == TINT64 || et == TUINT64) { 1081 if n.Right.Op == OLITERAL { 1082 // Leave div/mod by constant powers of 2. 1083 // The SSA backend will handle those. 1084 switch et { 1085 case TINT64: 1086 c := n.Right.Int64() 1087 if c < 0 { 1088 c = -c 1089 } 1090 if c != 0 && c&(c-1) == 0 { 1091 break opswitch 1092 } 1093 case TUINT64: 1094 c := uint64(n.Right.Int64()) 1095 if c != 0 && c&(c-1) == 0 { 1096 break opswitch 1097 } 1098 } 1099 } 1100 var fn string 1101 if et == TINT64 { 1102 fn = "int64" 1103 } else { 1104 fn = "uint64" 1105 } 1106 if n.Op == ODIV { 1107 fn += "div" 1108 } else { 1109 fn += "mod" 1110 } 1111 n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et])) 1112 } 1113 1114 case OINDEX: 1115 n.Left = walkexpr(n.Left, init) 1116 1117 // save the original node for bounds checking elision. 1118 // If it was a ODIV/OMOD walk might rewrite it. 1119 r := n.Right 1120 1121 n.Right = walkexpr(n.Right, init) 1122 1123 // if range of type cannot exceed static array bound, 1124 // disable bounds check. 1125 if n.Bounded() { 1126 break 1127 } 1128 t := n.Left.Type 1129 if t != nil && t.IsPtr() { 1130 t = t.Elem() 1131 } 1132 if t.IsArray() { 1133 n.SetBounded(bounded(r, t.NumElem())) 1134 if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) { 1135 Warn("index bounds check elided") 1136 } 1137 if smallintconst(n.Right) && !n.Bounded() { 1138 yyerror("index out of bounds") 1139 } 1140 } else if Isconst(n.Left, CTSTR) { 1141 n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string))))) 1142 if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) { 1143 Warn("index bounds check elided") 1144 } 1145 if smallintconst(n.Right) && !n.Bounded() { 1146 yyerror("index out of bounds") 1147 } 1148 } 1149 1150 if Isconst(n.Right, CTINT) { 1151 if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 { 1152 yyerror("index out of bounds") 1153 } 1154 } 1155 1156 case OINDEXMAP: 1157 // Replace m[k] with *map{access1,assign}(maptype, m, &k) 1158 n.Left = walkexpr(n.Left, init) 1159 n.Right = walkexpr(n.Right, init) 1160 map_ := n.Left 1161 key := n.Right 1162 t := map_.Type 1163 if n.Etype == 1 { 1164 // This m[k] expression is on the left-hand side of an assignment. 1165 fast := mapfast(t) 1166 if fast == mapslow { 1167 // standard version takes key by reference. 1168 // orderexpr made sure key is addressable. 1169 key = nod(OADDR, key, nil) 1170 } 1171 n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key) 1172 } else { 1173 // m[k] is not the target of an assignment. 1174 fast := mapfast(t) 1175 if fast == mapslow { 1176 // standard version takes key by reference. 1177 // orderexpr made sure key is addressable. 1178 key = nod(OADDR, key, nil) 1179 } 1180 1181 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 1182 n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key) 1183 } else { 1184 z := zeroaddr(w) 1185 n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z) 1186 } 1187 } 1188 n.Type = types.NewPtr(t.Val()) 1189 n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers. 1190 n = nod(OIND, n, nil) 1191 n.Type = t.Val() 1192 n.SetTypecheck(1) 1193 1194 case ORECV: 1195 Fatalf("walkexpr ORECV") // should see inside OAS only 1196 1197 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1198 n.Left = walkexpr(n.Left, init) 1199 low, high, max := n.SliceBounds() 1200 low = walkexpr(low, init) 1201 if low != nil && iszero(low) { 1202 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k]. 1203 low = nil 1204 } 1205 high = walkexpr(high, init) 1206 max = walkexpr(max, init) 1207 n.SetSliceBounds(low, high, max) 1208 if n.Op.IsSlice3() { 1209 if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) { 1210 // Reduce x[i:j:cap(x)] to x[i:j]. 1211 if n.Op == OSLICE3 { 1212 n.Op = OSLICE 1213 } else { 1214 n.Op = OSLICEARR 1215 } 1216 n = reduceSlice(n) 1217 } 1218 } else { 1219 n = reduceSlice(n) 1220 } 1221 1222 case ONEW: 1223 if n.Esc == EscNone { 1224 if n.Type.Elem().Width >= 1<<16 { 1225 Fatalf("large ONEW with EscNone: %v", n) 1226 } 1227 r := temp(n.Type.Elem()) 1228 r = nod(OAS, r, nil) // zero temp 1229 r = typecheck(r, Etop) 1230 init.Append(r) 1231 r = nod(OADDR, r.Left, nil) 1232 r = typecheck(r, Erv) 1233 n = r 1234 } else { 1235 n = callnew(n.Type.Elem()) 1236 } 1237 1238 case OCMPSTR: 1239 // s + "badgerbadgerbadger" == "badgerbadgerbadger" 1240 if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) { 1241 // TODO(marvin): Fix Node.EType type union. 1242 r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0)) 1243 r = typecheck(r, Erv) 1244 r = walkexpr(r, init) 1245 r.Type = n.Type 1246 n = r 1247 break 1248 } 1249 1250 // Rewrite comparisons to short constant strings as length+byte-wise comparisons. 1251 var cs, ncs *Node // const string, non-const string 1252 switch { 1253 case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR): 1254 // ignore; will be constant evaluated 1255 case Isconst(n.Left, CTSTR): 1256 cs = n.Left 1257 ncs = n.Right 1258 case Isconst(n.Right, CTSTR): 1259 cs = n.Right 1260 ncs = n.Left 1261 } 1262 if cs != nil { 1263 cmp := Op(n.Etype) 1264 // Our comparison below assumes that the non-constant string 1265 // is on the left hand side, so rewrite "" cmp x to x cmp "". 1266 // See issue 24817. 1267 if Isconst(n.Left, CTSTR) { 1268 cmp = brrev(cmp) 1269 } 1270 1271 // maxRewriteLen was chosen empirically. 1272 // It is the value that minimizes cmd/go file size 1273 // across most architectures. 1274 // See the commit description for CL 26758 for details. 1275 maxRewriteLen := 6 1276 // Some architectures can load unaligned byte sequence as 1 word. 1277 // So we can cover longer strings with the same amount of code. 1278 canCombineLoads := false 1279 combine64bit := false 1280 // TODO: does this improve performance on any other architectures? 1281 switch thearch.LinkArch.Family { 1282 case sys.AMD64: 1283 // Larger compare require longer instructions, so keep this reasonably low. 1284 // Data from CL 26758 shows that longer strings are rare. 1285 // If we really want we can do 16 byte SSE comparisons in the future. 1286 maxRewriteLen = 16 1287 canCombineLoads = true 1288 combine64bit = true 1289 case sys.I386: 1290 maxRewriteLen = 8 1291 canCombineLoads = true 1292 } 1293 var and Op 1294 switch cmp { 1295 case OEQ: 1296 and = OANDAND 1297 case ONE: 1298 and = OOROR 1299 default: 1300 // Don't do byte-wise comparisons for <, <=, etc. 1301 // They're fairly complicated. 1302 // Length-only checks are ok, though. 1303 maxRewriteLen = 0 1304 } 1305 if s := cs.Val().U.(string); len(s) <= maxRewriteLen { 1306 if len(s) > 0 { 1307 ncs = safeexpr(ncs, init) 1308 } 1309 // TODO(marvin): Fix Node.EType type union. 1310 r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s)))) 1311 remains := len(s) 1312 for i := 0; remains > 0; { 1313 if remains == 1 || !canCombineLoads { 1314 cb := nodintconst(int64(s[i])) 1315 ncb := nod(OINDEX, ncs, nodintconst(int64(i))) 1316 r = nod(and, r, nod(cmp, ncb, cb)) 1317 remains-- 1318 i++ 1319 continue 1320 } 1321 var step int 1322 var convType *types.Type 1323 switch { 1324 case remains >= 8 && combine64bit: 1325 convType = types.Types[TINT64] 1326 step = 8 1327 case remains >= 4: 1328 convType = types.Types[TUINT32] 1329 step = 4 1330 case remains >= 2: 1331 convType = types.Types[TUINT16] 1332 step = 2 1333 } 1334 ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i))) 1335 ncsubstr = conv(ncsubstr, convType) 1336 csubstr := int64(s[i]) 1337 // Calculate large constant from bytes as sequence of shifts and ors. 1338 // Like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ... 1339 // ssa will combine this into a single large load. 1340 for offset := 1; offset < step; offset++ { 1341 b := nod(OINDEX, ncs, nodintconst(int64(i+offset))) 1342 b = conv(b, convType) 1343 b = nod(OLSH, b, nodintconst(int64(8*offset))) 1344 ncsubstr = nod(OOR, ncsubstr, b) 1345 csubstr = csubstr | int64(s[i+offset])<<uint8(8*offset) 1346 } 1347 csubstrPart := nodintconst(csubstr) 1348 // Compare "step" bytes as once 1349 r = nod(and, r, nod(cmp, csubstrPart, ncsubstr)) 1350 remains -= step 1351 i += step 1352 } 1353 r = typecheck(r, Erv) 1354 r = walkexpr(r, init) 1355 r.Type = n.Type 1356 n = r 1357 break 1358 } 1359 } 1360 1361 var r *Node 1362 // TODO(marvin): Fix Node.EType type union. 1363 if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { 1364 // prepare for rewrite below 1365 n.Left = cheapexpr(n.Left, init) 1366 n.Right = cheapexpr(n.Right, init) 1367 1368 lstr := conv(n.Left, types.Types[TSTRING]) 1369 rstr := conv(n.Right, types.Types[TSTRING]) 1370 lptr := nod(OSPTR, lstr, nil) 1371 rptr := nod(OSPTR, rstr, nil) 1372 llen := conv(nod(OLEN, lstr, nil), types.Types[TUINTPTR]) 1373 rlen := conv(nod(OLEN, rstr, nil), types.Types[TUINTPTR]) 1374 1375 fn := syslook("memequal") 1376 fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8]) 1377 r = mkcall1(fn, types.Types[TBOOL], init, lptr, rptr, llen) 1378 1379 // quick check of len before full compare for == or !=. 1380 // memequal then tests equality up to length len. 1381 // TODO(marvin): Fix Node.EType type union. 1382 if Op(n.Etype) == OEQ { 1383 // len(left) == len(right) && memequal(left, right, len) 1384 r = nod(OANDAND, nod(OEQ, llen, rlen), r) 1385 } else { 1386 // len(left) != len(right) || !memequal(left, right, len) 1387 r = nod(ONOT, r, nil) 1388 r = nod(OOROR, nod(ONE, llen, rlen), r) 1389 } 1390 1391 r = typecheck(r, Erv) 1392 r = walkexpr(r, nil) 1393 } else { 1394 // sys_cmpstring(s1, s2) :: 0 1395 r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING])) 1396 // TODO(marvin): Fix Node.EType type union. 1397 r = nod(Op(n.Etype), r, nodintconst(0)) 1398 } 1399 1400 r = typecheck(r, Erv) 1401 if !n.Type.IsBoolean() { 1402 Fatalf("cmp %v", n.Type) 1403 } 1404 r.Type = n.Type 1405 n = r 1406 1407 case OADDSTR: 1408 n = addstr(n, init) 1409 1410 case OAPPEND: 1411 // order should make sure we only see OAS(node, OAPPEND), which we handle above. 1412 Fatalf("append outside assignment") 1413 1414 case OCOPY: 1415 n = copyany(n, init, instrumenting && !compiling_runtime) 1416 1417 // cannot use chanfn - closechan takes any, not chan any 1418 case OCLOSE: 1419 fn := syslook("closechan") 1420 1421 fn = substArgTypes(fn, n.Left.Type) 1422 n = mkcall1(fn, nil, init, n.Left) 1423 1424 case OMAKECHAN: 1425 // When size fits into int, use makechan instead of 1426 // makechan64, which is faster and shorter on 32 bit platforms. 1427 size := n.Left 1428 fnname := "makechan64" 1429 argtype := types.Types[TINT64] 1430 1431 // Type checking guarantees that TIDEAL size is positive and fits in an int. 1432 // The case of size overflow when converting TUINT or TUINTPTR to TINT 1433 // will be handled by the negative range checks in makechan during runtime. 1434 if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 { 1435 fnname = "makechan" 1436 argtype = types.Types[TINT] 1437 } 1438 1439 n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype)) 1440 1441 case OMAKEMAP: 1442 t := n.Type 1443 hmapType := hmap(t) 1444 hint := n.Left 1445 1446 // var h *hmap 1447 var h *Node 1448 if n.Esc == EscNone { 1449 // Allocate hmap on stack. 1450 1451 // var hv hmap 1452 hv := temp(hmapType) 1453 zero := nod(OAS, hv, nil) 1454 zero = typecheck(zero, Etop) 1455 init.Append(zero) 1456 // h = &hv 1457 h = nod(OADDR, hv, nil) 1458 1459 // Allocate one bucket pointed to by hmap.buckets on stack if hint 1460 // is not larger than BUCKETSIZE. In case hint is larger than 1461 // BUCKETSIZE runtime.makemap will allocate the buckets on the heap. 1462 // Maximum key and value size is 128 bytes, larger objects 1463 // are stored with an indirection. So max bucket size is 2048+eps. 1464 if !Isconst(hint, CTINT) || 1465 !(hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) > 0) { 1466 // var bv bmap 1467 bv := temp(bmap(t)) 1468 1469 zero = nod(OAS, bv, nil) 1470 zero = typecheck(zero, Etop) 1471 init.Append(zero) 1472 1473 // b = &bv 1474 b := nod(OADDR, bv, nil) 1475 1476 // h.buckets = b 1477 bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap 1478 na := nod(OAS, nodSym(ODOT, h, bsym), b) 1479 na = typecheck(na, Etop) 1480 init.Append(na) 1481 } 1482 } 1483 1484 if Isconst(hint, CTINT) && hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 { 1485 // Handling make(map[any]any) and 1486 // make(map[any]any, hint) where hint <= BUCKETSIZE 1487 // special allows for faster map initialization and 1488 // improves binary size by using calls with fewer arguments. 1489 // For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false 1490 // and no buckets will be allocated by makemap. Therefore, 1491 // no buckets need to be allocated in this code path. 1492 if n.Esc == EscNone { 1493 // Only need to initialize h.hash0 since 1494 // hmap h has been allocated on the stack already. 1495 // h.hash0 = fastrand() 1496 rand := mkcall("fastrand", types.Types[TUINT32], init) 1497 hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap 1498 a := nod(OAS, nodSym(ODOT, h, hashsym), rand) 1499 a = typecheck(a, Etop) 1500 a = walkexpr(a, init) 1501 init.Append(a) 1502 n = nod(OCONVNOP, h, nil) 1503 n.Type = t 1504 n = typecheck(n, Erv) 1505 } else { 1506 // Call runtime.makehmap to allocate an 1507 // hmap on the heap and initialize hmap's hash0 field. 1508 fn := syslook("makemap_small") 1509 fn = substArgTypes(fn, t.Key(), t.Val()) 1510 n = mkcall1(fn, n.Type, init) 1511 } 1512 } else { 1513 if n.Esc != EscNone { 1514 h = nodnil() 1515 } 1516 // Map initialization with a variable or large hint is 1517 // more complicated. We therefore generate a call to 1518 // runtime.makemap to intialize hmap and allocate the 1519 // map buckets. 1520 1521 // When hint fits into int, use makemap instead of 1522 // makemap64, which is faster and shorter on 32 bit platforms. 1523 fnname := "makemap64" 1524 argtype := types.Types[TINT64] 1525 1526 // Type checking guarantees that TIDEAL hint is positive and fits in an int. 1527 // See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function. 1528 // The case of hint overflow when converting TUINT or TUINTPTR to TINT 1529 // will be handled by the negative range checks in makemap during runtime. 1530 if hint.Type.IsKind(TIDEAL) || maxintval[hint.Type.Etype].Cmp(maxintval[TUINT]) <= 0 { 1531 fnname = "makemap" 1532 argtype = types.Types[TINT] 1533 } 1534 1535 fn := syslook(fnname) 1536 fn = substArgTypes(fn, hmapType, t.Key(), t.Val()) 1537 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(hint, argtype), h) 1538 } 1539 1540 case OMAKESLICE: 1541 l := n.Left 1542 r := n.Right 1543 if r == nil { 1544 r = safeexpr(l, init) 1545 l = r 1546 } 1547 t := n.Type 1548 if n.Esc == EscNone { 1549 if !isSmallMakeSlice(n) { 1550 Fatalf("non-small OMAKESLICE with EscNone: %v", n) 1551 } 1552 // var arr [r]T 1553 // n = arr[:l] 1554 t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T 1555 var_ := temp(t) 1556 a := nod(OAS, var_, nil) // zero temp 1557 a = typecheck(a, Etop) 1558 init.Append(a) 1559 r := nod(OSLICE, var_, nil) // arr[:l] 1560 r.SetSliceBounds(nil, l, nil) 1561 r = conv(r, n.Type) // in case n.Type is named. 1562 r = typecheck(r, Erv) 1563 r = walkexpr(r, init) 1564 n = r 1565 } else { 1566 // n escapes; set up a call to makeslice. 1567 // When len and cap can fit into int, use makeslice instead of 1568 // makeslice64, which is faster and shorter on 32 bit platforms. 1569 1570 if t.Elem().NotInHeap() { 1571 yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem()) 1572 } 1573 1574 len, cap := l, r 1575 1576 fnname := "makeslice64" 1577 argtype := types.Types[TINT64] 1578 1579 // Type checking guarantees that TIDEAL len/cap are positive and fit in an int. 1580 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 1581 // will be handled by the negative range checks in makeslice during runtime. 1582 if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) && 1583 (cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) { 1584 fnname = "makeslice" 1585 argtype = types.Types[TINT] 1586 } 1587 1588 fn := syslook(fnname) 1589 fn = substArgTypes(fn, t.Elem()) // any-1 1590 n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)) 1591 } 1592 1593 case ORUNESTR: 1594 a := nodnil() 1595 if n.Esc == EscNone { 1596 t := types.NewArray(types.Types[TUINT8], 4) 1597 var_ := temp(t) 1598 a = nod(OADDR, var_, nil) 1599 } 1600 1601 // intstring(*[4]byte, rune) 1602 n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64])) 1603 1604 case OARRAYBYTESTR: 1605 a := nodnil() 1606 if n.Esc == EscNone { 1607 // Create temporary buffer for string on stack. 1608 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1609 1610 a = nod(OADDR, temp(t), nil) 1611 } 1612 1613 // slicebytetostring(*[32]byte, []byte) string; 1614 n = mkcall("slicebytetostring", n.Type, init, a, n.Left) 1615 1616 // slicebytetostringtmp([]byte) string; 1617 case OARRAYBYTESTRTMP: 1618 n.Left = walkexpr(n.Left, init) 1619 1620 if !instrumenting { 1621 // Let the backend handle OARRAYBYTESTRTMP directly 1622 // to avoid a function call to slicebytetostringtmp. 1623 break 1624 } 1625 1626 n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) 1627 1628 // slicerunetostring(*[32]byte, []rune) string; 1629 case OARRAYRUNESTR: 1630 a := nodnil() 1631 1632 if n.Esc == EscNone { 1633 // Create temporary buffer for string on stack. 1634 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1635 1636 a = nod(OADDR, temp(t), nil) 1637 } 1638 1639 n = mkcall("slicerunetostring", n.Type, init, a, n.Left) 1640 1641 // stringtoslicebyte(*32[byte], string) []byte; 1642 case OSTRARRAYBYTE: 1643 a := nodnil() 1644 1645 if n.Esc == EscNone { 1646 // Create temporary buffer for slice on stack. 1647 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1648 1649 a = nod(OADDR, temp(t), nil) 1650 } 1651 1652 n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING])) 1653 1654 case OSTRARRAYBYTETMP: 1655 // []byte(string) conversion that creates a slice 1656 // referring to the actual string bytes. 1657 // This conversion is handled later by the backend and 1658 // is only for use by internal compiler optimizations 1659 // that know that the slice won't be mutated. 1660 // The only such case today is: 1661 // for i, c := range []byte(string) 1662 n.Left = walkexpr(n.Left, init) 1663 1664 // stringtoslicerune(*[32]rune, string) []rune 1665 case OSTRARRAYRUNE: 1666 a := nodnil() 1667 1668 if n.Esc == EscNone { 1669 // Create temporary buffer for slice on stack. 1670 t := types.NewArray(types.Types[TINT32], tmpstringbufsize) 1671 1672 a = nod(OADDR, temp(t), nil) 1673 } 1674 1675 n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) 1676 1677 // ifaceeq(i1 any-1, i2 any-2) (ret bool); 1678 case OCMPIFACE: 1679 if !eqtype(n.Left.Type, n.Right.Type) { 1680 Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type) 1681 } 1682 var fn *Node 1683 if n.Left.Type.IsEmptyInterface() { 1684 fn = syslook("efaceeq") 1685 } else { 1686 fn = syslook("ifaceeq") 1687 } 1688 1689 n.Right = cheapexpr(n.Right, init) 1690 n.Left = cheapexpr(n.Left, init) 1691 lt := nod(OITAB, n.Left, nil) 1692 rt := nod(OITAB, n.Right, nil) 1693 ld := nod(OIDATA, n.Left, nil) 1694 rd := nod(OIDATA, n.Right, nil) 1695 ld.Type = types.Types[TUNSAFEPTR] 1696 rd.Type = types.Types[TUNSAFEPTR] 1697 ld.SetTypecheck(1) 1698 rd.SetTypecheck(1) 1699 call := mkcall1(fn, n.Type, init, lt, ld, rd) 1700 1701 // Check itable/type before full compare. 1702 // Note: short-circuited because order matters. 1703 // TODO(marvin): Fix Node.EType type union. 1704 var cmp *Node 1705 if Op(n.Etype) == OEQ { 1706 cmp = nod(OANDAND, nod(OEQ, lt, rt), call) 1707 } else { 1708 cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil)) 1709 } 1710 cmp = typecheck(cmp, Erv) 1711 cmp = walkexpr(cmp, init) 1712 cmp.Type = n.Type 1713 n = cmp 1714 1715 case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: 1716 if isStaticCompositeLiteral(n) && !canSSAType(n.Type) { 1717 // n can be directly represented in the read-only data section. 1718 // Make direct reference to the static data. See issue 12841. 1719 vstat := staticname(n.Type) 1720 vstat.Name.SetReadonly(true) 1721 fixedlit(inInitFunction, initKindStatic, n, vstat, init) 1722 n = vstat 1723 n = typecheck(n, Erv) 1724 break 1725 } 1726 var_ := temp(n.Type) 1727 anylit(n, var_, init) 1728 n = var_ 1729 1730 case OSEND: 1731 n1 := n.Right 1732 n1 = assignconv(n1, n.Left.Type.Elem(), "chan send") 1733 n1 = walkexpr(n1, init) 1734 n1 = nod(OADDR, n1, nil) 1735 n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1) 1736 1737 case OCLOSURE: 1738 n = walkclosure(n, init) 1739 1740 case OCALLPART: 1741 n = walkpartialcall(n, init) 1742 } 1743 1744 // Expressions that are constant at run time but not 1745 // considered const by the language spec are not turned into 1746 // constants until walk. For example, if n is y%1 == 0, the 1747 // walk of y%1 may have replaced it by 0. 1748 // Check whether n with its updated args is itself now a constant. 1749 t := n.Type 1750 evconst(n) 1751 if n.Type != t { 1752 Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type) 1753 } 1754 if n.Op == OLITERAL { 1755 n = typecheck(n, Erv) 1756 // Emit string symbol now to avoid emitting 1757 // any concurrently during the backend. 1758 if s, ok := n.Val().U.(string); ok { 1759 _ = stringsym(n.Pos, s) 1760 } 1761 } 1762 1763 updateHasCall(n) 1764 1765 if Debug['w'] != 0 && n != nil { 1766 Dump("walk", n) 1767 } 1768 1769 lineno = lno 1770 return n 1771 } 1772 1773 // TODO(josharian): combine this with its caller and simplify 1774 func reduceSlice(n *Node) *Node { 1775 low, high, max := n.SliceBounds() 1776 if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) { 1777 // Reduce x[i:len(x)] to x[i:]. 1778 high = nil 1779 } 1780 n.SetSliceBounds(low, high, max) 1781 if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { 1782 // Reduce x[:] to x. 1783 if Debug_slice > 0 { 1784 Warn("slice: omit slice operation") 1785 } 1786 return n.Left 1787 } 1788 return n 1789 } 1790 1791 func ascompatee1(l *Node, r *Node, init *Nodes) *Node { 1792 // convas will turn map assigns into function calls, 1793 // making it impossible for reorder3 to work. 1794 n := nod(OAS, l, r) 1795 1796 if l.Op == OINDEXMAP { 1797 return n 1798 } 1799 1800 return convas(n, init) 1801 } 1802 1803 func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { 1804 // check assign expression list to 1805 // an expression list. called in 1806 // expr-list = expr-list 1807 1808 // ensure order of evaluation for function calls 1809 for i := range nl { 1810 nl[i] = safeexpr(nl[i], init) 1811 } 1812 for i1 := range nr { 1813 nr[i1] = safeexpr(nr[i1], init) 1814 } 1815 1816 var nn []*Node 1817 i := 0 1818 for ; i < len(nl); i++ { 1819 if i >= len(nr) { 1820 break 1821 } 1822 // Do not generate 'x = x' during return. See issue 4014. 1823 if op == ORETURN && samesafeexpr(nl[i], nr[i]) { 1824 continue 1825 } 1826 nn = append(nn, ascompatee1(nl[i], nr[i], init)) 1827 } 1828 1829 // cannot happen: caller checked that lists had same length 1830 if i < len(nl) || i < len(nr) { 1831 var nln, nrn Nodes 1832 nln.Set(nl) 1833 nrn.Set(nr) 1834 Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname()) 1835 } 1836 return nn 1837 } 1838 1839 // l is an lv and rt is the type of an rv 1840 // return 1 if this implies a function call 1841 // evaluating the lv or a function call 1842 // in the conversion of the types 1843 func fncall(l *Node, rt *types.Type) bool { 1844 if l.HasCall() || l.Op == OINDEXMAP { 1845 return true 1846 } 1847 if eqtype(l.Type, rt) { 1848 return false 1849 } 1850 return true 1851 } 1852 1853 // check assign type list to 1854 // an expression list. called in 1855 // expr-list = func() 1856 func ascompatet(nl Nodes, nr *types.Type) []*Node { 1857 if nl.Len() != nr.NumFields() { 1858 Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) 1859 } 1860 1861 var nn, mm Nodes 1862 for i, l := range nl.Slice() { 1863 if isblank(l) { 1864 continue 1865 } 1866 r := nr.Field(i) 1867 1868 // any lv that causes a fn call must be 1869 // deferred until all the return arguments 1870 // have been pulled from the output arguments 1871 if fncall(l, r.Type) { 1872 tmp := temp(r.Type) 1873 tmp = typecheck(tmp, Erv) 1874 a := nod(OAS, l, tmp) 1875 a = convas(a, &mm) 1876 mm.Append(a) 1877 l = tmp 1878 } 1879 1880 a := nod(OAS, l, nodarg(r, 0)) 1881 a = convas(a, &nn) 1882 updateHasCall(a) 1883 if a.HasCall() { 1884 Dump("ascompatet ucount", a) 1885 Fatalf("ascompatet: too many function calls evaluating parameters") 1886 } 1887 1888 nn.Append(a) 1889 } 1890 return append(nn.Slice(), mm.Slice()...) 1891 } 1892 1893 // nodarg returns a Node for the function argument denoted by t, 1894 // which is either the entire function argument or result struct (t is a struct *types.Type) 1895 // or a specific argument (t is a *types.Field within a struct *types.Type). 1896 // 1897 // If fp is 0, the node is for use by a caller invoking the given 1898 // function, preparing the arguments before the call 1899 // or retrieving the results after the call. 1900 // In this case, the node will correspond to an outgoing argument 1901 // slot like 8(SP). 1902 // 1903 // If fp is 1, the node is for use by the function itself 1904 // (the callee), to retrieve its arguments or write its results. 1905 // In this case the node will be an ONAME with an appropriate 1906 // type and offset. 1907 func nodarg(t interface{}, fp int) *Node { 1908 var n *Node 1909 1910 var funarg types.Funarg 1911 switch t := t.(type) { 1912 default: 1913 Fatalf("bad nodarg %T(%v)", t, t) 1914 1915 case *types.Type: 1916 // Entire argument struct, not just one arg 1917 if !t.IsFuncArgStruct() { 1918 Fatalf("nodarg: bad type %v", t) 1919 } 1920 funarg = t.StructType().Funarg 1921 1922 // Build fake variable name for whole arg struct. 1923 n = newname(lookup(".args")) 1924 n.Type = t 1925 first := t.Field(0) 1926 if first == nil { 1927 Fatalf("nodarg: bad struct") 1928 } 1929 if first.Offset == BADWIDTH { 1930 Fatalf("nodarg: offset not computed for %v", t) 1931 } 1932 n.Xoffset = first.Offset 1933 1934 case *types.Field: 1935 funarg = t.Funarg 1936 if fp == 1 { 1937 // NOTE(rsc): This should be using t.Nname directly, 1938 // except in the case where t.Nname.Sym is the blank symbol and 1939 // so the assignment would be discarded during code generation. 1940 // In that case we need to make a new node, and there is no harm 1941 // in optimization passes to doing so. But otherwise we should 1942 // definitely be using the actual declaration and not a newly built node. 1943 // The extra Fatalf checks here are verifying that this is the case, 1944 // without changing the actual logic (at time of writing, it's getting 1945 // toward time for the Go 1.7 beta). 1946 // At some quieter time (assuming we've never seen these Fatalfs happen) 1947 // we could change this code to use "expect" directly. 1948 expect := asNode(t.Nname) 1949 if expect.isParamHeapCopy() { 1950 expect = expect.Name.Param.Stackcopy 1951 } 1952 1953 for _, n := range Curfn.Func.Dcl { 1954 if (n.Class() == PPARAM || n.Class() == PPARAMOUT) && !t.Sym.IsBlank() && n.Sym == t.Sym { 1955 if n != expect { 1956 Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op) 1957 } 1958 return n 1959 } 1960 } 1961 1962 if !expect.Sym.IsBlank() { 1963 Fatalf("nodarg: did not find node in dcl list: %v", expect) 1964 } 1965 } 1966 1967 // Build fake name for individual variable. 1968 // This is safe because if there was a real declared name 1969 // we'd have used it above. 1970 n = newname(lookup("__")) 1971 n.Type = t.Type 1972 if t.Offset == BADWIDTH { 1973 Fatalf("nodarg: offset not computed for %v", t) 1974 } 1975 n.Xoffset = t.Offset 1976 n.Orig = asNode(t.Nname) 1977 } 1978 1979 // Rewrite argument named _ to __, 1980 // or else the assignment to _ will be 1981 // discarded during code generation. 1982 if isblank(n) { 1983 n.Sym = lookup("__") 1984 } 1985 1986 switch fp { 1987 default: 1988 Fatalf("bad fp") 1989 1990 case 0: // preparing arguments for call 1991 n.Op = OINDREGSP 1992 n.Xoffset += Ctxt.FixedFrameSize() 1993 1994 case 1: // reading arguments inside call 1995 n.SetClass(PPARAM) 1996 if funarg == types.FunargResults { 1997 n.SetClass(PPARAMOUT) 1998 } 1999 } 2000 2001 n.SetTypecheck(1) 2002 n.SetAddrtaken(true) // keep optimizers at bay 2003 return n 2004 } 2005 2006 // package all the arguments that match a ... T parameter into a []T. 2007 func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node { 2008 esc := uint16(EscUnknown) 2009 if ddd != nil { 2010 esc = ddd.Esc 2011 } 2012 2013 if len(args) == 0 { 2014 n := nodnil() 2015 n.Type = typ 2016 return n 2017 } 2018 2019 n := nod(OCOMPLIT, nil, typenod(typ)) 2020 if ddd != nil && prealloc[ddd] != nil { 2021 prealloc[n] = prealloc[ddd] // temporary to use 2022 } 2023 n.List.Set(args) 2024 n.Esc = esc 2025 n = typecheck(n, Erv) 2026 if n.Type == nil { 2027 Fatalf("mkdotargslice: typecheck failed") 2028 } 2029 n = walkexpr(n, init) 2030 return n 2031 } 2032 2033 // check assign expression list to 2034 // a type list. called in 2035 // return expr-list 2036 // func(expr-list) 2037 func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node { 2038 // f(g()) where g has multiple return values 2039 if len(rhs) == 1 && rhs[0].Type.IsFuncArgStruct() { 2040 // optimization - can do block copy 2041 if eqtypenoname(rhs[0].Type, lhs) { 2042 nl := nodarg(lhs, fp) 2043 nr := nod(OCONVNOP, rhs[0], nil) 2044 nr.Type = nl.Type 2045 n := convas(nod(OAS, nl, nr), init) 2046 n.SetTypecheck(1) 2047 return []*Node{n} 2048 } 2049 2050 // conversions involved. 2051 // copy into temporaries. 2052 var tmps []*Node 2053 for _, nr := range rhs[0].Type.FieldSlice() { 2054 tmps = append(tmps, temp(nr.Type)) 2055 } 2056 2057 a := nod(OAS2, nil, nil) 2058 a.List.Set(tmps) 2059 a.Rlist.Set(rhs) 2060 a = typecheck(a, Etop) 2061 a = walkstmt(a) 2062 init.Append(a) 2063 2064 rhs = tmps 2065 } 2066 2067 // For each parameter (LHS), assign its corresponding argument (RHS). 2068 // If there's a ... parameter (which is only valid as the final 2069 // parameter) and this is not a ... call expression, 2070 // then assign the remaining arguments as a slice. 2071 var nn []*Node 2072 for i, nl := range lhs.FieldSlice() { 2073 var nr *Node 2074 if nl.Isddd() && !isddd { 2075 nr = mkdotargslice(nl.Type, rhs[i:], init, call.Right) 2076 } else { 2077 nr = rhs[i] 2078 } 2079 2080 a := nod(OAS, nodarg(nl, fp), nr) 2081 a = convas(a, init) 2082 a.SetTypecheck(1) 2083 nn = append(nn, a) 2084 } 2085 2086 return nn 2087 } 2088 2089 // generate code for print 2090 func walkprint(nn *Node, init *Nodes) *Node { 2091 // Hoist all the argument evaluation up before the lock. 2092 walkexprlistcheap(nn.List.Slice(), init) 2093 2094 // For println, add " " between elements and "\n" at the end. 2095 if nn.Op == OPRINTN { 2096 s := nn.List.Slice() 2097 t := make([]*Node, 0, len(s)*2) 2098 for i, n := range s { 2099 if i != 0 { 2100 t = append(t, nodstr(" ")) 2101 } 2102 t = append(t, n) 2103 } 2104 t = append(t, nodstr("\n")) 2105 nn.List.Set(t) 2106 } 2107 2108 // Collapse runs of constant strings. 2109 s := nn.List.Slice() 2110 t := make([]*Node, 0, len(s)) 2111 for i := 0; i < len(s); { 2112 var strs []string 2113 for i < len(s) && Isconst(s[i], CTSTR) { 2114 strs = append(strs, s[i].Val().U.(string)) 2115 i++ 2116 } 2117 if len(strs) > 0 { 2118 t = append(t, nodstr(strings.Join(strs, ""))) 2119 } 2120 if i < len(s) { 2121 t = append(t, s[i]) 2122 i++ 2123 } 2124 } 2125 nn.List.Set(t) 2126 2127 calls := []*Node{mkcall("printlock", nil, init)} 2128 for i, n := range nn.List.Slice() { 2129 if n.Op == OLITERAL { 2130 switch n.Val().Ctype() { 2131 case CTRUNE: 2132 n = defaultlit(n, types.Runetype) 2133 2134 case CTINT: 2135 n = defaultlit(n, types.Types[TINT64]) 2136 2137 case CTFLT: 2138 n = defaultlit(n, types.Types[TFLOAT64]) 2139 } 2140 } 2141 2142 if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { 2143 n = defaultlit(n, types.Types[TINT64]) 2144 } 2145 n = defaultlit(n, nil) 2146 nn.List.SetIndex(i, n) 2147 if n.Type == nil || n.Type.Etype == TFORW { 2148 continue 2149 } 2150 2151 var on *Node 2152 switch n.Type.Etype { 2153 case TINTER: 2154 if n.Type.IsEmptyInterface() { 2155 on = syslook("printeface") 2156 } else { 2157 on = syslook("printiface") 2158 } 2159 on = substArgTypes(on, n.Type) // any-1 2160 case TPTR32, TPTR64, TCHAN, TMAP, TFUNC, TUNSAFEPTR: 2161 on = syslook("printpointer") 2162 on = substArgTypes(on, n.Type) // any-1 2163 case TSLICE: 2164 on = syslook("printslice") 2165 on = substArgTypes(on, n.Type) // any-1 2166 case TUINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINTPTR: 2167 if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" { 2168 on = syslook("printhex") 2169 } else { 2170 on = syslook("printuint") 2171 } 2172 case TINT, TINT8, TINT16, TINT32, TINT64: 2173 on = syslook("printint") 2174 case TFLOAT32, TFLOAT64: 2175 on = syslook("printfloat") 2176 case TCOMPLEX64, TCOMPLEX128: 2177 on = syslook("printcomplex") 2178 case TBOOL: 2179 on = syslook("printbool") 2180 case TSTRING: 2181 cs := "" 2182 if Isconst(n, CTSTR) { 2183 cs = n.Val().U.(string) 2184 } 2185 switch cs { 2186 case " ": 2187 on = syslook("printsp") 2188 case "\n": 2189 on = syslook("printnl") 2190 default: 2191 on = syslook("printstring") 2192 } 2193 default: 2194 badtype(OPRINT, n.Type, nil) 2195 continue 2196 } 2197 2198 r := nod(OCALL, on, nil) 2199 if params := on.Type.Params().FieldSlice(); len(params) > 0 { 2200 t := params[0].Type 2201 if !eqtype(t, n.Type) { 2202 n = nod(OCONV, n, nil) 2203 n.Type = t 2204 } 2205 r.List.Append(n) 2206 } 2207 calls = append(calls, r) 2208 } 2209 2210 calls = append(calls, mkcall("printunlock", nil, init)) 2211 2212 typecheckslice(calls, Etop) 2213 walkexprlist(calls, init) 2214 2215 r := nod(OEMPTY, nil, nil) 2216 r = typecheck(r, Etop) 2217 r = walkexpr(r, init) 2218 r.Ninit.Set(calls) 2219 return r 2220 } 2221 2222 func callnew(t *types.Type) *Node { 2223 if t.NotInHeap() { 2224 yyerror("%v is go:notinheap; heap allocation disallowed", t) 2225 } 2226 dowidth(t) 2227 fn := syslook("newobject") 2228 fn = substArgTypes(fn, t) 2229 v := mkcall1(fn, types.NewPtr(t), nil, typename(t)) 2230 v.SetNonNil(true) 2231 return v 2232 } 2233 2234 func iscallret(n *Node) bool { 2235 if n == nil { 2236 return false 2237 } 2238 n = outervalue(n) 2239 return n.Op == OINDREGSP 2240 } 2241 2242 // isReflectHeaderDataField reports whether l is an expression p.Data 2243 // where p has type reflect.SliceHeader or reflect.StringHeader. 2244 func isReflectHeaderDataField(l *Node) bool { 2245 if l.Type != types.Types[TUINTPTR] { 2246 return false 2247 } 2248 2249 var tsym *types.Sym 2250 switch l.Op { 2251 case ODOT: 2252 tsym = l.Left.Type.Sym 2253 case ODOTPTR: 2254 tsym = l.Left.Type.Elem().Sym 2255 default: 2256 return false 2257 } 2258 2259 if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" { 2260 return false 2261 } 2262 return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader" 2263 } 2264 2265 func convas(n *Node, init *Nodes) *Node { 2266 if n.Op != OAS { 2267 Fatalf("convas: not OAS %v", n.Op) 2268 } 2269 defer updateHasCall(n) 2270 2271 n.SetTypecheck(1) 2272 2273 if n.Left == nil || n.Right == nil { 2274 return n 2275 } 2276 2277 lt := n.Left.Type 2278 rt := n.Right.Type 2279 if lt == nil || rt == nil { 2280 return n 2281 } 2282 2283 if isblank(n.Left) { 2284 n.Right = defaultlit(n.Right, nil) 2285 return n 2286 } 2287 2288 if !eqtype(lt, rt) { 2289 n.Right = assignconv(n.Right, lt, "assignment") 2290 n.Right = walkexpr(n.Right, init) 2291 } 2292 dowidth(n.Right.Type) 2293 2294 return n 2295 } 2296 2297 // from ascompat[te] 2298 // evaluating actual function arguments. 2299 // f(a,b) 2300 // if there is exactly one function expr, 2301 // then it is done first. otherwise must 2302 // make temp variables 2303 func reorder1(all []*Node) []*Node { 2304 if len(all) == 1 { 2305 return all 2306 } 2307 2308 funcCalls := 0 2309 for _, n := range all { 2310 updateHasCall(n) 2311 if n.HasCall() { 2312 funcCalls++ 2313 } 2314 } 2315 if funcCalls == 0 { 2316 return all 2317 } 2318 2319 var g []*Node // fncalls assigned to tempnames 2320 var f *Node // last fncall assigned to stack 2321 var r []*Node // non fncalls and tempnames assigned to stack 2322 d := 0 2323 for _, n := range all { 2324 if !n.HasCall() { 2325 r = append(r, n) 2326 continue 2327 } 2328 2329 d++ 2330 if d == funcCalls { 2331 f = n 2332 continue 2333 } 2334 2335 // make assignment of fncall to tempname 2336 a := temp(n.Right.Type) 2337 2338 a = nod(OAS, a, n.Right) 2339 g = append(g, a) 2340 2341 // put normal arg assignment on list 2342 // with fncall replaced by tempname 2343 n.Right = a.Left 2344 2345 r = append(r, n) 2346 } 2347 2348 if f != nil { 2349 g = append(g, f) 2350 } 2351 return append(g, r...) 2352 } 2353 2354 // from ascompat[ee] 2355 // a,b = c,d 2356 // simultaneous assignment. there cannot 2357 // be later use of an earlier lvalue. 2358 // 2359 // function calls have been removed. 2360 func reorder3(all []*Node) []*Node { 2361 // If a needed expression may be affected by an 2362 // earlier assignment, make an early copy of that 2363 // expression and use the copy instead. 2364 var early []*Node 2365 2366 var mapinit Nodes 2367 for i, n := range all { 2368 l := n.Left 2369 2370 // Save subexpressions needed on left side. 2371 // Drill through non-dereferences. 2372 for { 2373 if l.Op == ODOT || l.Op == OPAREN { 2374 l = l.Left 2375 continue 2376 } 2377 2378 if l.Op == OINDEX && l.Left.Type.IsArray() { 2379 l.Right = reorder3save(l.Right, all, i, &early) 2380 l = l.Left 2381 continue 2382 } 2383 2384 break 2385 } 2386 2387 switch l.Op { 2388 default: 2389 Fatalf("reorder3 unexpected lvalue %#v", l.Op) 2390 2391 case ONAME: 2392 break 2393 2394 case OINDEX, OINDEXMAP: 2395 l.Left = reorder3save(l.Left, all, i, &early) 2396 l.Right = reorder3save(l.Right, all, i, &early) 2397 if l.Op == OINDEXMAP { 2398 all[i] = convas(all[i], &mapinit) 2399 } 2400 2401 case OIND, ODOTPTR: 2402 l.Left = reorder3save(l.Left, all, i, &early) 2403 } 2404 2405 // Save expression on right side. 2406 all[i].Right = reorder3save(all[i].Right, all, i, &early) 2407 } 2408 2409 early = append(mapinit.Slice(), early...) 2410 return append(early, all...) 2411 } 2412 2413 // if the evaluation of *np would be affected by the 2414 // assignments in all up to but not including the ith assignment, 2415 // copy into a temporary during *early and 2416 // replace *np with that temp. 2417 // The result of reorder3save MUST be assigned back to n, e.g. 2418 // n.Left = reorder3save(n.Left, all, i, early) 2419 func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node { 2420 if !aliased(n, all, i) { 2421 return n 2422 } 2423 2424 q := temp(n.Type) 2425 q = nod(OAS, q, n) 2426 q = typecheck(q, Etop) 2427 *early = append(*early, q) 2428 return q.Left 2429 } 2430 2431 // what's the outer value that a write to n affects? 2432 // outer value means containing struct or array. 2433 func outervalue(n *Node) *Node { 2434 for { 2435 switch n.Op { 2436 case OXDOT: 2437 Fatalf("OXDOT in walk") 2438 case ODOT, OPAREN, OCONVNOP: 2439 n = n.Left 2440 continue 2441 case OINDEX: 2442 if n.Left.Type != nil && n.Left.Type.IsArray() { 2443 n = n.Left 2444 continue 2445 } 2446 } 2447 2448 return n 2449 } 2450 } 2451 2452 // Is it possible that the computation of n might be 2453 // affected by writes in as up to but not including the ith element? 2454 func aliased(n *Node, all []*Node, i int) bool { 2455 if n == nil { 2456 return false 2457 } 2458 2459 // Treat all fields of a struct as referring to the whole struct. 2460 // We could do better but we would have to keep track of the fields. 2461 for n.Op == ODOT { 2462 n = n.Left 2463 } 2464 2465 // Look for obvious aliasing: a variable being assigned 2466 // during the all list and appearing in n. 2467 // Also record whether there are any writes to main memory. 2468 // Also record whether there are any writes to variables 2469 // whose addresses have been taken. 2470 memwrite := false 2471 varwrite := false 2472 for _, an := range all[:i] { 2473 a := outervalue(an.Left) 2474 2475 for a.Op == ODOT { 2476 a = a.Left 2477 } 2478 2479 if a.Op != ONAME { 2480 memwrite = true 2481 continue 2482 } 2483 2484 switch n.Class() { 2485 default: 2486 varwrite = true 2487 continue 2488 2489 case PAUTO, PPARAM, PPARAMOUT: 2490 if n.Addrtaken() { 2491 varwrite = true 2492 continue 2493 } 2494 2495 if vmatch2(a, n) { 2496 // Direct hit. 2497 return true 2498 } 2499 } 2500 } 2501 2502 // The variables being written do not appear in n. 2503 // However, n might refer to computed addresses 2504 // that are being written. 2505 2506 // If no computed addresses are affected by the writes, no aliasing. 2507 if !memwrite && !varwrite { 2508 return false 2509 } 2510 2511 // If n does not refer to computed addresses 2512 // (that is, if n only refers to variables whose addresses 2513 // have not been taken), no aliasing. 2514 if varexpr(n) { 2515 return false 2516 } 2517 2518 // Otherwise, both the writes and n refer to computed memory addresses. 2519 // Assume that they might conflict. 2520 return true 2521 } 2522 2523 // does the evaluation of n only refer to variables 2524 // whose addresses have not been taken? 2525 // (and no other memory) 2526 func varexpr(n *Node) bool { 2527 if n == nil { 2528 return true 2529 } 2530 2531 switch n.Op { 2532 case OLITERAL: 2533 return true 2534 2535 case ONAME: 2536 switch n.Class() { 2537 case PAUTO, PPARAM, PPARAMOUT: 2538 if !n.Addrtaken() { 2539 return true 2540 } 2541 } 2542 2543 return false 2544 2545 case OADD, 2546 OSUB, 2547 OOR, 2548 OXOR, 2549 OMUL, 2550 ODIV, 2551 OMOD, 2552 OLSH, 2553 ORSH, 2554 OAND, 2555 OANDNOT, 2556 OPLUS, 2557 OMINUS, 2558 OCOM, 2559 OPAREN, 2560 OANDAND, 2561 OOROR, 2562 OCONV, 2563 OCONVNOP, 2564 OCONVIFACE, 2565 ODOTTYPE: 2566 return varexpr(n.Left) && varexpr(n.Right) 2567 2568 case ODOT: // but not ODOTPTR 2569 // Should have been handled in aliased. 2570 Fatalf("varexpr unexpected ODOT") 2571 } 2572 2573 // Be conservative. 2574 return false 2575 } 2576 2577 // is the name l mentioned in r? 2578 func vmatch2(l *Node, r *Node) bool { 2579 if r == nil { 2580 return false 2581 } 2582 switch r.Op { 2583 // match each right given left 2584 case ONAME: 2585 return l == r 2586 2587 case OLITERAL: 2588 return false 2589 } 2590 2591 if vmatch2(l, r.Left) { 2592 return true 2593 } 2594 if vmatch2(l, r.Right) { 2595 return true 2596 } 2597 for _, n := range r.List.Slice() { 2598 if vmatch2(l, n) { 2599 return true 2600 } 2601 } 2602 return false 2603 } 2604 2605 // is any name mentioned in l also mentioned in r? 2606 // called by sinit.go 2607 func vmatch1(l *Node, r *Node) bool { 2608 // isolate all left sides 2609 if l == nil || r == nil { 2610 return false 2611 } 2612 switch l.Op { 2613 case ONAME: 2614 switch l.Class() { 2615 case PPARAM, PAUTO: 2616 break 2617 2618 default: 2619 // assignment to non-stack variable must be 2620 // delayed if right has function calls. 2621 if r.HasCall() { 2622 return true 2623 } 2624 } 2625 2626 return vmatch2(l, r) 2627 2628 case OLITERAL: 2629 return false 2630 } 2631 2632 if vmatch1(l.Left, r) { 2633 return true 2634 } 2635 if vmatch1(l.Right, r) { 2636 return true 2637 } 2638 for _, n := range l.List.Slice() { 2639 if vmatch1(n, r) { 2640 return true 2641 } 2642 } 2643 return false 2644 } 2645 2646 // paramstoheap returns code to allocate memory for heap-escaped parameters 2647 // and to copy non-result parameters' values from the stack. 2648 func paramstoheap(params *types.Type) []*Node { 2649 var nn []*Node 2650 for _, t := range params.Fields().Slice() { 2651 v := asNode(t.Nname) 2652 if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result 2653 v = nil 2654 } 2655 if v == nil { 2656 continue 2657 } 2658 2659 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil { 2660 nn = append(nn, walkstmt(nod(ODCL, v, nil))) 2661 if stackcopy.Class() == PPARAM { 2662 nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop))) 2663 } 2664 } 2665 } 2666 2667 return nn 2668 } 2669 2670 // zeroResults zeros the return values at the start of the function. 2671 // We need to do this very early in the function. Defer might stop a 2672 // panic and show the return values as they exist at the time of 2673 // panic. For precise stacks, the garbage collector assumes results 2674 // are always live, so we need to zero them before any allocations, 2675 // even allocations to move params/results to the heap. 2676 // The generated code is added to Curfn's Enter list. 2677 func zeroResults() { 2678 lno := lineno 2679 lineno = Curfn.Pos 2680 for _, f := range Curfn.Type.Results().Fields().Slice() { 2681 if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil { 2682 // The local which points to the return value is the 2683 // thing that needs zeroing. This is already handled 2684 // by a Needzero annotation in plive.go:livenessepilogue. 2685 continue 2686 } 2687 // Zero the stack location containing f. 2688 Curfn.Func.Enter.Append(nod(OAS, nodarg(f, 1), nil)) 2689 } 2690 lineno = lno 2691 } 2692 2693 // returnsfromheap returns code to copy values for heap-escaped parameters 2694 // back to the stack. 2695 func returnsfromheap(params *types.Type) []*Node { 2696 var nn []*Node 2697 for _, t := range params.Fields().Slice() { 2698 v := asNode(t.Nname) 2699 if v == nil { 2700 continue 2701 } 2702 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT { 2703 nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop))) 2704 } 2705 } 2706 2707 return nn 2708 } 2709 2710 // heapmoves generates code to handle migrating heap-escaped parameters 2711 // between the stack and the heap. The generated code is added to Curfn's 2712 // Enter and Exit lists. 2713 func heapmoves() { 2714 lno := lineno 2715 lineno = Curfn.Pos 2716 nn := paramstoheap(Curfn.Type.Recvs()) 2717 nn = append(nn, paramstoheap(Curfn.Type.Params())...) 2718 nn = append(nn, paramstoheap(Curfn.Type.Results())...) 2719 Curfn.Func.Enter.Append(nn...) 2720 lineno = Curfn.Func.Endlineno 2721 Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) 2722 lineno = lno 2723 } 2724 2725 func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node { 2726 if fn.Type == nil || fn.Type.Etype != TFUNC { 2727 Fatalf("mkcall %v %v", fn, fn.Type) 2728 } 2729 2730 n := fn.Type.NumParams() 2731 if n != len(va) { 2732 Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va)) 2733 } 2734 2735 r := nod(OCALL, fn, nil) 2736 r.List.Set(va) 2737 if fn.Type.NumResults() > 0 { 2738 r = typecheck(r, Erv|Efnstruct) 2739 } else { 2740 r = typecheck(r, Etop) 2741 } 2742 r = walkexpr(r, init) 2743 r.Type = t 2744 return r 2745 } 2746 2747 func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node { 2748 return vmkcall(syslook(name), t, init, args) 2749 } 2750 2751 func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node { 2752 return vmkcall(fn, t, init, args) 2753 } 2754 2755 func conv(n *Node, t *types.Type) *Node { 2756 if eqtype(n.Type, t) { 2757 return n 2758 } 2759 n = nod(OCONV, n, nil) 2760 n.Type = t 2761 n = typecheck(n, Erv) 2762 return n 2763 } 2764 2765 // byteindex converts n, which is byte-sized, to a uint8. 2766 // We cannot use conv, because we allow converting bool to uint8 here, 2767 // which is forbidden in user code. 2768 func byteindex(n *Node) *Node { 2769 if eqtype(n.Type, types.Types[TUINT8]) { 2770 return n 2771 } 2772 n = nod(OCONV, n, nil) 2773 n.Type = types.Types[TUINT8] 2774 n.SetTypecheck(1) 2775 return n 2776 } 2777 2778 func chanfn(name string, n int, t *types.Type) *Node { 2779 if !t.IsChan() { 2780 Fatalf("chanfn %v", t) 2781 } 2782 fn := syslook(name) 2783 switch n { 2784 default: 2785 Fatalf("chanfn %d", n) 2786 case 1: 2787 fn = substArgTypes(fn, t.Elem()) 2788 case 2: 2789 fn = substArgTypes(fn, t.Elem(), t.Elem()) 2790 } 2791 return fn 2792 } 2793 2794 func mapfn(name string, t *types.Type) *Node { 2795 if !t.IsMap() { 2796 Fatalf("mapfn %v", t) 2797 } 2798 fn := syslook(name) 2799 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val()) 2800 return fn 2801 } 2802 2803 func mapfndel(name string, t *types.Type) *Node { 2804 if !t.IsMap() { 2805 Fatalf("mapfn %v", t) 2806 } 2807 fn := syslook(name) 2808 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key()) 2809 return fn 2810 } 2811 2812 const ( 2813 mapslow = iota 2814 mapfast32 2815 mapfast32ptr 2816 mapfast64 2817 mapfast64ptr 2818 mapfaststr 2819 nmapfast 2820 ) 2821 2822 type mapnames [nmapfast]string 2823 2824 func mkmapnames(base string, ptr string) mapnames { 2825 return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"} 2826 } 2827 2828 var mapaccess1 = mkmapnames("mapaccess1", "") 2829 var mapaccess2 = mkmapnames("mapaccess2", "") 2830 var mapassign = mkmapnames("mapassign", "ptr") 2831 var mapdelete = mkmapnames("mapdelete", "") 2832 2833 func mapfast(t *types.Type) int { 2834 // Check ../../runtime/hashmap.go:maxValueSize before changing. 2835 if t.Val().Width > 128 { 2836 return mapslow 2837 } 2838 switch algtype(t.Key()) { 2839 case AMEM32: 2840 if !t.Key().HasHeapPointer() { 2841 return mapfast32 2842 } 2843 if Widthptr == 4 { 2844 return mapfast32ptr 2845 } 2846 Fatalf("small pointer %v", t.Key()) 2847 case AMEM64: 2848 if !t.Key().HasHeapPointer() { 2849 return mapfast64 2850 } 2851 if Widthptr == 8 { 2852 return mapfast64ptr 2853 } 2854 // Two-word object, at least one of which is a pointer. 2855 // Use the slow path. 2856 case ASTRING: 2857 return mapfaststr 2858 } 2859 return mapslow 2860 } 2861 2862 func writebarrierfn(name string, l *types.Type, r *types.Type) *Node { 2863 fn := syslook(name) 2864 fn = substArgTypes(fn, l, r) 2865 return fn 2866 } 2867 2868 func addstr(n *Node, init *Nodes) *Node { 2869 // orderexpr rewrote OADDSTR to have a list of strings. 2870 c := n.List.Len() 2871 2872 if c < 2 { 2873 Fatalf("addstr count %d too small", c) 2874 } 2875 2876 buf := nodnil() 2877 if n.Esc == EscNone { 2878 sz := int64(0) 2879 for _, n1 := range n.List.Slice() { 2880 if n1.Op == OLITERAL { 2881 sz += int64(len(n1.Val().U.(string))) 2882 } 2883 } 2884 2885 // Don't allocate the buffer if the result won't fit. 2886 if sz < tmpstringbufsize { 2887 // Create temporary buffer for result string on stack. 2888 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 2889 2890 buf = nod(OADDR, temp(t), nil) 2891 } 2892 } 2893 2894 // build list of string arguments 2895 args := []*Node{buf} 2896 for _, n2 := range n.List.Slice() { 2897 args = append(args, conv(n2, types.Types[TSTRING])) 2898 } 2899 2900 var fn string 2901 if c <= 5 { 2902 // small numbers of strings use direct runtime helpers. 2903 // note: orderexpr knows this cutoff too. 2904 fn = fmt.Sprintf("concatstring%d", c) 2905 } else { 2906 // large numbers of strings are passed to the runtime as a slice. 2907 fn = "concatstrings" 2908 2909 t := types.NewSlice(types.Types[TSTRING]) 2910 slice := nod(OCOMPLIT, nil, typenod(t)) 2911 if prealloc[n] != nil { 2912 prealloc[slice] = prealloc[n] 2913 } 2914 slice.List.Set(args[1:]) // skip buf arg 2915 args = []*Node{buf, slice} 2916 slice.Esc = EscNone 2917 } 2918 2919 cat := syslook(fn) 2920 r := nod(OCALL, cat, nil) 2921 r.List.Set(args) 2922 r = typecheck(r, Erv) 2923 r = walkexpr(r, init) 2924 r.Type = n.Type 2925 2926 return r 2927 } 2928 2929 // expand append(l1, l2...) to 2930 // init { 2931 // s := l1 2932 // n := len(s) + len(l2) 2933 // // Compare as uint so growslice can panic on overflow. 2934 // if uint(n) > uint(cap(s)) { 2935 // s = growslice(s, n) 2936 // } 2937 // s = s[:n] 2938 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2939 // } 2940 // s 2941 // 2942 // l2 is allowed to be a string. 2943 func appendslice(n *Node, init *Nodes) *Node { 2944 walkexprlistsafe(n.List.Slice(), init) 2945 2946 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2947 // and n are name or literal, but those may index the slice we're 2948 // modifying here. Fix explicitly. 2949 ls := n.List.Slice() 2950 for i1, n1 := range ls { 2951 ls[i1] = cheapexpr(n1, init) 2952 } 2953 2954 l1 := n.List.First() 2955 l2 := n.List.Second() 2956 2957 var l []*Node 2958 2959 // var s []T 2960 s := temp(l1.Type) 2961 l = append(l, nod(OAS, s, l1)) // s = l1 2962 2963 // n := len(s) + len(l2) 2964 nn := temp(types.Types[TINT]) 2965 l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil)))) 2966 2967 // if uint(n) > uint(cap(s)) 2968 nif := nod(OIF, nil, nil) 2969 nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil)) 2970 nif.Left.Left.Type = types.Types[TUINT] 2971 nif.Left.Right.Type = types.Types[TUINT] 2972 2973 // instantiate growslice(Type*, []any, int) []any 2974 fn := syslook("growslice") 2975 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2976 2977 // s = growslice(T, s, n) 2978 nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn))) 2979 l = append(l, nif) 2980 2981 // s = s[:n] 2982 nt := nod(OSLICE, s, nil) 2983 nt.SetSliceBounds(nil, nn, nil) 2984 nt.Etype = 1 2985 l = append(l, nod(OAS, s, nt)) 2986 2987 if l1.Type.Elem().HasHeapPointer() { 2988 // copy(s[len(l1):], l2) 2989 nptr1 := nod(OSLICE, s, nil) 2990 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2991 nptr1.Etype = 1 2992 nptr2 := l2 2993 Curfn.Func.setWBPos(n.Pos) 2994 fn := syslook("typedslicecopy") 2995 fn = substArgTypes(fn, l1.Type, l2.Type) 2996 var ln Nodes 2997 ln.Set(l) 2998 nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2) 2999 l = append(ln.Slice(), nt) 3000 } else if instrumenting && !compiling_runtime { 3001 // rely on runtime to instrument copy. 3002 // copy(s[len(l1):], l2) 3003 nptr1 := nod(OSLICE, s, nil) 3004 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 3005 nptr1.Etype = 1 3006 nptr2 := l2 3007 3008 var ln Nodes 3009 ln.Set(l) 3010 var nt *Node 3011 if l2.Type.IsString() { 3012 fn := syslook("slicestringcopy") 3013 fn = substArgTypes(fn, l1.Type, l2.Type) 3014 nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2) 3015 } else { 3016 fn := syslook("slicecopy") 3017 fn = substArgTypes(fn, l1.Type, l2.Type) 3018 nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width)) 3019 } 3020 3021 l = append(ln.Slice(), nt) 3022 } else { 3023 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 3024 nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil)) 3025 nptr1.SetBounded(true) 3026 3027 nptr1 = nod(OADDR, nptr1, nil) 3028 3029 nptr2 := nod(OSPTR, l2, nil) 3030 3031 fn := syslook("memmove") 3032 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 3033 3034 var ln Nodes 3035 ln.Set(l) 3036 nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln) 3037 3038 nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width)) 3039 nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid) 3040 l = append(ln.Slice(), nt) 3041 } 3042 3043 typecheckslice(l, Etop) 3044 walkstmtlist(l) 3045 init.Append(l...) 3046 return s 3047 } 3048 3049 // Rewrite append(src, x, y, z) so that any side effects in 3050 // x, y, z (including runtime panics) are evaluated in 3051 // initialization statements before the append. 3052 // For normal code generation, stop there and leave the 3053 // rest to cgen_append. 3054 // 3055 // For race detector, expand append(src, a [, b]* ) to 3056 // 3057 // init { 3058 // s := src 3059 // const argc = len(args) - 1 3060 // if cap(s) - len(s) < argc { 3061 // s = growslice(s, len(s)+argc) 3062 // } 3063 // n := len(s) 3064 // s = s[:n+argc] 3065 // s[n] = a 3066 // s[n+1] = b 3067 // ... 3068 // } 3069 // s 3070 func walkappend(n *Node, init *Nodes, dst *Node) *Node { 3071 if !samesafeexpr(dst, n.List.First()) { 3072 n.List.SetFirst(safeexpr(n.List.First(), init)) 3073 n.List.SetFirst(walkexpr(n.List.First(), init)) 3074 } 3075 walkexprlistsafe(n.List.Slice()[1:], init) 3076 3077 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 3078 // and n are name or literal, but those may index the slice we're 3079 // modifying here. Fix explicitly. 3080 // Using cheapexpr also makes sure that the evaluation 3081 // of all arguments (and especially any panics) happen 3082 // before we begin to modify the slice in a visible way. 3083 ls := n.List.Slice()[1:] 3084 for i, n := range ls { 3085 ls[i] = cheapexpr(n, init) 3086 } 3087 3088 nsrc := n.List.First() 3089 3090 argc := n.List.Len() - 1 3091 if argc < 1 { 3092 return nsrc 3093 } 3094 3095 // General case, with no function calls left as arguments. 3096 // Leave for gen, except that instrumentation requires old form. 3097 if !instrumenting || compiling_runtime { 3098 return n 3099 } 3100 3101 var l []*Node 3102 3103 ns := temp(nsrc.Type) 3104 l = append(l, nod(OAS, ns, nsrc)) // s = src 3105 3106 na := nodintconst(int64(argc)) // const argc 3107 nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc 3108 nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na) 3109 3110 fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T) 3111 fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem()) 3112 3113 nx.Nbody.Set1(nod(OAS, ns, 3114 mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns, 3115 nod(OADD, nod(OLEN, ns, nil), na)))) 3116 3117 l = append(l, nx) 3118 3119 nn := temp(types.Types[TINT]) 3120 l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s) 3121 3122 nx = nod(OSLICE, ns, nil) // ...s[:n+argc] 3123 nx.SetSliceBounds(nil, nod(OADD, nn, na), nil) 3124 nx.Etype = 1 3125 l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc] 3126 3127 ls = n.List.Slice()[1:] 3128 for i, n := range ls { 3129 nx = nod(OINDEX, ns, nn) // s[n] ... 3130 nx.SetBounded(true) 3131 l = append(l, nod(OAS, nx, n)) // s[n] = arg 3132 if i+1 < len(ls) { 3133 l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1 3134 } 3135 } 3136 3137 typecheckslice(l, Etop) 3138 walkstmtlist(l) 3139 init.Append(l...) 3140 return ns 3141 } 3142 3143 // Lower copy(a, b) to a memmove call or a runtime call. 3144 // 3145 // init { 3146 // n := len(a) 3147 // if n > len(b) { n = len(b) } 3148 // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) 3149 // } 3150 // n; 3151 // 3152 // Also works if b is a string. 3153 // 3154 func copyany(n *Node, init *Nodes, runtimecall bool) *Node { 3155 if n.Left.Type.Elem().HasHeapPointer() { 3156 Curfn.Func.setWBPos(n.Pos) 3157 fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) 3158 return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right) 3159 } 3160 3161 if runtimecall { 3162 if n.Right.Type.IsString() { 3163 fn := syslook("slicestringcopy") 3164 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3165 return mkcall1(fn, n.Type, init, n.Left, n.Right) 3166 } 3167 3168 fn := syslook("slicecopy") 3169 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3170 return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width)) 3171 } 3172 3173 n.Left = walkexpr(n.Left, init) 3174 n.Right = walkexpr(n.Right, init) 3175 nl := temp(n.Left.Type) 3176 nr := temp(n.Right.Type) 3177 var l []*Node 3178 l = append(l, nod(OAS, nl, n.Left)) 3179 l = append(l, nod(OAS, nr, n.Right)) 3180 3181 nfrm := nod(OSPTR, nr, nil) 3182 nto := nod(OSPTR, nl, nil) 3183 3184 nlen := temp(types.Types[TINT]) 3185 3186 // n = len(to) 3187 l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil))) 3188 3189 // if n > len(frm) { n = len(frm) } 3190 nif := nod(OIF, nil, nil) 3191 3192 nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil)) 3193 nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil))) 3194 l = append(l, nif) 3195 3196 // Call memmove. 3197 fn := syslook("memmove") 3198 3199 fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem()) 3200 nwid := temp(types.Types[TUINTPTR]) 3201 l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR]))) 3202 nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width)) 3203 l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) 3204 3205 typecheckslice(l, Etop) 3206 walkstmtlist(l) 3207 init.Append(l...) 3208 return nlen 3209 } 3210 3211 func eqfor(t *types.Type) (n *Node, needsize bool) { 3212 // Should only arrive here with large memory or 3213 // a struct/array containing a non-memory field/element. 3214 // Small memory is handled inline, and single non-memory 3215 // is handled during type check (OCMPSTR etc). 3216 switch a, _ := algtype1(t); a { 3217 case AMEM: 3218 n := syslook("memequal") 3219 n = substArgTypes(n, t, t) 3220 return n, true 3221 case ASPECIAL: 3222 sym := typesymprefix(".eq", t) 3223 n := newname(sym) 3224 n.SetClass(PFUNC) 3225 ntype := nod(OTFUNC, nil, nil) 3226 ntype.List.Append(anonfield(types.NewPtr(t))) 3227 ntype.List.Append(anonfield(types.NewPtr(t))) 3228 ntype.Rlist.Append(anonfield(types.Types[TBOOL])) 3229 ntype = typecheck(ntype, Etype) 3230 n.Type = ntype.Type 3231 return n, false 3232 } 3233 Fatalf("eqfor %v", t) 3234 return nil, false 3235 } 3236 3237 // The result of walkcompare MUST be assigned back to n, e.g. 3238 // n.Left = walkcompare(n.Left, init) 3239 func walkcompare(n *Node, init *Nodes) *Node { 3240 // Given interface value l and concrete value r, rewrite 3241 // l == r 3242 // into types-equal && data-equal. 3243 // This is efficient, avoids allocations, and avoids runtime calls. 3244 var l, r *Node 3245 if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() { 3246 l = n.Left 3247 r = n.Right 3248 } else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() { 3249 l = n.Right 3250 r = n.Left 3251 } 3252 3253 if l != nil { 3254 // Handle both == and !=. 3255 eq := n.Op 3256 var andor Op 3257 if eq == OEQ { 3258 andor = OANDAND 3259 } else { 3260 andor = OOROR 3261 } 3262 // Check for types equal. 3263 // For empty interface, this is: 3264 // l.tab == type(r) 3265 // For non-empty interface, this is: 3266 // l.tab != nil && l.tab._type == type(r) 3267 var eqtype *Node 3268 tab := nod(OITAB, l, nil) 3269 rtyp := typename(r.Type) 3270 if l.Type.IsEmptyInterface() { 3271 tab.Type = types.NewPtr(types.Types[TUINT8]) 3272 tab.SetTypecheck(1) 3273 eqtype = nod(eq, tab, rtyp) 3274 } else { 3275 nonnil := nod(brcom(eq), nodnil(), tab) 3276 match := nod(eq, itabType(tab), rtyp) 3277 eqtype = nod(andor, nonnil, match) 3278 } 3279 // Check for data equal. 3280 eqdata := nod(eq, ifaceData(l, r.Type), r) 3281 // Put it all together. 3282 expr := nod(andor, eqtype, eqdata) 3283 n = finishcompare(n, expr, init) 3284 return n 3285 } 3286 3287 // Must be comparison of array or struct. 3288 // Otherwise back end handles it. 3289 // While we're here, decide whether to 3290 // inline or call an eq alg. 3291 t := n.Left.Type 3292 var inline bool 3293 3294 maxcmpsize := int64(4) 3295 unalignedLoad := false 3296 switch thearch.LinkArch.Family { 3297 case sys.AMD64, sys.ARM64, sys.S390X: 3298 // Keep this low enough, to generate less code than function call. 3299 maxcmpsize = 16 3300 unalignedLoad = true 3301 case sys.I386: 3302 maxcmpsize = 8 3303 unalignedLoad = true 3304 } 3305 3306 switch t.Etype { 3307 default: 3308 return n 3309 case TARRAY: 3310 // We can compare several elements at once with 2/4/8 byte integer compares 3311 inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize)) 3312 case TSTRUCT: 3313 inline = t.NumFields() <= 4 3314 } 3315 3316 cmpl := n.Left 3317 for cmpl != nil && cmpl.Op == OCONVNOP { 3318 cmpl = cmpl.Left 3319 } 3320 cmpr := n.Right 3321 for cmpr != nil && cmpr.Op == OCONVNOP { 3322 cmpr = cmpr.Left 3323 } 3324 3325 // Chose not to inline. Call equality function directly. 3326 if !inline { 3327 if isvaluelit(cmpl) { 3328 var_ := temp(cmpl.Type) 3329 anylit(cmpl, var_, init) 3330 cmpl = var_ 3331 } 3332 if isvaluelit(cmpr) { 3333 var_ := temp(cmpr.Type) 3334 anylit(cmpr, var_, init) 3335 cmpr = var_ 3336 } 3337 if !islvalue(cmpl) || !islvalue(cmpr) { 3338 Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) 3339 } 3340 3341 // eq algs take pointers 3342 pl := temp(types.NewPtr(t)) 3343 al := nod(OAS, pl, nod(OADDR, cmpl, nil)) 3344 al.Right.Etype = 1 // addr does not escape 3345 al = typecheck(al, Etop) 3346 init.Append(al) 3347 3348 pr := temp(types.NewPtr(t)) 3349 ar := nod(OAS, pr, nod(OADDR, cmpr, nil)) 3350 ar.Right.Etype = 1 // addr does not escape 3351 ar = typecheck(ar, Etop) 3352 init.Append(ar) 3353 3354 fn, needsize := eqfor(t) 3355 call := nod(OCALL, fn, nil) 3356 call.List.Append(pl) 3357 call.List.Append(pr) 3358 if needsize { 3359 call.List.Append(nodintconst(t.Width)) 3360 } 3361 res := call 3362 if n.Op != OEQ { 3363 res = nod(ONOT, res, nil) 3364 } 3365 n = finishcompare(n, res, init) 3366 return n 3367 } 3368 3369 // inline: build boolean expression comparing element by element 3370 andor := OANDAND 3371 if n.Op == ONE { 3372 andor = OOROR 3373 } 3374 var expr *Node 3375 compare := func(el, er *Node) { 3376 a := nod(n.Op, el, er) 3377 if expr == nil { 3378 expr = a 3379 } else { 3380 expr = nod(andor, expr, a) 3381 } 3382 } 3383 cmpl = safeexpr(cmpl, init) 3384 cmpr = safeexpr(cmpr, init) 3385 if t.IsStruct() { 3386 for _, f := range t.Fields().Slice() { 3387 sym := f.Sym 3388 if sym.IsBlank() { 3389 continue 3390 } 3391 compare( 3392 nodSym(OXDOT, cmpl, sym), 3393 nodSym(OXDOT, cmpr, sym), 3394 ) 3395 } 3396 } else { 3397 step := int64(1) 3398 remains := t.NumElem() * t.Elem().Width 3399 combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger() 3400 combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger() 3401 combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger() 3402 for i := int64(0); remains > 0; { 3403 var convType *types.Type 3404 switch { 3405 case remains >= 8 && combine64bit: 3406 convType = types.Types[TINT64] 3407 step = 8 / t.Elem().Width 3408 case remains >= 4 && combine32bit: 3409 convType = types.Types[TUINT32] 3410 step = 4 / t.Elem().Width 3411 case remains >= 2 && combine16bit: 3412 convType = types.Types[TUINT16] 3413 step = 2 / t.Elem().Width 3414 default: 3415 step = 1 3416 } 3417 if step == 1 { 3418 compare( 3419 nod(OINDEX, cmpl, nodintconst(int64(i))), 3420 nod(OINDEX, cmpr, nodintconst(int64(i))), 3421 ) 3422 i++ 3423 remains -= t.Elem().Width 3424 } else { 3425 elemType := t.Elem().ToUnsigned() 3426 cmplw := nod(OINDEX, cmpl, nodintconst(int64(i))) 3427 cmplw = conv(cmplw, elemType) // convert to unsigned 3428 cmplw = conv(cmplw, convType) // widen 3429 cmprw := nod(OINDEX, cmpr, nodintconst(int64(i))) 3430 cmprw = conv(cmprw, elemType) 3431 cmprw = conv(cmprw, convType) 3432 // For code like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ... 3433 // ssa will generate a single large load. 3434 for offset := int64(1); offset < step; offset++ { 3435 lb := nod(OINDEX, cmpl, nodintconst(int64(i+offset))) 3436 lb = conv(lb, elemType) 3437 lb = conv(lb, convType) 3438 lb = nod(OLSH, lb, nodintconst(int64(8*t.Elem().Width*offset))) 3439 cmplw = nod(OOR, cmplw, lb) 3440 rb := nod(OINDEX, cmpr, nodintconst(int64(i+offset))) 3441 rb = conv(rb, elemType) 3442 rb = conv(rb, convType) 3443 rb = nod(OLSH, rb, nodintconst(int64(8*t.Elem().Width*offset))) 3444 cmprw = nod(OOR, cmprw, rb) 3445 } 3446 compare(cmplw, cmprw) 3447 i += step 3448 remains -= step * t.Elem().Width 3449 } 3450 } 3451 } 3452 if expr == nil { 3453 expr = nodbool(n.Op == OEQ) 3454 } 3455 n = finishcompare(n, expr, init) 3456 return n 3457 } 3458 3459 // The result of finishcompare MUST be assigned back to n, e.g. 3460 // n.Left = finishcompare(n.Left, x, r, init) 3461 func finishcompare(n, r *Node, init *Nodes) *Node { 3462 // Use nn here to avoid passing r to typecheck. 3463 nn := r 3464 nn = typecheck(nn, Erv) 3465 nn = walkexpr(nn, init) 3466 r = nn 3467 if r.Type != n.Type { 3468 r = nod(OCONVNOP, r, nil) 3469 r.Type = n.Type 3470 r.SetTypecheck(1) 3471 nn = r 3472 } 3473 return nn 3474 } 3475 3476 // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers. 3477 func (n *Node) isIntOrdering() bool { 3478 switch n.Op { 3479 case OLE, OLT, OGE, OGT: 3480 default: 3481 return false 3482 } 3483 return n.Left.Type.IsInteger() && n.Right.Type.IsInteger() 3484 } 3485 3486 // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10. 3487 // n must be an OANDAND or OOROR node. 3488 // The result of walkinrange MUST be assigned back to n, e.g. 3489 // n.Left = walkinrange(n.Left) 3490 func walkinrange(n *Node, init *Nodes) *Node { 3491 // We are looking for something equivalent to a opl b OP b opr c, where: 3492 // * a, b, and c have integer type 3493 // * b is side-effect-free 3494 // * opl and opr are each < or ≤ 3495 // * OP is && 3496 l := n.Left 3497 r := n.Right 3498 if !l.isIntOrdering() || !r.isIntOrdering() { 3499 return n 3500 } 3501 3502 // Find b, if it exists, and rename appropriately. 3503 // Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right 3504 // Output is: a opl b(==x) ANDAND/OROR b(==x) opr c 3505 a, opl, b := l.Left, l.Op, l.Right 3506 x, opr, c := r.Left, r.Op, r.Right 3507 for i := 0; ; i++ { 3508 if samesafeexpr(b, x) { 3509 break 3510 } 3511 if i == 3 { 3512 // Tried all permutations and couldn't find an appropriate b == x. 3513 return n 3514 } 3515 if i&1 == 0 { 3516 a, opl, b = b, brrev(opl), a 3517 } else { 3518 x, opr, c = c, brrev(opr), x 3519 } 3520 } 3521 3522 // If n.Op is ||, apply de Morgan. 3523 // Negate the internal ops now; we'll negate the top level op at the end. 3524 // Henceforth assume &&. 3525 negateResult := n.Op == OOROR 3526 if negateResult { 3527 opl = brcom(opl) 3528 opr = brcom(opr) 3529 } 3530 3531 cmpdir := func(o Op) int { 3532 switch o { 3533 case OLE, OLT: 3534 return -1 3535 case OGE, OGT: 3536 return +1 3537 } 3538 Fatalf("walkinrange cmpdir %v", o) 3539 return 0 3540 } 3541 if cmpdir(opl) != cmpdir(opr) { 3542 // Not a range check; something like b < a && b < c. 3543 return n 3544 } 3545 3546 switch opl { 3547 case OGE, OGT: 3548 // We have something like a > b && b ≥ c. 3549 // Switch and reverse ops and rename constants, 3550 // to make it look like a ≤ b && b < c. 3551 a, c = c, a 3552 opl, opr = brrev(opr), brrev(opl) 3553 } 3554 3555 // We must ensure that c-a is non-negative. 3556 // For now, require a and c to be constants. 3557 // In the future, we could also support a == 0 and c == len/cap(...). 3558 // Unfortunately, by this point, most len/cap expressions have been 3559 // stored into temporary variables. 3560 if !Isconst(a, CTINT) || !Isconst(c, CTINT) { 3561 return n 3562 } 3563 3564 if opl == OLT { 3565 // We have a < b && ... 3566 // We need a ≤ b && ... to safely use unsigned comparison tricks. 3567 // If a is not the maximum constant for b's type, 3568 // we can increment a and switch to ≤. 3569 if a.Int64() >= maxintval[b.Type.Etype].Int64() { 3570 return n 3571 } 3572 a = nodintconst(a.Int64() + 1) 3573 opl = OLE 3574 } 3575 3576 bound := c.Int64() - a.Int64() 3577 if bound < 0 { 3578 // Bad news. Something like 5 <= x && x < 3. 3579 // Rare in practice, and we still need to generate side-effects, 3580 // so just leave it alone. 3581 return n 3582 } 3583 3584 // We have a ≤ b && b < c (or a ≤ b && b ≤ c). 3585 // This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a), 3586 // which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a), 3587 // which is equivalent to uint(b-a) < uint(c-a). 3588 ut := b.Type.ToUnsigned() 3589 lhs := conv(nod(OSUB, b, a), ut) 3590 rhs := nodintconst(bound) 3591 if negateResult { 3592 // Negate top level. 3593 opr = brcom(opr) 3594 } 3595 cmp := nod(opr, lhs, rhs) 3596 cmp.Pos = n.Pos 3597 cmp = addinit(cmp, l.Ninit.Slice()) 3598 cmp = addinit(cmp, r.Ninit.Slice()) 3599 // Typecheck the AST rooted at cmp... 3600 cmp = typecheck(cmp, Erv) 3601 // ...but then reset cmp's type to match n's type. 3602 cmp.Type = n.Type 3603 cmp = walkexpr(cmp, init) 3604 return cmp 3605 } 3606 3607 // return 1 if integer n must be in range [0, max), 0 otherwise 3608 func bounded(n *Node, max int64) bool { 3609 if n.Type == nil || !n.Type.IsInteger() { 3610 return false 3611 } 3612 3613 sign := n.Type.IsSigned() 3614 bits := int32(8 * n.Type.Width) 3615 3616 if smallintconst(n) { 3617 v := n.Int64() 3618 return 0 <= v && v < max 3619 } 3620 3621 switch n.Op { 3622 case OAND: 3623 v := int64(-1) 3624 if smallintconst(n.Left) { 3625 v = n.Left.Int64() 3626 } else if smallintconst(n.Right) { 3627 v = n.Right.Int64() 3628 } 3629 3630 if 0 <= v && v < max { 3631 return true 3632 } 3633 3634 case OMOD: 3635 if !sign && smallintconst(n.Right) { 3636 v := n.Right.Int64() 3637 if 0 <= v && v <= max { 3638 return true 3639 } 3640 } 3641 3642 case ODIV: 3643 if !sign && smallintconst(n.Right) { 3644 v := n.Right.Int64() 3645 for bits > 0 && v >= 2 { 3646 bits-- 3647 v >>= 1 3648 } 3649 } 3650 3651 case ORSH: 3652 if !sign && smallintconst(n.Right) { 3653 v := n.Right.Int64() 3654 if v > int64(bits) { 3655 return true 3656 } 3657 bits -= int32(v) 3658 } 3659 } 3660 3661 if !sign && bits <= 62 && 1<<uint(bits) <= max { 3662 return true 3663 } 3664 3665 return false 3666 } 3667 3668 // usemethod checks interface method calls for uses of reflect.Type.Method. 3669 func usemethod(n *Node) { 3670 t := n.Left.Type 3671 3672 // Looking for either of: 3673 // Method(int) reflect.Method 3674 // MethodByName(string) (reflect.Method, bool) 3675 // 3676 // TODO(crawshaw): improve precision of match by working out 3677 // how to check the method name. 3678 if n := t.NumParams(); n != 1 { 3679 return 3680 } 3681 if n := t.NumResults(); n != 1 && n != 2 { 3682 return 3683 } 3684 p0 := t.Params().Field(0) 3685 res0 := t.Results().Field(0) 3686 var res1 *types.Field 3687 if t.NumResults() == 2 { 3688 res1 = t.Results().Field(1) 3689 } 3690 3691 if res1 == nil { 3692 if p0.Type.Etype != TINT { 3693 return 3694 } 3695 } else { 3696 if !p0.Type.IsString() { 3697 return 3698 } 3699 if !res1.Type.IsBoolean() { 3700 return 3701 } 3702 } 3703 3704 // Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors 3705 // (including global variables such as numImports - was issue #19028). 3706 if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" { 3707 Curfn.Func.SetReflectMethod(true) 3708 } 3709 } 3710 3711 func usefield(n *Node) { 3712 if objabi.Fieldtrack_enabled == 0 { 3713 return 3714 } 3715 3716 switch n.Op { 3717 default: 3718 Fatalf("usefield %v", n.Op) 3719 3720 case ODOT, ODOTPTR: 3721 break 3722 } 3723 if n.Sym == nil { 3724 // No field name. This DOTPTR was built by the compiler for access 3725 // to runtime data structures. Ignore. 3726 return 3727 } 3728 3729 t := n.Left.Type 3730 if t.IsPtr() { 3731 t = t.Elem() 3732 } 3733 field := dotField[typeSymKey{t.Orig, n.Sym}] 3734 if field == nil { 3735 Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) 3736 } 3737 if !strings.Contains(field.Note, "go:\"track\"") { 3738 return 3739 } 3740 3741 outer := n.Left.Type 3742 if outer.IsPtr() { 3743 outer = outer.Elem() 3744 } 3745 if outer.Sym == nil { 3746 yyerror("tracked field must be in named struct type") 3747 } 3748 if !exportname(field.Sym.Name) { 3749 yyerror("tracked field must be exported (upper case)") 3750 } 3751 3752 sym := tracksym(outer, field) 3753 if Curfn.Func.FieldTrack == nil { 3754 Curfn.Func.FieldTrack = make(map[*types.Sym]struct{}) 3755 } 3756 Curfn.Func.FieldTrack[sym] = struct{}{} 3757 } 3758 3759 func candiscardlist(l Nodes) bool { 3760 for _, n := range l.Slice() { 3761 if !candiscard(n) { 3762 return false 3763 } 3764 } 3765 return true 3766 } 3767 3768 func candiscard(n *Node) bool { 3769 if n == nil { 3770 return true 3771 } 3772 3773 switch n.Op { 3774 default: 3775 return false 3776 3777 // Discardable as long as the subpieces are. 3778 case ONAME, 3779 ONONAME, 3780 OTYPE, 3781 OPACK, 3782 OLITERAL, 3783 OADD, 3784 OSUB, 3785 OOR, 3786 OXOR, 3787 OADDSTR, 3788 OADDR, 3789 OANDAND, 3790 OARRAYBYTESTR, 3791 OARRAYRUNESTR, 3792 OSTRARRAYBYTE, 3793 OSTRARRAYRUNE, 3794 OCAP, 3795 OCMPIFACE, 3796 OCMPSTR, 3797 OCOMPLIT, 3798 OMAPLIT, 3799 OSTRUCTLIT, 3800 OARRAYLIT, 3801 OSLICELIT, 3802 OPTRLIT, 3803 OCONV, 3804 OCONVIFACE, 3805 OCONVNOP, 3806 ODOT, 3807 OEQ, 3808 ONE, 3809 OLT, 3810 OLE, 3811 OGT, 3812 OGE, 3813 OKEY, 3814 OSTRUCTKEY, 3815 OLEN, 3816 OMUL, 3817 OLSH, 3818 ORSH, 3819 OAND, 3820 OANDNOT, 3821 ONEW, 3822 ONOT, 3823 OCOM, 3824 OPLUS, 3825 OMINUS, 3826 OOROR, 3827 OPAREN, 3828 ORUNESTR, 3829 OREAL, 3830 OIMAG, 3831 OCOMPLEX: 3832 break 3833 3834 // Discardable as long as we know it's not division by zero. 3835 case ODIV, OMOD: 3836 if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 { 3837 break 3838 } 3839 if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 { 3840 break 3841 } 3842 return false 3843 3844 // Discardable as long as we know it won't fail because of a bad size. 3845 case OMAKECHAN, OMAKEMAP: 3846 if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 { 3847 break 3848 } 3849 return false 3850 3851 // Difficult to tell what sizes are okay. 3852 case OMAKESLICE: 3853 return false 3854 } 3855 3856 if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) { 3857 return false 3858 } 3859 3860 return true 3861 } 3862 3863 // rewrite 3864 // print(x, y, z) 3865 // into 3866 // func(a1, a2, a3) { 3867 // print(a1, a2, a3) 3868 // }(x, y, z) 3869 // and same for println. 3870 3871 var walkprintfunc_prgen int 3872 3873 // The result of walkprintfunc MUST be assigned back to n, e.g. 3874 // n.Left = walkprintfunc(n.Left, init) 3875 func walkprintfunc(n *Node, init *Nodes) *Node { 3876 if n.Ninit.Len() != 0 { 3877 walkstmtlist(n.Ninit.Slice()) 3878 init.AppendNodes(&n.Ninit) 3879 } 3880 3881 t := nod(OTFUNC, nil, nil) 3882 var printargs []*Node 3883 for i, n1 := range n.List.Slice() { 3884 buf := fmt.Sprintf("a%d", i) 3885 a := namedfield(buf, n1.Type) 3886 t.List.Append(a) 3887 printargs = append(printargs, a.Left) 3888 } 3889 3890 oldfn := Curfn 3891 Curfn = nil 3892 3893 walkprintfunc_prgen++ 3894 sym := lookupN("print·%d", walkprintfunc_prgen) 3895 fn := dclfunc(sym, t) 3896 3897 a := nod(n.Op, nil, nil) 3898 a.List.Set(printargs) 3899 a = typecheck(a, Etop) 3900 a = walkstmt(a) 3901 3902 fn.Nbody.Set1(a) 3903 3904 funcbody() 3905 3906 fn = typecheck(fn, Etop) 3907 typecheckslice(fn.Nbody.Slice(), Etop) 3908 xtop = append(xtop, fn) 3909 Curfn = oldfn 3910 3911 a = nod(OCALL, nil, nil) 3912 a.Left = fn.Func.Nname 3913 a.List.Set(n.List.Slice()) 3914 a = typecheck(a, Etop) 3915 a = walkexpr(a, init) 3916 return a 3917 } 3918 3919 // substArgTypes substitutes the given list of types for 3920 // successive occurrences of the "any" placeholder in the 3921 // type syntax expression n.Type. 3922 // The result of substArgTypes MUST be assigned back to old, e.g. 3923 // n.Left = substArgTypes(n.Left, t1, t2) 3924 func substArgTypes(old *Node, types_ ...*types.Type) *Node { 3925 n := *old // make shallow copy 3926 3927 for _, t := range types_ { 3928 dowidth(t) 3929 } 3930 n.Type = types.SubstAny(n.Type, &types_) 3931 if len(types_) > 0 { 3932 Fatalf("substArgTypes: too many argument types") 3933 } 3934 return &n 3935 }