github.com/epfl-dcsl/gotee@v0.0.0-20200909122901-014b35f5e5e9/src/cmd/compile/internal/gc/walk.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/compile/internal/types" 9 "cmd/internal/objabi" 10 "cmd/internal/sys" 11 "fmt" 12 "strings" 13 ) 14 15 // The constant is known to runtime. 16 const tmpstringbufsize = 32 17 18 func walk(fn *Node) { 19 Curfn = fn 20 21 if Debug['W'] != 0 { 22 s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) 23 dumplist(s, Curfn.Nbody) 24 } 25 26 lno := lineno 27 28 // Final typecheck for any unused variables. 29 for i, ln := range fn.Func.Dcl { 30 if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) { 31 ln = typecheck(ln, Erv|Easgn) 32 fn.Func.Dcl[i] = ln 33 } 34 } 35 36 // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. 37 for _, ln := range fn.Func.Dcl { 38 if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() { 39 ln.Name.Defn.Left.Name.SetUsed(true) 40 } 41 } 42 43 for _, ln := range fn.Func.Dcl { 44 if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() { 45 continue 46 } 47 if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { 48 if defn.Left.Name.Used() { 49 continue 50 } 51 yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym) 52 defn.Left.Name.SetUsed(true) // suppress repeats 53 } else { 54 yyerrorl(ln.Pos, "%v declared and not used", ln.Sym) 55 } 56 } 57 58 lineno = lno 59 if nerrors != 0 { 60 return 61 } 62 walkstmtlist(Curfn.Nbody.Slice()) 63 if Debug['W'] != 0 { 64 s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) 65 dumplist(s, Curfn.Nbody) 66 } 67 68 zeroResults() 69 heapmoves() 70 if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 { 71 s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) 72 dumplist(s, Curfn.Func.Enter) 73 } 74 } 75 76 func walkstmtlist(s []*Node) { 77 for i := range s { 78 s[i] = walkstmt(s[i]) 79 } 80 } 81 82 func samelist(a, b []*Node) bool { 83 if len(a) != len(b) { 84 return false 85 } 86 for i, n := range a { 87 if n != b[i] { 88 return false 89 } 90 } 91 return true 92 } 93 94 func paramoutheap(fn *Node) bool { 95 for _, ln := range fn.Func.Dcl { 96 switch ln.Class() { 97 case PPARAMOUT: 98 if ln.isParamStackCopy() || ln.Addrtaken() { 99 return true 100 } 101 102 case PAUTO: 103 // stop early - parameters are over 104 return false 105 } 106 } 107 108 return false 109 } 110 111 // adds "adjust" to all the argument locations for the call n. 112 // n must be a defer or go node that has already been walked. 113 func adjustargs(n *Node, adjust int) { 114 callfunc := n.Left 115 for _, arg := range callfunc.List.Slice() { 116 if arg.Op != OAS { 117 Fatalf("call arg not assignment") 118 } 119 lhs := arg.Left 120 if lhs.Op == ONAME { 121 // This is a temporary introduced by reorder1. 122 // The real store to the stack appears later in the arg list. 123 continue 124 } 125 126 if lhs.Op != OINDREGSP { 127 Fatalf("call argument store does not use OINDREGSP") 128 } 129 130 // can't really check this in machine-indep code. 131 //if(lhs->val.u.reg != D_SP) 132 // Fatalf("call arg assign not indreg(SP)") 133 lhs.Xoffset += int64(adjust) 134 } 135 } 136 137 // The result of walkstmt MUST be assigned back to n, e.g. 138 // n.Left = walkstmt(n.Left) 139 func walkstmt(n *Node) *Node { 140 if n == nil { 141 return n 142 } 143 144 setlineno(n) 145 146 walkstmtlist(n.Ninit.Slice()) 147 148 switch n.Op { 149 default: 150 if n.Op == ONAME { 151 yyerror("%v is not a top level statement", n.Sym) 152 } else { 153 yyerror("%v is not a top level statement", n.Op) 154 } 155 Dump("nottop", n) 156 157 case OAS, 158 OASOP, 159 OAS2, 160 OAS2DOTTYPE, 161 OAS2RECV, 162 OAS2FUNC, 163 OAS2MAPR, 164 OCLOSE, 165 OCOPY, 166 OCALLMETH, 167 OCALLINTER, 168 OCALL, 169 OCALLFUNC, 170 ODELETE, 171 OSEND, 172 OPRINT, 173 OPRINTN, 174 OPANIC, 175 OEMPTY, 176 ORECOVER, 177 OGETG: 178 if n.Typecheck() == 0 { 179 Fatalf("missing typecheck: %+v", n) 180 } 181 wascopy := n.Op == OCOPY 182 init := n.Ninit 183 n.Ninit.Set(nil) 184 n = walkexpr(n, &init) 185 n = addinit(n, init.Slice()) 186 if wascopy && n.Op == OCONVNOP { 187 n.Op = OEMPTY // don't leave plain values as statements. 188 } 189 190 // special case for a receive where we throw away 191 // the value received. 192 case ORECV: 193 if n.Typecheck() == 0 { 194 Fatalf("missing typecheck: %+v", n) 195 } 196 init := n.Ninit 197 n.Ninit.Set(nil) 198 199 n.Left = walkexpr(n.Left, &init) 200 n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil()) 201 n = walkexpr(n, &init) 202 203 n = addinit(n, init.Slice()) 204 205 case OBREAK, 206 OCONTINUE, 207 OFALL, 208 OGOTO, 209 OLABEL, 210 ODCLCONST, 211 ODCLTYPE, 212 OCHECKNIL, 213 OVARKILL, 214 OVARLIVE: 215 break 216 217 case ODCL: 218 v := n.Left 219 if v.Class() == PAUTOHEAP { 220 if compiling_runtime { 221 yyerror("%v escapes to heap, not allowed in runtime.", v) 222 } 223 if prealloc[v] == nil { 224 prealloc[v] = callnew(v.Type) 225 } 226 nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v]) 227 nn.SetColas(true) 228 nn = typecheck(nn, Etop) 229 return walkstmt(nn) 230 } 231 232 case OBLOCK: 233 walkstmtlist(n.List.Slice()) 234 235 case OXCASE: 236 yyerror("case statement out of place") 237 n.Op = OCASE 238 fallthrough 239 240 case OCASE: 241 n.Right = walkstmt(n.Right) 242 243 case ODEFER: 244 Curfn.Func.SetHasDefer(true) 245 switch n.Left.Op { 246 case OPRINT, OPRINTN: 247 n.Left = walkprintfunc(n.Left, &n.Ninit) 248 249 case OCOPY: 250 n.Left = copyany(n.Left, &n.Ninit, true) 251 252 default: 253 n.Left = walkexpr(n.Left, &n.Ninit) 254 } 255 256 // make room for size & fn arguments. 257 adjustargs(n, 2*Widthptr) 258 259 case OFOR, OFORUNTIL: 260 if n.Left != nil { 261 walkstmtlist(n.Left.Ninit.Slice()) 262 init := n.Left.Ninit 263 n.Left.Ninit.Set(nil) 264 n.Left = walkexpr(n.Left, &init) 265 n.Left = addinit(n.Left, init.Slice()) 266 } 267 268 n.Right = walkstmt(n.Right) 269 walkstmtlist(n.Nbody.Slice()) 270 271 case OIF: 272 n.Left = walkexpr(n.Left, &n.Ninit) 273 walkstmtlist(n.Nbody.Slice()) 274 walkstmtlist(n.Rlist.Slice()) 275 276 case OPROC: 277 switch n.Left.Op { 278 case OPRINT, OPRINTN: 279 n.Left = walkprintfunc(n.Left, &n.Ninit) 280 281 case OCOPY: 282 n.Left = copyany(n.Left, &n.Ninit, true) 283 284 default: 285 n.Left = walkexpr(n.Left, &n.Ninit) 286 } 287 288 // make room for size & fn arguments. 289 adjustargs(n, 2*Widthptr) 290 291 case OGOSECURE: 292 switch n.Left.Op { 293 case OCALLFUNC: 294 n.Left = walkexpr(n.Left, &n.Ninit) 295 default: 296 Dump("nottop", n) 297 } 298 299 // make roo for size & fn arguments. 300 adjustargs(n, 2*Widthptr) 301 302 case ORETURN: 303 walkexprlist(n.List.Slice(), &n.Ninit) 304 if n.List.Len() == 0 { 305 break 306 } 307 if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) { 308 // assign to the function out parameters, 309 // so that reorder3 can fix up conflicts 310 var rl []*Node 311 312 for _, ln := range Curfn.Func.Dcl { 313 cl := ln.Class() 314 if cl == PAUTO || cl == PAUTOHEAP { 315 break 316 } 317 if cl == PPARAMOUT { 318 if ln.isParamStackCopy() { 319 ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil) 320 } 321 rl = append(rl, ln) 322 } 323 } 324 325 if got, want := n.List.Len(), len(rl); got != want { 326 // order should have rewritten multi-value function calls 327 // with explicit OAS2FUNC nodes. 328 Fatalf("expected %v return arguments, have %v", want, got) 329 } 330 331 if samelist(rl, n.List.Slice()) { 332 // special return in disguise 333 n.List.Set(nil) 334 335 break 336 } 337 338 // move function calls out, to make reorder3's job easier. 339 walkexprlistsafe(n.List.Slice(), &n.Ninit) 340 341 ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit) 342 n.List.Set(reorder3(ll)) 343 break 344 } 345 346 ll := ascompatte(nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit) 347 n.List.Set(ll) 348 349 case ORETJMP: 350 break 351 352 case OSELECT: 353 walkselect(n) 354 355 case OSWITCH: 356 walkswitch(n) 357 358 case ORANGE: 359 n = walkrange(n) 360 } 361 362 if n.Op == ONAME { 363 Fatalf("walkstmt ended up with name: %+v", n) 364 } 365 return n 366 } 367 368 func isSmallMakeSlice(n *Node) bool { 369 if n.Op != OMAKESLICE { 370 return false 371 } 372 l := n.Left 373 r := n.Right 374 if r == nil { 375 r = l 376 } 377 t := n.Type 378 379 return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) 380 } 381 382 // walk the whole tree of the body of an 383 // expression or simple statement. 384 // the types expressions are calculated. 385 // compile-time constants are evaluated. 386 // complex side effects like statements are appended to init 387 func walkexprlist(s []*Node, init *Nodes) { 388 for i := range s { 389 s[i] = walkexpr(s[i], init) 390 } 391 } 392 393 func walkexprlistsafe(s []*Node, init *Nodes) { 394 for i, n := range s { 395 s[i] = safeexpr(n, init) 396 s[i] = walkexpr(s[i], init) 397 } 398 } 399 400 func walkexprlistcheap(s []*Node, init *Nodes) { 401 for i, n := range s { 402 s[i] = cheapexpr(n, init) 403 s[i] = walkexpr(s[i], init) 404 } 405 } 406 407 // Build name of function for interface conversion. 408 // Not all names are possible 409 // (e.g., we'll never generate convE2E or convE2I or convI2E). 410 func convFuncName(from, to *types.Type) string { 411 tkind := to.Tie() 412 switch from.Tie() { 413 case 'I': 414 switch tkind { 415 case 'I': 416 return "convI2I" 417 } 418 case 'T': 419 switch tkind { 420 case 'E': 421 switch { 422 case from.Size() == 2 && from.Align == 2: 423 return "convT2E16" 424 case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from): 425 return "convT2E32" 426 case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from): 427 return "convT2E64" 428 case from.IsString(): 429 return "convT2Estring" 430 case from.IsSlice(): 431 return "convT2Eslice" 432 case !types.Haspointers(from): 433 return "convT2Enoptr" 434 } 435 return "convT2E" 436 case 'I': 437 switch { 438 case from.Size() == 2 && from.Align == 2: 439 return "convT2I16" 440 case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from): 441 return "convT2I32" 442 case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from): 443 return "convT2I64" 444 case from.IsString(): 445 return "convT2Istring" 446 case from.IsSlice(): 447 return "convT2Islice" 448 case !types.Haspointers(from): 449 return "convT2Inoptr" 450 } 451 return "convT2I" 452 } 453 } 454 Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie()) 455 panic("unreachable") 456 } 457 458 // The result of walkexpr MUST be assigned back to n, e.g. 459 // n.Left = walkexpr(n.Left, init) 460 func walkexpr(n *Node, init *Nodes) *Node { 461 if n == nil { 462 return n 463 } 464 465 // Eagerly checkwidth all expressions for the back end. 466 if n.Type != nil && !n.Type.WidthCalculated() { 467 switch n.Type.Etype { 468 case TBLANK, TNIL, TIDEAL: 469 default: 470 checkwidth(n.Type) 471 } 472 } 473 474 if init == &n.Ninit { 475 // not okay to use n->ninit when walking n, 476 // because we might replace n with some other node 477 // and would lose the init list. 478 Fatalf("walkexpr init == &n->ninit") 479 } 480 481 if n.Ninit.Len() != 0 { 482 walkstmtlist(n.Ninit.Slice()) 483 init.AppendNodes(&n.Ninit) 484 } 485 486 lno := setlineno(n) 487 488 if Debug['w'] > 1 { 489 Dump("walk-before", n) 490 } 491 492 if n.Typecheck() != 1 { 493 Fatalf("missed typecheck: %+v", n) 494 } 495 496 if n.Op == ONAME && n.Class() == PAUTOHEAP { 497 nn := nod(OIND, n.Name.Param.Heapaddr, nil) 498 nn = typecheck(nn, Erv) 499 nn = walkexpr(nn, init) 500 nn.Left.SetNonNil(true) 501 return nn 502 } 503 504 opswitch: 505 switch n.Op { 506 default: 507 Dump("walk", n) 508 Fatalf("walkexpr: switch 1 unknown op %+S", n) 509 510 case ONONAME, OINDREGSP, OEMPTY, OGETG: 511 512 case OTYPE, ONAME, OLITERAL: 513 // TODO(mdempsky): Just return n; see discussion on CL 38655. 514 // Perhaps refactor to use Node.mayBeShared for these instead. 515 // If these return early, make sure to still call 516 // stringsym for constant strings. 517 518 case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER, 519 OIND, OSPTR, OITAB, OIDATA, OADDR: 520 n.Left = walkexpr(n.Left, init) 521 522 case OEFACE, OAND, OSUB, OMUL, OLT, OLE, OGE, OGT, OADD, OOR, OXOR: 523 n.Left = walkexpr(n.Left, init) 524 n.Right = walkexpr(n.Right, init) 525 526 case ODOT: 527 usefield(n) 528 n.Left = walkexpr(n.Left, init) 529 530 case ODOTTYPE, ODOTTYPE2: 531 n.Left = walkexpr(n.Left, init) 532 // Set up interface type addresses for back end. 533 n.Right = typename(n.Type) 534 if n.Op == ODOTTYPE { 535 n.Right.Right = typename(n.Left.Type) 536 } 537 if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 538 n.List.Set1(itabname(n.Type, n.Left.Type)) 539 } 540 541 case ODOTPTR: 542 usefield(n) 543 if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 { 544 // No actual copy will be generated, so emit an explicit nil check. 545 n.Left = cheapexpr(n.Left, init) 546 547 checknil(n.Left, init) 548 } 549 550 n.Left = walkexpr(n.Left, init) 551 552 case OLEN, OCAP: 553 n.Left = walkexpr(n.Left, init) 554 555 // replace len(*[10]int) with 10. 556 // delayed until now to preserve side effects. 557 t := n.Left.Type 558 559 if t.IsPtr() { 560 t = t.Elem() 561 } 562 if t.IsArray() { 563 safeexpr(n.Left, init) 564 nodconst(n, n.Type, t.NumElem()) 565 n.SetTypecheck(1) 566 } 567 568 case OLSH, ORSH: 569 n.Left = walkexpr(n.Left, init) 570 n.Right = walkexpr(n.Right, init) 571 t := n.Left.Type 572 n.SetBounded(bounded(n.Right, 8*t.Width)) 573 if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { 574 Warn("shift bounds check elided") 575 } 576 577 case OCOMPLEX: 578 // Use results from call expression as arguments for complex. 579 if n.Left == nil && n.Right == nil { 580 n.Left = n.List.First() 581 n.Right = n.List.Second() 582 } 583 n.Left = walkexpr(n.Left, init) 584 n.Right = walkexpr(n.Right, init) 585 586 case OEQ, ONE: 587 n.Left = walkexpr(n.Left, init) 588 n.Right = walkexpr(n.Right, init) 589 590 // Disable safemode while compiling this code: the code we 591 // generate internally can refer to unsafe.Pointer. 592 // In this case it can happen if we need to generate an == 593 // for a struct containing a reflect.Value, which itself has 594 // an unexported field of type unsafe.Pointer. 595 old_safemode := safemode 596 safemode = false 597 n = walkcompare(n, init) 598 safemode = old_safemode 599 600 case OANDAND, OOROR: 601 n.Left = walkexpr(n.Left, init) 602 603 // cannot put side effects from n.Right on init, 604 // because they cannot run before n.Left is checked. 605 // save elsewhere and store on the eventual n.Right. 606 var ll Nodes 607 608 n.Right = walkexpr(n.Right, &ll) 609 n.Right = addinit(n.Right, ll.Slice()) 610 n = walkinrange(n, init) 611 612 case OPRINT, OPRINTN: 613 walkexprlist(n.List.Slice(), init) 614 n = walkprint(n, init) 615 616 case OPANIC: 617 n = mkcall("gopanic", nil, init, n.Left) 618 619 case ORECOVER: 620 n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil)) 621 622 case OCLOSUREVAR, OCFUNC: 623 n.SetAddable(true) 624 625 case OCALLINTER: 626 usemethod(n) 627 t := n.Left.Type 628 if n.List.Len() != 0 && n.List.First().Op == OAS { 629 break 630 } 631 n.Left = walkexpr(n.Left, init) 632 walkexprlist(n.List.Slice(), init) 633 ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 634 n.List.Set(reorder1(ll)) 635 636 case OCALLFUNC: 637 if n.Left.Op == OCLOSURE { 638 // Transform direct call of a closure to call of a normal function. 639 // transformclosure already did all preparation work. 640 641 // Prepend captured variables to argument list. 642 n.List.Prepend(n.Left.Func.Enter.Slice()...) 643 644 n.Left.Func.Enter.Set(nil) 645 646 // Replace OCLOSURE with ONAME/PFUNC. 647 n.Left = n.Left.Func.Closure.Func.Nname 648 649 // Update type of OCALLFUNC node. 650 // Output arguments had not changed, but their offsets could. 651 if n.Left.Type.NumResults() == 1 { 652 n.Type = n.Left.Type.Results().Field(0).Type 653 } else { 654 n.Type = n.Left.Type.Results() 655 } 656 } 657 658 t := n.Left.Type 659 if n.List.Len() != 0 && n.List.First().Op == OAS { 660 break 661 } 662 663 n.Left = walkexpr(n.Left, init) 664 walkexprlist(n.List.Slice(), init) 665 666 ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 667 n.List.Set(reorder1(ll)) 668 669 case OCALLMETH: 670 t := n.Left.Type 671 if n.List.Len() != 0 && n.List.First().Op == OAS { 672 break 673 } 674 n.Left = walkexpr(n.Left, init) 675 walkexprlist(n.List.Slice(), init) 676 ll := ascompatte(n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init) 677 lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 678 ll = append(ll, lr...) 679 n.Left.Left = nil 680 updateHasCall(n.Left) 681 n.List.Set(reorder1(ll)) 682 683 case OAS: 684 init.AppendNodes(&n.Ninit) 685 686 n.Left = walkexpr(n.Left, init) 687 n.Left = safeexpr(n.Left, init) 688 689 if oaslit(n, init) { 690 break 691 } 692 693 if n.Right == nil { 694 // TODO(austin): Check all "implicit zeroing" 695 break 696 } 697 698 if !instrumenting && iszero(n.Right) { 699 break 700 } 701 702 switch n.Right.Op { 703 default: 704 n.Right = walkexpr(n.Right, init) 705 706 case ORECV: 707 // x = <-c; n.Left is x, n.Right.Left is c. 708 // orderstmt made sure x is addressable. 709 n.Right.Left = walkexpr(n.Right.Left, init) 710 711 n1 := nod(OADDR, n.Left, nil) 712 r := n.Right.Left // the channel 713 n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1) 714 n = walkexpr(n, init) 715 break opswitch 716 717 case OAPPEND: 718 // x = append(...) 719 r := n.Right 720 if r.Type.Elem().NotInHeap() { 721 yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem()) 722 } 723 if r.Isddd() { 724 r = appendslice(r, init) // also works for append(slice, string). 725 } else { 726 r = walkappend(r, init, n) 727 } 728 n.Right = r 729 if r.Op == OAPPEND { 730 // Left in place for back end. 731 // Do not add a new write barrier. 732 // Set up address of type for back end. 733 r.Left = typename(r.Type.Elem()) 734 break opswitch 735 } 736 // Otherwise, lowered for race detector. 737 // Treat as ordinary assignment. 738 } 739 740 if n.Left != nil && n.Right != nil { 741 n = convas(n, init) 742 } 743 744 case OAS2: 745 init.AppendNodes(&n.Ninit) 746 walkexprlistsafe(n.List.Slice(), init) 747 walkexprlistsafe(n.Rlist.Slice(), init) 748 ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init) 749 ll = reorder3(ll) 750 n = liststmt(ll) 751 752 // a,b,... = fn() 753 case OAS2FUNC: 754 init.AppendNodes(&n.Ninit) 755 756 r := n.Rlist.First() 757 walkexprlistsafe(n.List.Slice(), init) 758 r = walkexpr(r, init) 759 760 if isIntrinsicCall(r) { 761 n.Rlist.Set1(r) 762 break 763 } 764 init.Append(r) 765 766 ll := ascompatet(n.List, r.Type) 767 n = liststmt(ll) 768 769 // x, y = <-c 770 // orderstmt made sure x is addressable. 771 case OAS2RECV: 772 init.AppendNodes(&n.Ninit) 773 774 r := n.Rlist.First() 775 walkexprlistsafe(n.List.Slice(), init) 776 r.Left = walkexpr(r.Left, init) 777 var n1 *Node 778 if isblank(n.List.First()) { 779 n1 = nodnil() 780 } else { 781 n1 = nod(OADDR, n.List.First(), nil) 782 } 783 n1.Etype = 1 // addr does not escape 784 fn := chanfn("chanrecv2", 2, r.Left.Type) 785 ok := n.List.Second() 786 call := mkcall1(fn, ok.Type, init, r.Left, n1) 787 n = nod(OAS, ok, call) 788 n = typecheck(n, Etop) 789 790 // a,b = m[i] 791 case OAS2MAPR: 792 init.AppendNodes(&n.Ninit) 793 794 r := n.Rlist.First() 795 walkexprlistsafe(n.List.Slice(), init) 796 r.Left = walkexpr(r.Left, init) 797 r.Right = walkexpr(r.Right, init) 798 t := r.Left.Type 799 800 fast := mapfast(t) 801 var key *Node 802 if fast != mapslow { 803 // fast versions take key by value 804 key = r.Right 805 } else { 806 // standard version takes key by reference 807 // orderexpr made sure key is addressable. 808 key = nod(OADDR, r.Right, nil) 809 } 810 811 // from: 812 // a,b = m[i] 813 // to: 814 // var,b = mapaccess2*(t, m, i) 815 // a = *var 816 a := n.List.First() 817 818 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 819 fn := mapfn(mapaccess2[fast], t) 820 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key) 821 } else { 822 fn := mapfn("mapaccess2_fat", t) 823 z := zeroaddr(w) 824 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z) 825 } 826 827 // mapaccess2* returns a typed bool, but due to spec changes, 828 // the boolean result of i.(T) is now untyped so we make it the 829 // same type as the variable on the lhs. 830 if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() { 831 r.Type.Field(1).Type = ok.Type 832 } 833 n.Rlist.Set1(r) 834 n.Op = OAS2FUNC 835 836 // don't generate a = *var if a is _ 837 if !isblank(a) { 838 var_ := temp(types.NewPtr(t.Val())) 839 var_.SetTypecheck(1) 840 var_.SetNonNil(true) // mapaccess always returns a non-nil pointer 841 n.List.SetFirst(var_) 842 n = walkexpr(n, init) 843 init.Append(n) 844 n = nod(OAS, a, nod(OIND, var_, nil)) 845 } 846 847 n = typecheck(n, Etop) 848 n = walkexpr(n, init) 849 850 case ODELETE: 851 init.AppendNodes(&n.Ninit) 852 map_ := n.List.First() 853 key := n.List.Second() 854 map_ = walkexpr(map_, init) 855 key = walkexpr(key, init) 856 857 t := map_.Type 858 fast := mapfast(t) 859 if fast == mapslow { 860 // orderstmt made sure key is addressable. 861 key = nod(OADDR, key, nil) 862 } 863 n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key) 864 865 case OAS2DOTTYPE: 866 walkexprlistsafe(n.List.Slice(), init) 867 n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init)) 868 869 case OCONVIFACE: 870 n.Left = walkexpr(n.Left, init) 871 872 // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped. 873 if isdirectiface(n.Left.Type) { 874 var t *Node 875 if n.Type.IsEmptyInterface() { 876 t = typename(n.Left.Type) 877 } else { 878 t = itabname(n.Left.Type, n.Type) 879 } 880 l := nod(OEFACE, t, n.Left) 881 l.Type = n.Type 882 l.SetTypecheck(n.Typecheck()) 883 n = l 884 break 885 } 886 887 if staticbytes == nil { 888 staticbytes = newname(Runtimepkg.Lookup("staticbytes")) 889 staticbytes.SetClass(PEXTERN) 890 staticbytes.Type = types.NewArray(types.Types[TUINT8], 256) 891 zerobase = newname(Runtimepkg.Lookup("zerobase")) 892 zerobase.SetClass(PEXTERN) 893 zerobase.Type = types.Types[TUINTPTR] 894 } 895 896 // Optimize convT2{E,I} for many cases in which T is not pointer-shaped, 897 // by using an existing addressable value identical to n.Left 898 // or creating one on the stack. 899 var value *Node 900 switch { 901 case n.Left.Type.Size() == 0: 902 // n.Left is zero-sized. Use zerobase. 903 cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246. 904 value = zerobase 905 case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()): 906 // n.Left is a bool/byte. Use staticbytes[n.Left]. 907 n.Left = cheapexpr(n.Left, init) 908 value = nod(OINDEX, staticbytes, byteindex(n.Left)) 909 value.SetBounded(true) 910 case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly(): 911 // n.Left is a readonly global; use it directly. 912 value = n.Left 913 case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024: 914 // n.Left does not escape. Use a stack temporary initialized to n.Left. 915 value = temp(n.Left.Type) 916 init.Append(typecheck(nod(OAS, value, n.Left), Etop)) 917 } 918 919 if value != nil { 920 // Value is identical to n.Left. 921 // Construct the interface directly: {type/itab, &value}. 922 var t *Node 923 if n.Type.IsEmptyInterface() { 924 t = typename(n.Left.Type) 925 } else { 926 t = itabname(n.Left.Type, n.Type) 927 } 928 l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv)) 929 l.Type = n.Type 930 l.SetTypecheck(n.Typecheck()) 931 n = l 932 break 933 } 934 935 // Implement interface to empty interface conversion. 936 // tmp = i.itab 937 // if tmp != nil { 938 // tmp = tmp.type 939 // } 940 // e = iface{tmp, i.data} 941 if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 942 // Evaluate the input interface. 943 c := temp(n.Left.Type) 944 init.Append(nod(OAS, c, n.Left)) 945 946 // Get the itab out of the interface. 947 tmp := temp(types.NewPtr(types.Types[TUINT8])) 948 init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv))) 949 950 // Get the type out of the itab. 951 nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil) 952 nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp))) 953 init.Append(nif) 954 955 // Build the result. 956 e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8]))) 957 e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE. 958 e.SetTypecheck(1) 959 n = e 960 break 961 } 962 963 var ll []*Node 964 if n.Type.IsEmptyInterface() { 965 if !n.Left.Type.IsInterface() { 966 ll = append(ll, typename(n.Left.Type)) 967 } 968 } else { 969 if n.Left.Type.IsInterface() { 970 ll = append(ll, typename(n.Type)) 971 } else { 972 ll = append(ll, itabname(n.Left.Type, n.Type)) 973 } 974 } 975 976 if n.Left.Type.IsInterface() { 977 ll = append(ll, n.Left) 978 } else { 979 // regular types are passed by reference to avoid C vararg calls 980 // orderexpr arranged for n.Left to be a temporary for all 981 // the conversions it could see. comparison of an interface 982 // with a non-interface, especially in a switch on interface value 983 // with non-interface cases, is not visible to orderstmt, so we 984 // have to fall back on allocating a temp here. 985 if islvalue(n.Left) { 986 ll = append(ll, nod(OADDR, n.Left, nil)) 987 } else { 988 ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) 989 } 990 dowidth(n.Left.Type) 991 } 992 993 fn := syslook(convFuncName(n.Left.Type, n.Type)) 994 fn = substArgTypes(fn, n.Left.Type, n.Type) 995 dowidth(fn.Type) 996 n = nod(OCALL, fn, nil) 997 n.List.Set(ll) 998 n = typecheck(n, Erv) 999 n = walkexpr(n, init) 1000 1001 case OCONV, OCONVNOP: 1002 if thearch.SoftFloat { 1003 // For the soft-float case, ssa.go handles these conversions. 1004 goto oconv_walkexpr 1005 } 1006 switch thearch.LinkArch.Family { 1007 case sys.ARM, sys.MIPS: 1008 if n.Left.Type.IsFloat() { 1009 switch n.Type.Etype { 1010 case TINT64: 1011 n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1012 break opswitch 1013 case TUINT64: 1014 n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1015 break opswitch 1016 } 1017 } 1018 1019 if n.Type.IsFloat() { 1020 switch n.Left.Type.Etype { 1021 case TINT64: 1022 n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type) 1023 break opswitch 1024 case TUINT64: 1025 n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type) 1026 break opswitch 1027 } 1028 } 1029 1030 case sys.I386: 1031 if n.Left.Type.IsFloat() { 1032 switch n.Type.Etype { 1033 case TINT64: 1034 n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1035 break opswitch 1036 case TUINT64: 1037 n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1038 break opswitch 1039 case TUINT32, TUINT, TUINTPTR: 1040 n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1041 break opswitch 1042 } 1043 } 1044 if n.Type.IsFloat() { 1045 switch n.Left.Type.Etype { 1046 case TINT64: 1047 n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type) 1048 break opswitch 1049 case TUINT64: 1050 n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type) 1051 break opswitch 1052 case TUINT32, TUINT, TUINTPTR: 1053 n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type) 1054 break opswitch 1055 } 1056 } 1057 } 1058 1059 oconv_walkexpr: 1060 n.Left = walkexpr(n.Left, init) 1061 1062 case OANDNOT: 1063 n.Left = walkexpr(n.Left, init) 1064 n.Op = OAND 1065 n.Right = nod(OCOM, n.Right, nil) 1066 n.Right = typecheck(n.Right, Erv) 1067 n.Right = walkexpr(n.Right, init) 1068 1069 case ODIV, OMOD: 1070 n.Left = walkexpr(n.Left, init) 1071 n.Right = walkexpr(n.Right, init) 1072 1073 // rewrite complex div into function call. 1074 et := n.Left.Type.Etype 1075 1076 if isComplex[et] && n.Op == ODIV { 1077 t := n.Type 1078 n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128])) 1079 n = conv(n, t) 1080 break 1081 } 1082 1083 // Nothing to do for float divisions. 1084 if isFloat[et] { 1085 break 1086 } 1087 1088 // rewrite 64-bit div and mod on 32-bit architectures. 1089 // TODO: Remove this code once we can introduce 1090 // runtime calls late in SSA processing. 1091 if Widthreg < 8 && (et == TINT64 || et == TUINT64) { 1092 if n.Right.Op == OLITERAL { 1093 // Leave div/mod by constant powers of 2. 1094 // The SSA backend will handle those. 1095 switch et { 1096 case TINT64: 1097 c := n.Right.Int64() 1098 if c < 0 { 1099 c = -c 1100 } 1101 if c != 0 && c&(c-1) == 0 { 1102 break opswitch 1103 } 1104 case TUINT64: 1105 c := uint64(n.Right.Int64()) 1106 if c != 0 && c&(c-1) == 0 { 1107 break opswitch 1108 } 1109 } 1110 } 1111 var fn string 1112 if et == TINT64 { 1113 fn = "int64" 1114 } else { 1115 fn = "uint64" 1116 } 1117 if n.Op == ODIV { 1118 fn += "div" 1119 } else { 1120 fn += "mod" 1121 } 1122 n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et])) 1123 } 1124 1125 case OINDEX: 1126 n.Left = walkexpr(n.Left, init) 1127 1128 // save the original node for bounds checking elision. 1129 // If it was a ODIV/OMOD walk might rewrite it. 1130 r := n.Right 1131 1132 n.Right = walkexpr(n.Right, init) 1133 1134 // if range of type cannot exceed static array bound, 1135 // disable bounds check. 1136 if n.Bounded() { 1137 break 1138 } 1139 t := n.Left.Type 1140 if t != nil && t.IsPtr() { 1141 t = t.Elem() 1142 } 1143 if t.IsArray() { 1144 n.SetBounded(bounded(r, t.NumElem())) 1145 if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) { 1146 Warn("index bounds check elided") 1147 } 1148 if smallintconst(n.Right) && !n.Bounded() { 1149 yyerror("index out of bounds") 1150 } 1151 } else if Isconst(n.Left, CTSTR) { 1152 n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string))))) 1153 if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) { 1154 Warn("index bounds check elided") 1155 } 1156 if smallintconst(n.Right) && !n.Bounded() { 1157 yyerror("index out of bounds") 1158 } 1159 } 1160 1161 if Isconst(n.Right, CTINT) { 1162 if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 { 1163 yyerror("index out of bounds") 1164 } 1165 } 1166 1167 case OINDEXMAP: 1168 // Replace m[k] with *map{access1,assign}(maptype, m, &k) 1169 n.Left = walkexpr(n.Left, init) 1170 n.Right = walkexpr(n.Right, init) 1171 map_ := n.Left 1172 key := n.Right 1173 t := map_.Type 1174 if n.Etype == 1 { 1175 // This m[k] expression is on the left-hand side of an assignment. 1176 fast := mapfast(t) 1177 if fast == mapslow { 1178 // standard version takes key by reference. 1179 // orderexpr made sure key is addressable. 1180 key = nod(OADDR, key, nil) 1181 } 1182 n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key) 1183 } else { 1184 // m[k] is not the target of an assignment. 1185 fast := mapfast(t) 1186 if fast == mapslow { 1187 // standard version takes key by reference. 1188 // orderexpr made sure key is addressable. 1189 key = nod(OADDR, key, nil) 1190 } 1191 1192 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 1193 n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key) 1194 } else { 1195 z := zeroaddr(w) 1196 n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z) 1197 } 1198 } 1199 n.Type = types.NewPtr(t.Val()) 1200 n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers. 1201 n = nod(OIND, n, nil) 1202 n.Type = t.Val() 1203 n.SetTypecheck(1) 1204 1205 case ORECV: 1206 Fatalf("walkexpr ORECV") // should see inside OAS only 1207 1208 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1209 n.Left = walkexpr(n.Left, init) 1210 low, high, max := n.SliceBounds() 1211 low = walkexpr(low, init) 1212 if low != nil && iszero(low) { 1213 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k]. 1214 low = nil 1215 } 1216 high = walkexpr(high, init) 1217 max = walkexpr(max, init) 1218 n.SetSliceBounds(low, high, max) 1219 if n.Op.IsSlice3() { 1220 if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) { 1221 // Reduce x[i:j:cap(x)] to x[i:j]. 1222 if n.Op == OSLICE3 { 1223 n.Op = OSLICE 1224 } else { 1225 n.Op = OSLICEARR 1226 } 1227 n = reduceSlice(n) 1228 } 1229 } else { 1230 n = reduceSlice(n) 1231 } 1232 1233 case ONEW: 1234 if n.Esc == EscNone { 1235 if n.Type.Elem().Width >= 1<<16 { 1236 Fatalf("large ONEW with EscNone: %v", n) 1237 } 1238 r := temp(n.Type.Elem()) 1239 r = nod(OAS, r, nil) // zero temp 1240 r = typecheck(r, Etop) 1241 init.Append(r) 1242 r = nod(OADDR, r.Left, nil) 1243 r = typecheck(r, Erv) 1244 n = r 1245 } else { 1246 n = callnew(n.Type.Elem()) 1247 } 1248 1249 case OCMPSTR: 1250 // s + "badgerbadgerbadger" == "badgerbadgerbadger" 1251 if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) { 1252 // TODO(marvin): Fix Node.EType type union. 1253 r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0)) 1254 r = typecheck(r, Erv) 1255 r = walkexpr(r, init) 1256 r.Type = n.Type 1257 n = r 1258 break 1259 } 1260 1261 // Rewrite comparisons to short constant strings as length+byte-wise comparisons. 1262 var cs, ncs *Node // const string, non-const string 1263 switch { 1264 case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR): 1265 // ignore; will be constant evaluated 1266 case Isconst(n.Left, CTSTR): 1267 cs = n.Left 1268 ncs = n.Right 1269 case Isconst(n.Right, CTSTR): 1270 cs = n.Right 1271 ncs = n.Left 1272 } 1273 if cs != nil { 1274 cmp := Op(n.Etype) 1275 // Our comparison below assumes that the non-constant string 1276 // is on the left hand side, so rewrite "" cmp x to x cmp "". 1277 // See issue 24817. 1278 if Isconst(n.Left, CTSTR) { 1279 cmp = brrev(cmp) 1280 } 1281 1282 // maxRewriteLen was chosen empirically. 1283 // It is the value that minimizes cmd/go file size 1284 // across most architectures. 1285 // See the commit description for CL 26758 for details. 1286 maxRewriteLen := 6 1287 // Some architectures can load unaligned byte sequence as 1 word. 1288 // So we can cover longer strings with the same amount of code. 1289 canCombineLoads := false 1290 combine64bit := false 1291 // TODO: does this improve performance on any other architectures? 1292 switch thearch.LinkArch.Family { 1293 case sys.AMD64: 1294 // Larger compare require longer instructions, so keep this reasonably low. 1295 // Data from CL 26758 shows that longer strings are rare. 1296 // If we really want we can do 16 byte SSE comparisons in the future. 1297 maxRewriteLen = 16 1298 canCombineLoads = true 1299 combine64bit = true 1300 case sys.I386: 1301 maxRewriteLen = 8 1302 canCombineLoads = true 1303 } 1304 var and Op 1305 switch cmp { 1306 case OEQ: 1307 and = OANDAND 1308 case ONE: 1309 and = OOROR 1310 default: 1311 // Don't do byte-wise comparisons for <, <=, etc. 1312 // They're fairly complicated. 1313 // Length-only checks are ok, though. 1314 maxRewriteLen = 0 1315 } 1316 if s := cs.Val().U.(string); len(s) <= maxRewriteLen { 1317 if len(s) > 0 { 1318 ncs = safeexpr(ncs, init) 1319 } 1320 // TODO(marvin): Fix Node.EType type union. 1321 r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s)))) 1322 remains := len(s) 1323 for i := 0; remains > 0; { 1324 if remains == 1 || !canCombineLoads { 1325 cb := nodintconst(int64(s[i])) 1326 ncb := nod(OINDEX, ncs, nodintconst(int64(i))) 1327 r = nod(and, r, nod(cmp, ncb, cb)) 1328 remains-- 1329 i++ 1330 continue 1331 } 1332 var step int 1333 var convType *types.Type 1334 switch { 1335 case remains >= 8 && combine64bit: 1336 convType = types.Types[TINT64] 1337 step = 8 1338 case remains >= 4: 1339 convType = types.Types[TUINT32] 1340 step = 4 1341 case remains >= 2: 1342 convType = types.Types[TUINT16] 1343 step = 2 1344 } 1345 ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i))) 1346 ncsubstr = conv(ncsubstr, convType) 1347 csubstr := int64(s[i]) 1348 // Calculate large constant from bytes as sequence of shifts and ors. 1349 // Like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ... 1350 // ssa will combine this into a single large load. 1351 for offset := 1; offset < step; offset++ { 1352 b := nod(OINDEX, ncs, nodintconst(int64(i+offset))) 1353 b = conv(b, convType) 1354 b = nod(OLSH, b, nodintconst(int64(8*offset))) 1355 ncsubstr = nod(OOR, ncsubstr, b) 1356 csubstr = csubstr | int64(s[i+offset])<<uint8(8*offset) 1357 } 1358 csubstrPart := nodintconst(csubstr) 1359 // Compare "step" bytes as once 1360 r = nod(and, r, nod(cmp, csubstrPart, ncsubstr)) 1361 remains -= step 1362 i += step 1363 } 1364 r = typecheck(r, Erv) 1365 r = walkexpr(r, init) 1366 r.Type = n.Type 1367 n = r 1368 break 1369 } 1370 } 1371 1372 var r *Node 1373 // TODO(marvin): Fix Node.EType type union. 1374 if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { 1375 // prepare for rewrite below 1376 n.Left = cheapexpr(n.Left, init) 1377 n.Right = cheapexpr(n.Right, init) 1378 1379 lstr := conv(n.Left, types.Types[TSTRING]) 1380 rstr := conv(n.Right, types.Types[TSTRING]) 1381 lptr := nod(OSPTR, lstr, nil) 1382 rptr := nod(OSPTR, rstr, nil) 1383 llen := conv(nod(OLEN, lstr, nil), types.Types[TUINTPTR]) 1384 rlen := conv(nod(OLEN, rstr, nil), types.Types[TUINTPTR]) 1385 1386 fn := syslook("memequal") 1387 fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8]) 1388 r = mkcall1(fn, types.Types[TBOOL], init, lptr, rptr, llen) 1389 1390 // quick check of len before full compare for == or !=. 1391 // memequal then tests equality up to length len. 1392 // TODO(marvin): Fix Node.EType type union. 1393 if Op(n.Etype) == OEQ { 1394 // len(left) == len(right) && memequal(left, right, len) 1395 r = nod(OANDAND, nod(OEQ, llen, rlen), r) 1396 } else { 1397 // len(left) != len(right) || !memequal(left, right, len) 1398 r = nod(ONOT, r, nil) 1399 r = nod(OOROR, nod(ONE, llen, rlen), r) 1400 } 1401 1402 r = typecheck(r, Erv) 1403 r = walkexpr(r, nil) 1404 } else { 1405 // sys_cmpstring(s1, s2) :: 0 1406 r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING])) 1407 // TODO(marvin): Fix Node.EType type union. 1408 r = nod(Op(n.Etype), r, nodintconst(0)) 1409 } 1410 1411 r = typecheck(r, Erv) 1412 if !n.Type.IsBoolean() { 1413 Fatalf("cmp %v", n.Type) 1414 } 1415 r.Type = n.Type 1416 n = r 1417 1418 case OADDSTR: 1419 n = addstr(n, init) 1420 1421 case OAPPEND: 1422 // order should make sure we only see OAS(node, OAPPEND), which we handle above. 1423 Fatalf("append outside assignment") 1424 1425 case OCOPY: 1426 n = copyany(n, init, instrumenting && !compiling_runtime) 1427 1428 // cannot use chanfn - closechan takes any, not chan any 1429 case OCLOSE: 1430 fn := syslook("closechan") 1431 1432 fn = substArgTypes(fn, n.Left.Type) 1433 n = mkcall1(fn, nil, init, n.Left) 1434 1435 case OMAKECHAN: 1436 // When size fits into int, use makechan instead of 1437 // makechan64, which is faster and shorter on 32 bit platforms. 1438 size := n.Left 1439 fnname := "makechan64" 1440 argtype := types.Types[TINT64] 1441 1442 // Type checking guarantees that TIDEAL size is positive and fits in an int. 1443 // The case of size overflow when converting TUINT or TUINTPTR to TINT 1444 // will be handled by the negative range checks in makechan during runtime. 1445 if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 { 1446 fnname = "makechan" 1447 argtype = types.Types[TINT] 1448 } 1449 1450 n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype)) 1451 1452 case OMAKEMAP: 1453 t := n.Type 1454 hmapType := hmap(t) 1455 hint := n.Left 1456 1457 // var h *hmap 1458 var h *Node 1459 if n.Esc == EscNone { 1460 // Allocate hmap on stack. 1461 1462 // var hv hmap 1463 hv := temp(hmapType) 1464 zero := nod(OAS, hv, nil) 1465 zero = typecheck(zero, Etop) 1466 init.Append(zero) 1467 // h = &hv 1468 h = nod(OADDR, hv, nil) 1469 1470 // Allocate one bucket pointed to by hmap.buckets on stack if hint 1471 // is not larger than BUCKETSIZE. In case hint is larger than 1472 // BUCKETSIZE runtime.makemap will allocate the buckets on the heap. 1473 // Maximum key and value size is 128 bytes, larger objects 1474 // are stored with an indirection. So max bucket size is 2048+eps. 1475 if !Isconst(hint, CTINT) || 1476 !(hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) > 0) { 1477 // var bv bmap 1478 bv := temp(bmap(t)) 1479 1480 zero = nod(OAS, bv, nil) 1481 zero = typecheck(zero, Etop) 1482 init.Append(zero) 1483 1484 // b = &bv 1485 b := nod(OADDR, bv, nil) 1486 1487 // h.buckets = b 1488 bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap 1489 na := nod(OAS, nodSym(ODOT, h, bsym), b) 1490 na = typecheck(na, Etop) 1491 init.Append(na) 1492 } 1493 } 1494 1495 if Isconst(hint, CTINT) && hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 { 1496 // Handling make(map[any]any) and 1497 // make(map[any]any, hint) where hint <= BUCKETSIZE 1498 // special allows for faster map initialization and 1499 // improves binary size by using calls with fewer arguments. 1500 // For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false 1501 // and no buckets will be allocated by makemap. Therefore, 1502 // no buckets need to be allocated in this code path. 1503 if n.Esc == EscNone { 1504 // Only need to initialize h.hash0 since 1505 // hmap h has been allocated on the stack already. 1506 // h.hash0 = fastrand() 1507 rand := mkcall("fastrand", types.Types[TUINT32], init) 1508 hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap 1509 a := nod(OAS, nodSym(ODOT, h, hashsym), rand) 1510 a = typecheck(a, Etop) 1511 a = walkexpr(a, init) 1512 init.Append(a) 1513 n = nod(OCONVNOP, h, nil) 1514 n.Type = t 1515 n = typecheck(n, Erv) 1516 } else { 1517 // Call runtime.makehmap to allocate an 1518 // hmap on the heap and initialize hmap's hash0 field. 1519 fn := syslook("makemap_small") 1520 fn = substArgTypes(fn, t.Key(), t.Val()) 1521 n = mkcall1(fn, n.Type, init) 1522 } 1523 } else { 1524 if n.Esc != EscNone { 1525 h = nodnil() 1526 } 1527 // Map initialization with a variable or large hint is 1528 // more complicated. We therefore generate a call to 1529 // runtime.makemap to intialize hmap and allocate the 1530 // map buckets. 1531 1532 // When hint fits into int, use makemap instead of 1533 // makemap64, which is faster and shorter on 32 bit platforms. 1534 fnname := "makemap64" 1535 argtype := types.Types[TINT64] 1536 1537 // Type checking guarantees that TIDEAL hint is positive and fits in an int. 1538 // See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function. 1539 // The case of hint overflow when converting TUINT or TUINTPTR to TINT 1540 // will be handled by the negative range checks in makemap during runtime. 1541 if hint.Type.IsKind(TIDEAL) || maxintval[hint.Type.Etype].Cmp(maxintval[TUINT]) <= 0 { 1542 fnname = "makemap" 1543 argtype = types.Types[TINT] 1544 } 1545 1546 fn := syslook(fnname) 1547 fn = substArgTypes(fn, hmapType, t.Key(), t.Val()) 1548 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(hint, argtype), h) 1549 } 1550 1551 case OMAKESLICE: 1552 l := n.Left 1553 r := n.Right 1554 if r == nil { 1555 r = safeexpr(l, init) 1556 l = r 1557 } 1558 t := n.Type 1559 if n.Esc == EscNone { 1560 if !isSmallMakeSlice(n) { 1561 Fatalf("non-small OMAKESLICE with EscNone: %v", n) 1562 } 1563 // var arr [r]T 1564 // n = arr[:l] 1565 t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T 1566 var_ := temp(t) 1567 a := nod(OAS, var_, nil) // zero temp 1568 a = typecheck(a, Etop) 1569 init.Append(a) 1570 r := nod(OSLICE, var_, nil) // arr[:l] 1571 r.SetSliceBounds(nil, l, nil) 1572 r = conv(r, n.Type) // in case n.Type is named. 1573 r = typecheck(r, Erv) 1574 r = walkexpr(r, init) 1575 n = r 1576 } else { 1577 // n escapes; set up a call to makeslice. 1578 // When len and cap can fit into int, use makeslice instead of 1579 // makeslice64, which is faster and shorter on 32 bit platforms. 1580 1581 if t.Elem().NotInHeap() { 1582 yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem()) 1583 } 1584 1585 len, cap := l, r 1586 1587 fnname := "makeslice64" 1588 argtype := types.Types[TINT64] 1589 1590 // Type checking guarantees that TIDEAL len/cap are positive and fit in an int. 1591 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 1592 // will be handled by the negative range checks in makeslice during runtime. 1593 if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) && 1594 (cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) { 1595 fnname = "makeslice" 1596 argtype = types.Types[TINT] 1597 } 1598 1599 fn := syslook(fnname) 1600 fn = substArgTypes(fn, t.Elem()) // any-1 1601 n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)) 1602 } 1603 1604 case ORUNESTR: 1605 a := nodnil() 1606 if n.Esc == EscNone { 1607 t := types.NewArray(types.Types[TUINT8], 4) 1608 var_ := temp(t) 1609 a = nod(OADDR, var_, nil) 1610 } 1611 1612 // intstring(*[4]byte, rune) 1613 n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64])) 1614 1615 case OARRAYBYTESTR: 1616 a := nodnil() 1617 if n.Esc == EscNone { 1618 // Create temporary buffer for string on stack. 1619 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1620 1621 a = nod(OADDR, temp(t), nil) 1622 } 1623 1624 // slicebytetostring(*[32]byte, []byte) string; 1625 n = mkcall("slicebytetostring", n.Type, init, a, n.Left) 1626 1627 // slicebytetostringtmp([]byte) string; 1628 case OARRAYBYTESTRTMP: 1629 n.Left = walkexpr(n.Left, init) 1630 1631 if !instrumenting { 1632 // Let the backend handle OARRAYBYTESTRTMP directly 1633 // to avoid a function call to slicebytetostringtmp. 1634 break 1635 } 1636 1637 n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) 1638 1639 // slicerunetostring(*[32]byte, []rune) string; 1640 case OARRAYRUNESTR: 1641 a := nodnil() 1642 1643 if n.Esc == EscNone { 1644 // Create temporary buffer for string on stack. 1645 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1646 1647 a = nod(OADDR, temp(t), nil) 1648 } 1649 1650 n = mkcall("slicerunetostring", n.Type, init, a, n.Left) 1651 1652 // stringtoslicebyte(*32[byte], string) []byte; 1653 case OSTRARRAYBYTE: 1654 a := nodnil() 1655 1656 if n.Esc == EscNone { 1657 // Create temporary buffer for slice on stack. 1658 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1659 1660 a = nod(OADDR, temp(t), nil) 1661 } 1662 1663 n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING])) 1664 1665 case OSTRARRAYBYTETMP: 1666 // []byte(string) conversion that creates a slice 1667 // referring to the actual string bytes. 1668 // This conversion is handled later by the backend and 1669 // is only for use by internal compiler optimizations 1670 // that know that the slice won't be mutated. 1671 // The only such case today is: 1672 // for i, c := range []byte(string) 1673 n.Left = walkexpr(n.Left, init) 1674 1675 // stringtoslicerune(*[32]rune, string) []rune 1676 case OSTRARRAYRUNE: 1677 a := nodnil() 1678 1679 if n.Esc == EscNone { 1680 // Create temporary buffer for slice on stack. 1681 t := types.NewArray(types.Types[TINT32], tmpstringbufsize) 1682 1683 a = nod(OADDR, temp(t), nil) 1684 } 1685 1686 n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) 1687 1688 // ifaceeq(i1 any-1, i2 any-2) (ret bool); 1689 case OCMPIFACE: 1690 if !eqtype(n.Left.Type, n.Right.Type) { 1691 Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type) 1692 } 1693 var fn *Node 1694 if n.Left.Type.IsEmptyInterface() { 1695 fn = syslook("efaceeq") 1696 } else { 1697 fn = syslook("ifaceeq") 1698 } 1699 1700 n.Right = cheapexpr(n.Right, init) 1701 n.Left = cheapexpr(n.Left, init) 1702 lt := nod(OITAB, n.Left, nil) 1703 rt := nod(OITAB, n.Right, nil) 1704 ld := nod(OIDATA, n.Left, nil) 1705 rd := nod(OIDATA, n.Right, nil) 1706 ld.Type = types.Types[TUNSAFEPTR] 1707 rd.Type = types.Types[TUNSAFEPTR] 1708 ld.SetTypecheck(1) 1709 rd.SetTypecheck(1) 1710 call := mkcall1(fn, n.Type, init, lt, ld, rd) 1711 1712 // Check itable/type before full compare. 1713 // Note: short-circuited because order matters. 1714 // TODO(marvin): Fix Node.EType type union. 1715 var cmp *Node 1716 if Op(n.Etype) == OEQ { 1717 cmp = nod(OANDAND, nod(OEQ, lt, rt), call) 1718 } else { 1719 cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil)) 1720 } 1721 cmp = typecheck(cmp, Erv) 1722 cmp = walkexpr(cmp, init) 1723 cmp.Type = n.Type 1724 n = cmp 1725 1726 case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: 1727 if isStaticCompositeLiteral(n) && !canSSAType(n.Type) { 1728 // n can be directly represented in the read-only data section. 1729 // Make direct reference to the static data. See issue 12841. 1730 vstat := staticname(n.Type) 1731 vstat.Name.SetReadonly(true) 1732 fixedlit(inInitFunction, initKindStatic, n, vstat, init) 1733 n = vstat 1734 n = typecheck(n, Erv) 1735 break 1736 } 1737 var_ := temp(n.Type) 1738 anylit(n, var_, init) 1739 n = var_ 1740 1741 case OSEND: 1742 n1 := n.Right 1743 n1 = assignconv(n1, n.Left.Type.Elem(), "chan send") 1744 n1 = walkexpr(n1, init) 1745 n1 = nod(OADDR, n1, nil) 1746 n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1) 1747 1748 case OCLOSURE: 1749 n = walkclosure(n, init) 1750 1751 case OCALLPART: 1752 n = walkpartialcall(n, init) 1753 } 1754 1755 // Expressions that are constant at run time but not 1756 // considered const by the language spec are not turned into 1757 // constants until walk. For example, if n is y%1 == 0, the 1758 // walk of y%1 may have replaced it by 0. 1759 // Check whether n with its updated args is itself now a constant. 1760 t := n.Type 1761 evconst(n) 1762 if n.Type != t { 1763 Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type) 1764 } 1765 if n.Op == OLITERAL { 1766 n = typecheck(n, Erv) 1767 // Emit string symbol now to avoid emitting 1768 // any concurrently during the backend. 1769 if s, ok := n.Val().U.(string); ok { 1770 _ = stringsym(n.Pos, s) 1771 } 1772 } 1773 1774 updateHasCall(n) 1775 1776 if Debug['w'] != 0 && n != nil { 1777 Dump("walk", n) 1778 } 1779 1780 lineno = lno 1781 return n 1782 } 1783 1784 // TODO(josharian): combine this with its caller and simplify 1785 func reduceSlice(n *Node) *Node { 1786 low, high, max := n.SliceBounds() 1787 if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) { 1788 // Reduce x[i:len(x)] to x[i:]. 1789 high = nil 1790 } 1791 n.SetSliceBounds(low, high, max) 1792 if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { 1793 // Reduce x[:] to x. 1794 if Debug_slice > 0 { 1795 Warn("slice: omit slice operation") 1796 } 1797 return n.Left 1798 } 1799 return n 1800 } 1801 1802 func ascompatee1(l *Node, r *Node, init *Nodes) *Node { 1803 // convas will turn map assigns into function calls, 1804 // making it impossible for reorder3 to work. 1805 n := nod(OAS, l, r) 1806 1807 if l.Op == OINDEXMAP { 1808 return n 1809 } 1810 1811 return convas(n, init) 1812 } 1813 1814 func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { 1815 // check assign expression list to 1816 // an expression list. called in 1817 // expr-list = expr-list 1818 1819 // ensure order of evaluation for function calls 1820 for i := range nl { 1821 nl[i] = safeexpr(nl[i], init) 1822 } 1823 for i1 := range nr { 1824 nr[i1] = safeexpr(nr[i1], init) 1825 } 1826 1827 var nn []*Node 1828 i := 0 1829 for ; i < len(nl); i++ { 1830 if i >= len(nr) { 1831 break 1832 } 1833 // Do not generate 'x = x' during return. See issue 4014. 1834 if op == ORETURN && samesafeexpr(nl[i], nr[i]) { 1835 continue 1836 } 1837 nn = append(nn, ascompatee1(nl[i], nr[i], init)) 1838 } 1839 1840 // cannot happen: caller checked that lists had same length 1841 if i < len(nl) || i < len(nr) { 1842 var nln, nrn Nodes 1843 nln.Set(nl) 1844 nrn.Set(nr) 1845 Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname()) 1846 } 1847 return nn 1848 } 1849 1850 // l is an lv and rt is the type of an rv 1851 // return 1 if this implies a function call 1852 // evaluating the lv or a function call 1853 // in the conversion of the types 1854 func fncall(l *Node, rt *types.Type) bool { 1855 if l.HasCall() || l.Op == OINDEXMAP { 1856 return true 1857 } 1858 if eqtype(l.Type, rt) { 1859 return false 1860 } 1861 return true 1862 } 1863 1864 // check assign type list to 1865 // an expression list. called in 1866 // expr-list = func() 1867 func ascompatet(nl Nodes, nr *types.Type) []*Node { 1868 if nl.Len() != nr.NumFields() { 1869 Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) 1870 } 1871 1872 var nn, mm Nodes 1873 for i, l := range nl.Slice() { 1874 if isblank(l) { 1875 continue 1876 } 1877 r := nr.Field(i) 1878 1879 // any lv that causes a fn call must be 1880 // deferred until all the return arguments 1881 // have been pulled from the output arguments 1882 if fncall(l, r.Type) { 1883 tmp := temp(r.Type) 1884 tmp = typecheck(tmp, Erv) 1885 a := nod(OAS, l, tmp) 1886 a = convas(a, &mm) 1887 mm.Append(a) 1888 l = tmp 1889 } 1890 1891 a := nod(OAS, l, nodarg(r, 0)) 1892 a = convas(a, &nn) 1893 updateHasCall(a) 1894 if a.HasCall() { 1895 Dump("ascompatet ucount", a) 1896 Fatalf("ascompatet: too many function calls evaluating parameters") 1897 } 1898 1899 nn.Append(a) 1900 } 1901 return append(nn.Slice(), mm.Slice()...) 1902 } 1903 1904 // nodarg returns a Node for the function argument denoted by t, 1905 // which is either the entire function argument or result struct (t is a struct *types.Type) 1906 // or a specific argument (t is a *types.Field within a struct *types.Type). 1907 // 1908 // If fp is 0, the node is for use by a caller invoking the given 1909 // function, preparing the arguments before the call 1910 // or retrieving the results after the call. 1911 // In this case, the node will correspond to an outgoing argument 1912 // slot like 8(SP). 1913 // 1914 // If fp is 1, the node is for use by the function itself 1915 // (the callee), to retrieve its arguments or write its results. 1916 // In this case the node will be an ONAME with an appropriate 1917 // type and offset. 1918 func nodarg(t interface{}, fp int) *Node { 1919 var n *Node 1920 1921 var funarg types.Funarg 1922 switch t := t.(type) { 1923 default: 1924 Fatalf("bad nodarg %T(%v)", t, t) 1925 1926 case *types.Type: 1927 // Entire argument struct, not just one arg 1928 if !t.IsFuncArgStruct() { 1929 Fatalf("nodarg: bad type %v", t) 1930 } 1931 funarg = t.StructType().Funarg 1932 1933 // Build fake variable name for whole arg struct. 1934 n = newname(lookup(".args")) 1935 n.Type = t 1936 first := t.Field(0) 1937 if first == nil { 1938 Fatalf("nodarg: bad struct") 1939 } 1940 if first.Offset == BADWIDTH { 1941 Fatalf("nodarg: offset not computed for %v", t) 1942 } 1943 n.Xoffset = first.Offset 1944 1945 case *types.Field: 1946 funarg = t.Funarg 1947 if fp == 1 { 1948 // NOTE(rsc): This should be using t.Nname directly, 1949 // except in the case where t.Nname.Sym is the blank symbol and 1950 // so the assignment would be discarded during code generation. 1951 // In that case we need to make a new node, and there is no harm 1952 // in optimization passes to doing so. But otherwise we should 1953 // definitely be using the actual declaration and not a newly built node. 1954 // The extra Fatalf checks here are verifying that this is the case, 1955 // without changing the actual logic (at time of writing, it's getting 1956 // toward time for the Go 1.7 beta). 1957 // At some quieter time (assuming we've never seen these Fatalfs happen) 1958 // we could change this code to use "expect" directly. 1959 expect := asNode(t.Nname) 1960 if expect.isParamHeapCopy() { 1961 expect = expect.Name.Param.Stackcopy 1962 } 1963 1964 for _, n := range Curfn.Func.Dcl { 1965 if (n.Class() == PPARAM || n.Class() == PPARAMOUT) && !t.Sym.IsBlank() && n.Sym == t.Sym { 1966 if n != expect { 1967 Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op) 1968 } 1969 return n 1970 } 1971 } 1972 1973 if !expect.Sym.IsBlank() { 1974 Fatalf("nodarg: did not find node in dcl list: %v", expect) 1975 } 1976 } 1977 1978 // Build fake name for individual variable. 1979 // This is safe because if there was a real declared name 1980 // we'd have used it above. 1981 n = newname(lookup("__")) 1982 n.Type = t.Type 1983 if t.Offset == BADWIDTH { 1984 Fatalf("nodarg: offset not computed for %v", t) 1985 } 1986 n.Xoffset = t.Offset 1987 n.Orig = asNode(t.Nname) 1988 } 1989 1990 // Rewrite argument named _ to __, 1991 // or else the assignment to _ will be 1992 // discarded during code generation. 1993 if isblank(n) { 1994 n.Sym = lookup("__") 1995 } 1996 1997 switch fp { 1998 default: 1999 Fatalf("bad fp") 2000 2001 case 0: // preparing arguments for call 2002 n.Op = OINDREGSP 2003 n.Xoffset += Ctxt.FixedFrameSize() 2004 2005 case 1: // reading arguments inside call 2006 n.SetClass(PPARAM) 2007 if funarg == types.FunargResults { 2008 n.SetClass(PPARAMOUT) 2009 } 2010 } 2011 2012 n.SetTypecheck(1) 2013 n.SetAddrtaken(true) // keep optimizers at bay 2014 return n 2015 } 2016 2017 // package all the arguments that match a ... T parameter into a []T. 2018 func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node { 2019 esc := uint16(EscUnknown) 2020 if ddd != nil { 2021 esc = ddd.Esc 2022 } 2023 2024 if len(args) == 0 { 2025 n := nodnil() 2026 n.Type = typ 2027 return n 2028 } 2029 2030 n := nod(OCOMPLIT, nil, typenod(typ)) 2031 if ddd != nil && prealloc[ddd] != nil { 2032 prealloc[n] = prealloc[ddd] // temporary to use 2033 } 2034 n.List.Set(args) 2035 n.Esc = esc 2036 n = typecheck(n, Erv) 2037 if n.Type == nil { 2038 Fatalf("mkdotargslice: typecheck failed") 2039 } 2040 n = walkexpr(n, init) 2041 return n 2042 } 2043 2044 // check assign expression list to 2045 // a type list. called in 2046 // return expr-list 2047 // func(expr-list) 2048 func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node { 2049 // f(g()) where g has multiple return values 2050 if len(rhs) == 1 && rhs[0].Type.IsFuncArgStruct() { 2051 // optimization - can do block copy 2052 if eqtypenoname(rhs[0].Type, lhs) { 2053 nl := nodarg(lhs, fp) 2054 nr := nod(OCONVNOP, rhs[0], nil) 2055 nr.Type = nl.Type 2056 n := convas(nod(OAS, nl, nr), init) 2057 n.SetTypecheck(1) 2058 return []*Node{n} 2059 } 2060 2061 // conversions involved. 2062 // copy into temporaries. 2063 var tmps []*Node 2064 for _, nr := range rhs[0].Type.FieldSlice() { 2065 tmps = append(tmps, temp(nr.Type)) 2066 } 2067 2068 a := nod(OAS2, nil, nil) 2069 a.List.Set(tmps) 2070 a.Rlist.Set(rhs) 2071 a = typecheck(a, Etop) 2072 a = walkstmt(a) 2073 init.Append(a) 2074 2075 rhs = tmps 2076 } 2077 2078 // For each parameter (LHS), assign its corresponding argument (RHS). 2079 // If there's a ... parameter (which is only valid as the final 2080 // parameter) and this is not a ... call expression, 2081 // then assign the remaining arguments as a slice. 2082 var nn []*Node 2083 for i, nl := range lhs.FieldSlice() { 2084 var nr *Node 2085 if nl.Isddd() && !isddd { 2086 nr = mkdotargslice(nl.Type, rhs[i:], init, call.Right) 2087 } else { 2088 nr = rhs[i] 2089 } 2090 2091 a := nod(OAS, nodarg(nl, fp), nr) 2092 a = convas(a, init) 2093 a.SetTypecheck(1) 2094 nn = append(nn, a) 2095 } 2096 2097 return nn 2098 } 2099 2100 // generate code for print 2101 func walkprint(nn *Node, init *Nodes) *Node { 2102 // Hoist all the argument evaluation up before the lock. 2103 walkexprlistcheap(nn.List.Slice(), init) 2104 2105 // For println, add " " between elements and "\n" at the end. 2106 if nn.Op == OPRINTN { 2107 s := nn.List.Slice() 2108 t := make([]*Node, 0, len(s)*2) 2109 for i, n := range s { 2110 if i != 0 { 2111 t = append(t, nodstr(" ")) 2112 } 2113 t = append(t, n) 2114 } 2115 t = append(t, nodstr("\n")) 2116 nn.List.Set(t) 2117 } 2118 2119 // Collapse runs of constant strings. 2120 s := nn.List.Slice() 2121 t := make([]*Node, 0, len(s)) 2122 for i := 0; i < len(s); { 2123 var strs []string 2124 for i < len(s) && Isconst(s[i], CTSTR) { 2125 strs = append(strs, s[i].Val().U.(string)) 2126 i++ 2127 } 2128 if len(strs) > 0 { 2129 t = append(t, nodstr(strings.Join(strs, ""))) 2130 } 2131 if i < len(s) { 2132 t = append(t, s[i]) 2133 i++ 2134 } 2135 } 2136 nn.List.Set(t) 2137 2138 calls := []*Node{mkcall("printlock", nil, init)} 2139 for i, n := range nn.List.Slice() { 2140 if n.Op == OLITERAL { 2141 switch n.Val().Ctype() { 2142 case CTRUNE: 2143 n = defaultlit(n, types.Runetype) 2144 2145 case CTINT: 2146 n = defaultlit(n, types.Types[TINT64]) 2147 2148 case CTFLT: 2149 n = defaultlit(n, types.Types[TFLOAT64]) 2150 } 2151 } 2152 2153 if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { 2154 n = defaultlit(n, types.Types[TINT64]) 2155 } 2156 n = defaultlit(n, nil) 2157 nn.List.SetIndex(i, n) 2158 if n.Type == nil || n.Type.Etype == TFORW { 2159 continue 2160 } 2161 2162 var on *Node 2163 switch n.Type.Etype { 2164 case TINTER: 2165 if n.Type.IsEmptyInterface() { 2166 on = syslook("printeface") 2167 } else { 2168 on = syslook("printiface") 2169 } 2170 on = substArgTypes(on, n.Type) // any-1 2171 case TPTR32, TPTR64, TCHAN, TMAP, TFUNC, TUNSAFEPTR: 2172 on = syslook("printpointer") 2173 on = substArgTypes(on, n.Type) // any-1 2174 case TSLICE: 2175 on = syslook("printslice") 2176 on = substArgTypes(on, n.Type) // any-1 2177 case TUINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINTPTR: 2178 if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" { 2179 on = syslook("printhex") 2180 } else { 2181 on = syslook("printuint") 2182 } 2183 case TINT, TINT8, TINT16, TINT32, TINT64: 2184 on = syslook("printint") 2185 case TFLOAT32, TFLOAT64: 2186 on = syslook("printfloat") 2187 case TCOMPLEX64, TCOMPLEX128: 2188 on = syslook("printcomplex") 2189 case TBOOL: 2190 on = syslook("printbool") 2191 case TSTRING: 2192 cs := "" 2193 if Isconst(n, CTSTR) { 2194 cs = n.Val().U.(string) 2195 } 2196 switch cs { 2197 case " ": 2198 on = syslook("printsp") 2199 case "\n": 2200 on = syslook("printnl") 2201 default: 2202 on = syslook("printstring") 2203 } 2204 default: 2205 badtype(OPRINT, n.Type, nil) 2206 continue 2207 } 2208 2209 r := nod(OCALL, on, nil) 2210 if params := on.Type.Params().FieldSlice(); len(params) > 0 { 2211 t := params[0].Type 2212 if !eqtype(t, n.Type) { 2213 n = nod(OCONV, n, nil) 2214 n.Type = t 2215 } 2216 r.List.Append(n) 2217 } 2218 calls = append(calls, r) 2219 } 2220 2221 calls = append(calls, mkcall("printunlock", nil, init)) 2222 2223 typecheckslice(calls, Etop) 2224 walkexprlist(calls, init) 2225 2226 r := nod(OEMPTY, nil, nil) 2227 r = typecheck(r, Etop) 2228 r = walkexpr(r, init) 2229 r.Ninit.Set(calls) 2230 return r 2231 } 2232 2233 func callnew(t *types.Type) *Node { 2234 if t.NotInHeap() { 2235 yyerror("%v is go:notinheap; heap allocation disallowed", t) 2236 } 2237 dowidth(t) 2238 fn := syslook("newobject") 2239 fn = substArgTypes(fn, t) 2240 v := mkcall1(fn, types.NewPtr(t), nil, typename(t)) 2241 v.SetNonNil(true) 2242 return v 2243 } 2244 2245 func iscallret(n *Node) bool { 2246 if n == nil { 2247 return false 2248 } 2249 n = outervalue(n) 2250 return n.Op == OINDREGSP 2251 } 2252 2253 // isReflectHeaderDataField reports whether l is an expression p.Data 2254 // where p has type reflect.SliceHeader or reflect.StringHeader. 2255 func isReflectHeaderDataField(l *Node) bool { 2256 if l.Type != types.Types[TUINTPTR] { 2257 return false 2258 } 2259 2260 var tsym *types.Sym 2261 switch l.Op { 2262 case ODOT: 2263 tsym = l.Left.Type.Sym 2264 case ODOTPTR: 2265 tsym = l.Left.Type.Elem().Sym 2266 default: 2267 return false 2268 } 2269 2270 if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" { 2271 return false 2272 } 2273 return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader" 2274 } 2275 2276 func convas(n *Node, init *Nodes) *Node { 2277 if n.Op != OAS { 2278 Fatalf("convas: not OAS %v", n.Op) 2279 } 2280 defer updateHasCall(n) 2281 2282 n.SetTypecheck(1) 2283 2284 if n.Left == nil || n.Right == nil { 2285 return n 2286 } 2287 2288 lt := n.Left.Type 2289 rt := n.Right.Type 2290 if lt == nil || rt == nil { 2291 return n 2292 } 2293 2294 if isblank(n.Left) { 2295 n.Right = defaultlit(n.Right, nil) 2296 return n 2297 } 2298 2299 if !eqtype(lt, rt) { 2300 n.Right = assignconv(n.Right, lt, "assignment") 2301 n.Right = walkexpr(n.Right, init) 2302 } 2303 dowidth(n.Right.Type) 2304 2305 return n 2306 } 2307 2308 // from ascompat[te] 2309 // evaluating actual function arguments. 2310 // f(a,b) 2311 // if there is exactly one function expr, 2312 // then it is done first. otherwise must 2313 // make temp variables 2314 func reorder1(all []*Node) []*Node { 2315 if len(all) == 1 { 2316 return all 2317 } 2318 2319 funcCalls := 0 2320 for _, n := range all { 2321 updateHasCall(n) 2322 if n.HasCall() { 2323 funcCalls++ 2324 } 2325 } 2326 if funcCalls == 0 { 2327 return all 2328 } 2329 2330 var g []*Node // fncalls assigned to tempnames 2331 var f *Node // last fncall assigned to stack 2332 var r []*Node // non fncalls and tempnames assigned to stack 2333 d := 0 2334 for _, n := range all { 2335 if !n.HasCall() { 2336 r = append(r, n) 2337 continue 2338 } 2339 2340 d++ 2341 if d == funcCalls { 2342 f = n 2343 continue 2344 } 2345 2346 // make assignment of fncall to tempname 2347 a := temp(n.Right.Type) 2348 2349 a = nod(OAS, a, n.Right) 2350 g = append(g, a) 2351 2352 // put normal arg assignment on list 2353 // with fncall replaced by tempname 2354 n.Right = a.Left 2355 2356 r = append(r, n) 2357 } 2358 2359 if f != nil { 2360 g = append(g, f) 2361 } 2362 return append(g, r...) 2363 } 2364 2365 // from ascompat[ee] 2366 // a,b = c,d 2367 // simultaneous assignment. there cannot 2368 // be later use of an earlier lvalue. 2369 // 2370 // function calls have been removed. 2371 func reorder3(all []*Node) []*Node { 2372 // If a needed expression may be affected by an 2373 // earlier assignment, make an early copy of that 2374 // expression and use the copy instead. 2375 var early []*Node 2376 2377 var mapinit Nodes 2378 for i, n := range all { 2379 l := n.Left 2380 2381 // Save subexpressions needed on left side. 2382 // Drill through non-dereferences. 2383 for { 2384 if l.Op == ODOT || l.Op == OPAREN { 2385 l = l.Left 2386 continue 2387 } 2388 2389 if l.Op == OINDEX && l.Left.Type.IsArray() { 2390 l.Right = reorder3save(l.Right, all, i, &early) 2391 l = l.Left 2392 continue 2393 } 2394 2395 break 2396 } 2397 2398 switch l.Op { 2399 default: 2400 Fatalf("reorder3 unexpected lvalue %#v", l.Op) 2401 2402 case ONAME: 2403 break 2404 2405 case OINDEX, OINDEXMAP: 2406 l.Left = reorder3save(l.Left, all, i, &early) 2407 l.Right = reorder3save(l.Right, all, i, &early) 2408 if l.Op == OINDEXMAP { 2409 all[i] = convas(all[i], &mapinit) 2410 } 2411 2412 case OIND, ODOTPTR: 2413 l.Left = reorder3save(l.Left, all, i, &early) 2414 } 2415 2416 // Save expression on right side. 2417 all[i].Right = reorder3save(all[i].Right, all, i, &early) 2418 } 2419 2420 early = append(mapinit.Slice(), early...) 2421 return append(early, all...) 2422 } 2423 2424 // if the evaluation of *np would be affected by the 2425 // assignments in all up to but not including the ith assignment, 2426 // copy into a temporary during *early and 2427 // replace *np with that temp. 2428 // The result of reorder3save MUST be assigned back to n, e.g. 2429 // n.Left = reorder3save(n.Left, all, i, early) 2430 func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node { 2431 if !aliased(n, all, i) { 2432 return n 2433 } 2434 2435 q := temp(n.Type) 2436 q = nod(OAS, q, n) 2437 q = typecheck(q, Etop) 2438 *early = append(*early, q) 2439 return q.Left 2440 } 2441 2442 // what's the outer value that a write to n affects? 2443 // outer value means containing struct or array. 2444 func outervalue(n *Node) *Node { 2445 for { 2446 switch n.Op { 2447 case OXDOT: 2448 Fatalf("OXDOT in walk") 2449 case ODOT, OPAREN, OCONVNOP: 2450 n = n.Left 2451 continue 2452 case OINDEX: 2453 if n.Left.Type != nil && n.Left.Type.IsArray() { 2454 n = n.Left 2455 continue 2456 } 2457 } 2458 2459 return n 2460 } 2461 } 2462 2463 // Is it possible that the computation of n might be 2464 // affected by writes in as up to but not including the ith element? 2465 func aliased(n *Node, all []*Node, i int) bool { 2466 if n == nil { 2467 return false 2468 } 2469 2470 // Treat all fields of a struct as referring to the whole struct. 2471 // We could do better but we would have to keep track of the fields. 2472 for n.Op == ODOT { 2473 n = n.Left 2474 } 2475 2476 // Look for obvious aliasing: a variable being assigned 2477 // during the all list and appearing in n. 2478 // Also record whether there are any writes to main memory. 2479 // Also record whether there are any writes to variables 2480 // whose addresses have been taken. 2481 memwrite := false 2482 varwrite := false 2483 for _, an := range all[:i] { 2484 a := outervalue(an.Left) 2485 2486 for a.Op == ODOT { 2487 a = a.Left 2488 } 2489 2490 if a.Op != ONAME { 2491 memwrite = true 2492 continue 2493 } 2494 2495 switch n.Class() { 2496 default: 2497 varwrite = true 2498 continue 2499 2500 case PAUTO, PPARAM, PPARAMOUT: 2501 if n.Addrtaken() { 2502 varwrite = true 2503 continue 2504 } 2505 2506 if vmatch2(a, n) { 2507 // Direct hit. 2508 return true 2509 } 2510 } 2511 } 2512 2513 // The variables being written do not appear in n. 2514 // However, n might refer to computed addresses 2515 // that are being written. 2516 2517 // If no computed addresses are affected by the writes, no aliasing. 2518 if !memwrite && !varwrite { 2519 return false 2520 } 2521 2522 // If n does not refer to computed addresses 2523 // (that is, if n only refers to variables whose addresses 2524 // have not been taken), no aliasing. 2525 if varexpr(n) { 2526 return false 2527 } 2528 2529 // Otherwise, both the writes and n refer to computed memory addresses. 2530 // Assume that they might conflict. 2531 return true 2532 } 2533 2534 // does the evaluation of n only refer to variables 2535 // whose addresses have not been taken? 2536 // (and no other memory) 2537 func varexpr(n *Node) bool { 2538 if n == nil { 2539 return true 2540 } 2541 2542 switch n.Op { 2543 case OLITERAL: 2544 return true 2545 2546 case ONAME: 2547 switch n.Class() { 2548 case PAUTO, PPARAM, PPARAMOUT: 2549 if !n.Addrtaken() { 2550 return true 2551 } 2552 } 2553 2554 return false 2555 2556 case OADD, 2557 OSUB, 2558 OOR, 2559 OXOR, 2560 OMUL, 2561 ODIV, 2562 OMOD, 2563 OLSH, 2564 ORSH, 2565 OAND, 2566 OANDNOT, 2567 OPLUS, 2568 OMINUS, 2569 OCOM, 2570 OPAREN, 2571 OANDAND, 2572 OOROR, 2573 OCONV, 2574 OCONVNOP, 2575 OCONVIFACE, 2576 ODOTTYPE: 2577 return varexpr(n.Left) && varexpr(n.Right) 2578 2579 case ODOT: // but not ODOTPTR 2580 // Should have been handled in aliased. 2581 Fatalf("varexpr unexpected ODOT") 2582 } 2583 2584 // Be conservative. 2585 return false 2586 } 2587 2588 // is the name l mentioned in r? 2589 func vmatch2(l *Node, r *Node) bool { 2590 if r == nil { 2591 return false 2592 } 2593 switch r.Op { 2594 // match each right given left 2595 case ONAME: 2596 return l == r 2597 2598 case OLITERAL: 2599 return false 2600 } 2601 2602 if vmatch2(l, r.Left) { 2603 return true 2604 } 2605 if vmatch2(l, r.Right) { 2606 return true 2607 } 2608 for _, n := range r.List.Slice() { 2609 if vmatch2(l, n) { 2610 return true 2611 } 2612 } 2613 return false 2614 } 2615 2616 // is any name mentioned in l also mentioned in r? 2617 // called by sinit.go 2618 func vmatch1(l *Node, r *Node) bool { 2619 // isolate all left sides 2620 if l == nil || r == nil { 2621 return false 2622 } 2623 switch l.Op { 2624 case ONAME: 2625 switch l.Class() { 2626 case PPARAM, PAUTO: 2627 break 2628 2629 default: 2630 // assignment to non-stack variable must be 2631 // delayed if right has function calls. 2632 if r.HasCall() { 2633 return true 2634 } 2635 } 2636 2637 return vmatch2(l, r) 2638 2639 case OLITERAL: 2640 return false 2641 } 2642 2643 if vmatch1(l.Left, r) { 2644 return true 2645 } 2646 if vmatch1(l.Right, r) { 2647 return true 2648 } 2649 for _, n := range l.List.Slice() { 2650 if vmatch1(n, r) { 2651 return true 2652 } 2653 } 2654 return false 2655 } 2656 2657 // paramstoheap returns code to allocate memory for heap-escaped parameters 2658 // and to copy non-result parameters' values from the stack. 2659 func paramstoheap(params *types.Type) []*Node { 2660 var nn []*Node 2661 for _, t := range params.Fields().Slice() { 2662 v := asNode(t.Nname) 2663 if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result 2664 v = nil 2665 } 2666 if v == nil { 2667 continue 2668 } 2669 2670 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil { 2671 nn = append(nn, walkstmt(nod(ODCL, v, nil))) 2672 if stackcopy.Class() == PPARAM { 2673 nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop))) 2674 } 2675 } 2676 } 2677 2678 return nn 2679 } 2680 2681 // zeroResults zeros the return values at the start of the function. 2682 // We need to do this very early in the function. Defer might stop a 2683 // panic and show the return values as they exist at the time of 2684 // panic. For precise stacks, the garbage collector assumes results 2685 // are always live, so we need to zero them before any allocations, 2686 // even allocations to move params/results to the heap. 2687 // The generated code is added to Curfn's Enter list. 2688 func zeroResults() { 2689 lno := lineno 2690 lineno = Curfn.Pos 2691 for _, f := range Curfn.Type.Results().Fields().Slice() { 2692 if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil { 2693 // The local which points to the return value is the 2694 // thing that needs zeroing. This is already handled 2695 // by a Needzero annotation in plive.go:livenessepilogue. 2696 continue 2697 } 2698 // Zero the stack location containing f. 2699 Curfn.Func.Enter.Append(nod(OAS, nodarg(f, 1), nil)) 2700 } 2701 lineno = lno 2702 } 2703 2704 // returnsfromheap returns code to copy values for heap-escaped parameters 2705 // back to the stack. 2706 func returnsfromheap(params *types.Type) []*Node { 2707 var nn []*Node 2708 for _, t := range params.Fields().Slice() { 2709 v := asNode(t.Nname) 2710 if v == nil { 2711 continue 2712 } 2713 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT { 2714 nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop))) 2715 } 2716 } 2717 2718 return nn 2719 } 2720 2721 // heapmoves generates code to handle migrating heap-escaped parameters 2722 // between the stack and the heap. The generated code is added to Curfn's 2723 // Enter and Exit lists. 2724 func heapmoves() { 2725 lno := lineno 2726 lineno = Curfn.Pos 2727 nn := paramstoheap(Curfn.Type.Recvs()) 2728 nn = append(nn, paramstoheap(Curfn.Type.Params())...) 2729 nn = append(nn, paramstoheap(Curfn.Type.Results())...) 2730 Curfn.Func.Enter.Append(nn...) 2731 lineno = Curfn.Func.Endlineno 2732 Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) 2733 lineno = lno 2734 } 2735 2736 func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node { 2737 if fn.Type == nil || fn.Type.Etype != TFUNC { 2738 Fatalf("mkcall %v %v", fn, fn.Type) 2739 } 2740 2741 n := fn.Type.NumParams() 2742 if n != len(va) { 2743 Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va)) 2744 } 2745 2746 r := nod(OCALL, fn, nil) 2747 r.List.Set(va) 2748 if fn.Type.NumResults() > 0 { 2749 r = typecheck(r, Erv|Efnstruct) 2750 } else { 2751 r = typecheck(r, Etop) 2752 } 2753 r = walkexpr(r, init) 2754 r.Type = t 2755 return r 2756 } 2757 2758 func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node { 2759 return vmkcall(syslook(name), t, init, args) 2760 } 2761 2762 func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node { 2763 return vmkcall(fn, t, init, args) 2764 } 2765 2766 func conv(n *Node, t *types.Type) *Node { 2767 if eqtype(n.Type, t) { 2768 return n 2769 } 2770 n = nod(OCONV, n, nil) 2771 n.Type = t 2772 n = typecheck(n, Erv) 2773 return n 2774 } 2775 2776 // byteindex converts n, which is byte-sized, to a uint8. 2777 // We cannot use conv, because we allow converting bool to uint8 here, 2778 // which is forbidden in user code. 2779 func byteindex(n *Node) *Node { 2780 if eqtype(n.Type, types.Types[TUINT8]) { 2781 return n 2782 } 2783 n = nod(OCONV, n, nil) 2784 n.Type = types.Types[TUINT8] 2785 n.SetTypecheck(1) 2786 return n 2787 } 2788 2789 func chanfn(name string, n int, t *types.Type) *Node { 2790 if !t.IsChan() { 2791 Fatalf("chanfn %v", t) 2792 } 2793 fn := syslook(name) 2794 switch n { 2795 default: 2796 Fatalf("chanfn %d", n) 2797 case 1: 2798 fn = substArgTypes(fn, t.Elem()) 2799 case 2: 2800 fn = substArgTypes(fn, t.Elem(), t.Elem()) 2801 } 2802 return fn 2803 } 2804 2805 func mapfn(name string, t *types.Type) *Node { 2806 if !t.IsMap() { 2807 Fatalf("mapfn %v", t) 2808 } 2809 fn := syslook(name) 2810 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val()) 2811 return fn 2812 } 2813 2814 func mapfndel(name string, t *types.Type) *Node { 2815 if !t.IsMap() { 2816 Fatalf("mapfn %v", t) 2817 } 2818 fn := syslook(name) 2819 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key()) 2820 return fn 2821 } 2822 2823 const ( 2824 mapslow = iota 2825 mapfast32 2826 mapfast32ptr 2827 mapfast64 2828 mapfast64ptr 2829 mapfaststr 2830 nmapfast 2831 ) 2832 2833 type mapnames [nmapfast]string 2834 2835 func mkmapnames(base string, ptr string) mapnames { 2836 return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"} 2837 } 2838 2839 var mapaccess1 = mkmapnames("mapaccess1", "") 2840 var mapaccess2 = mkmapnames("mapaccess2", "") 2841 var mapassign = mkmapnames("mapassign", "ptr") 2842 var mapdelete = mkmapnames("mapdelete", "") 2843 2844 func mapfast(t *types.Type) int { 2845 // Check ../../runtime/hashmap.go:maxValueSize before changing. 2846 if t.Val().Width > 128 { 2847 return mapslow 2848 } 2849 switch algtype(t.Key()) { 2850 case AMEM32: 2851 if !t.Key().HasHeapPointer() { 2852 return mapfast32 2853 } 2854 if Widthptr == 4 { 2855 return mapfast32ptr 2856 } 2857 Fatalf("small pointer %v", t.Key()) 2858 case AMEM64: 2859 if !t.Key().HasHeapPointer() { 2860 return mapfast64 2861 } 2862 if Widthptr == 8 { 2863 return mapfast64ptr 2864 } 2865 // Two-word object, at least one of which is a pointer. 2866 // Use the slow path. 2867 case ASTRING: 2868 return mapfaststr 2869 } 2870 return mapslow 2871 } 2872 2873 func writebarrierfn(name string, l *types.Type, r *types.Type) *Node { 2874 fn := syslook(name) 2875 fn = substArgTypes(fn, l, r) 2876 return fn 2877 } 2878 2879 func addstr(n *Node, init *Nodes) *Node { 2880 // orderexpr rewrote OADDSTR to have a list of strings. 2881 c := n.List.Len() 2882 2883 if c < 2 { 2884 Fatalf("addstr count %d too small", c) 2885 } 2886 2887 buf := nodnil() 2888 if n.Esc == EscNone { 2889 sz := int64(0) 2890 for _, n1 := range n.List.Slice() { 2891 if n1.Op == OLITERAL { 2892 sz += int64(len(n1.Val().U.(string))) 2893 } 2894 } 2895 2896 // Don't allocate the buffer if the result won't fit. 2897 if sz < tmpstringbufsize { 2898 // Create temporary buffer for result string on stack. 2899 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 2900 2901 buf = nod(OADDR, temp(t), nil) 2902 } 2903 } 2904 2905 // build list of string arguments 2906 args := []*Node{buf} 2907 for _, n2 := range n.List.Slice() { 2908 args = append(args, conv(n2, types.Types[TSTRING])) 2909 } 2910 2911 var fn string 2912 if c <= 5 { 2913 // small numbers of strings use direct runtime helpers. 2914 // note: orderexpr knows this cutoff too. 2915 fn = fmt.Sprintf("concatstring%d", c) 2916 } else { 2917 // large numbers of strings are passed to the runtime as a slice. 2918 fn = "concatstrings" 2919 2920 t := types.NewSlice(types.Types[TSTRING]) 2921 slice := nod(OCOMPLIT, nil, typenod(t)) 2922 if prealloc[n] != nil { 2923 prealloc[slice] = prealloc[n] 2924 } 2925 slice.List.Set(args[1:]) // skip buf arg 2926 args = []*Node{buf, slice} 2927 slice.Esc = EscNone 2928 } 2929 2930 cat := syslook(fn) 2931 r := nod(OCALL, cat, nil) 2932 r.List.Set(args) 2933 r = typecheck(r, Erv) 2934 r = walkexpr(r, init) 2935 r.Type = n.Type 2936 2937 return r 2938 } 2939 2940 // expand append(l1, l2...) to 2941 // init { 2942 // s := l1 2943 // n := len(s) + len(l2) 2944 // // Compare as uint so growslice can panic on overflow. 2945 // if uint(n) > uint(cap(s)) { 2946 // s = growslice(s, n) 2947 // } 2948 // s = s[:n] 2949 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2950 // } 2951 // s 2952 // 2953 // l2 is allowed to be a string. 2954 func appendslice(n *Node, init *Nodes) *Node { 2955 walkexprlistsafe(n.List.Slice(), init) 2956 2957 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2958 // and n are name or literal, but those may index the slice we're 2959 // modifying here. Fix explicitly. 2960 ls := n.List.Slice() 2961 for i1, n1 := range ls { 2962 ls[i1] = cheapexpr(n1, init) 2963 } 2964 2965 l1 := n.List.First() 2966 l2 := n.List.Second() 2967 2968 var l []*Node 2969 2970 // var s []T 2971 s := temp(l1.Type) 2972 l = append(l, nod(OAS, s, l1)) // s = l1 2973 2974 // n := len(s) + len(l2) 2975 nn := temp(types.Types[TINT]) 2976 l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil)))) 2977 2978 // if uint(n) > uint(cap(s)) 2979 nif := nod(OIF, nil, nil) 2980 nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil)) 2981 nif.Left.Left.Type = types.Types[TUINT] 2982 nif.Left.Right.Type = types.Types[TUINT] 2983 2984 // instantiate growslice(Type*, []any, int) []any 2985 fn := syslook("growslice") 2986 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2987 2988 // s = growslice(T, s, n) 2989 nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn))) 2990 l = append(l, nif) 2991 2992 // s = s[:n] 2993 nt := nod(OSLICE, s, nil) 2994 nt.SetSliceBounds(nil, nn, nil) 2995 nt.Etype = 1 2996 l = append(l, nod(OAS, s, nt)) 2997 2998 if l1.Type.Elem().HasHeapPointer() { 2999 // copy(s[len(l1):], l2) 3000 nptr1 := nod(OSLICE, s, nil) 3001 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 3002 nptr1.Etype = 1 3003 nptr2 := l2 3004 Curfn.Func.setWBPos(n.Pos) 3005 fn := syslook("typedslicecopy") 3006 fn = substArgTypes(fn, l1.Type, l2.Type) 3007 var ln Nodes 3008 ln.Set(l) 3009 nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2) 3010 l = append(ln.Slice(), nt) 3011 } else if instrumenting && !compiling_runtime { 3012 // rely on runtime to instrument copy. 3013 // copy(s[len(l1):], l2) 3014 nptr1 := nod(OSLICE, s, nil) 3015 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 3016 nptr1.Etype = 1 3017 nptr2 := l2 3018 3019 var ln Nodes 3020 ln.Set(l) 3021 var nt *Node 3022 if l2.Type.IsString() { 3023 fn := syslook("slicestringcopy") 3024 fn = substArgTypes(fn, l1.Type, l2.Type) 3025 nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2) 3026 } else { 3027 fn := syslook("slicecopy") 3028 fn = substArgTypes(fn, l1.Type, l2.Type) 3029 nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width)) 3030 } 3031 3032 l = append(ln.Slice(), nt) 3033 } else { 3034 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 3035 nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil)) 3036 nptr1.SetBounded(true) 3037 3038 nptr1 = nod(OADDR, nptr1, nil) 3039 3040 nptr2 := nod(OSPTR, l2, nil) 3041 3042 fn := syslook("memmove") 3043 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 3044 3045 var ln Nodes 3046 ln.Set(l) 3047 nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln) 3048 3049 nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width)) 3050 nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid) 3051 l = append(ln.Slice(), nt) 3052 } 3053 3054 typecheckslice(l, Etop) 3055 walkstmtlist(l) 3056 init.Append(l...) 3057 return s 3058 } 3059 3060 // Rewrite append(src, x, y, z) so that any side effects in 3061 // x, y, z (including runtime panics) are evaluated in 3062 // initialization statements before the append. 3063 // For normal code generation, stop there and leave the 3064 // rest to cgen_append. 3065 // 3066 // For race detector, expand append(src, a [, b]* ) to 3067 // 3068 // init { 3069 // s := src 3070 // const argc = len(args) - 1 3071 // if cap(s) - len(s) < argc { 3072 // s = growslice(s, len(s)+argc) 3073 // } 3074 // n := len(s) 3075 // s = s[:n+argc] 3076 // s[n] = a 3077 // s[n+1] = b 3078 // ... 3079 // } 3080 // s 3081 func walkappend(n *Node, init *Nodes, dst *Node) *Node { 3082 if !samesafeexpr(dst, n.List.First()) { 3083 n.List.SetFirst(safeexpr(n.List.First(), init)) 3084 n.List.SetFirst(walkexpr(n.List.First(), init)) 3085 } 3086 walkexprlistsafe(n.List.Slice()[1:], init) 3087 3088 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 3089 // and n are name or literal, but those may index the slice we're 3090 // modifying here. Fix explicitly. 3091 // Using cheapexpr also makes sure that the evaluation 3092 // of all arguments (and especially any panics) happen 3093 // before we begin to modify the slice in a visible way. 3094 ls := n.List.Slice()[1:] 3095 for i, n := range ls { 3096 ls[i] = cheapexpr(n, init) 3097 } 3098 3099 nsrc := n.List.First() 3100 3101 argc := n.List.Len() - 1 3102 if argc < 1 { 3103 return nsrc 3104 } 3105 3106 // General case, with no function calls left as arguments. 3107 // Leave for gen, except that instrumentation requires old form. 3108 if !instrumenting || compiling_runtime { 3109 return n 3110 } 3111 3112 var l []*Node 3113 3114 ns := temp(nsrc.Type) 3115 l = append(l, nod(OAS, ns, nsrc)) // s = src 3116 3117 na := nodintconst(int64(argc)) // const argc 3118 nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc 3119 nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na) 3120 3121 fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T) 3122 fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem()) 3123 3124 nx.Nbody.Set1(nod(OAS, ns, 3125 mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns, 3126 nod(OADD, nod(OLEN, ns, nil), na)))) 3127 3128 l = append(l, nx) 3129 3130 nn := temp(types.Types[TINT]) 3131 l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s) 3132 3133 nx = nod(OSLICE, ns, nil) // ...s[:n+argc] 3134 nx.SetSliceBounds(nil, nod(OADD, nn, na), nil) 3135 nx.Etype = 1 3136 l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc] 3137 3138 ls = n.List.Slice()[1:] 3139 for i, n := range ls { 3140 nx = nod(OINDEX, ns, nn) // s[n] ... 3141 nx.SetBounded(true) 3142 l = append(l, nod(OAS, nx, n)) // s[n] = arg 3143 if i+1 < len(ls) { 3144 l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1 3145 } 3146 } 3147 3148 typecheckslice(l, Etop) 3149 walkstmtlist(l) 3150 init.Append(l...) 3151 return ns 3152 } 3153 3154 // Lower copy(a, b) to a memmove call or a runtime call. 3155 // 3156 // init { 3157 // n := len(a) 3158 // if n > len(b) { n = len(b) } 3159 // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) 3160 // } 3161 // n; 3162 // 3163 // Also works if b is a string. 3164 // 3165 func copyany(n *Node, init *Nodes, runtimecall bool) *Node { 3166 if n.Left.Type.Elem().HasHeapPointer() { 3167 Curfn.Func.setWBPos(n.Pos) 3168 fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) 3169 return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right) 3170 } 3171 3172 if runtimecall { 3173 if n.Right.Type.IsString() { 3174 fn := syslook("slicestringcopy") 3175 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3176 return mkcall1(fn, n.Type, init, n.Left, n.Right) 3177 } 3178 3179 fn := syslook("slicecopy") 3180 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3181 return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width)) 3182 } 3183 3184 n.Left = walkexpr(n.Left, init) 3185 n.Right = walkexpr(n.Right, init) 3186 nl := temp(n.Left.Type) 3187 nr := temp(n.Right.Type) 3188 var l []*Node 3189 l = append(l, nod(OAS, nl, n.Left)) 3190 l = append(l, nod(OAS, nr, n.Right)) 3191 3192 nfrm := nod(OSPTR, nr, nil) 3193 nto := nod(OSPTR, nl, nil) 3194 3195 nlen := temp(types.Types[TINT]) 3196 3197 // n = len(to) 3198 l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil))) 3199 3200 // if n > len(frm) { n = len(frm) } 3201 nif := nod(OIF, nil, nil) 3202 3203 nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil)) 3204 nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil))) 3205 l = append(l, nif) 3206 3207 // Call memmove. 3208 fn := syslook("memmove") 3209 3210 fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem()) 3211 nwid := temp(types.Types[TUINTPTR]) 3212 l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR]))) 3213 nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width)) 3214 l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) 3215 3216 typecheckslice(l, Etop) 3217 walkstmtlist(l) 3218 init.Append(l...) 3219 return nlen 3220 } 3221 3222 func eqfor(t *types.Type) (n *Node, needsize bool) { 3223 // Should only arrive here with large memory or 3224 // a struct/array containing a non-memory field/element. 3225 // Small memory is handled inline, and single non-memory 3226 // is handled during type check (OCMPSTR etc). 3227 switch a, _ := algtype1(t); a { 3228 case AMEM: 3229 n := syslook("memequal") 3230 n = substArgTypes(n, t, t) 3231 return n, true 3232 case ASPECIAL: 3233 sym := typesymprefix(".eq", t) 3234 n := newname(sym) 3235 n.SetClass(PFUNC) 3236 ntype := nod(OTFUNC, nil, nil) 3237 ntype.List.Append(anonfield(types.NewPtr(t))) 3238 ntype.List.Append(anonfield(types.NewPtr(t))) 3239 ntype.Rlist.Append(anonfield(types.Types[TBOOL])) 3240 ntype = typecheck(ntype, Etype) 3241 n.Type = ntype.Type 3242 return n, false 3243 } 3244 Fatalf("eqfor %v", t) 3245 return nil, false 3246 } 3247 3248 // The result of walkcompare MUST be assigned back to n, e.g. 3249 // n.Left = walkcompare(n.Left, init) 3250 func walkcompare(n *Node, init *Nodes) *Node { 3251 // Given interface value l and concrete value r, rewrite 3252 // l == r 3253 // into types-equal && data-equal. 3254 // This is efficient, avoids allocations, and avoids runtime calls. 3255 var l, r *Node 3256 if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() { 3257 l = n.Left 3258 r = n.Right 3259 } else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() { 3260 l = n.Right 3261 r = n.Left 3262 } 3263 3264 if l != nil { 3265 // Handle both == and !=. 3266 eq := n.Op 3267 var andor Op 3268 if eq == OEQ { 3269 andor = OANDAND 3270 } else { 3271 andor = OOROR 3272 } 3273 // Check for types equal. 3274 // For empty interface, this is: 3275 // l.tab == type(r) 3276 // For non-empty interface, this is: 3277 // l.tab != nil && l.tab._type == type(r) 3278 var eqtype *Node 3279 tab := nod(OITAB, l, nil) 3280 rtyp := typename(r.Type) 3281 if l.Type.IsEmptyInterface() { 3282 tab.Type = types.NewPtr(types.Types[TUINT8]) 3283 tab.SetTypecheck(1) 3284 eqtype = nod(eq, tab, rtyp) 3285 } else { 3286 nonnil := nod(brcom(eq), nodnil(), tab) 3287 match := nod(eq, itabType(tab), rtyp) 3288 eqtype = nod(andor, nonnil, match) 3289 } 3290 // Check for data equal. 3291 eqdata := nod(eq, ifaceData(l, r.Type), r) 3292 // Put it all together. 3293 expr := nod(andor, eqtype, eqdata) 3294 n = finishcompare(n, expr, init) 3295 return n 3296 } 3297 3298 // Must be comparison of array or struct. 3299 // Otherwise back end handles it. 3300 // While we're here, decide whether to 3301 // inline or call an eq alg. 3302 t := n.Left.Type 3303 var inline bool 3304 3305 maxcmpsize := int64(4) 3306 unalignedLoad := false 3307 switch thearch.LinkArch.Family { 3308 case sys.AMD64, sys.ARM64, sys.S390X: 3309 // Keep this low enough, to generate less code than function call. 3310 maxcmpsize = 16 3311 unalignedLoad = true 3312 case sys.I386: 3313 maxcmpsize = 8 3314 unalignedLoad = true 3315 } 3316 3317 switch t.Etype { 3318 default: 3319 return n 3320 case TARRAY: 3321 // We can compare several elements at once with 2/4/8 byte integer compares 3322 inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize)) 3323 case TSTRUCT: 3324 inline = t.NumFields() <= 4 3325 } 3326 3327 cmpl := n.Left 3328 for cmpl != nil && cmpl.Op == OCONVNOP { 3329 cmpl = cmpl.Left 3330 } 3331 cmpr := n.Right 3332 for cmpr != nil && cmpr.Op == OCONVNOP { 3333 cmpr = cmpr.Left 3334 } 3335 3336 // Chose not to inline. Call equality function directly. 3337 if !inline { 3338 if isvaluelit(cmpl) { 3339 var_ := temp(cmpl.Type) 3340 anylit(cmpl, var_, init) 3341 cmpl = var_ 3342 } 3343 if isvaluelit(cmpr) { 3344 var_ := temp(cmpr.Type) 3345 anylit(cmpr, var_, init) 3346 cmpr = var_ 3347 } 3348 if !islvalue(cmpl) || !islvalue(cmpr) { 3349 Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) 3350 } 3351 3352 // eq algs take pointers 3353 pl := temp(types.NewPtr(t)) 3354 al := nod(OAS, pl, nod(OADDR, cmpl, nil)) 3355 al.Right.Etype = 1 // addr does not escape 3356 al = typecheck(al, Etop) 3357 init.Append(al) 3358 3359 pr := temp(types.NewPtr(t)) 3360 ar := nod(OAS, pr, nod(OADDR, cmpr, nil)) 3361 ar.Right.Etype = 1 // addr does not escape 3362 ar = typecheck(ar, Etop) 3363 init.Append(ar) 3364 3365 fn, needsize := eqfor(t) 3366 call := nod(OCALL, fn, nil) 3367 call.List.Append(pl) 3368 call.List.Append(pr) 3369 if needsize { 3370 call.List.Append(nodintconst(t.Width)) 3371 } 3372 res := call 3373 if n.Op != OEQ { 3374 res = nod(ONOT, res, nil) 3375 } 3376 n = finishcompare(n, res, init) 3377 return n 3378 } 3379 3380 // inline: build boolean expression comparing element by element 3381 andor := OANDAND 3382 if n.Op == ONE { 3383 andor = OOROR 3384 } 3385 var expr *Node 3386 compare := func(el, er *Node) { 3387 a := nod(n.Op, el, er) 3388 if expr == nil { 3389 expr = a 3390 } else { 3391 expr = nod(andor, expr, a) 3392 } 3393 } 3394 cmpl = safeexpr(cmpl, init) 3395 cmpr = safeexpr(cmpr, init) 3396 if t.IsStruct() { 3397 for _, f := range t.Fields().Slice() { 3398 sym := f.Sym 3399 if sym.IsBlank() { 3400 continue 3401 } 3402 compare( 3403 nodSym(OXDOT, cmpl, sym), 3404 nodSym(OXDOT, cmpr, sym), 3405 ) 3406 } 3407 } else { 3408 step := int64(1) 3409 remains := t.NumElem() * t.Elem().Width 3410 combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger() 3411 combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger() 3412 combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger() 3413 for i := int64(0); remains > 0; { 3414 var convType *types.Type 3415 switch { 3416 case remains >= 8 && combine64bit: 3417 convType = types.Types[TINT64] 3418 step = 8 / t.Elem().Width 3419 case remains >= 4 && combine32bit: 3420 convType = types.Types[TUINT32] 3421 step = 4 / t.Elem().Width 3422 case remains >= 2 && combine16bit: 3423 convType = types.Types[TUINT16] 3424 step = 2 / t.Elem().Width 3425 default: 3426 step = 1 3427 } 3428 if step == 1 { 3429 compare( 3430 nod(OINDEX, cmpl, nodintconst(int64(i))), 3431 nod(OINDEX, cmpr, nodintconst(int64(i))), 3432 ) 3433 i++ 3434 remains -= t.Elem().Width 3435 } else { 3436 elemType := t.Elem().ToUnsigned() 3437 cmplw := nod(OINDEX, cmpl, nodintconst(int64(i))) 3438 cmplw = conv(cmplw, elemType) // convert to unsigned 3439 cmplw = conv(cmplw, convType) // widen 3440 cmprw := nod(OINDEX, cmpr, nodintconst(int64(i))) 3441 cmprw = conv(cmprw, elemType) 3442 cmprw = conv(cmprw, convType) 3443 // For code like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ... 3444 // ssa will generate a single large load. 3445 for offset := int64(1); offset < step; offset++ { 3446 lb := nod(OINDEX, cmpl, nodintconst(int64(i+offset))) 3447 lb = conv(lb, elemType) 3448 lb = conv(lb, convType) 3449 lb = nod(OLSH, lb, nodintconst(int64(8*t.Elem().Width*offset))) 3450 cmplw = nod(OOR, cmplw, lb) 3451 rb := nod(OINDEX, cmpr, nodintconst(int64(i+offset))) 3452 rb = conv(rb, elemType) 3453 rb = conv(rb, convType) 3454 rb = nod(OLSH, rb, nodintconst(int64(8*t.Elem().Width*offset))) 3455 cmprw = nod(OOR, cmprw, rb) 3456 } 3457 compare(cmplw, cmprw) 3458 i += step 3459 remains -= step * t.Elem().Width 3460 } 3461 } 3462 } 3463 if expr == nil { 3464 expr = nodbool(n.Op == OEQ) 3465 } 3466 n = finishcompare(n, expr, init) 3467 return n 3468 } 3469 3470 // The result of finishcompare MUST be assigned back to n, e.g. 3471 // n.Left = finishcompare(n.Left, x, r, init) 3472 func finishcompare(n, r *Node, init *Nodes) *Node { 3473 // Use nn here to avoid passing r to typecheck. 3474 nn := r 3475 nn = typecheck(nn, Erv) 3476 nn = walkexpr(nn, init) 3477 r = nn 3478 if r.Type != n.Type { 3479 r = nod(OCONVNOP, r, nil) 3480 r.Type = n.Type 3481 r.SetTypecheck(1) 3482 nn = r 3483 } 3484 return nn 3485 } 3486 3487 // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers. 3488 func (n *Node) isIntOrdering() bool { 3489 switch n.Op { 3490 case OLE, OLT, OGE, OGT: 3491 default: 3492 return false 3493 } 3494 return n.Left.Type.IsInteger() && n.Right.Type.IsInteger() 3495 } 3496 3497 // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10. 3498 // n must be an OANDAND or OOROR node. 3499 // The result of walkinrange MUST be assigned back to n, e.g. 3500 // n.Left = walkinrange(n.Left) 3501 func walkinrange(n *Node, init *Nodes) *Node { 3502 // We are looking for something equivalent to a opl b OP b opr c, where: 3503 // * a, b, and c have integer type 3504 // * b is side-effect-free 3505 // * opl and opr are each < or ≤ 3506 // * OP is && 3507 l := n.Left 3508 r := n.Right 3509 if !l.isIntOrdering() || !r.isIntOrdering() { 3510 return n 3511 } 3512 3513 // Find b, if it exists, and rename appropriately. 3514 // Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right 3515 // Output is: a opl b(==x) ANDAND/OROR b(==x) opr c 3516 a, opl, b := l.Left, l.Op, l.Right 3517 x, opr, c := r.Left, r.Op, r.Right 3518 for i := 0; ; i++ { 3519 if samesafeexpr(b, x) { 3520 break 3521 } 3522 if i == 3 { 3523 // Tried all permutations and couldn't find an appropriate b == x. 3524 return n 3525 } 3526 if i&1 == 0 { 3527 a, opl, b = b, brrev(opl), a 3528 } else { 3529 x, opr, c = c, brrev(opr), x 3530 } 3531 } 3532 3533 // If n.Op is ||, apply de Morgan. 3534 // Negate the internal ops now; we'll negate the top level op at the end. 3535 // Henceforth assume &&. 3536 negateResult := n.Op == OOROR 3537 if negateResult { 3538 opl = brcom(opl) 3539 opr = brcom(opr) 3540 } 3541 3542 cmpdir := func(o Op) int { 3543 switch o { 3544 case OLE, OLT: 3545 return -1 3546 case OGE, OGT: 3547 return +1 3548 } 3549 Fatalf("walkinrange cmpdir %v", o) 3550 return 0 3551 } 3552 if cmpdir(opl) != cmpdir(opr) { 3553 // Not a range check; something like b < a && b < c. 3554 return n 3555 } 3556 3557 switch opl { 3558 case OGE, OGT: 3559 // We have something like a > b && b ≥ c. 3560 // Switch and reverse ops and rename constants, 3561 // to make it look like a ≤ b && b < c. 3562 a, c = c, a 3563 opl, opr = brrev(opr), brrev(opl) 3564 } 3565 3566 // We must ensure that c-a is non-negative. 3567 // For now, require a and c to be constants. 3568 // In the future, we could also support a == 0 and c == len/cap(...). 3569 // Unfortunately, by this point, most len/cap expressions have been 3570 // stored into temporary variables. 3571 if !Isconst(a, CTINT) || !Isconst(c, CTINT) { 3572 return n 3573 } 3574 3575 // Ensure that Int64() does not overflow on a and c (it'll happen 3576 // for any const above 2**63; see issue #27143). 3577 if !a.CanInt64() || !c.CanInt64() { 3578 return n 3579 } 3580 3581 if opl == OLT { 3582 // We have a < b && ... 3583 // We need a ≤ b && ... to safely use unsigned comparison tricks. 3584 // If a is not the maximum constant for b's type, 3585 // we can increment a and switch to ≤. 3586 if a.Int64() >= maxintval[b.Type.Etype].Int64() { 3587 return n 3588 } 3589 a = nodintconst(a.Int64() + 1) 3590 opl = OLE 3591 } 3592 3593 bound := c.Int64() - a.Int64() 3594 if bound < 0 { 3595 // Bad news. Something like 5 <= x && x < 3. 3596 // Rare in practice, and we still need to generate side-effects, 3597 // so just leave it alone. 3598 return n 3599 } 3600 3601 // We have a ≤ b && b < c (or a ≤ b && b ≤ c). 3602 // This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a), 3603 // which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a), 3604 // which is equivalent to uint(b-a) < uint(c-a). 3605 ut := b.Type.ToUnsigned() 3606 lhs := conv(nod(OSUB, b, a), ut) 3607 rhs := nodintconst(bound) 3608 if negateResult { 3609 // Negate top level. 3610 opr = brcom(opr) 3611 } 3612 cmp := nod(opr, lhs, rhs) 3613 cmp.Pos = n.Pos 3614 cmp = addinit(cmp, l.Ninit.Slice()) 3615 cmp = addinit(cmp, r.Ninit.Slice()) 3616 // Typecheck the AST rooted at cmp... 3617 cmp = typecheck(cmp, Erv) 3618 // ...but then reset cmp's type to match n's type. 3619 cmp.Type = n.Type 3620 cmp = walkexpr(cmp, init) 3621 return cmp 3622 } 3623 3624 // return 1 if integer n must be in range [0, max), 0 otherwise 3625 func bounded(n *Node, max int64) bool { 3626 if n.Type == nil || !n.Type.IsInteger() { 3627 return false 3628 } 3629 3630 sign := n.Type.IsSigned() 3631 bits := int32(8 * n.Type.Width) 3632 3633 if smallintconst(n) { 3634 v := n.Int64() 3635 return 0 <= v && v < max 3636 } 3637 3638 switch n.Op { 3639 case OAND: 3640 v := int64(-1) 3641 if smallintconst(n.Left) { 3642 v = n.Left.Int64() 3643 } else if smallintconst(n.Right) { 3644 v = n.Right.Int64() 3645 } 3646 3647 if 0 <= v && v < max { 3648 return true 3649 } 3650 3651 case OMOD: 3652 if !sign && smallintconst(n.Right) { 3653 v := n.Right.Int64() 3654 if 0 <= v && v <= max { 3655 return true 3656 } 3657 } 3658 3659 case ODIV: 3660 if !sign && smallintconst(n.Right) { 3661 v := n.Right.Int64() 3662 for bits > 0 && v >= 2 { 3663 bits-- 3664 v >>= 1 3665 } 3666 } 3667 3668 case ORSH: 3669 if !sign && smallintconst(n.Right) { 3670 v := n.Right.Int64() 3671 if v > int64(bits) { 3672 return true 3673 } 3674 bits -= int32(v) 3675 } 3676 } 3677 3678 if !sign && bits <= 62 && 1<<uint(bits) <= max { 3679 return true 3680 } 3681 3682 return false 3683 } 3684 3685 // usemethod checks interface method calls for uses of reflect.Type.Method. 3686 func usemethod(n *Node) { 3687 t := n.Left.Type 3688 3689 // Looking for either of: 3690 // Method(int) reflect.Method 3691 // MethodByName(string) (reflect.Method, bool) 3692 // 3693 // TODO(crawshaw): improve precision of match by working out 3694 // how to check the method name. 3695 if n := t.NumParams(); n != 1 { 3696 return 3697 } 3698 if n := t.NumResults(); n != 1 && n != 2 { 3699 return 3700 } 3701 p0 := t.Params().Field(0) 3702 res0 := t.Results().Field(0) 3703 var res1 *types.Field 3704 if t.NumResults() == 2 { 3705 res1 = t.Results().Field(1) 3706 } 3707 3708 if res1 == nil { 3709 if p0.Type.Etype != TINT { 3710 return 3711 } 3712 } else { 3713 if !p0.Type.IsString() { 3714 return 3715 } 3716 if !res1.Type.IsBoolean() { 3717 return 3718 } 3719 } 3720 3721 // Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors 3722 // (including global variables such as numImports - was issue #19028). 3723 if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" { 3724 Curfn.Func.SetReflectMethod(true) 3725 } 3726 } 3727 3728 func usefield(n *Node) { 3729 if objabi.Fieldtrack_enabled == 0 { 3730 return 3731 } 3732 3733 switch n.Op { 3734 default: 3735 Fatalf("usefield %v", n.Op) 3736 3737 case ODOT, ODOTPTR: 3738 break 3739 } 3740 if n.Sym == nil { 3741 // No field name. This DOTPTR was built by the compiler for access 3742 // to runtime data structures. Ignore. 3743 return 3744 } 3745 3746 t := n.Left.Type 3747 if t.IsPtr() { 3748 t = t.Elem() 3749 } 3750 field := dotField[typeSymKey{t.Orig, n.Sym}] 3751 if field == nil { 3752 Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) 3753 } 3754 if !strings.Contains(field.Note, "go:\"track\"") { 3755 return 3756 } 3757 3758 outer := n.Left.Type 3759 if outer.IsPtr() { 3760 outer = outer.Elem() 3761 } 3762 if outer.Sym == nil { 3763 yyerror("tracked field must be in named struct type") 3764 } 3765 if !exportname(field.Sym.Name) { 3766 yyerror("tracked field must be exported (upper case)") 3767 } 3768 3769 sym := tracksym(outer, field) 3770 if Curfn.Func.FieldTrack == nil { 3771 Curfn.Func.FieldTrack = make(map[*types.Sym]struct{}) 3772 } 3773 Curfn.Func.FieldTrack[sym] = struct{}{} 3774 } 3775 3776 func candiscardlist(l Nodes) bool { 3777 for _, n := range l.Slice() { 3778 if !candiscard(n) { 3779 return false 3780 } 3781 } 3782 return true 3783 } 3784 3785 func candiscard(n *Node) bool { 3786 if n == nil { 3787 return true 3788 } 3789 3790 switch n.Op { 3791 default: 3792 return false 3793 3794 // Discardable as long as the subpieces are. 3795 case ONAME, 3796 ONONAME, 3797 OTYPE, 3798 OPACK, 3799 OLITERAL, 3800 OADD, 3801 OSUB, 3802 OOR, 3803 OXOR, 3804 OADDSTR, 3805 OADDR, 3806 OANDAND, 3807 OARRAYBYTESTR, 3808 OARRAYRUNESTR, 3809 OSTRARRAYBYTE, 3810 OSTRARRAYRUNE, 3811 OCAP, 3812 OCMPIFACE, 3813 OCMPSTR, 3814 OCOMPLIT, 3815 OMAPLIT, 3816 OSTRUCTLIT, 3817 OARRAYLIT, 3818 OSLICELIT, 3819 OPTRLIT, 3820 OCONV, 3821 OCONVIFACE, 3822 OCONVNOP, 3823 ODOT, 3824 OEQ, 3825 ONE, 3826 OLT, 3827 OLE, 3828 OGT, 3829 OGE, 3830 OKEY, 3831 OSTRUCTKEY, 3832 OLEN, 3833 OMUL, 3834 OLSH, 3835 ORSH, 3836 OAND, 3837 OANDNOT, 3838 ONEW, 3839 ONOT, 3840 OCOM, 3841 OPLUS, 3842 OMINUS, 3843 OOROR, 3844 OPAREN, 3845 ORUNESTR, 3846 OREAL, 3847 OIMAG, 3848 OCOMPLEX: 3849 break 3850 3851 // Discardable as long as we know it's not division by zero. 3852 case ODIV, OMOD: 3853 if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 { 3854 break 3855 } 3856 if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 { 3857 break 3858 } 3859 return false 3860 3861 // Discardable as long as we know it won't fail because of a bad size. 3862 case OMAKECHAN, OMAKEMAP: 3863 if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 { 3864 break 3865 } 3866 return false 3867 3868 // Difficult to tell what sizes are okay. 3869 case OMAKESLICE: 3870 return false 3871 } 3872 3873 if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) { 3874 return false 3875 } 3876 3877 return true 3878 } 3879 3880 // rewrite 3881 // print(x, y, z) 3882 // into 3883 // func(a1, a2, a3) { 3884 // print(a1, a2, a3) 3885 // }(x, y, z) 3886 // and same for println. 3887 3888 var walkprintfunc_prgen int 3889 3890 // The result of walkprintfunc MUST be assigned back to n, e.g. 3891 // n.Left = walkprintfunc(n.Left, init) 3892 func walkprintfunc(n *Node, init *Nodes) *Node { 3893 if n.Ninit.Len() != 0 { 3894 walkstmtlist(n.Ninit.Slice()) 3895 init.AppendNodes(&n.Ninit) 3896 } 3897 3898 t := nod(OTFUNC, nil, nil) 3899 var printargs []*Node 3900 for i, n1 := range n.List.Slice() { 3901 buf := fmt.Sprintf("a%d", i) 3902 a := namedfield(buf, n1.Type) 3903 t.List.Append(a) 3904 printargs = append(printargs, a.Left) 3905 } 3906 3907 oldfn := Curfn 3908 Curfn = nil 3909 3910 walkprintfunc_prgen++ 3911 sym := lookupN("print·%d", walkprintfunc_prgen) 3912 fn := dclfunc(sym, t) 3913 3914 a := nod(n.Op, nil, nil) 3915 a.List.Set(printargs) 3916 a = typecheck(a, Etop) 3917 a = walkstmt(a) 3918 3919 fn.Nbody.Set1(a) 3920 3921 funcbody() 3922 3923 fn = typecheck(fn, Etop) 3924 typecheckslice(fn.Nbody.Slice(), Etop) 3925 xtop = append(xtop, fn) 3926 Curfn = oldfn 3927 3928 a = nod(OCALL, nil, nil) 3929 a.Left = fn.Func.Nname 3930 a.List.Set(n.List.Slice()) 3931 a = typecheck(a, Etop) 3932 a = walkexpr(a, init) 3933 return a 3934 } 3935 3936 // substArgTypes substitutes the given list of types for 3937 // successive occurrences of the "any" placeholder in the 3938 // type syntax expression n.Type. 3939 // The result of substArgTypes MUST be assigned back to old, e.g. 3940 // n.Left = substArgTypes(n.Left, t1, t2) 3941 func substArgTypes(old *Node, types_ ...*types.Type) *Node { 3942 n := *old // make shallow copy 3943 3944 for _, t := range types_ { 3945 dowidth(t) 3946 } 3947 n.Type = types.SubstAny(n.Type, &types_) 3948 if len(types_) > 0 { 3949 Fatalf("substArgTypes: too many argument types") 3950 } 3951 return &n 3952 }