github.com/goproxy0/go@v0.0.0-20171111080102-49cc0c489d2c/src/cmd/compile/internal/gc/walk.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package gc 6 7 import ( 8 "cmd/compile/internal/types" 9 "cmd/internal/objabi" 10 "cmd/internal/sys" 11 "fmt" 12 "strings" 13 ) 14 15 // The constant is known to runtime. 16 const tmpstringbufsize = 32 17 18 func walk(fn *Node) { 19 Curfn = fn 20 21 if Debug['W'] != 0 { 22 s := fmt.Sprintf("\nbefore %v", Curfn.Func.Nname.Sym) 23 dumplist(s, Curfn.Nbody) 24 } 25 26 lno := lineno 27 28 // Final typecheck for any unused variables. 29 for i, ln := range fn.Func.Dcl { 30 if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) { 31 ln = typecheck(ln, Erv|Easgn) 32 fn.Func.Dcl[i] = ln 33 } 34 } 35 36 // Propagate the used flag for typeswitch variables up to the NONAME in it's definition. 37 for _, ln := range fn.Func.Dcl { 38 if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() { 39 ln.Name.Defn.Left.Name.SetUsed(true) 40 } 41 } 42 43 for _, ln := range fn.Func.Dcl { 44 if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() { 45 continue 46 } 47 if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW { 48 if defn.Left.Name.Used() { 49 continue 50 } 51 yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym) 52 defn.Left.Name.SetUsed(true) // suppress repeats 53 } else { 54 yyerrorl(ln.Pos, "%v declared and not used", ln.Sym) 55 } 56 } 57 58 lineno = lno 59 if nerrors != 0 { 60 return 61 } 62 walkstmtlist(Curfn.Nbody.Slice()) 63 if Debug['W'] != 0 { 64 s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) 65 dumplist(s, Curfn.Nbody) 66 } 67 68 zeroResults() 69 heapmoves() 70 if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 { 71 s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) 72 dumplist(s, Curfn.Func.Enter) 73 } 74 } 75 76 func walkstmtlist(s []*Node) { 77 for i := range s { 78 s[i] = walkstmt(s[i]) 79 } 80 } 81 82 func samelist(a, b []*Node) bool { 83 if len(a) != len(b) { 84 return false 85 } 86 for i, n := range a { 87 if n != b[i] { 88 return false 89 } 90 } 91 return true 92 } 93 94 func paramoutheap(fn *Node) bool { 95 for _, ln := range fn.Func.Dcl { 96 switch ln.Class() { 97 case PPARAMOUT: 98 if ln.isParamStackCopy() || ln.Addrtaken() { 99 return true 100 } 101 102 case PAUTO: 103 // stop early - parameters are over 104 return false 105 } 106 } 107 108 return false 109 } 110 111 // adds "adjust" to all the argument locations for the call n. 112 // n must be a defer or go node that has already been walked. 113 func adjustargs(n *Node, adjust int) { 114 callfunc := n.Left 115 for _, arg := range callfunc.List.Slice() { 116 if arg.Op != OAS { 117 Fatalf("call arg not assignment") 118 } 119 lhs := arg.Left 120 if lhs.Op == ONAME { 121 // This is a temporary introduced by reorder1. 122 // The real store to the stack appears later in the arg list. 123 continue 124 } 125 126 if lhs.Op != OINDREGSP { 127 Fatalf("call argument store does not use OINDREGSP") 128 } 129 130 // can't really check this in machine-indep code. 131 //if(lhs->val.u.reg != D_SP) 132 // Fatalf("call arg assign not indreg(SP)") 133 lhs.Xoffset += int64(adjust) 134 } 135 } 136 137 // The result of walkstmt MUST be assigned back to n, e.g. 138 // n.Left = walkstmt(n.Left) 139 func walkstmt(n *Node) *Node { 140 if n == nil { 141 return n 142 } 143 144 setlineno(n) 145 146 walkstmtlist(n.Ninit.Slice()) 147 148 switch n.Op { 149 default: 150 if n.Op == ONAME { 151 yyerror("%v is not a top level statement", n.Sym) 152 } else { 153 yyerror("%v is not a top level statement", n.Op) 154 } 155 Dump("nottop", n) 156 157 case OAS, 158 OASOP, 159 OAS2, 160 OAS2DOTTYPE, 161 OAS2RECV, 162 OAS2FUNC, 163 OAS2MAPR, 164 OCLOSE, 165 OCOPY, 166 OCALLMETH, 167 OCALLINTER, 168 OCALL, 169 OCALLFUNC, 170 ODELETE, 171 OSEND, 172 OPRINT, 173 OPRINTN, 174 OPANIC, 175 OEMPTY, 176 ORECOVER, 177 OGETG: 178 if n.Typecheck() == 0 { 179 Fatalf("missing typecheck: %+v", n) 180 } 181 wascopy := n.Op == OCOPY 182 init := n.Ninit 183 n.Ninit.Set(nil) 184 n = walkexpr(n, &init) 185 n = addinit(n, init.Slice()) 186 if wascopy && n.Op == OCONVNOP { 187 n.Op = OEMPTY // don't leave plain values as statements. 188 } 189 190 // special case for a receive where we throw away 191 // the value received. 192 case ORECV: 193 if n.Typecheck() == 0 { 194 Fatalf("missing typecheck: %+v", n) 195 } 196 init := n.Ninit 197 n.Ninit.Set(nil) 198 199 n.Left = walkexpr(n.Left, &init) 200 n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil()) 201 n = walkexpr(n, &init) 202 203 n = addinit(n, init.Slice()) 204 205 case OBREAK, 206 OCONTINUE, 207 OFALL, 208 OGOTO, 209 OLABEL, 210 ODCLCONST, 211 ODCLTYPE, 212 OCHECKNIL, 213 OVARKILL, 214 OVARLIVE: 215 break 216 217 case ODCL: 218 v := n.Left 219 if v.Class() == PAUTOHEAP { 220 if compiling_runtime { 221 yyerror("%v escapes to heap, not allowed in runtime.", v) 222 } 223 if prealloc[v] == nil { 224 prealloc[v] = callnew(v.Type) 225 } 226 nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v]) 227 nn.SetColas(true) 228 nn = typecheck(nn, Etop) 229 return walkstmt(nn) 230 } 231 232 case OBLOCK: 233 walkstmtlist(n.List.Slice()) 234 235 case OXCASE: 236 yyerror("case statement out of place") 237 n.Op = OCASE 238 fallthrough 239 240 case OCASE: 241 n.Right = walkstmt(n.Right) 242 243 case ODEFER: 244 Curfn.Func.SetHasDefer(true) 245 switch n.Left.Op { 246 case OPRINT, OPRINTN: 247 n.Left = walkprintfunc(n.Left, &n.Ninit) 248 249 case OCOPY: 250 n.Left = copyany(n.Left, &n.Ninit, true) 251 252 default: 253 n.Left = walkexpr(n.Left, &n.Ninit) 254 } 255 256 // make room for size & fn arguments. 257 adjustargs(n, 2*Widthptr) 258 259 case OFOR, OFORUNTIL: 260 if n.Left != nil { 261 walkstmtlist(n.Left.Ninit.Slice()) 262 init := n.Left.Ninit 263 n.Left.Ninit.Set(nil) 264 n.Left = walkexpr(n.Left, &init) 265 n.Left = addinit(n.Left, init.Slice()) 266 } 267 268 n.Right = walkstmt(n.Right) 269 walkstmtlist(n.Nbody.Slice()) 270 271 case OIF: 272 n.Left = walkexpr(n.Left, &n.Ninit) 273 walkstmtlist(n.Nbody.Slice()) 274 walkstmtlist(n.Rlist.Slice()) 275 276 case OPROC: 277 switch n.Left.Op { 278 case OPRINT, OPRINTN: 279 n.Left = walkprintfunc(n.Left, &n.Ninit) 280 281 case OCOPY: 282 n.Left = copyany(n.Left, &n.Ninit, true) 283 284 default: 285 n.Left = walkexpr(n.Left, &n.Ninit) 286 } 287 288 // make room for size & fn arguments. 289 adjustargs(n, 2*Widthptr) 290 291 case ORETURN: 292 walkexprlist(n.List.Slice(), &n.Ninit) 293 if n.List.Len() == 0 { 294 break 295 } 296 if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) { 297 // assign to the function out parameters, 298 // so that reorder3 can fix up conflicts 299 var rl []*Node 300 301 for _, ln := range Curfn.Func.Dcl { 302 cl := ln.Class() 303 if cl == PAUTO || cl == PAUTOHEAP { 304 break 305 } 306 if cl == PPARAMOUT { 307 if ln.isParamStackCopy() { 308 ln = walkexpr(typecheck(nod(OIND, ln.Name.Param.Heapaddr, nil), Erv), nil) 309 } 310 rl = append(rl, ln) 311 } 312 } 313 314 if got, want := n.List.Len(), len(rl); got != want { 315 // order should have rewritten multi-value function calls 316 // with explicit OAS2FUNC nodes. 317 Fatalf("expected %v return arguments, have %v", want, got) 318 } 319 320 if samelist(rl, n.List.Slice()) { 321 // special return in disguise 322 n.List.Set(nil) 323 324 break 325 } 326 327 // move function calls out, to make reorder3's job easier. 328 walkexprlistsafe(n.List.Slice(), &n.Ninit) 329 330 ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit) 331 n.List.Set(reorder3(ll)) 332 break 333 } 334 335 ll := ascompatte(nil, false, Curfn.Type.Results(), n.List.Slice(), 1, &n.Ninit) 336 n.List.Set(ll) 337 338 case ORETJMP: 339 break 340 341 case OSELECT: 342 walkselect(n) 343 344 case OSWITCH: 345 walkswitch(n) 346 347 case ORANGE: 348 n = walkrange(n) 349 } 350 351 if n.Op == ONAME { 352 Fatalf("walkstmt ended up with name: %+v", n) 353 } 354 return n 355 } 356 357 func isSmallMakeSlice(n *Node) bool { 358 if n.Op != OMAKESLICE { 359 return false 360 } 361 l := n.Left 362 r := n.Right 363 if r == nil { 364 r = l 365 } 366 t := n.Type 367 368 return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width) 369 } 370 371 // walk the whole tree of the body of an 372 // expression or simple statement. 373 // the types expressions are calculated. 374 // compile-time constants are evaluated. 375 // complex side effects like statements are appended to init 376 func walkexprlist(s []*Node, init *Nodes) { 377 for i := range s { 378 s[i] = walkexpr(s[i], init) 379 } 380 } 381 382 func walkexprlistsafe(s []*Node, init *Nodes) { 383 for i, n := range s { 384 s[i] = safeexpr(n, init) 385 s[i] = walkexpr(s[i], init) 386 } 387 } 388 389 func walkexprlistcheap(s []*Node, init *Nodes) { 390 for i, n := range s { 391 s[i] = cheapexpr(n, init) 392 s[i] = walkexpr(s[i], init) 393 } 394 } 395 396 // Build name of function for interface conversion. 397 // Not all names are possible 398 // (e.g., we'll never generate convE2E or convE2I or convI2E). 399 func convFuncName(from, to *types.Type) string { 400 tkind := to.Tie() 401 switch from.Tie() { 402 case 'I': 403 switch tkind { 404 case 'I': 405 return "convI2I" 406 } 407 case 'T': 408 switch tkind { 409 case 'E': 410 switch { 411 case from.Size() == 2 && from.Align == 2: 412 return "convT2E16" 413 case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from): 414 return "convT2E32" 415 case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from): 416 return "convT2E64" 417 case from.IsString(): 418 return "convT2Estring" 419 case from.IsSlice(): 420 return "convT2Eslice" 421 case !types.Haspointers(from): 422 return "convT2Enoptr" 423 } 424 return "convT2E" 425 case 'I': 426 switch { 427 case from.Size() == 2 && from.Align == 2: 428 return "convT2I16" 429 case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from): 430 return "convT2I32" 431 case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from): 432 return "convT2I64" 433 case from.IsString(): 434 return "convT2Istring" 435 case from.IsSlice(): 436 return "convT2Islice" 437 case !types.Haspointers(from): 438 return "convT2Inoptr" 439 } 440 return "convT2I" 441 } 442 } 443 Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie()) 444 panic("unreachable") 445 } 446 447 // The result of walkexpr MUST be assigned back to n, e.g. 448 // n.Left = walkexpr(n.Left, init) 449 func walkexpr(n *Node, init *Nodes) *Node { 450 if n == nil { 451 return n 452 } 453 454 // Eagerly checkwidth all expressions for the back end. 455 if n.Type != nil && !n.Type.WidthCalculated() { 456 switch n.Type.Etype { 457 case TBLANK, TNIL, TIDEAL: 458 default: 459 checkwidth(n.Type) 460 } 461 } 462 463 if init == &n.Ninit { 464 // not okay to use n->ninit when walking n, 465 // because we might replace n with some other node 466 // and would lose the init list. 467 Fatalf("walkexpr init == &n->ninit") 468 } 469 470 if n.Ninit.Len() != 0 { 471 walkstmtlist(n.Ninit.Slice()) 472 init.AppendNodes(&n.Ninit) 473 } 474 475 lno := setlineno(n) 476 477 if Debug['w'] > 1 { 478 Dump("walk-before", n) 479 } 480 481 if n.Typecheck() != 1 { 482 Fatalf("missed typecheck: %+v", n) 483 } 484 485 if n.Op == ONAME && n.Class() == PAUTOHEAP { 486 nn := nod(OIND, n.Name.Param.Heapaddr, nil) 487 nn = typecheck(nn, Erv) 488 nn = walkexpr(nn, init) 489 nn.Left.SetNonNil(true) 490 return nn 491 } 492 493 opswitch: 494 switch n.Op { 495 default: 496 Dump("walk", n) 497 Fatalf("walkexpr: switch 1 unknown op %+S", n) 498 499 case ONONAME, OINDREGSP, OEMPTY, OGETG: 500 501 case OTYPE, ONAME, OLITERAL: 502 // TODO(mdempsky): Just return n; see discussion on CL 38655. 503 // Perhaps refactor to use Node.mayBeShared for these instead. 504 // If these return early, make sure to still call 505 // stringsym for constant strings. 506 507 case ONOT, OMINUS, OPLUS, OCOM, OREAL, OIMAG, ODOTMETH, ODOTINTER, 508 OIND, OSPTR, OITAB, OIDATA, OADDR: 509 n.Left = walkexpr(n.Left, init) 510 511 case OEFACE, OAND, OSUB, OMUL, OLT, OLE, OGE, OGT, OADD, OOR, OXOR: 512 n.Left = walkexpr(n.Left, init) 513 n.Right = walkexpr(n.Right, init) 514 515 case ODOT: 516 usefield(n) 517 n.Left = walkexpr(n.Left, init) 518 519 case ODOTTYPE, ODOTTYPE2: 520 n.Left = walkexpr(n.Left, init) 521 // Set up interface type addresses for back end. 522 n.Right = typename(n.Type) 523 if n.Op == ODOTTYPE { 524 n.Right.Right = typename(n.Left.Type) 525 } 526 if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 527 n.List.Set1(itabname(n.Type, n.Left.Type)) 528 } 529 530 case ODOTPTR: 531 usefield(n) 532 if n.Op == ODOTPTR && n.Left.Type.Elem().Width == 0 { 533 // No actual copy will be generated, so emit an explicit nil check. 534 n.Left = cheapexpr(n.Left, init) 535 536 checknil(n.Left, init) 537 } 538 539 n.Left = walkexpr(n.Left, init) 540 541 case OLEN, OCAP: 542 n.Left = walkexpr(n.Left, init) 543 544 // replace len(*[10]int) with 10. 545 // delayed until now to preserve side effects. 546 t := n.Left.Type 547 548 if t.IsPtr() { 549 t = t.Elem() 550 } 551 if t.IsArray() { 552 safeexpr(n.Left, init) 553 nodconst(n, n.Type, t.NumElem()) 554 n.SetTypecheck(1) 555 } 556 557 case OLSH, ORSH: 558 n.Left = walkexpr(n.Left, init) 559 n.Right = walkexpr(n.Right, init) 560 t := n.Left.Type 561 n.SetBounded(bounded(n.Right, 8*t.Width)) 562 if Debug['m'] != 0 && n.Etype != 0 && !Isconst(n.Right, CTINT) { 563 Warn("shift bounds check elided") 564 } 565 566 case OCOMPLEX: 567 // Use results from call expression as arguments for complex. 568 if n.Left == nil && n.Right == nil { 569 n.Left = n.List.First() 570 n.Right = n.List.Second() 571 } 572 n.Left = walkexpr(n.Left, init) 573 n.Right = walkexpr(n.Right, init) 574 575 case OEQ, ONE: 576 n.Left = walkexpr(n.Left, init) 577 n.Right = walkexpr(n.Right, init) 578 579 // Disable safemode while compiling this code: the code we 580 // generate internally can refer to unsafe.Pointer. 581 // In this case it can happen if we need to generate an == 582 // for a struct containing a reflect.Value, which itself has 583 // an unexported field of type unsafe.Pointer. 584 old_safemode := safemode 585 safemode = false 586 n = walkcompare(n, init) 587 safemode = old_safemode 588 589 case OANDAND, OOROR: 590 n.Left = walkexpr(n.Left, init) 591 592 // cannot put side effects from n.Right on init, 593 // because they cannot run before n.Left is checked. 594 // save elsewhere and store on the eventual n.Right. 595 var ll Nodes 596 597 n.Right = walkexpr(n.Right, &ll) 598 n.Right = addinit(n.Right, ll.Slice()) 599 n = walkinrange(n, init) 600 601 case OPRINT, OPRINTN: 602 walkexprlist(n.List.Slice(), init) 603 n = walkprint(n, init) 604 605 case OPANIC: 606 n = mkcall("gopanic", nil, init, n.Left) 607 608 case ORECOVER: 609 n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil)) 610 611 case OCLOSUREVAR, OCFUNC: 612 n.SetAddable(true) 613 614 case OCALLINTER: 615 usemethod(n) 616 t := n.Left.Type 617 if n.List.Len() != 0 && n.List.First().Op == OAS { 618 break 619 } 620 n.Left = walkexpr(n.Left, init) 621 walkexprlist(n.List.Slice(), init) 622 ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 623 n.List.Set(reorder1(ll)) 624 625 case OCALLFUNC: 626 if n.Left.Op == OCLOSURE { 627 // Transform direct call of a closure to call of a normal function. 628 // transformclosure already did all preparation work. 629 630 // Prepend captured variables to argument list. 631 n.List.Prepend(n.Left.Func.Enter.Slice()...) 632 633 n.Left.Func.Enter.Set(nil) 634 635 // Replace OCLOSURE with ONAME/PFUNC. 636 n.Left = n.Left.Func.Closure.Func.Nname 637 638 // Update type of OCALLFUNC node. 639 // Output arguments had not changed, but their offsets could. 640 if n.Left.Type.NumResults() == 1 { 641 n.Type = n.Left.Type.Results().Field(0).Type 642 } else { 643 n.Type = n.Left.Type.Results() 644 } 645 } 646 647 t := n.Left.Type 648 if n.List.Len() != 0 && n.List.First().Op == OAS { 649 break 650 } 651 652 n.Left = walkexpr(n.Left, init) 653 walkexprlist(n.List.Slice(), init) 654 655 ll := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 656 n.List.Set(reorder1(ll)) 657 658 case OCALLMETH: 659 t := n.Left.Type 660 if n.List.Len() != 0 && n.List.First().Op == OAS { 661 break 662 } 663 n.Left = walkexpr(n.Left, init) 664 walkexprlist(n.List.Slice(), init) 665 ll := ascompatte(n, false, t.Recvs(), []*Node{n.Left.Left}, 0, init) 666 lr := ascompatte(n, n.Isddd(), t.Params(), n.List.Slice(), 0, init) 667 ll = append(ll, lr...) 668 n.Left.Left = nil 669 updateHasCall(n.Left) 670 n.List.Set(reorder1(ll)) 671 672 case OAS: 673 init.AppendNodes(&n.Ninit) 674 675 n.Left = walkexpr(n.Left, init) 676 n.Left = safeexpr(n.Left, init) 677 678 if oaslit(n, init) { 679 break 680 } 681 682 if n.Right == nil { 683 // TODO(austin): Check all "implicit zeroing" 684 break 685 } 686 687 if !instrumenting && iszero(n.Right) { 688 break 689 } 690 691 switch n.Right.Op { 692 default: 693 n.Right = walkexpr(n.Right, init) 694 695 case ORECV: 696 // x = <-c; n.Left is x, n.Right.Left is c. 697 // orderstmt made sure x is addressable. 698 n.Right.Left = walkexpr(n.Right.Left, init) 699 700 n1 := nod(OADDR, n.Left, nil) 701 r := n.Right.Left // the channel 702 n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1) 703 n = walkexpr(n, init) 704 break opswitch 705 706 case OAPPEND: 707 // x = append(...) 708 r := n.Right 709 if r.Type.Elem().NotInHeap() { 710 yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem()) 711 } 712 if r.Isddd() { 713 r = appendslice(r, init) // also works for append(slice, string). 714 } else { 715 r = walkappend(r, init, n) 716 } 717 n.Right = r 718 if r.Op == OAPPEND { 719 // Left in place for back end. 720 // Do not add a new write barrier. 721 // Set up address of type for back end. 722 r.Left = typename(r.Type.Elem()) 723 break opswitch 724 } 725 // Otherwise, lowered for race detector. 726 // Treat as ordinary assignment. 727 } 728 729 if n.Left != nil && n.Right != nil { 730 n = convas(n, init) 731 } 732 733 case OAS2: 734 init.AppendNodes(&n.Ninit) 735 walkexprlistsafe(n.List.Slice(), init) 736 walkexprlistsafe(n.Rlist.Slice(), init) 737 ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init) 738 ll = reorder3(ll) 739 n = liststmt(ll) 740 741 // a,b,... = fn() 742 case OAS2FUNC: 743 init.AppendNodes(&n.Ninit) 744 745 r := n.Rlist.First() 746 walkexprlistsafe(n.List.Slice(), init) 747 r = walkexpr(r, init) 748 749 if isIntrinsicCall(r) { 750 n.Rlist.Set1(r) 751 break 752 } 753 init.Append(r) 754 755 ll := ascompatet(n.List, r.Type) 756 n = liststmt(ll) 757 758 // x, y = <-c 759 // orderstmt made sure x is addressable. 760 case OAS2RECV: 761 init.AppendNodes(&n.Ninit) 762 763 r := n.Rlist.First() 764 walkexprlistsafe(n.List.Slice(), init) 765 r.Left = walkexpr(r.Left, init) 766 var n1 *Node 767 if isblank(n.List.First()) { 768 n1 = nodnil() 769 } else { 770 n1 = nod(OADDR, n.List.First(), nil) 771 } 772 n1.Etype = 1 // addr does not escape 773 fn := chanfn("chanrecv2", 2, r.Left.Type) 774 ok := n.List.Second() 775 call := mkcall1(fn, ok.Type, init, r.Left, n1) 776 n = nod(OAS, ok, call) 777 n = typecheck(n, Etop) 778 779 // a,b = m[i] 780 case OAS2MAPR: 781 init.AppendNodes(&n.Ninit) 782 783 r := n.Rlist.First() 784 walkexprlistsafe(n.List.Slice(), init) 785 r.Left = walkexpr(r.Left, init) 786 r.Right = walkexpr(r.Right, init) 787 t := r.Left.Type 788 789 fast := mapfast(t) 790 var key *Node 791 if fast != mapslow { 792 // fast versions take key by value 793 key = r.Right 794 } else { 795 // standard version takes key by reference 796 // orderexpr made sure key is addressable. 797 key = nod(OADDR, r.Right, nil) 798 } 799 800 // from: 801 // a,b = m[i] 802 // to: 803 // var,b = mapaccess2*(t, m, i) 804 // a = *var 805 a := n.List.First() 806 807 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 808 fn := mapfn(mapaccess2[fast], t) 809 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key) 810 } else { 811 fn := mapfn("mapaccess2_fat", t) 812 z := zeroaddr(w) 813 r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z) 814 } 815 816 // mapaccess2* returns a typed bool, but due to spec changes, 817 // the boolean result of i.(T) is now untyped so we make it the 818 // same type as the variable on the lhs. 819 if ok := n.List.Second(); !isblank(ok) && ok.Type.IsBoolean() { 820 r.Type.Field(1).Type = ok.Type 821 } 822 n.Rlist.Set1(r) 823 n.Op = OAS2FUNC 824 825 // don't generate a = *var if a is _ 826 if !isblank(a) { 827 var_ := temp(types.NewPtr(t.Val())) 828 var_.SetTypecheck(1) 829 var_.SetNonNil(true) // mapaccess always returns a non-nil pointer 830 n.List.SetFirst(var_) 831 n = walkexpr(n, init) 832 init.Append(n) 833 n = nod(OAS, a, nod(OIND, var_, nil)) 834 } 835 836 n = typecheck(n, Etop) 837 n = walkexpr(n, init) 838 839 case ODELETE: 840 init.AppendNodes(&n.Ninit) 841 map_ := n.List.First() 842 key := n.List.Second() 843 map_ = walkexpr(map_, init) 844 key = walkexpr(key, init) 845 846 t := map_.Type 847 fast := mapfast(t) 848 if fast == mapslow { 849 // orderstmt made sure key is addressable. 850 key = nod(OADDR, key, nil) 851 } 852 n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key) 853 854 case OAS2DOTTYPE: 855 walkexprlistsafe(n.List.Slice(), init) 856 n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init)) 857 858 case OCONVIFACE: 859 n.Left = walkexpr(n.Left, init) 860 861 // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped. 862 if isdirectiface(n.Left.Type) { 863 var t *Node 864 if n.Type.IsEmptyInterface() { 865 t = typename(n.Left.Type) 866 } else { 867 t = itabname(n.Left.Type, n.Type) 868 } 869 l := nod(OEFACE, t, n.Left) 870 l.Type = n.Type 871 l.SetTypecheck(n.Typecheck()) 872 n = l 873 break 874 } 875 876 if staticbytes == nil { 877 staticbytes = newname(Runtimepkg.Lookup("staticbytes")) 878 staticbytes.SetClass(PEXTERN) 879 staticbytes.Type = types.NewArray(types.Types[TUINT8], 256) 880 zerobase = newname(Runtimepkg.Lookup("zerobase")) 881 zerobase.SetClass(PEXTERN) 882 zerobase.Type = types.Types[TUINTPTR] 883 } 884 885 // Optimize convT2{E,I} for many cases in which T is not pointer-shaped, 886 // by using an existing addressable value identical to n.Left 887 // or creating one on the stack. 888 var value *Node 889 switch { 890 case n.Left.Type.Size() == 0: 891 // n.Left is zero-sized. Use zerobase. 892 cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246. 893 value = zerobase 894 case n.Left.Type.IsBoolean() || (n.Left.Type.Size() == 1 && n.Left.Type.IsInteger()): 895 // n.Left is a bool/byte. Use staticbytes[n.Left]. 896 n.Left = cheapexpr(n.Left, init) 897 value = nod(OINDEX, staticbytes, byteindex(n.Left)) 898 value.SetBounded(true) 899 case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly(): 900 // n.Left is a readonly global; use it directly. 901 value = n.Left 902 case !n.Left.Type.IsInterface() && n.Esc == EscNone && n.Left.Type.Width <= 1024: 903 // n.Left does not escape. Use a stack temporary initialized to n.Left. 904 value = temp(n.Left.Type) 905 init.Append(typecheck(nod(OAS, value, n.Left), Etop)) 906 } 907 908 if value != nil { 909 // Value is identical to n.Left. 910 // Construct the interface directly: {type/itab, &value}. 911 var t *Node 912 if n.Type.IsEmptyInterface() { 913 t = typename(n.Left.Type) 914 } else { 915 t = itabname(n.Left.Type, n.Type) 916 } 917 l := nod(OEFACE, t, typecheck(nod(OADDR, value, nil), Erv)) 918 l.Type = n.Type 919 l.SetTypecheck(n.Typecheck()) 920 n = l 921 break 922 } 923 924 // Implement interface to empty interface conversion. 925 // tmp = i.itab 926 // if tmp != nil { 927 // tmp = tmp.type 928 // } 929 // e = iface{tmp, i.data} 930 if n.Type.IsEmptyInterface() && n.Left.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() { 931 // Evaluate the input interface. 932 c := temp(n.Left.Type) 933 init.Append(nod(OAS, c, n.Left)) 934 935 // Get the itab out of the interface. 936 tmp := temp(types.NewPtr(types.Types[TUINT8])) 937 init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv))) 938 939 // Get the type out of the itab. 940 nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), Erv), nil) 941 nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp))) 942 init.Append(nif) 943 944 // Build the result. 945 e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8]))) 946 e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE. 947 e.SetTypecheck(1) 948 n = e 949 break 950 } 951 952 var ll []*Node 953 if n.Type.IsEmptyInterface() { 954 if !n.Left.Type.IsInterface() { 955 ll = append(ll, typename(n.Left.Type)) 956 } 957 } else { 958 if n.Left.Type.IsInterface() { 959 ll = append(ll, typename(n.Type)) 960 } else { 961 ll = append(ll, itabname(n.Left.Type, n.Type)) 962 } 963 } 964 965 if n.Left.Type.IsInterface() { 966 ll = append(ll, n.Left) 967 } else { 968 // regular types are passed by reference to avoid C vararg calls 969 // orderexpr arranged for n.Left to be a temporary for all 970 // the conversions it could see. comparison of an interface 971 // with a non-interface, especially in a switch on interface value 972 // with non-interface cases, is not visible to orderstmt, so we 973 // have to fall back on allocating a temp here. 974 if islvalue(n.Left) { 975 ll = append(ll, nod(OADDR, n.Left, nil)) 976 } else { 977 ll = append(ll, nod(OADDR, copyexpr(n.Left, n.Left.Type, init), nil)) 978 } 979 dowidth(n.Left.Type) 980 } 981 982 fn := syslook(convFuncName(n.Left.Type, n.Type)) 983 fn = substArgTypes(fn, n.Left.Type, n.Type) 984 dowidth(fn.Type) 985 n = nod(OCALL, fn, nil) 986 n.List.Set(ll) 987 n = typecheck(n, Erv) 988 n = walkexpr(n, init) 989 990 case OCONV, OCONVNOP: 991 switch thearch.LinkArch.Family { 992 case sys.ARM, sys.MIPS: 993 if n.Left.Type.IsFloat() { 994 switch n.Type.Etype { 995 case TINT64: 996 n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 997 break opswitch 998 case TUINT64: 999 n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1000 break opswitch 1001 } 1002 } 1003 1004 if n.Type.IsFloat() { 1005 switch n.Left.Type.Etype { 1006 case TINT64: 1007 n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type) 1008 break opswitch 1009 case TUINT64: 1010 n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type) 1011 break opswitch 1012 } 1013 } 1014 1015 case sys.I386: 1016 if n.Left.Type.IsFloat() { 1017 switch n.Type.Etype { 1018 case TINT64: 1019 n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1020 break opswitch 1021 case TUINT64: 1022 n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1023 break opswitch 1024 case TUINT32, TUINT, TUINTPTR: 1025 n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64])) 1026 break opswitch 1027 } 1028 } 1029 if n.Type.IsFloat() { 1030 switch n.Left.Type.Etype { 1031 case TINT64: 1032 n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type) 1033 break opswitch 1034 case TUINT64: 1035 n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type) 1036 break opswitch 1037 case TUINT32, TUINT, TUINTPTR: 1038 n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type) 1039 break opswitch 1040 } 1041 } 1042 } 1043 1044 n.Left = walkexpr(n.Left, init) 1045 1046 case OANDNOT: 1047 n.Left = walkexpr(n.Left, init) 1048 n.Op = OAND 1049 n.Right = nod(OCOM, n.Right, nil) 1050 n.Right = typecheck(n.Right, Erv) 1051 n.Right = walkexpr(n.Right, init) 1052 1053 case ODIV, OMOD: 1054 n.Left = walkexpr(n.Left, init) 1055 n.Right = walkexpr(n.Right, init) 1056 1057 // rewrite complex div into function call. 1058 et := n.Left.Type.Etype 1059 1060 if isComplex[et] && n.Op == ODIV { 1061 t := n.Type 1062 n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128])) 1063 n = conv(n, t) 1064 break 1065 } 1066 1067 // Nothing to do for float divisions. 1068 if isFloat[et] { 1069 break 1070 } 1071 1072 // rewrite 64-bit div and mod on 32-bit architectures. 1073 // TODO: Remove this code once we can introduce 1074 // runtime calls late in SSA processing. 1075 if Widthreg < 8 && (et == TINT64 || et == TUINT64) { 1076 if n.Right.Op == OLITERAL { 1077 // Leave div/mod by constant powers of 2. 1078 // The SSA backend will handle those. 1079 switch et { 1080 case TINT64: 1081 c := n.Right.Int64() 1082 if c < 0 { 1083 c = -c 1084 } 1085 if c != 0 && c&(c-1) == 0 { 1086 break opswitch 1087 } 1088 case TUINT64: 1089 c := uint64(n.Right.Int64()) 1090 if c != 0 && c&(c-1) == 0 { 1091 break opswitch 1092 } 1093 } 1094 } 1095 var fn string 1096 if et == TINT64 { 1097 fn = "int64" 1098 } else { 1099 fn = "uint64" 1100 } 1101 if n.Op == ODIV { 1102 fn += "div" 1103 } else { 1104 fn += "mod" 1105 } 1106 n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et])) 1107 } 1108 1109 case OINDEX: 1110 n.Left = walkexpr(n.Left, init) 1111 1112 // save the original node for bounds checking elision. 1113 // If it was a ODIV/OMOD walk might rewrite it. 1114 r := n.Right 1115 1116 n.Right = walkexpr(n.Right, init) 1117 1118 // if range of type cannot exceed static array bound, 1119 // disable bounds check. 1120 if n.Bounded() { 1121 break 1122 } 1123 t := n.Left.Type 1124 if t != nil && t.IsPtr() { 1125 t = t.Elem() 1126 } 1127 if t.IsArray() { 1128 n.SetBounded(bounded(r, t.NumElem())) 1129 if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) { 1130 Warn("index bounds check elided") 1131 } 1132 if smallintconst(n.Right) && !n.Bounded() { 1133 yyerror("index out of bounds") 1134 } 1135 } else if Isconst(n.Left, CTSTR) { 1136 n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string))))) 1137 if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) { 1138 Warn("index bounds check elided") 1139 } 1140 if smallintconst(n.Right) && !n.Bounded() { 1141 yyerror("index out of bounds") 1142 } 1143 } 1144 1145 if Isconst(n.Right, CTINT) { 1146 if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 { 1147 yyerror("index out of bounds") 1148 } 1149 } 1150 1151 case OINDEXMAP: 1152 // Replace m[k] with *map{access1,assign}(maptype, m, &k) 1153 n.Left = walkexpr(n.Left, init) 1154 n.Right = walkexpr(n.Right, init) 1155 map_ := n.Left 1156 key := n.Right 1157 t := map_.Type 1158 if n.Etype == 1 { 1159 // This m[k] expression is on the left-hand side of an assignment. 1160 fast := mapfast(t) 1161 if fast == mapslow { 1162 // standard version takes key by reference. 1163 // orderexpr made sure key is addressable. 1164 key = nod(OADDR, key, nil) 1165 } 1166 n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key) 1167 } else { 1168 // m[k] is not the target of an assignment. 1169 fast := mapfast(t) 1170 if fast == mapslow { 1171 // standard version takes key by reference. 1172 // orderexpr made sure key is addressable. 1173 key = nod(OADDR, key, nil) 1174 } 1175 1176 if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero 1177 n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key) 1178 } else { 1179 z := zeroaddr(w) 1180 n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z) 1181 } 1182 } 1183 n.Type = types.NewPtr(t.Val()) 1184 n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers. 1185 n = nod(OIND, n, nil) 1186 n.Type = t.Val() 1187 n.SetTypecheck(1) 1188 1189 case ORECV: 1190 Fatalf("walkexpr ORECV") // should see inside OAS only 1191 1192 case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR: 1193 n.Left = walkexpr(n.Left, init) 1194 low, high, max := n.SliceBounds() 1195 low = walkexpr(low, init) 1196 if low != nil && iszero(low) { 1197 // Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k]. 1198 low = nil 1199 } 1200 high = walkexpr(high, init) 1201 max = walkexpr(max, init) 1202 n.SetSliceBounds(low, high, max) 1203 if n.Op.IsSlice3() { 1204 if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) { 1205 // Reduce x[i:j:cap(x)] to x[i:j]. 1206 if n.Op == OSLICE3 { 1207 n.Op = OSLICE 1208 } else { 1209 n.Op = OSLICEARR 1210 } 1211 n = reduceSlice(n) 1212 } 1213 } else { 1214 n = reduceSlice(n) 1215 } 1216 1217 case ONEW: 1218 if n.Esc == EscNone { 1219 if n.Type.Elem().Width >= 1<<16 { 1220 Fatalf("large ONEW with EscNone: %v", n) 1221 } 1222 r := temp(n.Type.Elem()) 1223 r = nod(OAS, r, nil) // zero temp 1224 r = typecheck(r, Etop) 1225 init.Append(r) 1226 r = nod(OADDR, r.Left, nil) 1227 r = typecheck(r, Erv) 1228 n = r 1229 } else { 1230 n = callnew(n.Type.Elem()) 1231 } 1232 1233 case OCMPSTR: 1234 // s + "badgerbadgerbadger" == "badgerbadgerbadger" 1235 if (Op(n.Etype) == OEQ || Op(n.Etype) == ONE) && Isconst(n.Right, CTSTR) && n.Left.Op == OADDSTR && n.Left.List.Len() == 2 && Isconst(n.Left.List.Second(), CTSTR) && strlit(n.Right) == strlit(n.Left.List.Second()) { 1236 // TODO(marvin): Fix Node.EType type union. 1237 r := nod(Op(n.Etype), nod(OLEN, n.Left.List.First(), nil), nodintconst(0)) 1238 r = typecheck(r, Erv) 1239 r = walkexpr(r, init) 1240 r.Type = n.Type 1241 n = r 1242 break 1243 } 1244 1245 // Rewrite comparisons to short constant strings as length+byte-wise comparisons. 1246 var cs, ncs *Node // const string, non-const string 1247 switch { 1248 case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR): 1249 // ignore; will be constant evaluated 1250 case Isconst(n.Left, CTSTR): 1251 cs = n.Left 1252 ncs = n.Right 1253 case Isconst(n.Right, CTSTR): 1254 cs = n.Right 1255 ncs = n.Left 1256 } 1257 if cs != nil { 1258 cmp := Op(n.Etype) 1259 // maxRewriteLen was chosen empirically. 1260 // It is the value that minimizes cmd/go file size 1261 // across most architectures. 1262 // See the commit description for CL 26758 for details. 1263 maxRewriteLen := 6 1264 // Some architectures can load unaligned byte sequence as 1 word. 1265 // So we can cover longer strings with the same amount of code. 1266 canCombineLoads := false 1267 combine64bit := false 1268 // TODO: does this improve performance on any other architectures? 1269 switch thearch.LinkArch.Family { 1270 case sys.AMD64: 1271 // Larger compare require longer instructions, so keep this reasonably low. 1272 // Data from CL 26758 shows that longer strings are rare. 1273 // If we really want we can do 16 byte SSE comparisons in the future. 1274 maxRewriteLen = 16 1275 canCombineLoads = true 1276 combine64bit = true 1277 case sys.I386: 1278 maxRewriteLen = 8 1279 canCombineLoads = true 1280 } 1281 var and Op 1282 switch cmp { 1283 case OEQ: 1284 and = OANDAND 1285 case ONE: 1286 and = OOROR 1287 default: 1288 // Don't do byte-wise comparisons for <, <=, etc. 1289 // They're fairly complicated. 1290 // Length-only checks are ok, though. 1291 maxRewriteLen = 0 1292 } 1293 if s := cs.Val().U.(string); len(s) <= maxRewriteLen { 1294 if len(s) > 0 { 1295 ncs = safeexpr(ncs, init) 1296 } 1297 // TODO(marvin): Fix Node.EType type union. 1298 r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s)))) 1299 remains := len(s) 1300 for i := 0; remains > 0; { 1301 if remains == 1 || !canCombineLoads { 1302 cb := nodintconst(int64(s[i])) 1303 ncb := nod(OINDEX, ncs, nodintconst(int64(i))) 1304 r = nod(and, r, nod(cmp, ncb, cb)) 1305 remains-- 1306 i++ 1307 continue 1308 } 1309 var step int 1310 var convType *types.Type 1311 switch { 1312 case remains >= 8 && combine64bit: 1313 convType = types.Types[TINT64] 1314 step = 8 1315 case remains >= 4: 1316 convType = types.Types[TUINT32] 1317 step = 4 1318 case remains >= 2: 1319 convType = types.Types[TUINT16] 1320 step = 2 1321 } 1322 ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i))) 1323 ncsubstr = conv(ncsubstr, convType) 1324 csubstr := int64(s[i]) 1325 // Calculate large constant from bytes as sequence of shifts and ors. 1326 // Like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ... 1327 // ssa will combine this into a single large load. 1328 for offset := 1; offset < step; offset++ { 1329 b := nod(OINDEX, ncs, nodintconst(int64(i+offset))) 1330 b = conv(b, convType) 1331 b = nod(OLSH, b, nodintconst(int64(8*offset))) 1332 ncsubstr = nod(OOR, ncsubstr, b) 1333 csubstr = csubstr | int64(s[i+offset])<<uint8(8*offset) 1334 } 1335 csubstrPart := nodintconst(csubstr) 1336 // Compare "step" bytes as once 1337 r = nod(and, r, nod(cmp, csubstrPart, ncsubstr)) 1338 remains -= step 1339 i += step 1340 } 1341 r = typecheck(r, Erv) 1342 r = walkexpr(r, init) 1343 r.Type = n.Type 1344 n = r 1345 break 1346 } 1347 } 1348 1349 var r *Node 1350 // TODO(marvin): Fix Node.EType type union. 1351 if Op(n.Etype) == OEQ || Op(n.Etype) == ONE { 1352 // prepare for rewrite below 1353 n.Left = cheapexpr(n.Left, init) 1354 n.Right = cheapexpr(n.Right, init) 1355 1356 lstr := conv(n.Left, types.Types[TSTRING]) 1357 rstr := conv(n.Right, types.Types[TSTRING]) 1358 lptr := nod(OSPTR, lstr, nil) 1359 rptr := nod(OSPTR, rstr, nil) 1360 llen := conv(nod(OLEN, lstr, nil), types.Types[TUINTPTR]) 1361 rlen := conv(nod(OLEN, rstr, nil), types.Types[TUINTPTR]) 1362 1363 fn := syslook("memequal") 1364 fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8]) 1365 r = mkcall1(fn, types.Types[TBOOL], init, lptr, rptr, llen) 1366 1367 // quick check of len before full compare for == or !=. 1368 // memequal then tests equality up to length len. 1369 // TODO(marvin): Fix Node.EType type union. 1370 if Op(n.Etype) == OEQ { 1371 // len(left) == len(right) && memequal(left, right, len) 1372 r = nod(OANDAND, nod(OEQ, llen, rlen), r) 1373 } else { 1374 // len(left) != len(right) || !memequal(left, right, len) 1375 r = nod(ONOT, r, nil) 1376 r = nod(OOROR, nod(ONE, llen, rlen), r) 1377 } 1378 1379 r = typecheck(r, Erv) 1380 r = walkexpr(r, nil) 1381 } else { 1382 // sys_cmpstring(s1, s2) :: 0 1383 r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING])) 1384 // TODO(marvin): Fix Node.EType type union. 1385 r = nod(Op(n.Etype), r, nodintconst(0)) 1386 } 1387 1388 r = typecheck(r, Erv) 1389 if !n.Type.IsBoolean() { 1390 Fatalf("cmp %v", n.Type) 1391 } 1392 r.Type = n.Type 1393 n = r 1394 1395 case OADDSTR: 1396 n = addstr(n, init) 1397 1398 case OAPPEND: 1399 // order should make sure we only see OAS(node, OAPPEND), which we handle above. 1400 Fatalf("append outside assignment") 1401 1402 case OCOPY: 1403 n = copyany(n, init, instrumenting && !compiling_runtime) 1404 1405 // cannot use chanfn - closechan takes any, not chan any 1406 case OCLOSE: 1407 fn := syslook("closechan") 1408 1409 fn = substArgTypes(fn, n.Left.Type) 1410 n = mkcall1(fn, nil, init, n.Left) 1411 1412 case OMAKECHAN: 1413 // When size fits into int, use makechan instead of 1414 // makechan64, which is faster and shorter on 32 bit platforms. 1415 size := n.Left 1416 fnname := "makechan64" 1417 argtype := types.Types[TINT64] 1418 1419 // Type checking guarantees that TIDEAL size is positive and fits in an int. 1420 // The case of size overflow when converting TUINT or TUINTPTR to TINT 1421 // will be handled by the negative range checks in makechan during runtime. 1422 if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 { 1423 fnname = "makechan" 1424 argtype = types.Types[TINT] 1425 } 1426 1427 n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype)) 1428 1429 case OMAKEMAP: 1430 t := n.Type 1431 hmapType := hmap(t) 1432 hint := n.Left 1433 1434 // var h *hmap 1435 var h *Node 1436 if n.Esc == EscNone { 1437 // Allocate hmap on stack. 1438 1439 // var hv hmap 1440 hv := temp(hmapType) 1441 zero := nod(OAS, hv, nil) 1442 zero = typecheck(zero, Etop) 1443 init.Append(zero) 1444 // h = &hv 1445 h = nod(OADDR, hv, nil) 1446 1447 // Allocate one bucket pointed to by hmap.buckets on stack if hint 1448 // is not larger than BUCKETSIZE. In case hint is larger than 1449 // BUCKETSIZE runtime.makemap will allocate the buckets on the heap. 1450 // Maximum key and value size is 128 bytes, larger objects 1451 // are stored with an indirection. So max bucket size is 2048+eps. 1452 if !Isconst(hint, CTINT) || 1453 !(hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) > 0) { 1454 // var bv bmap 1455 bv := temp(bmap(t)) 1456 1457 zero = nod(OAS, bv, nil) 1458 zero = typecheck(zero, Etop) 1459 init.Append(zero) 1460 1461 // b = &bv 1462 b := nod(OADDR, bv, nil) 1463 1464 // h.buckets = b 1465 bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap 1466 na := nod(OAS, nodSym(ODOT, h, bsym), b) 1467 na = typecheck(na, Etop) 1468 init.Append(na) 1469 } 1470 } 1471 1472 if Isconst(hint, CTINT) && hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 { 1473 // Handling make(map[any]any) and 1474 // make(map[any]any, hint) where hint <= BUCKETSIZE 1475 // special allows for faster map initialization and 1476 // improves binary size by using calls with fewer arguments. 1477 // For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false 1478 // and no buckets will be allocated by makemap. Therefore, 1479 // no buckets need to be allocated in this code path. 1480 if n.Esc == EscNone { 1481 // Only need to initialize h.hash0 since 1482 // hmap h has been allocated on the stack already. 1483 // h.hash0 = fastrand() 1484 rand := mkcall("fastrand", types.Types[TUINT32], init) 1485 hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap 1486 a := nod(OAS, nodSym(ODOT, h, hashsym), rand) 1487 a = typecheck(a, Etop) 1488 a = walkexpr(a, init) 1489 init.Append(a) 1490 n = nod(OCONVNOP, h, nil) 1491 n.Type = t 1492 n = typecheck(n, Erv) 1493 } else { 1494 // Call runtime.makehmap to allocate an 1495 // hmap on the heap and initialize hmap's hash0 field. 1496 fn := syslook("makemap_small") 1497 fn = substArgTypes(fn, t.Key(), t.Val()) 1498 n = mkcall1(fn, n.Type, init) 1499 } 1500 } else { 1501 if n.Esc != EscNone { 1502 h = nodnil() 1503 } 1504 // Map initialization with a variable or large hint is 1505 // more complicated. We therefore generate a call to 1506 // runtime.makemap to intialize hmap and allocate the 1507 // map buckets. 1508 1509 // When hint fits into int, use makemap instead of 1510 // makemap64, which is faster and shorter on 32 bit platforms. 1511 fnname := "makemap64" 1512 argtype := types.Types[TINT64] 1513 1514 // Type checking guarantees that TIDEAL hint is positive and fits in an int. 1515 // See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function. 1516 // The case of hint overflow when converting TUINT or TUINTPTR to TINT 1517 // will be handled by the negative range checks in makemap during runtime. 1518 if hint.Type.IsKind(TIDEAL) || maxintval[hint.Type.Etype].Cmp(maxintval[TUINT]) <= 0 { 1519 fnname = "makemap" 1520 argtype = types.Types[TINT] 1521 } 1522 1523 fn := syslook(fnname) 1524 fn = substArgTypes(fn, hmapType, t.Key(), t.Val()) 1525 n = mkcall1(fn, n.Type, init, typename(n.Type), conv(hint, argtype), h) 1526 } 1527 1528 case OMAKESLICE: 1529 l := n.Left 1530 r := n.Right 1531 if r == nil { 1532 r = safeexpr(l, init) 1533 l = r 1534 } 1535 t := n.Type 1536 if n.Esc == EscNone { 1537 if !isSmallMakeSlice(n) { 1538 Fatalf("non-small OMAKESLICE with EscNone: %v", n) 1539 } 1540 // var arr [r]T 1541 // n = arr[:l] 1542 t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T 1543 var_ := temp(t) 1544 a := nod(OAS, var_, nil) // zero temp 1545 a = typecheck(a, Etop) 1546 init.Append(a) 1547 r := nod(OSLICE, var_, nil) // arr[:l] 1548 r.SetSliceBounds(nil, l, nil) 1549 r = conv(r, n.Type) // in case n.Type is named. 1550 r = typecheck(r, Erv) 1551 r = walkexpr(r, init) 1552 n = r 1553 } else { 1554 // n escapes; set up a call to makeslice. 1555 // When len and cap can fit into int, use makeslice instead of 1556 // makeslice64, which is faster and shorter on 32 bit platforms. 1557 1558 if t.Elem().NotInHeap() { 1559 yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem()) 1560 } 1561 1562 len, cap := l, r 1563 1564 fnname := "makeslice64" 1565 argtype := types.Types[TINT64] 1566 1567 // Type checking guarantees that TIDEAL len/cap are positive and fit in an int. 1568 // The case of len or cap overflow when converting TUINT or TUINTPTR to TINT 1569 // will be handled by the negative range checks in makeslice during runtime. 1570 if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) && 1571 (cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) { 1572 fnname = "makeslice" 1573 argtype = types.Types[TINT] 1574 } 1575 1576 fn := syslook(fnname) 1577 fn = substArgTypes(fn, t.Elem()) // any-1 1578 n = mkcall1(fn, t, init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype)) 1579 } 1580 1581 case ORUNESTR: 1582 a := nodnil() 1583 if n.Esc == EscNone { 1584 t := types.NewArray(types.Types[TUINT8], 4) 1585 var_ := temp(t) 1586 a = nod(OADDR, var_, nil) 1587 } 1588 1589 // intstring(*[4]byte, rune) 1590 n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64])) 1591 1592 case OARRAYBYTESTR: 1593 a := nodnil() 1594 if n.Esc == EscNone { 1595 // Create temporary buffer for string on stack. 1596 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1597 1598 a = nod(OADDR, temp(t), nil) 1599 } 1600 1601 // slicebytetostring(*[32]byte, []byte) string; 1602 n = mkcall("slicebytetostring", n.Type, init, a, n.Left) 1603 1604 // slicebytetostringtmp([]byte) string; 1605 case OARRAYBYTESTRTMP: 1606 n.Left = walkexpr(n.Left, init) 1607 1608 if !instrumenting { 1609 // Let the backend handle OARRAYBYTESTRTMP directly 1610 // to avoid a function call to slicebytetostringtmp. 1611 break 1612 } 1613 1614 n = mkcall("slicebytetostringtmp", n.Type, init, n.Left) 1615 1616 // slicerunetostring(*[32]byte, []rune) string; 1617 case OARRAYRUNESTR: 1618 a := nodnil() 1619 1620 if n.Esc == EscNone { 1621 // Create temporary buffer for string on stack. 1622 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1623 1624 a = nod(OADDR, temp(t), nil) 1625 } 1626 1627 n = mkcall("slicerunetostring", n.Type, init, a, n.Left) 1628 1629 // stringtoslicebyte(*32[byte], string) []byte; 1630 case OSTRARRAYBYTE: 1631 a := nodnil() 1632 1633 if n.Esc == EscNone { 1634 // Create temporary buffer for slice on stack. 1635 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 1636 1637 a = nod(OADDR, temp(t), nil) 1638 } 1639 1640 n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING])) 1641 1642 case OSTRARRAYBYTETMP: 1643 // []byte(string) conversion that creates a slice 1644 // referring to the actual string bytes. 1645 // This conversion is handled later by the backend and 1646 // is only for use by internal compiler optimizations 1647 // that know that the slice won't be mutated. 1648 // The only such case today is: 1649 // for i, c := range []byte(string) 1650 n.Left = walkexpr(n.Left, init) 1651 1652 // stringtoslicerune(*[32]rune, string) []rune 1653 case OSTRARRAYRUNE: 1654 a := nodnil() 1655 1656 if n.Esc == EscNone { 1657 // Create temporary buffer for slice on stack. 1658 t := types.NewArray(types.Types[TINT32], tmpstringbufsize) 1659 1660 a = nod(OADDR, temp(t), nil) 1661 } 1662 1663 n = mkcall("stringtoslicerune", n.Type, init, a, n.Left) 1664 1665 // ifaceeq(i1 any-1, i2 any-2) (ret bool); 1666 case OCMPIFACE: 1667 if !eqtype(n.Left.Type, n.Right.Type) { 1668 Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type) 1669 } 1670 var fn *Node 1671 if n.Left.Type.IsEmptyInterface() { 1672 fn = syslook("efaceeq") 1673 } else { 1674 fn = syslook("ifaceeq") 1675 } 1676 1677 n.Right = cheapexpr(n.Right, init) 1678 n.Left = cheapexpr(n.Left, init) 1679 lt := nod(OITAB, n.Left, nil) 1680 rt := nod(OITAB, n.Right, nil) 1681 ld := nod(OIDATA, n.Left, nil) 1682 rd := nod(OIDATA, n.Right, nil) 1683 ld.Type = types.Types[TUNSAFEPTR] 1684 rd.Type = types.Types[TUNSAFEPTR] 1685 ld.SetTypecheck(1) 1686 rd.SetTypecheck(1) 1687 call := mkcall1(fn, n.Type, init, lt, ld, rd) 1688 1689 // Check itable/type before full compare. 1690 // Note: short-circuited because order matters. 1691 // TODO(marvin): Fix Node.EType type union. 1692 var cmp *Node 1693 if Op(n.Etype) == OEQ { 1694 cmp = nod(OANDAND, nod(OEQ, lt, rt), call) 1695 } else { 1696 cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil)) 1697 } 1698 cmp = typecheck(cmp, Erv) 1699 cmp = walkexpr(cmp, init) 1700 cmp.Type = n.Type 1701 n = cmp 1702 1703 case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT: 1704 if isStaticCompositeLiteral(n) && !canSSAType(n.Type) { 1705 // n can be directly represented in the read-only data section. 1706 // Make direct reference to the static data. See issue 12841. 1707 vstat := staticname(n.Type) 1708 vstat.Name.SetReadonly(true) 1709 fixedlit(inInitFunction, initKindStatic, n, vstat, init) 1710 n = vstat 1711 n = typecheck(n, Erv) 1712 break 1713 } 1714 var_ := temp(n.Type) 1715 anylit(n, var_, init) 1716 n = var_ 1717 1718 case OSEND: 1719 n1 := n.Right 1720 n1 = assignconv(n1, n.Left.Type.Elem(), "chan send") 1721 n1 = walkexpr(n1, init) 1722 n1 = nod(OADDR, n1, nil) 1723 n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1) 1724 1725 case OCLOSURE: 1726 n = walkclosure(n, init) 1727 1728 case OCALLPART: 1729 n = walkpartialcall(n, init) 1730 } 1731 1732 // Expressions that are constant at run time but not 1733 // considered const by the language spec are not turned into 1734 // constants until walk. For example, if n is y%1 == 0, the 1735 // walk of y%1 may have replaced it by 0. 1736 // Check whether n with its updated args is itself now a constant. 1737 t := n.Type 1738 evconst(n) 1739 if n.Type != t { 1740 Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type) 1741 } 1742 if n.Op == OLITERAL { 1743 n = typecheck(n, Erv) 1744 // Emit string symbol now to avoid emitting 1745 // any concurrently during the backend. 1746 if s, ok := n.Val().U.(string); ok { 1747 _ = stringsym(n.Pos, s) 1748 } 1749 } 1750 1751 updateHasCall(n) 1752 1753 if Debug['w'] != 0 && n != nil { 1754 Dump("walk", n) 1755 } 1756 1757 lineno = lno 1758 return n 1759 } 1760 1761 // TODO(josharian): combine this with its caller and simplify 1762 func reduceSlice(n *Node) *Node { 1763 low, high, max := n.SliceBounds() 1764 if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) { 1765 // Reduce x[i:len(x)] to x[i:]. 1766 high = nil 1767 } 1768 n.SetSliceBounds(low, high, max) 1769 if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { 1770 // Reduce x[:] to x. 1771 if Debug_slice > 0 { 1772 Warn("slice: omit slice operation") 1773 } 1774 return n.Left 1775 } 1776 return n 1777 } 1778 1779 func ascompatee1(l *Node, r *Node, init *Nodes) *Node { 1780 // convas will turn map assigns into function calls, 1781 // making it impossible for reorder3 to work. 1782 n := nod(OAS, l, r) 1783 1784 if l.Op == OINDEXMAP { 1785 return n 1786 } 1787 1788 return convas(n, init) 1789 } 1790 1791 func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { 1792 // check assign expression list to 1793 // an expression list. called in 1794 // expr-list = expr-list 1795 1796 // ensure order of evaluation for function calls 1797 for i := range nl { 1798 nl[i] = safeexpr(nl[i], init) 1799 } 1800 for i1 := range nr { 1801 nr[i1] = safeexpr(nr[i1], init) 1802 } 1803 1804 var nn []*Node 1805 i := 0 1806 for ; i < len(nl); i++ { 1807 if i >= len(nr) { 1808 break 1809 } 1810 // Do not generate 'x = x' during return. See issue 4014. 1811 if op == ORETURN && samesafeexpr(nl[i], nr[i]) { 1812 continue 1813 } 1814 nn = append(nn, ascompatee1(nl[i], nr[i], init)) 1815 } 1816 1817 // cannot happen: caller checked that lists had same length 1818 if i < len(nl) || i < len(nr) { 1819 var nln, nrn Nodes 1820 nln.Set(nl) 1821 nrn.Set(nr) 1822 Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname()) 1823 } 1824 return nn 1825 } 1826 1827 // l is an lv and rt is the type of an rv 1828 // return 1 if this implies a function call 1829 // evaluating the lv or a function call 1830 // in the conversion of the types 1831 func fncall(l *Node, rt *types.Type) bool { 1832 if l.HasCall() || l.Op == OINDEXMAP { 1833 return true 1834 } 1835 if eqtype(l.Type, rt) { 1836 return false 1837 } 1838 return true 1839 } 1840 1841 // check assign type list to 1842 // an expression list. called in 1843 // expr-list = func() 1844 func ascompatet(nl Nodes, nr *types.Type) []*Node { 1845 if nl.Len() != nr.NumFields() { 1846 Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) 1847 } 1848 1849 var nn, mm Nodes 1850 for i, l := range nl.Slice() { 1851 if isblank(l) { 1852 continue 1853 } 1854 r := nr.Field(i) 1855 1856 // any lv that causes a fn call must be 1857 // deferred until all the return arguments 1858 // have been pulled from the output arguments 1859 if fncall(l, r.Type) { 1860 tmp := temp(r.Type) 1861 tmp = typecheck(tmp, Erv) 1862 a := nod(OAS, l, tmp) 1863 a = convas(a, &mm) 1864 mm.Append(a) 1865 l = tmp 1866 } 1867 1868 a := nod(OAS, l, nodarg(r, 0)) 1869 a = convas(a, &nn) 1870 updateHasCall(a) 1871 if a.HasCall() { 1872 Dump("ascompatet ucount", a) 1873 Fatalf("ascompatet: too many function calls evaluating parameters") 1874 } 1875 1876 nn.Append(a) 1877 } 1878 return append(nn.Slice(), mm.Slice()...) 1879 } 1880 1881 // nodarg returns a Node for the function argument denoted by t, 1882 // which is either the entire function argument or result struct (t is a struct *types.Type) 1883 // or a specific argument (t is a *types.Field within a struct *types.Type). 1884 // 1885 // If fp is 0, the node is for use by a caller invoking the given 1886 // function, preparing the arguments before the call 1887 // or retrieving the results after the call. 1888 // In this case, the node will correspond to an outgoing argument 1889 // slot like 8(SP). 1890 // 1891 // If fp is 1, the node is for use by the function itself 1892 // (the callee), to retrieve its arguments or write its results. 1893 // In this case the node will be an ONAME with an appropriate 1894 // type and offset. 1895 func nodarg(t interface{}, fp int) *Node { 1896 var n *Node 1897 1898 var funarg types.Funarg 1899 switch t := t.(type) { 1900 default: 1901 Fatalf("bad nodarg %T(%v)", t, t) 1902 1903 case *types.Type: 1904 // Entire argument struct, not just one arg 1905 if !t.IsFuncArgStruct() { 1906 Fatalf("nodarg: bad type %v", t) 1907 } 1908 funarg = t.StructType().Funarg 1909 1910 // Build fake variable name for whole arg struct. 1911 n = newname(lookup(".args")) 1912 n.Type = t 1913 first := t.Field(0) 1914 if first == nil { 1915 Fatalf("nodarg: bad struct") 1916 } 1917 if first.Offset == BADWIDTH { 1918 Fatalf("nodarg: offset not computed for %v", t) 1919 } 1920 n.Xoffset = first.Offset 1921 1922 case *types.Field: 1923 funarg = t.Funarg 1924 if fp == 1 { 1925 // NOTE(rsc): This should be using t.Nname directly, 1926 // except in the case where t.Nname.Sym is the blank symbol and 1927 // so the assignment would be discarded during code generation. 1928 // In that case we need to make a new node, and there is no harm 1929 // in optimization passes to doing so. But otherwise we should 1930 // definitely be using the actual declaration and not a newly built node. 1931 // The extra Fatalf checks here are verifying that this is the case, 1932 // without changing the actual logic (at time of writing, it's getting 1933 // toward time for the Go 1.7 beta). 1934 // At some quieter time (assuming we've never seen these Fatalfs happen) 1935 // we could change this code to use "expect" directly. 1936 expect := asNode(t.Nname) 1937 if expect.isParamHeapCopy() { 1938 expect = expect.Name.Param.Stackcopy 1939 } 1940 1941 for _, n := range Curfn.Func.Dcl { 1942 if (n.Class() == PPARAM || n.Class() == PPARAMOUT) && !t.Sym.IsBlank() && n.Sym == t.Sym { 1943 if n != expect { 1944 Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op) 1945 } 1946 return n 1947 } 1948 } 1949 1950 if !expect.Sym.IsBlank() { 1951 Fatalf("nodarg: did not find node in dcl list: %v", expect) 1952 } 1953 } 1954 1955 // Build fake name for individual variable. 1956 // This is safe because if there was a real declared name 1957 // we'd have used it above. 1958 n = newname(lookup("__")) 1959 n.Type = t.Type 1960 if t.Offset == BADWIDTH { 1961 Fatalf("nodarg: offset not computed for %v", t) 1962 } 1963 n.Xoffset = t.Offset 1964 n.Orig = asNode(t.Nname) 1965 } 1966 1967 // Rewrite argument named _ to __, 1968 // or else the assignment to _ will be 1969 // discarded during code generation. 1970 if isblank(n) { 1971 n.Sym = lookup("__") 1972 } 1973 1974 switch fp { 1975 default: 1976 Fatalf("bad fp") 1977 1978 case 0: // preparing arguments for call 1979 n.Op = OINDREGSP 1980 n.Xoffset += Ctxt.FixedFrameSize() 1981 1982 case 1: // reading arguments inside call 1983 n.SetClass(PPARAM) 1984 if funarg == types.FunargResults { 1985 n.SetClass(PPARAMOUT) 1986 } 1987 } 1988 1989 n.SetTypecheck(1) 1990 n.SetAddrtaken(true) // keep optimizers at bay 1991 return n 1992 } 1993 1994 // package all the arguments that match a ... T parameter into a []T. 1995 func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node { 1996 esc := uint16(EscUnknown) 1997 if ddd != nil { 1998 esc = ddd.Esc 1999 } 2000 2001 if len(args) == 0 { 2002 n := nodnil() 2003 n.Type = typ 2004 return n 2005 } 2006 2007 n := nod(OCOMPLIT, nil, typenod(typ)) 2008 if ddd != nil && prealloc[ddd] != nil { 2009 prealloc[n] = prealloc[ddd] // temporary to use 2010 } 2011 n.List.Set(args) 2012 n.Esc = esc 2013 n = typecheck(n, Erv) 2014 if n.Type == nil { 2015 Fatalf("mkdotargslice: typecheck failed") 2016 } 2017 n = walkexpr(n, init) 2018 return n 2019 } 2020 2021 // check assign expression list to 2022 // a type list. called in 2023 // return expr-list 2024 // func(expr-list) 2025 func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node { 2026 // f(g()) where g has multiple return values 2027 if len(rhs) == 1 && rhs[0].Type.IsFuncArgStruct() { 2028 // optimization - can do block copy 2029 if eqtypenoname(rhs[0].Type, lhs) { 2030 nl := nodarg(lhs, fp) 2031 nr := nod(OCONVNOP, rhs[0], nil) 2032 nr.Type = nl.Type 2033 n := convas(nod(OAS, nl, nr), init) 2034 n.SetTypecheck(1) 2035 return []*Node{n} 2036 } 2037 2038 // conversions involved. 2039 // copy into temporaries. 2040 var tmps []*Node 2041 for _, nr := range rhs[0].Type.FieldSlice() { 2042 tmps = append(tmps, temp(nr.Type)) 2043 } 2044 2045 a := nod(OAS2, nil, nil) 2046 a.List.Set(tmps) 2047 a.Rlist.Set(rhs) 2048 a = typecheck(a, Etop) 2049 a = walkstmt(a) 2050 init.Append(a) 2051 2052 rhs = tmps 2053 } 2054 2055 // For each parameter (LHS), assign its corresponding argument (RHS). 2056 // If there's a ... parameter (which is only valid as the final 2057 // parameter) and this is not a ... call expression, 2058 // then assign the remaining arguments as a slice. 2059 var nn []*Node 2060 for i, nl := range lhs.FieldSlice() { 2061 var nr *Node 2062 if nl.Isddd() && !isddd { 2063 nr = mkdotargslice(nl.Type, rhs[i:], init, call.Right) 2064 } else { 2065 nr = rhs[i] 2066 } 2067 2068 a := nod(OAS, nodarg(nl, fp), nr) 2069 a = convas(a, init) 2070 a.SetTypecheck(1) 2071 nn = append(nn, a) 2072 } 2073 2074 return nn 2075 } 2076 2077 // generate code for print 2078 func walkprint(nn *Node, init *Nodes) *Node { 2079 // Hoist all the argument evaluation up before the lock. 2080 walkexprlistcheap(nn.List.Slice(), init) 2081 2082 // For println, add " " between elements and "\n" at the end. 2083 if nn.Op == OPRINTN { 2084 s := nn.List.Slice() 2085 t := make([]*Node, 0, len(s)*2) 2086 for i, n := range s { 2087 if i != 0 { 2088 t = append(t, nodstr(" ")) 2089 } 2090 t = append(t, n) 2091 } 2092 t = append(t, nodstr("\n")) 2093 nn.List.Set(t) 2094 } 2095 2096 // Collapse runs of constant strings. 2097 s := nn.List.Slice() 2098 t := make([]*Node, 0, len(s)) 2099 for i := 0; i < len(s); { 2100 var strs []string 2101 for i < len(s) && Isconst(s[i], CTSTR) { 2102 strs = append(strs, s[i].Val().U.(string)) 2103 i++ 2104 } 2105 if len(strs) > 0 { 2106 t = append(t, nodstr(strings.Join(strs, ""))) 2107 } 2108 if i < len(s) { 2109 t = append(t, s[i]) 2110 i++ 2111 } 2112 } 2113 nn.List.Set(t) 2114 2115 calls := []*Node{mkcall("printlock", nil, init)} 2116 for i, n := range nn.List.Slice() { 2117 if n.Op == OLITERAL { 2118 switch n.Val().Ctype() { 2119 case CTRUNE: 2120 n = defaultlit(n, types.Runetype) 2121 2122 case CTINT: 2123 n = defaultlit(n, types.Types[TINT64]) 2124 2125 case CTFLT: 2126 n = defaultlit(n, types.Types[TFLOAT64]) 2127 } 2128 } 2129 2130 if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL { 2131 n = defaultlit(n, types.Types[TINT64]) 2132 } 2133 n = defaultlit(n, nil) 2134 nn.List.SetIndex(i, n) 2135 if n.Type == nil || n.Type.Etype == TFORW { 2136 continue 2137 } 2138 2139 var on *Node 2140 switch n.Type.Etype { 2141 case TINTER: 2142 if n.Type.IsEmptyInterface() { 2143 on = syslook("printeface") 2144 } else { 2145 on = syslook("printiface") 2146 } 2147 on = substArgTypes(on, n.Type) // any-1 2148 case TPTR32, TPTR64, TCHAN, TMAP, TFUNC, TUNSAFEPTR: 2149 on = syslook("printpointer") 2150 on = substArgTypes(on, n.Type) // any-1 2151 case TSLICE: 2152 on = syslook("printslice") 2153 on = substArgTypes(on, n.Type) // any-1 2154 case TUINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINTPTR: 2155 if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" { 2156 on = syslook("printhex") 2157 } else { 2158 on = syslook("printuint") 2159 } 2160 case TINT, TINT8, TINT16, TINT32, TINT64: 2161 on = syslook("printint") 2162 case TFLOAT32, TFLOAT64: 2163 on = syslook("printfloat") 2164 case TCOMPLEX64, TCOMPLEX128: 2165 on = syslook("printcomplex") 2166 case TBOOL: 2167 on = syslook("printbool") 2168 case TSTRING: 2169 cs := "" 2170 if Isconst(n, CTSTR) { 2171 cs = n.Val().U.(string) 2172 } 2173 switch cs { 2174 case " ": 2175 on = syslook("printsp") 2176 case "\n": 2177 on = syslook("printnl") 2178 default: 2179 on = syslook("printstring") 2180 } 2181 default: 2182 badtype(OPRINT, n.Type, nil) 2183 continue 2184 } 2185 2186 r := nod(OCALL, on, nil) 2187 if params := on.Type.Params().FieldSlice(); len(params) > 0 { 2188 t := params[0].Type 2189 if !eqtype(t, n.Type) { 2190 n = nod(OCONV, n, nil) 2191 n.Type = t 2192 } 2193 r.List.Append(n) 2194 } 2195 calls = append(calls, r) 2196 } 2197 2198 calls = append(calls, mkcall("printunlock", nil, init)) 2199 2200 typecheckslice(calls, Etop) 2201 walkexprlist(calls, init) 2202 2203 r := nod(OEMPTY, nil, nil) 2204 r = typecheck(r, Etop) 2205 r = walkexpr(r, init) 2206 r.Ninit.Set(calls) 2207 return r 2208 } 2209 2210 func callnew(t *types.Type) *Node { 2211 if t.NotInHeap() { 2212 yyerror("%v is go:notinheap; heap allocation disallowed", t) 2213 } 2214 dowidth(t) 2215 fn := syslook("newobject") 2216 fn = substArgTypes(fn, t) 2217 v := mkcall1(fn, types.NewPtr(t), nil, typename(t)) 2218 v.SetNonNil(true) 2219 return v 2220 } 2221 2222 func iscallret(n *Node) bool { 2223 if n == nil { 2224 return false 2225 } 2226 n = outervalue(n) 2227 return n.Op == OINDREGSP 2228 } 2229 2230 func isstack(n *Node) bool { 2231 n = outervalue(n) 2232 2233 // If n is *autotmp and autotmp = &foo, replace n with foo. 2234 // We introduce such temps when initializing struct literals. 2235 if n.Op == OIND && n.Left.Op == ONAME && n.Left.IsAutoTmp() { 2236 defn := n.Left.Name.Defn 2237 if defn != nil && defn.Op == OAS && defn.Right.Op == OADDR { 2238 n = defn.Right.Left 2239 } 2240 } 2241 2242 switch n.Op { 2243 case OINDREGSP: 2244 return true 2245 2246 case ONAME: 2247 switch n.Class() { 2248 case PAUTO, PPARAM, PPARAMOUT: 2249 return true 2250 } 2251 } 2252 2253 return false 2254 } 2255 2256 // isReflectHeaderDataField reports whether l is an expression p.Data 2257 // where p has type reflect.SliceHeader or reflect.StringHeader. 2258 func isReflectHeaderDataField(l *Node) bool { 2259 if l.Type != types.Types[TUINTPTR] { 2260 return false 2261 } 2262 2263 var tsym *types.Sym 2264 switch l.Op { 2265 case ODOT: 2266 tsym = l.Left.Type.Sym 2267 case ODOTPTR: 2268 tsym = l.Left.Type.Elem().Sym 2269 default: 2270 return false 2271 } 2272 2273 if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" { 2274 return false 2275 } 2276 return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader" 2277 } 2278 2279 func convas(n *Node, init *Nodes) *Node { 2280 if n.Op != OAS { 2281 Fatalf("convas: not OAS %v", n.Op) 2282 } 2283 defer updateHasCall(n) 2284 2285 n.SetTypecheck(1) 2286 2287 if n.Left == nil || n.Right == nil { 2288 return n 2289 } 2290 2291 lt := n.Left.Type 2292 rt := n.Right.Type 2293 if lt == nil || rt == nil { 2294 return n 2295 } 2296 2297 if isblank(n.Left) { 2298 n.Right = defaultlit(n.Right, nil) 2299 return n 2300 } 2301 2302 if !eqtype(lt, rt) { 2303 n.Right = assignconv(n.Right, lt, "assignment") 2304 n.Right = walkexpr(n.Right, init) 2305 } 2306 dowidth(n.Right.Type) 2307 2308 return n 2309 } 2310 2311 // from ascompat[te] 2312 // evaluating actual function arguments. 2313 // f(a,b) 2314 // if there is exactly one function expr, 2315 // then it is done first. otherwise must 2316 // make temp variables 2317 func reorder1(all []*Node) []*Node { 2318 if len(all) == 1 { 2319 return all 2320 } 2321 2322 funcCalls := 0 2323 for _, n := range all { 2324 updateHasCall(n) 2325 if n.HasCall() { 2326 funcCalls++ 2327 } 2328 } 2329 if funcCalls == 0 { 2330 return all 2331 } 2332 2333 var g []*Node // fncalls assigned to tempnames 2334 var f *Node // last fncall assigned to stack 2335 var r []*Node // non fncalls and tempnames assigned to stack 2336 d := 0 2337 for _, n := range all { 2338 if !n.HasCall() { 2339 r = append(r, n) 2340 continue 2341 } 2342 2343 d++ 2344 if d == funcCalls { 2345 f = n 2346 continue 2347 } 2348 2349 // make assignment of fncall to tempname 2350 a := temp(n.Right.Type) 2351 2352 a = nod(OAS, a, n.Right) 2353 g = append(g, a) 2354 2355 // put normal arg assignment on list 2356 // with fncall replaced by tempname 2357 n.Right = a.Left 2358 2359 r = append(r, n) 2360 } 2361 2362 if f != nil { 2363 g = append(g, f) 2364 } 2365 return append(g, r...) 2366 } 2367 2368 // from ascompat[ee] 2369 // a,b = c,d 2370 // simultaneous assignment. there cannot 2371 // be later use of an earlier lvalue. 2372 // 2373 // function calls have been removed. 2374 func reorder3(all []*Node) []*Node { 2375 // If a needed expression may be affected by an 2376 // earlier assignment, make an early copy of that 2377 // expression and use the copy instead. 2378 var early []*Node 2379 2380 var mapinit Nodes 2381 for i, n := range all { 2382 l := n.Left 2383 2384 // Save subexpressions needed on left side. 2385 // Drill through non-dereferences. 2386 for { 2387 if l.Op == ODOT || l.Op == OPAREN { 2388 l = l.Left 2389 continue 2390 } 2391 2392 if l.Op == OINDEX && l.Left.Type.IsArray() { 2393 l.Right = reorder3save(l.Right, all, i, &early) 2394 l = l.Left 2395 continue 2396 } 2397 2398 break 2399 } 2400 2401 switch l.Op { 2402 default: 2403 Fatalf("reorder3 unexpected lvalue %#v", l.Op) 2404 2405 case ONAME: 2406 break 2407 2408 case OINDEX, OINDEXMAP: 2409 l.Left = reorder3save(l.Left, all, i, &early) 2410 l.Right = reorder3save(l.Right, all, i, &early) 2411 if l.Op == OINDEXMAP { 2412 all[i] = convas(all[i], &mapinit) 2413 } 2414 2415 case OIND, ODOTPTR: 2416 l.Left = reorder3save(l.Left, all, i, &early) 2417 } 2418 2419 // Save expression on right side. 2420 all[i].Right = reorder3save(all[i].Right, all, i, &early) 2421 } 2422 2423 early = append(mapinit.Slice(), early...) 2424 return append(early, all...) 2425 } 2426 2427 // if the evaluation of *np would be affected by the 2428 // assignments in all up to but not including the ith assignment, 2429 // copy into a temporary during *early and 2430 // replace *np with that temp. 2431 // The result of reorder3save MUST be assigned back to n, e.g. 2432 // n.Left = reorder3save(n.Left, all, i, early) 2433 func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node { 2434 if !aliased(n, all, i) { 2435 return n 2436 } 2437 2438 q := temp(n.Type) 2439 q = nod(OAS, q, n) 2440 q = typecheck(q, Etop) 2441 *early = append(*early, q) 2442 return q.Left 2443 } 2444 2445 // what's the outer value that a write to n affects? 2446 // outer value means containing struct or array. 2447 func outervalue(n *Node) *Node { 2448 for { 2449 switch n.Op { 2450 case OXDOT: 2451 Fatalf("OXDOT in walk") 2452 case ODOT, OPAREN, OCONVNOP: 2453 n = n.Left 2454 continue 2455 case OINDEX: 2456 if n.Left.Type != nil && n.Left.Type.IsArray() { 2457 n = n.Left 2458 continue 2459 } 2460 } 2461 2462 return n 2463 } 2464 } 2465 2466 // Is it possible that the computation of n might be 2467 // affected by writes in as up to but not including the ith element? 2468 func aliased(n *Node, all []*Node, i int) bool { 2469 if n == nil { 2470 return false 2471 } 2472 2473 // Treat all fields of a struct as referring to the whole struct. 2474 // We could do better but we would have to keep track of the fields. 2475 for n.Op == ODOT { 2476 n = n.Left 2477 } 2478 2479 // Look for obvious aliasing: a variable being assigned 2480 // during the all list and appearing in n. 2481 // Also record whether there are any writes to main memory. 2482 // Also record whether there are any writes to variables 2483 // whose addresses have been taken. 2484 memwrite := false 2485 varwrite := false 2486 for _, an := range all[:i] { 2487 a := outervalue(an.Left) 2488 2489 for a.Op == ODOT { 2490 a = a.Left 2491 } 2492 2493 if a.Op != ONAME { 2494 memwrite = true 2495 continue 2496 } 2497 2498 switch n.Class() { 2499 default: 2500 varwrite = true 2501 continue 2502 2503 case PAUTO, PPARAM, PPARAMOUT: 2504 if n.Addrtaken() { 2505 varwrite = true 2506 continue 2507 } 2508 2509 if vmatch2(a, n) { 2510 // Direct hit. 2511 return true 2512 } 2513 } 2514 } 2515 2516 // The variables being written do not appear in n. 2517 // However, n might refer to computed addresses 2518 // that are being written. 2519 2520 // If no computed addresses are affected by the writes, no aliasing. 2521 if !memwrite && !varwrite { 2522 return false 2523 } 2524 2525 // If n does not refer to computed addresses 2526 // (that is, if n only refers to variables whose addresses 2527 // have not been taken), no aliasing. 2528 if varexpr(n) { 2529 return false 2530 } 2531 2532 // Otherwise, both the writes and n refer to computed memory addresses. 2533 // Assume that they might conflict. 2534 return true 2535 } 2536 2537 // does the evaluation of n only refer to variables 2538 // whose addresses have not been taken? 2539 // (and no other memory) 2540 func varexpr(n *Node) bool { 2541 if n == nil { 2542 return true 2543 } 2544 2545 switch n.Op { 2546 case OLITERAL: 2547 return true 2548 2549 case ONAME: 2550 switch n.Class() { 2551 case PAUTO, PPARAM, PPARAMOUT: 2552 if !n.Addrtaken() { 2553 return true 2554 } 2555 } 2556 2557 return false 2558 2559 case OADD, 2560 OSUB, 2561 OOR, 2562 OXOR, 2563 OMUL, 2564 ODIV, 2565 OMOD, 2566 OLSH, 2567 ORSH, 2568 OAND, 2569 OANDNOT, 2570 OPLUS, 2571 OMINUS, 2572 OCOM, 2573 OPAREN, 2574 OANDAND, 2575 OOROR, 2576 OCONV, 2577 OCONVNOP, 2578 OCONVIFACE, 2579 ODOTTYPE: 2580 return varexpr(n.Left) && varexpr(n.Right) 2581 2582 case ODOT: // but not ODOTPTR 2583 // Should have been handled in aliased. 2584 Fatalf("varexpr unexpected ODOT") 2585 } 2586 2587 // Be conservative. 2588 return false 2589 } 2590 2591 // is the name l mentioned in r? 2592 func vmatch2(l *Node, r *Node) bool { 2593 if r == nil { 2594 return false 2595 } 2596 switch r.Op { 2597 // match each right given left 2598 case ONAME: 2599 return l == r 2600 2601 case OLITERAL: 2602 return false 2603 } 2604 2605 if vmatch2(l, r.Left) { 2606 return true 2607 } 2608 if vmatch2(l, r.Right) { 2609 return true 2610 } 2611 for _, n := range r.List.Slice() { 2612 if vmatch2(l, n) { 2613 return true 2614 } 2615 } 2616 return false 2617 } 2618 2619 // is any name mentioned in l also mentioned in r? 2620 // called by sinit.go 2621 func vmatch1(l *Node, r *Node) bool { 2622 // isolate all left sides 2623 if l == nil || r == nil { 2624 return false 2625 } 2626 switch l.Op { 2627 case ONAME: 2628 switch l.Class() { 2629 case PPARAM, PAUTO: 2630 break 2631 2632 default: 2633 // assignment to non-stack variable must be 2634 // delayed if right has function calls. 2635 if r.HasCall() { 2636 return true 2637 } 2638 } 2639 2640 return vmatch2(l, r) 2641 2642 case OLITERAL: 2643 return false 2644 } 2645 2646 if vmatch1(l.Left, r) { 2647 return true 2648 } 2649 if vmatch1(l.Right, r) { 2650 return true 2651 } 2652 for _, n := range l.List.Slice() { 2653 if vmatch1(n, r) { 2654 return true 2655 } 2656 } 2657 return false 2658 } 2659 2660 // paramstoheap returns code to allocate memory for heap-escaped parameters 2661 // and to copy non-result parameters' values from the stack. 2662 func paramstoheap(params *types.Type) []*Node { 2663 var nn []*Node 2664 for _, t := range params.Fields().Slice() { 2665 v := asNode(t.Nname) 2666 if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result 2667 v = nil 2668 } 2669 if v == nil { 2670 continue 2671 } 2672 2673 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil { 2674 nn = append(nn, walkstmt(nod(ODCL, v, nil))) 2675 if stackcopy.Class() == PPARAM { 2676 nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), Etop))) 2677 } 2678 } 2679 } 2680 2681 return nn 2682 } 2683 2684 // zeroResults zeros the return values at the start of the function. 2685 // We need to do this very early in the function. Defer might stop a 2686 // panic and show the return values as they exist at the time of 2687 // panic. For precise stacks, the garbage collector assumes results 2688 // are always live, so we need to zero them before any allocations, 2689 // even allocations to move params/results to the heap. 2690 // The generated code is added to Curfn's Enter list. 2691 func zeroResults() { 2692 lno := lineno 2693 lineno = Curfn.Pos 2694 for _, f := range Curfn.Type.Results().Fields().Slice() { 2695 if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil { 2696 // The local which points to the return value is the 2697 // thing that needs zeroing. This is already handled 2698 // by a Needzero annotation in plive.go:livenessepilogue. 2699 continue 2700 } 2701 // Zero the stack location containing f. 2702 Curfn.Func.Enter.Append(nod(OAS, nodarg(f, 1), nil)) 2703 } 2704 lineno = lno 2705 } 2706 2707 // returnsfromheap returns code to copy values for heap-escaped parameters 2708 // back to the stack. 2709 func returnsfromheap(params *types.Type) []*Node { 2710 var nn []*Node 2711 for _, t := range params.Fields().Slice() { 2712 v := asNode(t.Nname) 2713 if v == nil { 2714 continue 2715 } 2716 if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT { 2717 nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), Etop))) 2718 } 2719 } 2720 2721 return nn 2722 } 2723 2724 // heapmoves generates code to handle migrating heap-escaped parameters 2725 // between the stack and the heap. The generated code is added to Curfn's 2726 // Enter and Exit lists. 2727 func heapmoves() { 2728 lno := lineno 2729 lineno = Curfn.Pos 2730 nn := paramstoheap(Curfn.Type.Recvs()) 2731 nn = append(nn, paramstoheap(Curfn.Type.Params())...) 2732 nn = append(nn, paramstoheap(Curfn.Type.Results())...) 2733 Curfn.Func.Enter.Append(nn...) 2734 lineno = Curfn.Func.Endlineno 2735 Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) 2736 lineno = lno 2737 } 2738 2739 func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node { 2740 if fn.Type == nil || fn.Type.Etype != TFUNC { 2741 Fatalf("mkcall %v %v", fn, fn.Type) 2742 } 2743 2744 n := fn.Type.NumParams() 2745 if n != len(va) { 2746 Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va)) 2747 } 2748 2749 r := nod(OCALL, fn, nil) 2750 r.List.Set(va) 2751 if fn.Type.NumResults() > 0 { 2752 r = typecheck(r, Erv|Efnstruct) 2753 } else { 2754 r = typecheck(r, Etop) 2755 } 2756 r = walkexpr(r, init) 2757 r.Type = t 2758 return r 2759 } 2760 2761 func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node { 2762 return vmkcall(syslook(name), t, init, args) 2763 } 2764 2765 func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node { 2766 return vmkcall(fn, t, init, args) 2767 } 2768 2769 func conv(n *Node, t *types.Type) *Node { 2770 if eqtype(n.Type, t) { 2771 return n 2772 } 2773 n = nod(OCONV, n, nil) 2774 n.Type = t 2775 n = typecheck(n, Erv) 2776 return n 2777 } 2778 2779 // byteindex converts n, which is byte-sized, to a uint8. 2780 // We cannot use conv, because we allow converting bool to uint8 here, 2781 // which is forbidden in user code. 2782 func byteindex(n *Node) *Node { 2783 if eqtype(n.Type, types.Types[TUINT8]) { 2784 return n 2785 } 2786 n = nod(OCONV, n, nil) 2787 n.Type = types.Types[TUINT8] 2788 n.SetTypecheck(1) 2789 return n 2790 } 2791 2792 func chanfn(name string, n int, t *types.Type) *Node { 2793 if !t.IsChan() { 2794 Fatalf("chanfn %v", t) 2795 } 2796 fn := syslook(name) 2797 switch n { 2798 default: 2799 Fatalf("chanfn %d", n) 2800 case 1: 2801 fn = substArgTypes(fn, t.Elem()) 2802 case 2: 2803 fn = substArgTypes(fn, t.Elem(), t.Elem()) 2804 } 2805 return fn 2806 } 2807 2808 func mapfn(name string, t *types.Type) *Node { 2809 if !t.IsMap() { 2810 Fatalf("mapfn %v", t) 2811 } 2812 fn := syslook(name) 2813 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key(), t.Val()) 2814 return fn 2815 } 2816 2817 func mapfndel(name string, t *types.Type) *Node { 2818 if !t.IsMap() { 2819 Fatalf("mapfn %v", t) 2820 } 2821 fn := syslook(name) 2822 fn = substArgTypes(fn, t.Key(), t.Val(), t.Key()) 2823 return fn 2824 } 2825 2826 const ( 2827 mapslow = iota 2828 mapfast32 2829 mapfast64 2830 mapfaststr 2831 nmapfast 2832 ) 2833 2834 type mapnames [nmapfast]string 2835 2836 func mkmapnames(base string) mapnames { 2837 return mapnames{base, base + "_fast32", base + "_fast64", base + "_faststr"} 2838 } 2839 2840 var mapaccess1 = mkmapnames("mapaccess1") 2841 var mapaccess2 = mkmapnames("mapaccess2") 2842 var mapassign = mkmapnames("mapassign") 2843 var mapdelete = mkmapnames("mapdelete") 2844 2845 func mapfast(t *types.Type) int { 2846 // Check ../../runtime/hashmap.go:maxValueSize before changing. 2847 if t.Val().Width > 128 { 2848 return mapslow 2849 } 2850 switch algtype(t.Key()) { 2851 case AMEM32: 2852 return mapfast32 2853 case AMEM64: 2854 return mapfast64 2855 case ASTRING: 2856 return mapfaststr 2857 } 2858 return mapslow 2859 } 2860 2861 func writebarrierfn(name string, l *types.Type, r *types.Type) *Node { 2862 fn := syslook(name) 2863 fn = substArgTypes(fn, l, r) 2864 return fn 2865 } 2866 2867 func addstr(n *Node, init *Nodes) *Node { 2868 // orderexpr rewrote OADDSTR to have a list of strings. 2869 c := n.List.Len() 2870 2871 if c < 2 { 2872 Fatalf("addstr count %d too small", c) 2873 } 2874 2875 buf := nodnil() 2876 if n.Esc == EscNone { 2877 sz := int64(0) 2878 for _, n1 := range n.List.Slice() { 2879 if n1.Op == OLITERAL { 2880 sz += int64(len(n1.Val().U.(string))) 2881 } 2882 } 2883 2884 // Don't allocate the buffer if the result won't fit. 2885 if sz < tmpstringbufsize { 2886 // Create temporary buffer for result string on stack. 2887 t := types.NewArray(types.Types[TUINT8], tmpstringbufsize) 2888 2889 buf = nod(OADDR, temp(t), nil) 2890 } 2891 } 2892 2893 // build list of string arguments 2894 args := []*Node{buf} 2895 for _, n2 := range n.List.Slice() { 2896 args = append(args, conv(n2, types.Types[TSTRING])) 2897 } 2898 2899 var fn string 2900 if c <= 5 { 2901 // small numbers of strings use direct runtime helpers. 2902 // note: orderexpr knows this cutoff too. 2903 fn = fmt.Sprintf("concatstring%d", c) 2904 } else { 2905 // large numbers of strings are passed to the runtime as a slice. 2906 fn = "concatstrings" 2907 2908 t := types.NewSlice(types.Types[TSTRING]) 2909 slice := nod(OCOMPLIT, nil, typenod(t)) 2910 if prealloc[n] != nil { 2911 prealloc[slice] = prealloc[n] 2912 } 2913 slice.List.Set(args[1:]) // skip buf arg 2914 args = []*Node{buf, slice} 2915 slice.Esc = EscNone 2916 } 2917 2918 cat := syslook(fn) 2919 r := nod(OCALL, cat, nil) 2920 r.List.Set(args) 2921 r = typecheck(r, Erv) 2922 r = walkexpr(r, init) 2923 r.Type = n.Type 2924 2925 return r 2926 } 2927 2928 // expand append(l1, l2...) to 2929 // init { 2930 // s := l1 2931 // n := len(s) + len(l2) 2932 // // Compare as uint so growslice can panic on overflow. 2933 // if uint(n) > uint(cap(s)) { 2934 // s = growslice(s, n) 2935 // } 2936 // s = s[:n] 2937 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 2938 // } 2939 // s 2940 // 2941 // l2 is allowed to be a string. 2942 func appendslice(n *Node, init *Nodes) *Node { 2943 walkexprlistsafe(n.List.Slice(), init) 2944 2945 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 2946 // and n are name or literal, but those may index the slice we're 2947 // modifying here. Fix explicitly. 2948 ls := n.List.Slice() 2949 for i1, n1 := range ls { 2950 ls[i1] = cheapexpr(n1, init) 2951 } 2952 2953 l1 := n.List.First() 2954 l2 := n.List.Second() 2955 2956 var l []*Node 2957 2958 // var s []T 2959 s := temp(l1.Type) 2960 l = append(l, nod(OAS, s, l1)) // s = l1 2961 2962 // n := len(s) + len(l2) 2963 nn := temp(types.Types[TINT]) 2964 l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil)))) 2965 2966 // if uint(n) > uint(cap(s)) 2967 nif := nod(OIF, nil, nil) 2968 nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil)) 2969 nif.Left.Left.Type = types.Types[TUINT] 2970 nif.Left.Right.Type = types.Types[TUINT] 2971 2972 // instantiate growslice(Type*, []any, int) []any 2973 fn := syslook("growslice") 2974 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 2975 2976 // s = growslice(T, s, n) 2977 nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(s.Type.Elem()), s, nn))) 2978 l = append(l, nif) 2979 2980 // s = s[:n] 2981 nt := nod(OSLICE, s, nil) 2982 nt.SetSliceBounds(nil, nn, nil) 2983 nt.Etype = 1 2984 l = append(l, nod(OAS, s, nt)) 2985 2986 if l1.Type.Elem().HasHeapPointer() { 2987 // copy(s[len(l1):], l2) 2988 nptr1 := nod(OSLICE, s, nil) 2989 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 2990 nptr1.Etype = 1 2991 nptr2 := l2 2992 Curfn.Func.setWBPos(n.Pos) 2993 fn := syslook("typedslicecopy") 2994 fn = substArgTypes(fn, l1.Type, l2.Type) 2995 var ln Nodes 2996 ln.Set(l) 2997 nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2) 2998 l = append(ln.Slice(), nt) 2999 } else if instrumenting && !compiling_runtime { 3000 // rely on runtime to instrument copy. 3001 // copy(s[len(l1):], l2) 3002 nptr1 := nod(OSLICE, s, nil) 3003 nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil) 3004 nptr1.Etype = 1 3005 nptr2 := l2 3006 3007 var ln Nodes 3008 ln.Set(l) 3009 var nt *Node 3010 if l2.Type.IsString() { 3011 fn := syslook("slicestringcopy") 3012 fn = substArgTypes(fn, l1.Type, l2.Type) 3013 nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2) 3014 } else { 3015 fn := syslook("slicecopy") 3016 fn = substArgTypes(fn, l1.Type, l2.Type) 3017 nt = mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width)) 3018 } 3019 3020 l = append(ln.Slice(), nt) 3021 } else { 3022 // memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T)) 3023 nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil)) 3024 nptr1.SetBounded(true) 3025 3026 nptr1 = nod(OADDR, nptr1, nil) 3027 3028 nptr2 := nod(OSPTR, l2, nil) 3029 3030 fn := syslook("memmove") 3031 fn = substArgTypes(fn, s.Type.Elem(), s.Type.Elem()) 3032 3033 var ln Nodes 3034 ln.Set(l) 3035 nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln) 3036 3037 nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width)) 3038 nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid) 3039 l = append(ln.Slice(), nt) 3040 } 3041 3042 typecheckslice(l, Etop) 3043 walkstmtlist(l) 3044 init.Append(l...) 3045 return s 3046 } 3047 3048 // Rewrite append(src, x, y, z) so that any side effects in 3049 // x, y, z (including runtime panics) are evaluated in 3050 // initialization statements before the append. 3051 // For normal code generation, stop there and leave the 3052 // rest to cgen_append. 3053 // 3054 // For race detector, expand append(src, a [, b]* ) to 3055 // 3056 // init { 3057 // s := src 3058 // const argc = len(args) - 1 3059 // if cap(s) - len(s) < argc { 3060 // s = growslice(s, len(s)+argc) 3061 // } 3062 // n := len(s) 3063 // s = s[:n+argc] 3064 // s[n] = a 3065 // s[n+1] = b 3066 // ... 3067 // } 3068 // s 3069 func walkappend(n *Node, init *Nodes, dst *Node) *Node { 3070 if !samesafeexpr(dst, n.List.First()) { 3071 n.List.SetFirst(safeexpr(n.List.First(), init)) 3072 n.List.SetFirst(walkexpr(n.List.First(), init)) 3073 } 3074 walkexprlistsafe(n.List.Slice()[1:], init) 3075 3076 // walkexprlistsafe will leave OINDEX (s[n]) alone if both s 3077 // and n are name or literal, but those may index the slice we're 3078 // modifying here. Fix explicitly. 3079 // Using cheapexpr also makes sure that the evaluation 3080 // of all arguments (and especially any panics) happen 3081 // before we begin to modify the slice in a visible way. 3082 ls := n.List.Slice()[1:] 3083 for i, n := range ls { 3084 ls[i] = cheapexpr(n, init) 3085 } 3086 3087 nsrc := n.List.First() 3088 3089 argc := n.List.Len() - 1 3090 if argc < 1 { 3091 return nsrc 3092 } 3093 3094 // General case, with no function calls left as arguments. 3095 // Leave for gen, except that instrumentation requires old form. 3096 if !instrumenting || compiling_runtime { 3097 return n 3098 } 3099 3100 var l []*Node 3101 3102 ns := temp(nsrc.Type) 3103 l = append(l, nod(OAS, ns, nsrc)) // s = src 3104 3105 na := nodintconst(int64(argc)) // const argc 3106 nx := nod(OIF, nil, nil) // if cap(s) - len(s) < argc 3107 nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na) 3108 3109 fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T) 3110 fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem()) 3111 3112 nx.Nbody.Set1(nod(OAS, ns, 3113 mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns, 3114 nod(OADD, nod(OLEN, ns, nil), na)))) 3115 3116 l = append(l, nx) 3117 3118 nn := temp(types.Types[TINT]) 3119 l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s) 3120 3121 nx = nod(OSLICE, ns, nil) // ...s[:n+argc] 3122 nx.SetSliceBounds(nil, nod(OADD, nn, na), nil) 3123 nx.Etype = 1 3124 l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc] 3125 3126 ls = n.List.Slice()[1:] 3127 for i, n := range ls { 3128 nx = nod(OINDEX, ns, nn) // s[n] ... 3129 nx.SetBounded(true) 3130 l = append(l, nod(OAS, nx, n)) // s[n] = arg 3131 if i+1 < len(ls) { 3132 l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1 3133 } 3134 } 3135 3136 typecheckslice(l, Etop) 3137 walkstmtlist(l) 3138 init.Append(l...) 3139 return ns 3140 } 3141 3142 // Lower copy(a, b) to a memmove call or a runtime call. 3143 // 3144 // init { 3145 // n := len(a) 3146 // if n > len(b) { n = len(b) } 3147 // memmove(a.ptr, b.ptr, n*sizeof(elem(a))) 3148 // } 3149 // n; 3150 // 3151 // Also works if b is a string. 3152 // 3153 func copyany(n *Node, init *Nodes, runtimecall bool) *Node { 3154 if n.Left.Type.Elem().HasHeapPointer() { 3155 Curfn.Func.setWBPos(n.Pos) 3156 fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type) 3157 return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right) 3158 } 3159 3160 if runtimecall { 3161 if n.Right.Type.IsString() { 3162 fn := syslook("slicestringcopy") 3163 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3164 return mkcall1(fn, n.Type, init, n.Left, n.Right) 3165 } 3166 3167 fn := syslook("slicecopy") 3168 fn = substArgTypes(fn, n.Left.Type, n.Right.Type) 3169 return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width)) 3170 } 3171 3172 n.Left = walkexpr(n.Left, init) 3173 n.Right = walkexpr(n.Right, init) 3174 nl := temp(n.Left.Type) 3175 nr := temp(n.Right.Type) 3176 var l []*Node 3177 l = append(l, nod(OAS, nl, n.Left)) 3178 l = append(l, nod(OAS, nr, n.Right)) 3179 3180 nfrm := nod(OSPTR, nr, nil) 3181 nto := nod(OSPTR, nl, nil) 3182 3183 nlen := temp(types.Types[TINT]) 3184 3185 // n = len(to) 3186 l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil))) 3187 3188 // if n > len(frm) { n = len(frm) } 3189 nif := nod(OIF, nil, nil) 3190 3191 nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil)) 3192 nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil))) 3193 l = append(l, nif) 3194 3195 // Call memmove. 3196 fn := syslook("memmove") 3197 3198 fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem()) 3199 nwid := temp(types.Types[TUINTPTR]) 3200 l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR]))) 3201 nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width)) 3202 l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid)) 3203 3204 typecheckslice(l, Etop) 3205 walkstmtlist(l) 3206 init.Append(l...) 3207 return nlen 3208 } 3209 3210 func eqfor(t *types.Type) (n *Node, needsize bool) { 3211 // Should only arrive here with large memory or 3212 // a struct/array containing a non-memory field/element. 3213 // Small memory is handled inline, and single non-memory 3214 // is handled during type check (OCMPSTR etc). 3215 switch a, _ := algtype1(t); a { 3216 case AMEM: 3217 n := syslook("memequal") 3218 n = substArgTypes(n, t, t) 3219 return n, true 3220 case ASPECIAL: 3221 sym := typesymprefix(".eq", t) 3222 n := newname(sym) 3223 n.SetClass(PFUNC) 3224 ntype := nod(OTFUNC, nil, nil) 3225 ntype.List.Append(anonfield(types.NewPtr(t))) 3226 ntype.List.Append(anonfield(types.NewPtr(t))) 3227 ntype.Rlist.Append(anonfield(types.Types[TBOOL])) 3228 ntype = typecheck(ntype, Etype) 3229 n.Type = ntype.Type 3230 return n, false 3231 } 3232 Fatalf("eqfor %v", t) 3233 return nil, false 3234 } 3235 3236 // The result of walkcompare MUST be assigned back to n, e.g. 3237 // n.Left = walkcompare(n.Left, init) 3238 func walkcompare(n *Node, init *Nodes) *Node { 3239 // Given interface value l and concrete value r, rewrite 3240 // l == r 3241 // into types-equal && data-equal. 3242 // This is efficient, avoids allocations, and avoids runtime calls. 3243 var l, r *Node 3244 if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() { 3245 l = n.Left 3246 r = n.Right 3247 } else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() { 3248 l = n.Right 3249 r = n.Left 3250 } 3251 3252 if l != nil { 3253 // Handle both == and !=. 3254 eq := n.Op 3255 var andor Op 3256 if eq == OEQ { 3257 andor = OANDAND 3258 } else { 3259 andor = OOROR 3260 } 3261 // Check for types equal. 3262 // For empty interface, this is: 3263 // l.tab == type(r) 3264 // For non-empty interface, this is: 3265 // l.tab != nil && l.tab._type == type(r) 3266 var eqtype *Node 3267 tab := nod(OITAB, l, nil) 3268 rtyp := typename(r.Type) 3269 if l.Type.IsEmptyInterface() { 3270 tab.Type = types.NewPtr(types.Types[TUINT8]) 3271 tab.SetTypecheck(1) 3272 eqtype = nod(eq, tab, rtyp) 3273 } else { 3274 nonnil := nod(brcom(eq), nodnil(), tab) 3275 match := nod(eq, itabType(tab), rtyp) 3276 eqtype = nod(andor, nonnil, match) 3277 } 3278 // Check for data equal. 3279 eqdata := nod(eq, ifaceData(l, r.Type), r) 3280 // Put it all together. 3281 expr := nod(andor, eqtype, eqdata) 3282 n = finishcompare(n, expr, init) 3283 return n 3284 } 3285 3286 // Must be comparison of array or struct. 3287 // Otherwise back end handles it. 3288 // While we're here, decide whether to 3289 // inline or call an eq alg. 3290 t := n.Left.Type 3291 var inline bool 3292 3293 maxcmpsize := int64(4) 3294 unalignedLoad := false 3295 switch thearch.LinkArch.Family { 3296 case sys.AMD64, sys.ARM64, sys.S390X: 3297 // Keep this low enough, to generate less code than function call. 3298 maxcmpsize = 16 3299 unalignedLoad = true 3300 case sys.I386: 3301 maxcmpsize = 8 3302 unalignedLoad = true 3303 } 3304 3305 switch t.Etype { 3306 default: 3307 return n 3308 case TARRAY: 3309 // We can compare several elements at once with 2/4/8 byte integer compares 3310 inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize)) 3311 case TSTRUCT: 3312 inline = t.NumFields() <= 4 3313 } 3314 3315 cmpl := n.Left 3316 for cmpl != nil && cmpl.Op == OCONVNOP { 3317 cmpl = cmpl.Left 3318 } 3319 cmpr := n.Right 3320 for cmpr != nil && cmpr.Op == OCONVNOP { 3321 cmpr = cmpr.Left 3322 } 3323 3324 // Chose not to inline. Call equality function directly. 3325 if !inline { 3326 if isvaluelit(cmpl) { 3327 var_ := temp(cmpl.Type) 3328 anylit(cmpl, var_, init) 3329 cmpl = var_ 3330 } 3331 if isvaluelit(cmpr) { 3332 var_ := temp(cmpr.Type) 3333 anylit(cmpr, var_, init) 3334 cmpr = var_ 3335 } 3336 if !islvalue(cmpl) || !islvalue(cmpr) { 3337 Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) 3338 } 3339 3340 // eq algs take pointers 3341 pl := temp(types.NewPtr(t)) 3342 al := nod(OAS, pl, nod(OADDR, cmpl, nil)) 3343 al.Right.Etype = 1 // addr does not escape 3344 al = typecheck(al, Etop) 3345 init.Append(al) 3346 3347 pr := temp(types.NewPtr(t)) 3348 ar := nod(OAS, pr, nod(OADDR, cmpr, nil)) 3349 ar.Right.Etype = 1 // addr does not escape 3350 ar = typecheck(ar, Etop) 3351 init.Append(ar) 3352 3353 fn, needsize := eqfor(t) 3354 call := nod(OCALL, fn, nil) 3355 call.List.Append(pl) 3356 call.List.Append(pr) 3357 if needsize { 3358 call.List.Append(nodintconst(t.Width)) 3359 } 3360 res := call 3361 if n.Op != OEQ { 3362 res = nod(ONOT, res, nil) 3363 } 3364 n = finishcompare(n, res, init) 3365 return n 3366 } 3367 3368 // inline: build boolean expression comparing element by element 3369 andor := OANDAND 3370 if n.Op == ONE { 3371 andor = OOROR 3372 } 3373 var expr *Node 3374 compare := func(el, er *Node) { 3375 a := nod(n.Op, el, er) 3376 if expr == nil { 3377 expr = a 3378 } else { 3379 expr = nod(andor, expr, a) 3380 } 3381 } 3382 cmpl = safeexpr(cmpl, init) 3383 cmpr = safeexpr(cmpr, init) 3384 if t.IsStruct() { 3385 for _, f := range t.Fields().Slice() { 3386 sym := f.Sym 3387 if sym.IsBlank() { 3388 continue 3389 } 3390 compare( 3391 nodSym(OXDOT, cmpl, sym), 3392 nodSym(OXDOT, cmpr, sym), 3393 ) 3394 } 3395 } else { 3396 step := int64(1) 3397 remains := t.NumElem() * t.Elem().Width 3398 combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger() 3399 combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger() 3400 combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger() 3401 for i := int64(0); remains > 0; { 3402 var convType *types.Type 3403 switch { 3404 case remains >= 8 && combine64bit: 3405 convType = types.Types[TINT64] 3406 step = 8 / t.Elem().Width 3407 case remains >= 4 && combine32bit: 3408 convType = types.Types[TUINT32] 3409 step = 4 / t.Elem().Width 3410 case remains >= 2 && combine16bit: 3411 convType = types.Types[TUINT16] 3412 step = 2 / t.Elem().Width 3413 default: 3414 step = 1 3415 } 3416 if step == 1 { 3417 compare( 3418 nod(OINDEX, cmpl, nodintconst(int64(i))), 3419 nod(OINDEX, cmpr, nodintconst(int64(i))), 3420 ) 3421 i++ 3422 remains -= t.Elem().Width 3423 } else { 3424 cmplw := nod(OINDEX, cmpl, nodintconst(int64(i))) 3425 cmplw = conv(cmplw, convType) 3426 cmprw := nod(OINDEX, cmpr, nodintconst(int64(i))) 3427 cmprw = conv(cmprw, convType) 3428 // For code like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ... 3429 // ssa will generate a single large load. 3430 for offset := int64(1); offset < step; offset++ { 3431 lb := nod(OINDEX, cmpl, nodintconst(int64(i+offset))) 3432 lb = conv(lb, convType) 3433 lb = nod(OLSH, lb, nodintconst(int64(8*t.Elem().Width*offset))) 3434 cmplw = nod(OOR, cmplw, lb) 3435 rb := nod(OINDEX, cmpr, nodintconst(int64(i+offset))) 3436 rb = conv(rb, convType) 3437 rb = nod(OLSH, rb, nodintconst(int64(8*t.Elem().Width*offset))) 3438 cmprw = nod(OOR, cmprw, rb) 3439 } 3440 compare(cmplw, cmprw) 3441 i += step 3442 remains -= step * t.Elem().Width 3443 } 3444 } 3445 } 3446 if expr == nil { 3447 expr = nodbool(n.Op == OEQ) 3448 } 3449 n = finishcompare(n, expr, init) 3450 return n 3451 } 3452 3453 // The result of finishcompare MUST be assigned back to n, e.g. 3454 // n.Left = finishcompare(n.Left, x, r, init) 3455 func finishcompare(n, r *Node, init *Nodes) *Node { 3456 // Use nn here to avoid passing r to typecheck. 3457 nn := r 3458 nn = typecheck(nn, Erv) 3459 nn = walkexpr(nn, init) 3460 r = nn 3461 if r.Type != n.Type { 3462 r = nod(OCONVNOP, r, nil) 3463 r.Type = n.Type 3464 r.SetTypecheck(1) 3465 nn = r 3466 } 3467 return nn 3468 } 3469 3470 // isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers. 3471 func (n *Node) isIntOrdering() bool { 3472 switch n.Op { 3473 case OLE, OLT, OGE, OGT: 3474 default: 3475 return false 3476 } 3477 return n.Left.Type.IsInteger() && n.Right.Type.IsInteger() 3478 } 3479 3480 // walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10. 3481 // n must be an OANDAND or OOROR node. 3482 // The result of walkinrange MUST be assigned back to n, e.g. 3483 // n.Left = walkinrange(n.Left) 3484 func walkinrange(n *Node, init *Nodes) *Node { 3485 // We are looking for something equivalent to a opl b OP b opr c, where: 3486 // * a, b, and c have integer type 3487 // * b is side-effect-free 3488 // * opl and opr are each < or ≤ 3489 // * OP is && 3490 l := n.Left 3491 r := n.Right 3492 if !l.isIntOrdering() || !r.isIntOrdering() { 3493 return n 3494 } 3495 3496 // Find b, if it exists, and rename appropriately. 3497 // Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right 3498 // Output is: a opl b(==x) ANDAND/OROR b(==x) opr c 3499 a, opl, b := l.Left, l.Op, l.Right 3500 x, opr, c := r.Left, r.Op, r.Right 3501 for i := 0; ; i++ { 3502 if samesafeexpr(b, x) { 3503 break 3504 } 3505 if i == 3 { 3506 // Tried all permutations and couldn't find an appropriate b == x. 3507 return n 3508 } 3509 if i&1 == 0 { 3510 a, opl, b = b, brrev(opl), a 3511 } else { 3512 x, opr, c = c, brrev(opr), x 3513 } 3514 } 3515 3516 // If n.Op is ||, apply de Morgan. 3517 // Negate the internal ops now; we'll negate the top level op at the end. 3518 // Henceforth assume &&. 3519 negateResult := n.Op == OOROR 3520 if negateResult { 3521 opl = brcom(opl) 3522 opr = brcom(opr) 3523 } 3524 3525 cmpdir := func(o Op) int { 3526 switch o { 3527 case OLE, OLT: 3528 return -1 3529 case OGE, OGT: 3530 return +1 3531 } 3532 Fatalf("walkinrange cmpdir %v", o) 3533 return 0 3534 } 3535 if cmpdir(opl) != cmpdir(opr) { 3536 // Not a range check; something like b < a && b < c. 3537 return n 3538 } 3539 3540 switch opl { 3541 case OGE, OGT: 3542 // We have something like a > b && b ≥ c. 3543 // Switch and reverse ops and rename constants, 3544 // to make it look like a ≤ b && b < c. 3545 a, c = c, a 3546 opl, opr = brrev(opr), brrev(opl) 3547 } 3548 3549 // We must ensure that c-a is non-negative. 3550 // For now, require a and c to be constants. 3551 // In the future, we could also support a == 0 and c == len/cap(...). 3552 // Unfortunately, by this point, most len/cap expressions have been 3553 // stored into temporary variables. 3554 if !Isconst(a, CTINT) || !Isconst(c, CTINT) { 3555 return n 3556 } 3557 3558 if opl == OLT { 3559 // We have a < b && ... 3560 // We need a ≤ b && ... to safely use unsigned comparison tricks. 3561 // If a is not the maximum constant for b's type, 3562 // we can increment a and switch to ≤. 3563 if a.Int64() >= maxintval[b.Type.Etype].Int64() { 3564 return n 3565 } 3566 a = nodintconst(a.Int64() + 1) 3567 opl = OLE 3568 } 3569 3570 bound := c.Int64() - a.Int64() 3571 if bound < 0 { 3572 // Bad news. Something like 5 <= x && x < 3. 3573 // Rare in practice, and we still need to generate side-effects, 3574 // so just leave it alone. 3575 return n 3576 } 3577 3578 // We have a ≤ b && b < c (or a ≤ b && b ≤ c). 3579 // This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a), 3580 // which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a), 3581 // which is equivalent to uint(b-a) < uint(c-a). 3582 ut := b.Type.ToUnsigned() 3583 lhs := conv(nod(OSUB, b, a), ut) 3584 rhs := nodintconst(bound) 3585 if negateResult { 3586 // Negate top level. 3587 opr = brcom(opr) 3588 } 3589 cmp := nod(opr, lhs, rhs) 3590 cmp.Pos = n.Pos 3591 cmp = addinit(cmp, l.Ninit.Slice()) 3592 cmp = addinit(cmp, r.Ninit.Slice()) 3593 // Typecheck the AST rooted at cmp... 3594 cmp = typecheck(cmp, Erv) 3595 // ...but then reset cmp's type to match n's type. 3596 cmp.Type = n.Type 3597 cmp = walkexpr(cmp, init) 3598 return cmp 3599 } 3600 3601 // return 1 if integer n must be in range [0, max), 0 otherwise 3602 func bounded(n *Node, max int64) bool { 3603 if n.Type == nil || !n.Type.IsInteger() { 3604 return false 3605 } 3606 3607 sign := n.Type.IsSigned() 3608 bits := int32(8 * n.Type.Width) 3609 3610 if smallintconst(n) { 3611 v := n.Int64() 3612 return 0 <= v && v < max 3613 } 3614 3615 switch n.Op { 3616 case OAND: 3617 v := int64(-1) 3618 if smallintconst(n.Left) { 3619 v = n.Left.Int64() 3620 } else if smallintconst(n.Right) { 3621 v = n.Right.Int64() 3622 } 3623 3624 if 0 <= v && v < max { 3625 return true 3626 } 3627 3628 case OMOD: 3629 if !sign && smallintconst(n.Right) { 3630 v := n.Right.Int64() 3631 if 0 <= v && v <= max { 3632 return true 3633 } 3634 } 3635 3636 case ODIV: 3637 if !sign && smallintconst(n.Right) { 3638 v := n.Right.Int64() 3639 for bits > 0 && v >= 2 { 3640 bits-- 3641 v >>= 1 3642 } 3643 } 3644 3645 case ORSH: 3646 if !sign && smallintconst(n.Right) { 3647 v := n.Right.Int64() 3648 if v > int64(bits) { 3649 return true 3650 } 3651 bits -= int32(v) 3652 } 3653 } 3654 3655 if !sign && bits <= 62 && 1<<uint(bits) <= max { 3656 return true 3657 } 3658 3659 return false 3660 } 3661 3662 // usemethod checks interface method calls for uses of reflect.Type.Method. 3663 func usemethod(n *Node) { 3664 t := n.Left.Type 3665 3666 // Looking for either of: 3667 // Method(int) reflect.Method 3668 // MethodByName(string) (reflect.Method, bool) 3669 // 3670 // TODO(crawshaw): improve precision of match by working out 3671 // how to check the method name. 3672 if n := t.NumParams(); n != 1 { 3673 return 3674 } 3675 if n := t.NumResults(); n != 1 && n != 2 { 3676 return 3677 } 3678 p0 := t.Params().Field(0) 3679 res0 := t.Results().Field(0) 3680 var res1 *types.Field 3681 if t.NumResults() == 2 { 3682 res1 = t.Results().Field(1) 3683 } 3684 3685 if res1 == nil { 3686 if p0.Type.Etype != TINT { 3687 return 3688 } 3689 } else { 3690 if !p0.Type.IsString() { 3691 return 3692 } 3693 if !res1.Type.IsBoolean() { 3694 return 3695 } 3696 } 3697 3698 // Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors 3699 // (including global variables such as numImports - was issue #19028). 3700 if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" { 3701 Curfn.Func.SetReflectMethod(true) 3702 } 3703 } 3704 3705 func usefield(n *Node) { 3706 if objabi.Fieldtrack_enabled == 0 { 3707 return 3708 } 3709 3710 switch n.Op { 3711 default: 3712 Fatalf("usefield %v", n.Op) 3713 3714 case ODOT, ODOTPTR: 3715 break 3716 } 3717 if n.Sym == nil { 3718 // No field name. This DOTPTR was built by the compiler for access 3719 // to runtime data structures. Ignore. 3720 return 3721 } 3722 3723 t := n.Left.Type 3724 if t.IsPtr() { 3725 t = t.Elem() 3726 } 3727 field := dotField[typeSymKey{t.Orig, n.Sym}] 3728 if field == nil { 3729 Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) 3730 } 3731 if !strings.Contains(field.Note, "go:\"track\"") { 3732 return 3733 } 3734 3735 outer := n.Left.Type 3736 if outer.IsPtr() { 3737 outer = outer.Elem() 3738 } 3739 if outer.Sym == nil { 3740 yyerror("tracked field must be in named struct type") 3741 } 3742 if !exportname(field.Sym.Name) { 3743 yyerror("tracked field must be exported (upper case)") 3744 } 3745 3746 sym := tracksym(outer, field) 3747 if Curfn.Func.FieldTrack == nil { 3748 Curfn.Func.FieldTrack = make(map[*types.Sym]struct{}) 3749 } 3750 Curfn.Func.FieldTrack[sym] = struct{}{} 3751 } 3752 3753 func candiscardlist(l Nodes) bool { 3754 for _, n := range l.Slice() { 3755 if !candiscard(n) { 3756 return false 3757 } 3758 } 3759 return true 3760 } 3761 3762 func candiscard(n *Node) bool { 3763 if n == nil { 3764 return true 3765 } 3766 3767 switch n.Op { 3768 default: 3769 return false 3770 3771 // Discardable as long as the subpieces are. 3772 case ONAME, 3773 ONONAME, 3774 OTYPE, 3775 OPACK, 3776 OLITERAL, 3777 OADD, 3778 OSUB, 3779 OOR, 3780 OXOR, 3781 OADDSTR, 3782 OADDR, 3783 OANDAND, 3784 OARRAYBYTESTR, 3785 OARRAYRUNESTR, 3786 OSTRARRAYBYTE, 3787 OSTRARRAYRUNE, 3788 OCAP, 3789 OCMPIFACE, 3790 OCMPSTR, 3791 OCOMPLIT, 3792 OMAPLIT, 3793 OSTRUCTLIT, 3794 OARRAYLIT, 3795 OSLICELIT, 3796 OPTRLIT, 3797 OCONV, 3798 OCONVIFACE, 3799 OCONVNOP, 3800 ODOT, 3801 OEQ, 3802 ONE, 3803 OLT, 3804 OLE, 3805 OGT, 3806 OGE, 3807 OKEY, 3808 OSTRUCTKEY, 3809 OLEN, 3810 OMUL, 3811 OLSH, 3812 ORSH, 3813 OAND, 3814 OANDNOT, 3815 ONEW, 3816 ONOT, 3817 OCOM, 3818 OPLUS, 3819 OMINUS, 3820 OOROR, 3821 OPAREN, 3822 ORUNESTR, 3823 OREAL, 3824 OIMAG, 3825 OCOMPLEX: 3826 break 3827 3828 // Discardable as long as we know it's not division by zero. 3829 case ODIV, OMOD: 3830 if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 { 3831 break 3832 } 3833 if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 { 3834 break 3835 } 3836 return false 3837 3838 // Discardable as long as we know it won't fail because of a bad size. 3839 case OMAKECHAN, OMAKEMAP: 3840 if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 { 3841 break 3842 } 3843 return false 3844 3845 // Difficult to tell what sizes are okay. 3846 case OMAKESLICE: 3847 return false 3848 } 3849 3850 if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) { 3851 return false 3852 } 3853 3854 return true 3855 } 3856 3857 // rewrite 3858 // print(x, y, z) 3859 // into 3860 // func(a1, a2, a3) { 3861 // print(a1, a2, a3) 3862 // }(x, y, z) 3863 // and same for println. 3864 3865 var walkprintfunc_prgen int 3866 3867 // The result of walkprintfunc MUST be assigned back to n, e.g. 3868 // n.Left = walkprintfunc(n.Left, init) 3869 func walkprintfunc(n *Node, init *Nodes) *Node { 3870 if n.Ninit.Len() != 0 { 3871 walkstmtlist(n.Ninit.Slice()) 3872 init.AppendNodes(&n.Ninit) 3873 } 3874 3875 t := nod(OTFUNC, nil, nil) 3876 var printargs []*Node 3877 for i, n1 := range n.List.Slice() { 3878 buf := fmt.Sprintf("a%d", i) 3879 a := namedfield(buf, n1.Type) 3880 t.List.Append(a) 3881 printargs = append(printargs, a.Left) 3882 } 3883 3884 oldfn := Curfn 3885 Curfn = nil 3886 3887 walkprintfunc_prgen++ 3888 sym := lookupN("print·%d", walkprintfunc_prgen) 3889 fn := dclfunc(sym, t) 3890 3891 a := nod(n.Op, nil, nil) 3892 a.List.Set(printargs) 3893 a = typecheck(a, Etop) 3894 a = walkstmt(a) 3895 3896 fn.Nbody.Set1(a) 3897 3898 funcbody() 3899 3900 fn = typecheck(fn, Etop) 3901 typecheckslice(fn.Nbody.Slice(), Etop) 3902 xtop = append(xtop, fn) 3903 Curfn = oldfn 3904 3905 a = nod(OCALL, nil, nil) 3906 a.Left = fn.Func.Nname 3907 a.List.Set(n.List.Slice()) 3908 a = typecheck(a, Etop) 3909 a = walkexpr(a, init) 3910 return a 3911 } 3912 3913 // substArgTypes substitutes the given list of types for 3914 // successive occurrences of the "any" placeholder in the 3915 // type syntax expression n.Type. 3916 // The result of substArgTypes MUST be assigned back to old, e.g. 3917 // n.Left = substArgTypes(n.Left, t1, t2) 3918 func substArgTypes(old *Node, types_ ...*types.Type) *Node { 3919 n := *old // make shallow copy 3920 3921 for _, t := range types_ { 3922 dowidth(t) 3923 } 3924 n.Type = types.SubstAny(n.Type, &types_) 3925 if len(types_) > 0 { 3926 Fatalf("substArgTypes: too many argument types") 3927 } 3928 return &n 3929 }