github.com/euank/go@v0.0.0-20160829210321-495514729181/src/cmd/compile/internal/gc/gen.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Portable half of code generator; mainly statements and control flow. 6 7 package gc 8 9 import ( 10 "cmd/internal/obj" 11 "cmd/internal/sys" 12 "fmt" 13 ) 14 15 // TODO: labellist should become part of a "compilation state" for functions. 16 var labellist []*Label 17 18 func Sysfunc(name string) *Node { 19 n := newname(Pkglookup(name, Runtimepkg)) 20 n.Class = PFUNC 21 return n 22 } 23 24 // addrescapes tags node n as having had its address taken 25 // by "increasing" the "value" of n.Esc to EscHeap. 26 // Storage is allocated as necessary to allow the address 27 // to be taken. 28 func addrescapes(n *Node) { 29 switch n.Op { 30 // probably a type error already. 31 // dump("addrescapes", n); 32 default: 33 break 34 35 case ONAME: 36 if n == nodfp { 37 break 38 } 39 40 // if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping. 41 // on PPARAM it means something different. 42 if n.Class == PAUTO && n.Esc == EscNever { 43 break 44 } 45 46 // If a closure reference escapes, mark the outer variable as escaping. 47 if n.isClosureVar() { 48 addrescapes(n.Name.Defn) 49 break 50 } 51 52 if n.Class != PPARAM && n.Class != PPARAMOUT && n.Class != PAUTO { 53 break 54 } 55 56 // This is a plain parameter or local variable that needs to move to the heap, 57 // but possibly for the function outside the one we're compiling. 58 // That is, if we have: 59 // 60 // func f(x int) { 61 // func() { 62 // global = &x 63 // } 64 // } 65 // 66 // then we're analyzing the inner closure but we need to move x to the 67 // heap in f, not in the inner closure. Flip over to f before calling moveToHeap. 68 oldfn := Curfn 69 Curfn = n.Name.Curfn 70 if Curfn.Func.Closure != nil && Curfn.Op == OCLOSURE { 71 Curfn = Curfn.Func.Closure 72 } 73 ln := lineno 74 lineno = Curfn.Lineno 75 moveToHeap(n) 76 Curfn = oldfn 77 lineno = ln 78 79 case OIND, ODOTPTR: 80 break 81 82 // ODOTPTR has already been introduced, 83 // so these are the non-pointer ODOT and OINDEX. 84 // In &x[0], if x is a slice, then x does not 85 // escape--the pointer inside x does, but that 86 // is always a heap pointer anyway. 87 case ODOT, OINDEX, OPAREN, OCONVNOP: 88 if !n.Left.Type.IsSlice() { 89 addrescapes(n.Left) 90 } 91 } 92 } 93 94 // isParamStackCopy reports whether this is the on-stack copy of a 95 // function parameter that moved to the heap. 96 func (n *Node) isParamStackCopy() bool { 97 return n.Op == ONAME && (n.Class == PPARAM || n.Class == PPARAMOUT) && n.Name.Heapaddr != nil 98 } 99 100 // isParamHeapCopy reports whether this is the on-heap copy of 101 // a function parameter that moved to the heap. 102 func (n *Node) isParamHeapCopy() bool { 103 return n.Op == ONAME && n.Class == PAUTOHEAP && n.Name.Param.Stackcopy != nil 104 } 105 106 // moveToHeap records the parameter or local variable n as moved to the heap. 107 func moveToHeap(n *Node) { 108 if Debug['r'] != 0 { 109 Dump("MOVE", n) 110 } 111 if compiling_runtime { 112 Yyerror("%v escapes to heap, not allowed in runtime.", n) 113 } 114 if n.Class == PAUTOHEAP { 115 Dump("n", n) 116 Fatalf("double move to heap") 117 } 118 119 // Allocate a local stack variable to hold the pointer to the heap copy. 120 // temp will add it to the function declaration list automatically. 121 heapaddr := temp(Ptrto(n.Type)) 122 heapaddr.Sym = Lookup("&" + n.Sym.Name) 123 heapaddr.Orig.Sym = heapaddr.Sym 124 125 // Parameters have a local stack copy used at function start/end 126 // in addition to the copy in the heap that may live longer than 127 // the function. 128 if n.Class == PPARAM || n.Class == PPARAMOUT { 129 if n.Xoffset == BADWIDTH { 130 Fatalf("addrescapes before param assignment") 131 } 132 133 // We rewrite n below to be a heap variable (indirection of heapaddr). 134 // Preserve a copy so we can still write code referring to the original, 135 // and substitute that copy into the function declaration list 136 // so that analyses of the local (on-stack) variables use it. 137 stackcopy := Nod(ONAME, nil, nil) 138 stackcopy.Sym = n.Sym 139 stackcopy.Type = n.Type 140 stackcopy.Xoffset = n.Xoffset 141 stackcopy.Class = n.Class 142 stackcopy.Name.Heapaddr = heapaddr 143 if n.Class == PPARAM { 144 stackcopy.SetNotLiveAtEnd(true) 145 } 146 if n.Class == PPARAMOUT { 147 // Make sure the pointer to the heap copy is kept live throughout the function. 148 // The function could panic at any point, and then a defer could recover. 149 // Thus, we need the pointer to the heap copy always available so the 150 // post-deferreturn code can copy the return value back to the stack. 151 // See issue 16095. 152 heapaddr.setIsOutputParamHeapAddr(true) 153 } 154 n.Name.Param.Stackcopy = stackcopy 155 156 // Substitute the stackcopy into the function variable list so that 157 // liveness and other analyses use the underlying stack slot 158 // and not the now-pseudo-variable n. 159 found := false 160 for i, d := range Curfn.Func.Dcl { 161 if d == n { 162 Curfn.Func.Dcl[i] = stackcopy 163 found = true 164 break 165 } 166 // Parameters are before locals, so can stop early. 167 // This limits the search even in functions with many local variables. 168 if d.Class == PAUTO { 169 break 170 } 171 } 172 if !found { 173 Fatalf("cannot find %v in local variable list", n) 174 } 175 Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) 176 } 177 178 // Modify n in place so that uses of n now mean indirection of the heapaddr. 179 n.Class = PAUTOHEAP 180 n.Ullman = 2 181 n.Xoffset = 0 182 n.Name.Heapaddr = heapaddr 183 n.Esc = EscHeap 184 if Debug['m'] != 0 { 185 fmt.Printf("%v: moved to heap: %v\n", n.Line(), n) 186 } 187 } 188 189 func clearlabels() { 190 for _, l := range labellist { 191 l.Sym.Label = nil 192 } 193 labellist = labellist[:0] 194 } 195 196 func newlab(n *Node) *Label { 197 s := n.Left.Sym 198 lab := s.Label 199 if lab == nil { 200 lab = new(Label) 201 lab.Sym = s 202 s.Label = lab 203 if n.Used { 204 lab.Used = true 205 } 206 labellist = append(labellist, lab) 207 } 208 209 if n.Op == OLABEL { 210 if lab.Def != nil { 211 Yyerror("label %v already defined at %v", s, lab.Def.Line()) 212 } else { 213 lab.Def = n 214 } 215 } else { 216 lab.Use = append(lab.Use, n) 217 } 218 219 return lab 220 } 221 222 // There is a copy of checkgoto in the new SSA backend. 223 // Please keep them in sync. 224 func checkgoto(from *Node, to *Node) { 225 if from.Sym == to.Sym { 226 return 227 } 228 229 nf := 0 230 for fs := from.Sym; fs != nil; fs = fs.Link { 231 nf++ 232 } 233 nt := 0 234 for fs := to.Sym; fs != nil; fs = fs.Link { 235 nt++ 236 } 237 fs := from.Sym 238 for ; nf > nt; nf-- { 239 fs = fs.Link 240 } 241 if fs != to.Sym { 242 lno := lineno 243 setlineno(from) 244 245 // decide what to complain about. 246 // prefer to complain about 'into block' over declarations, 247 // so scan backward to find most recent block or else dcl. 248 var block *Sym 249 250 var dcl *Sym 251 ts := to.Sym 252 for ; nt > nf; nt-- { 253 if ts.Pkg == nil { 254 block = ts 255 } else { 256 dcl = ts 257 } 258 ts = ts.Link 259 } 260 261 for ts != fs { 262 if ts.Pkg == nil { 263 block = ts 264 } else { 265 dcl = ts 266 } 267 ts = ts.Link 268 fs = fs.Link 269 } 270 271 if block != nil { 272 Yyerror("goto %v jumps into block starting at %v", from.Left.Sym, linestr(block.Lastlineno)) 273 } else { 274 Yyerror("goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, linestr(dcl.Lastlineno)) 275 } 276 lineno = lno 277 } 278 } 279 280 func stmtlabel(n *Node) *Label { 281 if n.Sym != nil { 282 lab := n.Sym.Label 283 if lab != nil { 284 if lab.Def != nil { 285 if lab.Def.Name.Defn == n { 286 return lab 287 } 288 } 289 } 290 } 291 return nil 292 } 293 294 // compile statements 295 func Genlist(l Nodes) { 296 for _, n := range l.Slice() { 297 gen(n) 298 } 299 } 300 301 // generate code to start new proc running call n. 302 func cgen_proc(n *Node, proc int) { 303 switch n.Left.Op { 304 default: 305 Fatalf("cgen_proc: unknown call %v", n.Left.Op) 306 307 case OCALLMETH: 308 cgen_callmeth(n.Left, proc) 309 310 case OCALLINTER: 311 cgen_callinter(n.Left, nil, proc) 312 313 case OCALLFUNC: 314 cgen_call(n.Left, proc) 315 } 316 } 317 318 // generate declaration. 319 // have to allocate heap copy 320 // for escaped variables. 321 func cgen_dcl(n *Node) { 322 if Debug['g'] != 0 { 323 Dump("\ncgen-dcl", n) 324 } 325 if n.Op != ONAME { 326 Dump("cgen_dcl", n) 327 Fatalf("cgen_dcl") 328 } 329 330 if n.Class == PAUTOHEAP { 331 Fatalf("cgen_dcl %v", n) 332 } 333 } 334 335 // generate discard of value 336 func cgen_discard(nr *Node) { 337 if nr == nil { 338 return 339 } 340 341 switch nr.Op { 342 case ONAME: 343 if nr.Class != PAUTOHEAP && nr.Class != PEXTERN && nr.Class != PFUNC { 344 gused(nr) 345 } 346 347 // unary 348 case OADD, 349 OAND, 350 ODIV, 351 OEQ, 352 OGE, 353 OGT, 354 OLE, 355 OLSH, 356 OLT, 357 OMOD, 358 OMUL, 359 ONE, 360 OOR, 361 ORSH, 362 OSUB, 363 OXOR: 364 cgen_discard(nr.Left) 365 366 cgen_discard(nr.Right) 367 368 // binary 369 case OCAP, 370 OCOM, 371 OLEN, 372 OMINUS, 373 ONOT, 374 OPLUS: 375 cgen_discard(nr.Left) 376 377 case OIND: 378 Cgen_checknil(nr.Left) 379 380 // special enough to just evaluate 381 default: 382 var tmp Node 383 Tempname(&tmp, nr.Type) 384 385 Cgen_as(&tmp, nr) 386 gused(&tmp) 387 } 388 } 389 390 // clearslim generates code to zero a slim node. 391 func Clearslim(n *Node) { 392 var z Node 393 z.Op = OLITERAL 394 z.Type = n.Type 395 z.Addable = true 396 397 switch Simtype[n.Type.Etype] { 398 case TCOMPLEX64, TCOMPLEX128: 399 z.SetVal(Val{new(Mpcplx)}) 400 z.Val().U.(*Mpcplx).Real.SetFloat64(0.0) 401 z.Val().U.(*Mpcplx).Imag.SetFloat64(0.0) 402 403 case TFLOAT32, TFLOAT64: 404 var zero Mpflt 405 zero.SetFloat64(0.0) 406 z.SetVal(Val{&zero}) 407 408 case TPTR32, TPTR64, TCHAN, TMAP: 409 z.SetVal(Val{new(NilVal)}) 410 411 case TBOOL: 412 z.SetVal(Val{false}) 413 414 case TINT8, 415 TINT16, 416 TINT32, 417 TINT64, 418 TUINT8, 419 TUINT16, 420 TUINT32, 421 TUINT64: 422 z.SetVal(Val{new(Mpint)}) 423 z.Val().U.(*Mpint).SetInt64(0) 424 425 default: 426 Fatalf("clearslim called on type %v", n.Type) 427 } 428 429 ullmancalc(&z) 430 Cgen(&z, n) 431 } 432 433 // generate: 434 // res = iface{typ, data} 435 // n->left is typ 436 // n->right is data 437 func Cgen_eface(n *Node, res *Node) { 438 // the right node of an eface may contain function calls that uses res as an argument, 439 // so it's important that it is done first 440 441 tmp := temp(Types[Tptr]) 442 Cgen(n.Right, tmp) 443 444 Gvardef(res) 445 446 dst := *res 447 dst.Type = Types[Tptr] 448 dst.Xoffset += int64(Widthptr) 449 Cgen(tmp, &dst) 450 451 dst.Xoffset -= int64(Widthptr) 452 Cgen(n.Left, &dst) 453 } 454 455 // generate one of: 456 // res, resok = x.(T) 457 // res = x.(T) (when resok == nil) 458 // n.Left is x 459 // n.Type is T 460 func cgen_dottype(n *Node, res, resok *Node, wb bool) { 461 if Debug_typeassert > 0 { 462 Warn("type assertion inlined") 463 } 464 // iface := n.Left 465 // r1 := iword(iface) 466 // if n.Left is non-empty interface { 467 // r1 = *r1 468 // } 469 // if r1 == T { 470 // res = idata(iface) 471 // resok = true 472 // } else { 473 // assert[EI]2T(x, T, nil) // (when resok == nil; does not return) 474 // resok = false // (when resok != nil) 475 // } 476 // 477 var iface Node 478 Igen(n.Left, &iface, res) 479 var r1, r2 Node 480 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte) 481 Regalloc(&r1, byteptr, nil) 482 iface.Type = byteptr 483 Cgen(&iface, &r1) 484 if !n.Left.Type.IsEmptyInterface() { 485 // Holding itab, want concrete type in second word. 486 p := Thearch.Ginscmp(OEQ, byteptr, &r1, Nodintconst(0), -1) 487 r2 = r1 488 r2.Op = OINDREG 489 r2.Xoffset = int64(Widthptr) 490 Cgen(&r2, &r1) 491 Patch(p, Pc) 492 } 493 Regalloc(&r2, byteptr, nil) 494 Cgen(typename(n.Type), &r2) 495 p := Thearch.Ginscmp(ONE, byteptr, &r1, &r2, -1) 496 Regfree(&r2) // not needed for success path; reclaimed on one failure path 497 iface.Xoffset += int64(Widthptr) 498 Cgen(&iface, &r1) 499 Regfree(&iface) 500 501 if resok == nil { 502 r1.Type = res.Type 503 cgen_wb(&r1, res, wb) 504 q := Gbranch(obj.AJMP, nil, 0) 505 Patch(p, Pc) 506 Regrealloc(&r2) // reclaim from above, for this failure path 507 fn := syslook("panicdottype") 508 dowidth(fn.Type) 509 call := Nod(OCALLFUNC, fn, nil) 510 r1.Type = byteptr 511 r2.Type = byteptr 512 call.List.Set([]*Node{&r1, &r2, typename(n.Left.Type)}) 513 call.List.Set(ascompatte(OCALLFUNC, call, false, fn.Type.Params(), call.List.Slice(), 0, nil)) 514 gen(call) 515 Regfree(&r1) 516 Regfree(&r2) 517 Thearch.Gins(obj.AUNDEF, nil, nil) 518 Patch(q, Pc) 519 } else { 520 // This half is handling the res, resok = x.(T) case, 521 // which is called from gen, not cgen, and is consequently fussier 522 // about blank assignments. We have to avoid calling cgen for those. 523 r1.Type = res.Type 524 if !isblank(res) { 525 cgen_wb(&r1, res, wb) 526 } 527 Regfree(&r1) 528 if !isblank(resok) { 529 Cgen(Nodbool(true), resok) 530 } 531 q := Gbranch(obj.AJMP, nil, 0) 532 Patch(p, Pc) 533 if !isblank(res) { 534 n := nodnil() 535 n.Type = res.Type 536 Cgen(n, res) 537 } 538 if !isblank(resok) { 539 Cgen(Nodbool(false), resok) 540 } 541 Patch(q, Pc) 542 } 543 } 544 545 // generate: 546 // res, resok = x.(T) 547 // n.Left is x 548 // n.Type is T 549 func Cgen_As2dottype(n, res, resok *Node) { 550 if Debug_typeassert > 0 { 551 Warn("type assertion inlined") 552 } 553 // iface := n.Left 554 // r1 := iword(iface) 555 // if n.Left is non-empty interface { 556 // r1 = *r1 557 // } 558 // if r1 == T { 559 // res = idata(iface) 560 // resok = true 561 // } else { 562 // res = nil 563 // resok = false 564 // } 565 // 566 var iface Node 567 Igen(n.Left, &iface, nil) 568 var r1, r2 Node 569 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte) 570 Regalloc(&r1, byteptr, res) 571 iface.Type = byteptr 572 Cgen(&iface, &r1) 573 if !n.Left.Type.IsEmptyInterface() { 574 // Holding itab, want concrete type in second word. 575 p := Thearch.Ginscmp(OEQ, byteptr, &r1, Nodintconst(0), -1) 576 r2 = r1 577 r2.Op = OINDREG 578 r2.Xoffset = int64(Widthptr) 579 Cgen(&r2, &r1) 580 Patch(p, Pc) 581 } 582 Regalloc(&r2, byteptr, nil) 583 Cgen(typename(n.Type), &r2) 584 p := Thearch.Ginscmp(ONE, byteptr, &r1, &r2, -1) 585 iface.Type = n.Type 586 iface.Xoffset += int64(Widthptr) 587 Cgen(&iface, &r1) 588 if iface.Op != 0 { 589 Regfree(&iface) 590 } 591 Cgen(&r1, res) 592 q := Gbranch(obj.AJMP, nil, 0) 593 Patch(p, Pc) 594 595 fn := syslook("panicdottype") 596 dowidth(fn.Type) 597 call := Nod(OCALLFUNC, fn, nil) 598 call.List.Set([]*Node{&r1, &r2, typename(n.Left.Type)}) 599 call.List.Set(ascompatte(OCALLFUNC, call, false, fn.Type.Params(), call.List.Slice(), 0, nil)) 600 gen(call) 601 Regfree(&r1) 602 Regfree(&r2) 603 Thearch.Gins(obj.AUNDEF, nil, nil) 604 Patch(q, Pc) 605 } 606 607 // gather series of offsets 608 // >=0 is direct addressed field 609 // <0 is pointer to next field (+1) 610 func Dotoffset(n *Node, oary []int64, nn **Node) int { 611 var i int 612 613 switch n.Op { 614 case ODOT: 615 if n.Xoffset == BADWIDTH { 616 Dump("bad width in dotoffset", n) 617 Fatalf("bad width in dotoffset") 618 } 619 620 i = Dotoffset(n.Left, oary, nn) 621 if i > 0 { 622 if oary[i-1] >= 0 { 623 oary[i-1] += n.Xoffset 624 } else { 625 oary[i-1] -= n.Xoffset 626 } 627 break 628 } 629 630 if i < 10 { 631 oary[i] = n.Xoffset 632 i++ 633 } 634 635 case ODOTPTR: 636 if n.Xoffset == BADWIDTH { 637 Dump("bad width in dotoffset", n) 638 Fatalf("bad width in dotoffset") 639 } 640 641 i = Dotoffset(n.Left, oary, nn) 642 if i < 10 { 643 oary[i] = -(n.Xoffset + 1) 644 i++ 645 } 646 647 default: 648 *nn = n 649 return 0 650 } 651 652 if i >= 10 { 653 *nn = nil 654 } 655 return i 656 } 657 658 // make a new off the books 659 func Tempname(nn *Node, t *Type) { 660 if Curfn == nil { 661 Fatalf("no curfn for tempname") 662 } 663 if Curfn.Func.Closure != nil && Curfn.Op == OCLOSURE { 664 Dump("Tempname", Curfn) 665 Fatalf("adding tempname to wrong closure function") 666 } 667 668 if t == nil { 669 Yyerror("tempname called with nil type") 670 t = Types[TINT32] 671 } 672 673 // give each tmp a different name so that there 674 // a chance to registerizer them 675 s := LookupN("autotmp_", statuniqgen) 676 statuniqgen++ 677 n := Nod(ONAME, nil, nil) 678 n.Sym = s 679 s.Def = n 680 n.Type = t 681 n.Class = PAUTO 682 n.Addable = true 683 n.Ullman = 1 684 n.Esc = EscNever 685 n.Name.Curfn = Curfn 686 Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) 687 688 dowidth(t) 689 n.Xoffset = 0 690 *nn = *n 691 } 692 693 func temp(t *Type) *Node { 694 var n Node 695 Tempname(&n, t) 696 n.Sym.Def.Used = true 697 return n.Orig 698 } 699 700 func gen(n *Node) { 701 //dump("gen", n); 702 703 lno := setlineno(n) 704 705 wasregalloc := Anyregalloc() 706 707 if n == nil { 708 goto ret 709 } 710 711 if n.Ninit.Len() > 0 { 712 Genlist(n.Ninit) 713 } 714 715 setlineno(n) 716 717 switch n.Op { 718 default: 719 Fatalf("gen: unknown op %v", Nconv(n, FmtShort|FmtSign)) 720 721 case OCASE, 722 OFALL, 723 OXCASE, 724 OXFALL, 725 ODCLCONST, 726 ODCLFUNC, 727 ODCLTYPE: 728 break 729 730 case OEMPTY: 731 break 732 733 case OBLOCK: 734 Genlist(n.List) 735 736 case OLABEL: 737 if isblanksym(n.Left.Sym) { 738 break 739 } 740 741 lab := newlab(n) 742 743 // if there are pending gotos, resolve them all to the current pc. 744 var p2 *obj.Prog 745 for p1 := lab.Gotopc; p1 != nil; p1 = p2 { 746 p2 = unpatch(p1) 747 Patch(p1, Pc) 748 } 749 750 lab.Gotopc = nil 751 if lab.Labelpc == nil { 752 lab.Labelpc = Pc 753 } 754 755 if n.Name.Defn != nil { 756 switch n.Name.Defn.Op { 757 // so stmtlabel can find the label 758 case OFOR, OSWITCH, OSELECT: 759 n.Name.Defn.Sym = lab.Sym 760 } 761 } 762 763 // if label is defined, emit jump to it. 764 // otherwise save list of pending gotos in lab->gotopc. 765 // the list is linked through the normal jump target field 766 // to avoid a second list. (the jumps are actually still 767 // valid code, since they're just going to another goto 768 // to the same label. we'll unwind it when we learn the pc 769 // of the label in the OLABEL case above.) 770 case OGOTO: 771 lab := newlab(n) 772 773 if lab.Labelpc != nil { 774 gjmp(lab.Labelpc) 775 } else { 776 lab.Gotopc = gjmp(lab.Gotopc) 777 } 778 779 case OBREAK: 780 if n.Left != nil { 781 lab := n.Left.Sym.Label 782 if lab == nil { 783 Yyerror("break label not defined: %v", n.Left.Sym) 784 break 785 } 786 787 lab.Used = true 788 if lab.Breakpc == nil { 789 Yyerror("invalid break label %v", n.Left.Sym) 790 break 791 } 792 793 gjmp(lab.Breakpc) 794 break 795 } 796 797 if breakpc == nil { 798 Yyerror("break is not in a loop") 799 break 800 } 801 802 gjmp(breakpc) 803 804 case OCONTINUE: 805 if n.Left != nil { 806 lab := n.Left.Sym.Label 807 if lab == nil { 808 Yyerror("continue label not defined: %v", n.Left.Sym) 809 break 810 } 811 812 lab.Used = true 813 if lab.Continpc == nil { 814 Yyerror("invalid continue label %v", n.Left.Sym) 815 break 816 } 817 818 gjmp(lab.Continpc) 819 break 820 } 821 822 if continpc == nil { 823 Yyerror("continue is not in a loop") 824 break 825 } 826 827 gjmp(continpc) 828 829 case OFOR: 830 sbreak := breakpc 831 p1 := gjmp(nil) // goto test 832 breakpc = gjmp(nil) // break: goto done 833 scontin := continpc 834 continpc = Pc 835 836 // define break and continue labels 837 lab := stmtlabel(n) 838 if lab != nil { 839 lab.Breakpc = breakpc 840 lab.Continpc = continpc 841 } 842 843 gen(n.Right) // contin: incr 844 Patch(p1, Pc) // test: 845 Bgen(n.Left, false, -1, breakpc) // if(!test) goto break 846 Genlist(n.Nbody) // body 847 gjmp(continpc) 848 Patch(breakpc, Pc) // done: 849 continpc = scontin 850 breakpc = sbreak 851 if lab != nil { 852 lab.Breakpc = nil 853 lab.Continpc = nil 854 } 855 856 case OIF: 857 p1 := gjmp(nil) // goto test 858 p2 := gjmp(nil) // p2: goto else 859 Patch(p1, Pc) // test: 860 Bgen(n.Left, false, int(-n.Likely), p2) // if(!test) goto p2 861 Genlist(n.Nbody) // then 862 p3 := gjmp(nil) // goto done 863 Patch(p2, Pc) // else: 864 Genlist(n.Rlist) // else 865 Patch(p3, Pc) // done: 866 867 case OSWITCH: 868 sbreak := breakpc 869 p1 := gjmp(nil) // goto test 870 breakpc = gjmp(nil) // break: goto done 871 872 // define break label 873 lab := stmtlabel(n) 874 if lab != nil { 875 lab.Breakpc = breakpc 876 } 877 878 Patch(p1, Pc) // test: 879 Genlist(n.Nbody) // switch(test) body 880 Patch(breakpc, Pc) // done: 881 breakpc = sbreak 882 if lab != nil { 883 lab.Breakpc = nil 884 } 885 886 case OSELECT: 887 sbreak := breakpc 888 p1 := gjmp(nil) // goto test 889 breakpc = gjmp(nil) // break: goto done 890 891 // define break label 892 lab := stmtlabel(n) 893 if lab != nil { 894 lab.Breakpc = breakpc 895 } 896 897 Patch(p1, Pc) // test: 898 Genlist(n.Nbody) // select() body 899 Patch(breakpc, Pc) // done: 900 breakpc = sbreak 901 if lab != nil { 902 lab.Breakpc = nil 903 } 904 905 case ODCL: 906 cgen_dcl(n.Left) 907 908 case OAS: 909 if gen_as_init(n, false) { 910 break 911 } 912 Cgen_as(n.Left, n.Right) 913 914 case OASWB: 915 Cgen_as_wb(n.Left, n.Right, true) 916 917 case OAS2DOTTYPE: 918 cgen_dottype(n.Rlist.First(), n.List.First(), n.List.Second(), needwritebarrier(n.List.First(), n.Rlist.First())) 919 920 case OCALLMETH: 921 cgen_callmeth(n, 0) 922 923 case OCALLINTER: 924 cgen_callinter(n, nil, 0) 925 926 case OCALLFUNC: 927 cgen_call(n, 0) 928 929 case OPROC: 930 cgen_proc(n, 1) 931 932 case ODEFER: 933 cgen_proc(n, 2) 934 935 case ORETURN, ORETJMP: 936 cgen_ret(n) 937 938 // Function calls turned into compiler intrinsics. 939 // At top level, can just ignore the call and make sure to preserve side effects in the argument, if any. 940 case OGETG: 941 // nothing 942 case OSQRT: 943 cgen_discard(n.Left) 944 945 case OCHECKNIL: 946 Cgen_checknil(n.Left) 947 948 case OVARKILL: 949 Gvarkill(n.Left) 950 951 case OVARLIVE: 952 Gvarlive(n.Left) 953 } 954 955 ret: 956 if Anyregalloc() != wasregalloc { 957 Dump("node", n) 958 Fatalf("registers left allocated") 959 } 960 961 lineno = lno 962 } 963 964 func Cgen_as(nl, nr *Node) { 965 Cgen_as_wb(nl, nr, false) 966 } 967 968 func Cgen_as_wb(nl, nr *Node, wb bool) { 969 if Debug['g'] != 0 { 970 op := "cgen_as" 971 if wb { 972 op = "cgen_as_wb" 973 } 974 Dump(op, nl) 975 Dump(op+" = ", nr) 976 } 977 978 for nr != nil && nr.Op == OCONVNOP { 979 nr = nr.Left 980 } 981 982 if nl == nil || isblank(nl) { 983 cgen_discard(nr) 984 return 985 } 986 987 if nr == nil || iszero(nr) { 988 tl := nl.Type 989 if tl == nil { 990 return 991 } 992 if Isfat(tl) { 993 if nl.Op == ONAME { 994 Gvardef(nl) 995 } 996 Thearch.Clearfat(nl) 997 return 998 } 999 1000 Clearslim(nl) 1001 return 1002 } 1003 1004 tl := nl.Type 1005 if tl == nil { 1006 return 1007 } 1008 1009 cgen_wb(nr, nl, wb) 1010 } 1011 1012 func cgen_callmeth(n *Node, proc int) { 1013 // generate a rewrite in n2 for the method call 1014 // (p.f)(...) goes to (f)(p,...) 1015 1016 l := n.Left 1017 1018 if l.Op != ODOTMETH { 1019 Fatalf("cgen_callmeth: not dotmethod: %v", l) 1020 } 1021 1022 n2 := *n 1023 n2.Op = OCALLFUNC 1024 n2.Left = newname(l.Sym) 1025 n2.Left.Type = l.Type 1026 1027 if n2.Left.Op == ONAME { 1028 n2.Left.Class = PFUNC 1029 } 1030 cgen_call(&n2, proc) 1031 } 1032 1033 // CgenTemp creates a temporary node, assigns n to it, and returns it. 1034 func CgenTemp(n *Node) *Node { 1035 var tmp Node 1036 Tempname(&tmp, n.Type) 1037 Cgen(n, &tmp) 1038 return &tmp 1039 } 1040 1041 func checklabels() { 1042 for _, lab := range labellist { 1043 if lab.Def == nil { 1044 for _, n := range lab.Use { 1045 yyerrorl(n.Lineno, "label %v not defined", lab.Sym) 1046 } 1047 continue 1048 } 1049 1050 if lab.Use == nil && !lab.Used { 1051 yyerrorl(lab.Def.Lineno, "label %v defined and not used", lab.Sym) 1052 continue 1053 } 1054 1055 if lab.Gotopc != nil { 1056 Fatalf("label %v never resolved", lab.Sym) 1057 } 1058 for _, n := range lab.Use { 1059 checkgoto(n, lab.Def) 1060 } 1061 } 1062 } 1063 1064 // Componentgen copies a composite value by moving its individual components. 1065 // Slices, strings and interfaces are supported. Small structs or arrays with 1066 // elements of basic type are also supported. 1067 // nr is nil when assigning a zero value. 1068 func Componentgen(nr, nl *Node) bool { 1069 return componentgen_wb(nr, nl, false) 1070 } 1071 1072 // componentgen_wb is like componentgen but if wb==true emits write barriers for pointer updates. 1073 func componentgen_wb(nr, nl *Node, wb bool) bool { 1074 // Don't generate any code for complete copy of a variable into itself. 1075 // It's useless, and the VARDEF will incorrectly mark the old value as dead. 1076 // (This check assumes that the arguments passed to componentgen did not 1077 // themselves come from Igen, or else we could have Op==ONAME but 1078 // with a Type and Xoffset describing an individual field, not the entire 1079 // variable.) 1080 if nl.Op == ONAME && nl == nr { 1081 return true 1082 } 1083 1084 // Count number of moves required to move components. 1085 // If using write barrier, can only emit one pointer. 1086 // TODO(rsc): Allow more pointers, for reflect.Value. 1087 const maxMoves = 8 1088 n := 0 1089 numPtr := 0 1090 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1091 n++ 1092 if Simtype[t.Etype] == Tptr && t != itable { 1093 numPtr++ 1094 } 1095 return n <= maxMoves && (!wb || numPtr <= 1) 1096 }) 1097 if n > maxMoves || wb && numPtr > 1 { 1098 return false 1099 } 1100 1101 // Must call emitVardef after evaluating rhs but before writing to lhs. 1102 emitVardef := func() { 1103 // Emit vardef if needed. 1104 if nl.Op == ONAME { 1105 switch nl.Type.Etype { 1106 case TARRAY, TSLICE, TSTRING, TINTER, TSTRUCT: 1107 Gvardef(nl) 1108 } 1109 } 1110 } 1111 1112 isConstString := Isconst(nr, CTSTR) 1113 1114 if !cadable(nl) && nr != nil && !cadable(nr) && !isConstString { 1115 return false 1116 } 1117 1118 var nodl Node 1119 if cadable(nl) { 1120 nodl = *nl 1121 } else { 1122 if nr != nil && !cadable(nr) && !isConstString { 1123 return false 1124 } 1125 if nr == nil || isConstString || nl.Ullman >= nr.Ullman { 1126 Igen(nl, &nodl, nil) 1127 defer Regfree(&nodl) 1128 } 1129 } 1130 lbase := nodl.Xoffset 1131 1132 // Special case: zeroing. 1133 var nodr Node 1134 if nr == nil { 1135 // When zeroing, prepare a register containing zero. 1136 // TODO(rsc): Check that this is actually generating the best code. 1137 if Thearch.REGZERO != 0 { 1138 // cpu has a dedicated zero register 1139 Nodreg(&nodr, Types[TUINT], Thearch.REGZERO) 1140 } else { 1141 // no dedicated zero register 1142 var zero Node 1143 Nodconst(&zero, nl.Type, 0) 1144 Regalloc(&nodr, Types[TUINT], nil) 1145 Thearch.Gmove(&zero, &nodr) 1146 defer Regfree(&nodr) 1147 } 1148 1149 emitVardef() 1150 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1151 nodl.Type = t 1152 nodl.Xoffset = lbase + offset 1153 nodr.Type = t 1154 if t.IsFloat() { 1155 // TODO(rsc): Cache zero register like we do for integers? 1156 Clearslim(&nodl) 1157 } else { 1158 Thearch.Gmove(&nodr, &nodl) 1159 } 1160 return true 1161 }) 1162 return true 1163 } 1164 1165 // Special case: assignment of string constant. 1166 if isConstString { 1167 emitVardef() 1168 1169 // base 1170 nodl.Type = Ptrto(Types[TUINT8]) 1171 Regalloc(&nodr, Types[Tptr], nil) 1172 p := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &nodr) 1173 Datastring(nr.Val().U.(string), &p.From) 1174 p.From.Type = obj.TYPE_ADDR 1175 Thearch.Gmove(&nodr, &nodl) 1176 Regfree(&nodr) 1177 1178 // length 1179 nodl.Type = Types[Simtype[TUINT]] 1180 nodl.Xoffset += int64(Array_nel) - int64(Array_array) 1181 Nodconst(&nodr, nodl.Type, int64(len(nr.Val().U.(string)))) 1182 Thearch.Gmove(&nodr, &nodl) 1183 return true 1184 } 1185 1186 // General case: copy nl = nr. 1187 nodr = *nr 1188 if !cadable(nr) { 1189 if nr.Ullman >= UINF && nodl.Op == OINDREG { 1190 Fatalf("miscompile") 1191 } 1192 Igen(nr, &nodr, nil) 1193 defer Regfree(&nodr) 1194 } 1195 rbase := nodr.Xoffset 1196 1197 if nodl.Op == 0 { 1198 Igen(nl, &nodl, nil) 1199 defer Regfree(&nodl) 1200 lbase = nodl.Xoffset 1201 } 1202 1203 emitVardef() 1204 var ( 1205 ptrType *Type 1206 ptrOffset int64 1207 ) 1208 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1209 if wb && Simtype[t.Etype] == Tptr && t != itable { 1210 if ptrType != nil { 1211 Fatalf("componentgen_wb %v", Tconv(nl.Type, 0)) 1212 } 1213 ptrType = t 1214 ptrOffset = offset 1215 return true 1216 } 1217 nodl.Type = t 1218 nodl.Xoffset = lbase + offset 1219 nodr.Type = t 1220 nodr.Xoffset = rbase + offset 1221 Thearch.Gmove(&nodr, &nodl) 1222 return true 1223 }) 1224 if ptrType != nil { 1225 nodl.Type = ptrType 1226 nodl.Xoffset = lbase + ptrOffset 1227 nodr.Type = ptrType 1228 nodr.Xoffset = rbase + ptrOffset 1229 cgen_wbptr(&nodr, &nodl) 1230 } 1231 return true 1232 } 1233 1234 // visitComponents walks the individual components of the type t, 1235 // walking into array elements, struct fields, the real and imaginary 1236 // parts of complex numbers, and on 32-bit systems the high and 1237 // low halves of 64-bit integers. 1238 // It calls f for each such component, passing the component (aka element) 1239 // type and memory offset, assuming t starts at startOffset. 1240 // If f ever returns false, visitComponents returns false without any more 1241 // calls to f. Otherwise visitComponents returns true. 1242 func visitComponents(t *Type, startOffset int64, f func(elem *Type, elemOffset int64) bool) bool { 1243 switch t.Etype { 1244 case TINT64: 1245 if Widthreg == 8 { 1246 break 1247 } 1248 // NOTE: Assuming little endian (signed top half at offset 4). 1249 // We don't have any 32-bit big-endian systems. 1250 if !Thearch.LinkArch.InFamily(sys.ARM, sys.I386) { 1251 Fatalf("unknown 32-bit architecture") 1252 } 1253 return f(Types[TUINT32], startOffset) && 1254 f(Types[TINT32], startOffset+4) 1255 1256 case TUINT64: 1257 if Widthreg == 8 { 1258 break 1259 } 1260 return f(Types[TUINT32], startOffset) && 1261 f(Types[TUINT32], startOffset+4) 1262 1263 case TCOMPLEX64: 1264 return f(Types[TFLOAT32], startOffset) && 1265 f(Types[TFLOAT32], startOffset+4) 1266 1267 case TCOMPLEX128: 1268 return f(Types[TFLOAT64], startOffset) && 1269 f(Types[TFLOAT64], startOffset+8) 1270 1271 case TINTER: 1272 return f(itable, startOffset) && 1273 f(Ptrto(Types[TUINT8]), startOffset+int64(Widthptr)) 1274 1275 case TSTRING: 1276 return f(Ptrto(Types[TUINT8]), startOffset) && 1277 f(Types[Simtype[TUINT]], startOffset+int64(Widthptr)) 1278 1279 case TSLICE: 1280 return f(Ptrto(t.Elem()), startOffset+int64(Array_array)) && 1281 f(Types[Simtype[TUINT]], startOffset+int64(Array_nel)) && 1282 f(Types[Simtype[TUINT]], startOffset+int64(Array_cap)) 1283 1284 case TARRAY: 1285 // Short-circuit [1e6]struct{}. 1286 if t.Elem().Width == 0 { 1287 return true 1288 } 1289 1290 for i := int64(0); i < t.NumElem(); i++ { 1291 if !visitComponents(t.Elem(), startOffset+i*t.Elem().Width, f) { 1292 return false 1293 } 1294 } 1295 return true 1296 1297 case TSTRUCT: 1298 for _, field := range t.Fields().Slice() { 1299 if !visitComponents(field.Type, startOffset+field.Offset, f) { 1300 return false 1301 } 1302 } 1303 return true 1304 } 1305 return f(t, startOffset) 1306 } 1307 1308 func cadable(n *Node) bool { 1309 // Note: Not sure why you can have n.Op == ONAME without n.Addable, but you can. 1310 return n.Addable && n.Op == ONAME 1311 }