github.com/letsencrypt/go@v0.0.0-20160714163537-4054769a31f6/src/cmd/compile/internal/gc/gen.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Portable half of code generator; mainly statements and control flow. 6 7 package gc 8 9 import ( 10 "cmd/internal/obj" 11 "cmd/internal/sys" 12 "fmt" 13 ) 14 15 // TODO: labellist should become part of a "compilation state" for functions. 16 var labellist []*Label 17 18 func Sysfunc(name string) *Node { 19 n := newname(Pkglookup(name, Runtimepkg)) 20 n.Class = PFUNC 21 return n 22 } 23 24 // addrescapes tags node n as having had its address taken 25 // by "increasing" the "value" of n.Esc to EscHeap. 26 // Storage is allocated as necessary to allow the address 27 // to be taken. 28 func addrescapes(n *Node) { 29 switch n.Op { 30 // probably a type error already. 31 // dump("addrescapes", n); 32 default: 33 break 34 35 case ONAME: 36 if n == nodfp { 37 break 38 } 39 40 // if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping. 41 // on PPARAM it means something different. 42 if n.Class == PAUTO && n.Esc == EscNever { 43 break 44 } 45 46 // If a closure reference escapes, mark the outer variable as escaping. 47 if n.isClosureVar() { 48 addrescapes(n.Name.Defn) 49 break 50 } 51 52 if n.Class != PPARAM && n.Class != PPARAMOUT && n.Class != PAUTO { 53 break 54 } 55 56 // This is a plain parameter or local variable that needs to move to the heap, 57 // but possibly for the function outside the one we're compiling. 58 // That is, if we have: 59 // 60 // func f(x int) { 61 // func() { 62 // global = &x 63 // } 64 // } 65 // 66 // then we're analyzing the inner closure but we need to move x to the 67 // heap in f, not in the inner closure. Flip over to f before calling moveToHeap. 68 oldfn := Curfn 69 Curfn = n.Name.Curfn 70 if Curfn.Func.Closure != nil && Curfn.Op == OCLOSURE { 71 Curfn = Curfn.Func.Closure 72 } 73 ln := lineno 74 lineno = Curfn.Lineno 75 moveToHeap(n) 76 Curfn = oldfn 77 lineno = ln 78 79 case OIND, ODOTPTR: 80 break 81 82 // ODOTPTR has already been introduced, 83 // so these are the non-pointer ODOT and OINDEX. 84 // In &x[0], if x is a slice, then x does not 85 // escape--the pointer inside x does, but that 86 // is always a heap pointer anyway. 87 case ODOT, OINDEX, OPAREN, OCONVNOP: 88 if !n.Left.Type.IsSlice() { 89 addrescapes(n.Left) 90 } 91 } 92 } 93 94 // isParamStackCopy reports whether this is the on-stack copy of a 95 // function parameter that moved to the heap. 96 func (n *Node) isParamStackCopy() bool { 97 return n.Op == ONAME && (n.Class == PPARAM || n.Class == PPARAMOUT) && n.Name.Heapaddr != nil 98 } 99 100 // isParamHeapCopy reports whether this is the on-heap copy of 101 // a function parameter that moved to the heap. 102 func (n *Node) isParamHeapCopy() bool { 103 return n.Op == ONAME && n.Class == PAUTOHEAP && n.Name.Param.Stackcopy != nil 104 } 105 106 // paramClass reports the parameter class (PPARAM or PPARAMOUT) 107 // of the node, which may be an unmoved on-stack parameter 108 // or the on-heap or on-stack copy of a parameter that moved to the heap. 109 // If the node is not a parameter, paramClass returns Pxxx. 110 func (n *Node) paramClass() Class { 111 if n.Op != ONAME { 112 return Pxxx 113 } 114 if n.Class == PPARAM || n.Class == PPARAMOUT { 115 return n.Class 116 } 117 if n.isParamHeapCopy() { 118 return n.Name.Param.Stackcopy.Class 119 } 120 return Pxxx 121 } 122 123 // moveToHeap records the parameter or local variable n as moved to the heap. 124 func moveToHeap(n *Node) { 125 if Debug['r'] != 0 { 126 Dump("MOVE", n) 127 } 128 if compiling_runtime { 129 Yyerror("%v escapes to heap, not allowed in runtime.", n) 130 } 131 if n.Class == PAUTOHEAP { 132 Dump("n", n) 133 Fatalf("double move to heap") 134 } 135 136 // Allocate a local stack variable to hold the pointer to the heap copy. 137 // temp will add it to the function declaration list automatically. 138 heapaddr := temp(Ptrto(n.Type)) 139 heapaddr.Sym = Lookup("&" + n.Sym.Name) 140 heapaddr.Orig.Sym = heapaddr.Sym 141 142 // Parameters have a local stack copy used at function start/end 143 // in addition to the copy in the heap that may live longer than 144 // the function. 145 if n.Class == PPARAM || n.Class == PPARAMOUT { 146 if n.Xoffset == BADWIDTH { 147 Fatalf("addrescapes before param assignment") 148 } 149 150 // We rewrite n below to be a heap variable (indirection of heapaddr). 151 // Preserve a copy so we can still write code referring to the original, 152 // and substitute that copy into the function declaration list 153 // so that analyses of the local (on-stack) variables use it. 154 stackcopy := Nod(ONAME, nil, nil) 155 stackcopy.Sym = n.Sym 156 stackcopy.Type = n.Type 157 stackcopy.Xoffset = n.Xoffset 158 stackcopy.Class = n.Class 159 stackcopy.Name.Heapaddr = heapaddr 160 if n.Class == PPARAM { 161 stackcopy.SetNotLiveAtEnd(true) 162 } 163 if n.Class == PPARAMOUT { 164 // Make sure the pointer to the heap copy is kept live throughout the function. 165 // The function could panic at any point, and then a defer could recover. 166 // Thus, we need the pointer to the heap copy always available so the 167 // post-deferreturn code can copy the return value back to the stack. 168 // See issue 16095. 169 heapaddr.setIsOutputParamHeapAddr(true) 170 } 171 n.Name.Param.Stackcopy = stackcopy 172 173 // Substitute the stackcopy into the function variable list so that 174 // liveness and other analyses use the underlying stack slot 175 // and not the now-pseudo-variable n. 176 found := false 177 for i, d := range Curfn.Func.Dcl { 178 if d == n { 179 Curfn.Func.Dcl[i] = stackcopy 180 found = true 181 break 182 } 183 // Parameters are before locals, so can stop early. 184 // This limits the search even in functions with many local variables. 185 if d.Class == PAUTO { 186 break 187 } 188 } 189 if !found { 190 Fatalf("cannot find %v in local variable list", n) 191 } 192 Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) 193 } 194 195 // Modify n in place so that uses of n now mean indirection of the heapaddr. 196 n.Class = PAUTOHEAP 197 n.Ullman = 2 198 n.Xoffset = 0 199 n.Name.Heapaddr = heapaddr 200 n.Esc = EscHeap 201 if Debug['m'] != 0 { 202 fmt.Printf("%v: moved to heap: %v\n", n.Line(), n) 203 } 204 } 205 206 func clearlabels() { 207 for _, l := range labellist { 208 l.Sym.Label = nil 209 } 210 labellist = labellist[:0] 211 } 212 213 func newlab(n *Node) *Label { 214 s := n.Left.Sym 215 lab := s.Label 216 if lab == nil { 217 lab = new(Label) 218 lab.Sym = s 219 s.Label = lab 220 labellist = append(labellist, lab) 221 } 222 223 if n.Op == OLABEL { 224 if lab.Def != nil { 225 Yyerror("label %v already defined at %v", s, lab.Def.Line()) 226 } else { 227 lab.Def = n 228 } 229 } else { 230 lab.Use = append(lab.Use, n) 231 } 232 233 return lab 234 } 235 236 // There is a copy of checkgoto in the new SSA backend. 237 // Please keep them in sync. 238 func checkgoto(from *Node, to *Node) { 239 if from.Sym == to.Sym { 240 return 241 } 242 243 nf := 0 244 for fs := from.Sym; fs != nil; fs = fs.Link { 245 nf++ 246 } 247 nt := 0 248 for fs := to.Sym; fs != nil; fs = fs.Link { 249 nt++ 250 } 251 fs := from.Sym 252 for ; nf > nt; nf-- { 253 fs = fs.Link 254 } 255 if fs != to.Sym { 256 lno := lineno 257 setlineno(from) 258 259 // decide what to complain about. 260 // prefer to complain about 'into block' over declarations, 261 // so scan backward to find most recent block or else dcl. 262 var block *Sym 263 264 var dcl *Sym 265 ts := to.Sym 266 for ; nt > nf; nt-- { 267 if ts.Pkg == nil { 268 block = ts 269 } else { 270 dcl = ts 271 } 272 ts = ts.Link 273 } 274 275 for ts != fs { 276 if ts.Pkg == nil { 277 block = ts 278 } else { 279 dcl = ts 280 } 281 ts = ts.Link 282 fs = fs.Link 283 } 284 285 if block != nil { 286 Yyerror("goto %v jumps into block starting at %v", from.Left.Sym, linestr(block.Lastlineno)) 287 } else { 288 Yyerror("goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, linestr(dcl.Lastlineno)) 289 } 290 lineno = lno 291 } 292 } 293 294 func stmtlabel(n *Node) *Label { 295 if n.Sym != nil { 296 lab := n.Sym.Label 297 if lab != nil { 298 if lab.Def != nil { 299 if lab.Def.Name.Defn == n { 300 return lab 301 } 302 } 303 } 304 } 305 return nil 306 } 307 308 // compile statements 309 func Genlist(l Nodes) { 310 for _, n := range l.Slice() { 311 gen(n) 312 } 313 } 314 315 // generate code to start new proc running call n. 316 func cgen_proc(n *Node, proc int) { 317 switch n.Left.Op { 318 default: 319 Fatalf("cgen_proc: unknown call %v", n.Left.Op) 320 321 case OCALLMETH: 322 cgen_callmeth(n.Left, proc) 323 324 case OCALLINTER: 325 cgen_callinter(n.Left, nil, proc) 326 327 case OCALLFUNC: 328 cgen_call(n.Left, proc) 329 } 330 } 331 332 // generate declaration. 333 // have to allocate heap copy 334 // for escaped variables. 335 func cgen_dcl(n *Node) { 336 if Debug['g'] != 0 { 337 Dump("\ncgen-dcl", n) 338 } 339 if n.Op != ONAME { 340 Dump("cgen_dcl", n) 341 Fatalf("cgen_dcl") 342 } 343 344 if n.Class == PAUTOHEAP { 345 Fatalf("cgen_dcl %v", n) 346 } 347 } 348 349 // generate discard of value 350 func cgen_discard(nr *Node) { 351 if nr == nil { 352 return 353 } 354 355 switch nr.Op { 356 case ONAME: 357 if nr.Class != PAUTOHEAP && nr.Class != PEXTERN && nr.Class != PFUNC { 358 gused(nr) 359 } 360 361 // unary 362 case OADD, 363 OAND, 364 ODIV, 365 OEQ, 366 OGE, 367 OGT, 368 OLE, 369 OLSH, 370 OLT, 371 OMOD, 372 OMUL, 373 ONE, 374 OOR, 375 ORSH, 376 OSUB, 377 OXOR: 378 cgen_discard(nr.Left) 379 380 cgen_discard(nr.Right) 381 382 // binary 383 case OCAP, 384 OCOM, 385 OLEN, 386 OMINUS, 387 ONOT, 388 OPLUS: 389 cgen_discard(nr.Left) 390 391 case OIND: 392 Cgen_checknil(nr.Left) 393 394 // special enough to just evaluate 395 default: 396 var tmp Node 397 Tempname(&tmp, nr.Type) 398 399 Cgen_as(&tmp, nr) 400 gused(&tmp) 401 } 402 } 403 404 // clearslim generates code to zero a slim node. 405 func Clearslim(n *Node) { 406 var z Node 407 z.Op = OLITERAL 408 z.Type = n.Type 409 z.Addable = true 410 411 switch Simtype[n.Type.Etype] { 412 case TCOMPLEX64, TCOMPLEX128: 413 z.SetVal(Val{new(Mpcplx)}) 414 z.Val().U.(*Mpcplx).Real.SetFloat64(0.0) 415 z.Val().U.(*Mpcplx).Imag.SetFloat64(0.0) 416 417 case TFLOAT32, TFLOAT64: 418 var zero Mpflt 419 zero.SetFloat64(0.0) 420 z.SetVal(Val{&zero}) 421 422 case TPTR32, TPTR64, TCHAN, TMAP: 423 z.SetVal(Val{new(NilVal)}) 424 425 case TBOOL: 426 z.SetVal(Val{false}) 427 428 case TINT8, 429 TINT16, 430 TINT32, 431 TINT64, 432 TUINT8, 433 TUINT16, 434 TUINT32, 435 TUINT64: 436 z.SetVal(Val{new(Mpint)}) 437 z.Val().U.(*Mpint).SetInt64(0) 438 439 default: 440 Fatalf("clearslim called on type %v", n.Type) 441 } 442 443 ullmancalc(&z) 444 Cgen(&z, n) 445 } 446 447 // generate: 448 // res = iface{typ, data} 449 // n->left is typ 450 // n->right is data 451 func Cgen_eface(n *Node, res *Node) { 452 // the right node of an eface may contain function calls that uses res as an argument, 453 // so it's important that it is done first 454 455 tmp := temp(Types[Tptr]) 456 Cgen(n.Right, tmp) 457 458 Gvardef(res) 459 460 dst := *res 461 dst.Type = Types[Tptr] 462 dst.Xoffset += int64(Widthptr) 463 Cgen(tmp, &dst) 464 465 dst.Xoffset -= int64(Widthptr) 466 Cgen(n.Left, &dst) 467 } 468 469 // generate one of: 470 // res, resok = x.(T) 471 // res = x.(T) (when resok == nil) 472 // n.Left is x 473 // n.Type is T 474 func cgen_dottype(n *Node, res, resok *Node, wb bool) { 475 if Debug_typeassert > 0 { 476 Warn("type assertion inlined") 477 } 478 // iface := n.Left 479 // r1 := iword(iface) 480 // if n.Left is non-empty interface { 481 // r1 = *r1 482 // } 483 // if r1 == T { 484 // res = idata(iface) 485 // resok = true 486 // } else { 487 // assert[EI]2T(x, T, nil) // (when resok == nil; does not return) 488 // resok = false // (when resok != nil) 489 // } 490 // 491 var iface Node 492 Igen(n.Left, &iface, res) 493 var r1, r2 Node 494 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte) 495 Regalloc(&r1, byteptr, nil) 496 iface.Type = byteptr 497 Cgen(&iface, &r1) 498 if !n.Left.Type.IsEmptyInterface() { 499 // Holding itab, want concrete type in second word. 500 p := Thearch.Ginscmp(OEQ, byteptr, &r1, Nodintconst(0), -1) 501 r2 = r1 502 r2.Op = OINDREG 503 r2.Xoffset = int64(Widthptr) 504 Cgen(&r2, &r1) 505 Patch(p, Pc) 506 } 507 Regalloc(&r2, byteptr, nil) 508 Cgen(typename(n.Type), &r2) 509 p := Thearch.Ginscmp(ONE, byteptr, &r1, &r2, -1) 510 Regfree(&r2) // not needed for success path; reclaimed on one failure path 511 iface.Xoffset += int64(Widthptr) 512 Cgen(&iface, &r1) 513 Regfree(&iface) 514 515 if resok == nil { 516 r1.Type = res.Type 517 cgen_wb(&r1, res, wb) 518 q := Gbranch(obj.AJMP, nil, 0) 519 Patch(p, Pc) 520 Regrealloc(&r2) // reclaim from above, for this failure path 521 fn := syslook("panicdottype") 522 dowidth(fn.Type) 523 call := Nod(OCALLFUNC, fn, nil) 524 r1.Type = byteptr 525 r2.Type = byteptr 526 call.List.Set([]*Node{&r1, &r2, typename(n.Left.Type)}) 527 call.List.Set(ascompatte(OCALLFUNC, call, false, fn.Type.Params(), call.List.Slice(), 0, nil)) 528 gen(call) 529 Regfree(&r1) 530 Regfree(&r2) 531 Thearch.Gins(obj.AUNDEF, nil, nil) 532 Patch(q, Pc) 533 } else { 534 // This half is handling the res, resok = x.(T) case, 535 // which is called from gen, not cgen, and is consequently fussier 536 // about blank assignments. We have to avoid calling cgen for those. 537 r1.Type = res.Type 538 if !isblank(res) { 539 cgen_wb(&r1, res, wb) 540 } 541 Regfree(&r1) 542 if !isblank(resok) { 543 Cgen(Nodbool(true), resok) 544 } 545 q := Gbranch(obj.AJMP, nil, 0) 546 Patch(p, Pc) 547 if !isblank(res) { 548 n := nodnil() 549 n.Type = res.Type 550 Cgen(n, res) 551 } 552 if !isblank(resok) { 553 Cgen(Nodbool(false), resok) 554 } 555 Patch(q, Pc) 556 } 557 } 558 559 // generate: 560 // res, resok = x.(T) 561 // n.Left is x 562 // n.Type is T 563 func Cgen_As2dottype(n, res, resok *Node) { 564 if Debug_typeassert > 0 { 565 Warn("type assertion inlined") 566 } 567 // iface := n.Left 568 // r1 := iword(iface) 569 // if n.Left is non-empty interface { 570 // r1 = *r1 571 // } 572 // if r1 == T { 573 // res = idata(iface) 574 // resok = true 575 // } else { 576 // res = nil 577 // resok = false 578 // } 579 // 580 var iface Node 581 Igen(n.Left, &iface, nil) 582 var r1, r2 Node 583 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte) 584 Regalloc(&r1, byteptr, res) 585 iface.Type = byteptr 586 Cgen(&iface, &r1) 587 if !n.Left.Type.IsEmptyInterface() { 588 // Holding itab, want concrete type in second word. 589 p := Thearch.Ginscmp(OEQ, byteptr, &r1, Nodintconst(0), -1) 590 r2 = r1 591 r2.Op = OINDREG 592 r2.Xoffset = int64(Widthptr) 593 Cgen(&r2, &r1) 594 Patch(p, Pc) 595 } 596 Regalloc(&r2, byteptr, nil) 597 Cgen(typename(n.Type), &r2) 598 p := Thearch.Ginscmp(ONE, byteptr, &r1, &r2, -1) 599 iface.Type = n.Type 600 iface.Xoffset += int64(Widthptr) 601 Cgen(&iface, &r1) 602 if iface.Op != 0 { 603 Regfree(&iface) 604 } 605 Cgen(&r1, res) 606 q := Gbranch(obj.AJMP, nil, 0) 607 Patch(p, Pc) 608 609 fn := syslook("panicdottype") 610 dowidth(fn.Type) 611 call := Nod(OCALLFUNC, fn, nil) 612 call.List.Set([]*Node{&r1, &r2, typename(n.Left.Type)}) 613 call.List.Set(ascompatte(OCALLFUNC, call, false, fn.Type.Params(), call.List.Slice(), 0, nil)) 614 gen(call) 615 Regfree(&r1) 616 Regfree(&r2) 617 Thearch.Gins(obj.AUNDEF, nil, nil) 618 Patch(q, Pc) 619 } 620 621 // gather series of offsets 622 // >=0 is direct addressed field 623 // <0 is pointer to next field (+1) 624 func Dotoffset(n *Node, oary []int64, nn **Node) int { 625 var i int 626 627 switch n.Op { 628 case ODOT: 629 if n.Xoffset == BADWIDTH { 630 Dump("bad width in dotoffset", n) 631 Fatalf("bad width in dotoffset") 632 } 633 634 i = Dotoffset(n.Left, oary, nn) 635 if i > 0 { 636 if oary[i-1] >= 0 { 637 oary[i-1] += n.Xoffset 638 } else { 639 oary[i-1] -= n.Xoffset 640 } 641 break 642 } 643 644 if i < 10 { 645 oary[i] = n.Xoffset 646 i++ 647 } 648 649 case ODOTPTR: 650 if n.Xoffset == BADWIDTH { 651 Dump("bad width in dotoffset", n) 652 Fatalf("bad width in dotoffset") 653 } 654 655 i = Dotoffset(n.Left, oary, nn) 656 if i < 10 { 657 oary[i] = -(n.Xoffset + 1) 658 i++ 659 } 660 661 default: 662 *nn = n 663 return 0 664 } 665 666 if i >= 10 { 667 *nn = nil 668 } 669 return i 670 } 671 672 // make a new off the books 673 func Tempname(nn *Node, t *Type) { 674 if Curfn == nil { 675 Fatalf("no curfn for tempname") 676 } 677 if Curfn.Func.Closure != nil && Curfn.Op == OCLOSURE { 678 Dump("Tempname", Curfn) 679 Fatalf("adding tempname to wrong closure function") 680 } 681 682 if t == nil { 683 Yyerror("tempname called with nil type") 684 t = Types[TINT32] 685 } 686 687 // give each tmp a different name so that there 688 // a chance to registerizer them 689 s := LookupN("autotmp_", statuniqgen) 690 statuniqgen++ 691 n := Nod(ONAME, nil, nil) 692 n.Sym = s 693 s.Def = n 694 n.Type = t 695 n.Class = PAUTO 696 n.Addable = true 697 n.Ullman = 1 698 n.Esc = EscNever 699 n.Name.Curfn = Curfn 700 Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) 701 702 dowidth(t) 703 n.Xoffset = 0 704 *nn = *n 705 } 706 707 func temp(t *Type) *Node { 708 var n Node 709 Tempname(&n, t) 710 n.Sym.Def.Used = true 711 return n.Orig 712 } 713 714 func gen(n *Node) { 715 //dump("gen", n); 716 717 lno := setlineno(n) 718 719 wasregalloc := Anyregalloc() 720 721 if n == nil { 722 goto ret 723 } 724 725 if n.Ninit.Len() > 0 { 726 Genlist(n.Ninit) 727 } 728 729 setlineno(n) 730 731 switch n.Op { 732 default: 733 Fatalf("gen: unknown op %v", Nconv(n, FmtShort|FmtSign)) 734 735 case OCASE, 736 OFALL, 737 OXCASE, 738 OXFALL, 739 ODCLCONST, 740 ODCLFUNC, 741 ODCLTYPE: 742 break 743 744 case OEMPTY: 745 break 746 747 case OBLOCK: 748 Genlist(n.List) 749 750 case OLABEL: 751 if isblanksym(n.Left.Sym) { 752 break 753 } 754 755 lab := newlab(n) 756 757 // if there are pending gotos, resolve them all to the current pc. 758 var p2 *obj.Prog 759 for p1 := lab.Gotopc; p1 != nil; p1 = p2 { 760 p2 = unpatch(p1) 761 Patch(p1, Pc) 762 } 763 764 lab.Gotopc = nil 765 if lab.Labelpc == nil { 766 lab.Labelpc = Pc 767 } 768 769 if n.Name.Defn != nil { 770 switch n.Name.Defn.Op { 771 // so stmtlabel can find the label 772 case OFOR, OSWITCH, OSELECT: 773 n.Name.Defn.Sym = lab.Sym 774 } 775 } 776 777 // if label is defined, emit jump to it. 778 // otherwise save list of pending gotos in lab->gotopc. 779 // the list is linked through the normal jump target field 780 // to avoid a second list. (the jumps are actually still 781 // valid code, since they're just going to another goto 782 // to the same label. we'll unwind it when we learn the pc 783 // of the label in the OLABEL case above.) 784 case OGOTO: 785 lab := newlab(n) 786 787 if lab.Labelpc != nil { 788 gjmp(lab.Labelpc) 789 } else { 790 lab.Gotopc = gjmp(lab.Gotopc) 791 } 792 793 case OBREAK: 794 if n.Left != nil { 795 lab := n.Left.Sym.Label 796 if lab == nil { 797 Yyerror("break label not defined: %v", n.Left.Sym) 798 break 799 } 800 801 lab.Used = true 802 if lab.Breakpc == nil { 803 Yyerror("invalid break label %v", n.Left.Sym) 804 break 805 } 806 807 gjmp(lab.Breakpc) 808 break 809 } 810 811 if breakpc == nil { 812 Yyerror("break is not in a loop") 813 break 814 } 815 816 gjmp(breakpc) 817 818 case OCONTINUE: 819 if n.Left != nil { 820 lab := n.Left.Sym.Label 821 if lab == nil { 822 Yyerror("continue label not defined: %v", n.Left.Sym) 823 break 824 } 825 826 lab.Used = true 827 if lab.Continpc == nil { 828 Yyerror("invalid continue label %v", n.Left.Sym) 829 break 830 } 831 832 gjmp(lab.Continpc) 833 break 834 } 835 836 if continpc == nil { 837 Yyerror("continue is not in a loop") 838 break 839 } 840 841 gjmp(continpc) 842 843 case OFOR: 844 sbreak := breakpc 845 p1 := gjmp(nil) // goto test 846 breakpc = gjmp(nil) // break: goto done 847 scontin := continpc 848 continpc = Pc 849 850 // define break and continue labels 851 lab := stmtlabel(n) 852 if lab != nil { 853 lab.Breakpc = breakpc 854 lab.Continpc = continpc 855 } 856 857 gen(n.Right) // contin: incr 858 Patch(p1, Pc) // test: 859 Bgen(n.Left, false, -1, breakpc) // if(!test) goto break 860 Genlist(n.Nbody) // body 861 gjmp(continpc) 862 Patch(breakpc, Pc) // done: 863 continpc = scontin 864 breakpc = sbreak 865 if lab != nil { 866 lab.Breakpc = nil 867 lab.Continpc = nil 868 } 869 870 case OIF: 871 p1 := gjmp(nil) // goto test 872 p2 := gjmp(nil) // p2: goto else 873 Patch(p1, Pc) // test: 874 Bgen(n.Left, false, int(-n.Likely), p2) // if(!test) goto p2 875 Genlist(n.Nbody) // then 876 p3 := gjmp(nil) // goto done 877 Patch(p2, Pc) // else: 878 Genlist(n.Rlist) // else 879 Patch(p3, Pc) // done: 880 881 case OSWITCH: 882 sbreak := breakpc 883 p1 := gjmp(nil) // goto test 884 breakpc = gjmp(nil) // break: goto done 885 886 // define break label 887 lab := stmtlabel(n) 888 if lab != nil { 889 lab.Breakpc = breakpc 890 } 891 892 Patch(p1, Pc) // test: 893 Genlist(n.Nbody) // switch(test) body 894 Patch(breakpc, Pc) // done: 895 breakpc = sbreak 896 if lab != nil { 897 lab.Breakpc = nil 898 } 899 900 case OSELECT: 901 sbreak := breakpc 902 p1 := gjmp(nil) // goto test 903 breakpc = gjmp(nil) // break: goto done 904 905 // define break label 906 lab := stmtlabel(n) 907 if lab != nil { 908 lab.Breakpc = breakpc 909 } 910 911 Patch(p1, Pc) // test: 912 Genlist(n.Nbody) // select() body 913 Patch(breakpc, Pc) // done: 914 breakpc = sbreak 915 if lab != nil { 916 lab.Breakpc = nil 917 } 918 919 case ODCL: 920 cgen_dcl(n.Left) 921 922 case OAS: 923 if gen_as_init(n, false) { 924 break 925 } 926 Cgen_as(n.Left, n.Right) 927 928 case OASWB: 929 Cgen_as_wb(n.Left, n.Right, true) 930 931 case OAS2DOTTYPE: 932 cgen_dottype(n.Rlist.First(), n.List.First(), n.List.Second(), needwritebarrier(n.List.First(), n.Rlist.First())) 933 934 case OCALLMETH: 935 cgen_callmeth(n, 0) 936 937 case OCALLINTER: 938 cgen_callinter(n, nil, 0) 939 940 case OCALLFUNC: 941 cgen_call(n, 0) 942 943 case OPROC: 944 cgen_proc(n, 1) 945 946 case ODEFER: 947 cgen_proc(n, 2) 948 949 case ORETURN, ORETJMP: 950 cgen_ret(n) 951 952 // Function calls turned into compiler intrinsics. 953 // At top level, can just ignore the call and make sure to preserve side effects in the argument, if any. 954 case OGETG: 955 // nothing 956 case OSQRT: 957 cgen_discard(n.Left) 958 959 case OCHECKNIL: 960 Cgen_checknil(n.Left) 961 962 case OVARKILL: 963 Gvarkill(n.Left) 964 965 case OVARLIVE: 966 Gvarlive(n.Left) 967 } 968 969 ret: 970 if Anyregalloc() != wasregalloc { 971 Dump("node", n) 972 Fatalf("registers left allocated") 973 } 974 975 lineno = lno 976 } 977 978 func Cgen_as(nl, nr *Node) { 979 Cgen_as_wb(nl, nr, false) 980 } 981 982 func Cgen_as_wb(nl, nr *Node, wb bool) { 983 if Debug['g'] != 0 { 984 op := "cgen_as" 985 if wb { 986 op = "cgen_as_wb" 987 } 988 Dump(op, nl) 989 Dump(op+" = ", nr) 990 } 991 992 for nr != nil && nr.Op == OCONVNOP { 993 nr = nr.Left 994 } 995 996 if nl == nil || isblank(nl) { 997 cgen_discard(nr) 998 return 999 } 1000 1001 if nr == nil || iszero(nr) { 1002 tl := nl.Type 1003 if tl == nil { 1004 return 1005 } 1006 if Isfat(tl) { 1007 if nl.Op == ONAME { 1008 Gvardef(nl) 1009 } 1010 Thearch.Clearfat(nl) 1011 return 1012 } 1013 1014 Clearslim(nl) 1015 return 1016 } 1017 1018 tl := nl.Type 1019 if tl == nil { 1020 return 1021 } 1022 1023 cgen_wb(nr, nl, wb) 1024 } 1025 1026 func cgen_callmeth(n *Node, proc int) { 1027 // generate a rewrite in n2 for the method call 1028 // (p.f)(...) goes to (f)(p,...) 1029 1030 l := n.Left 1031 1032 if l.Op != ODOTMETH { 1033 Fatalf("cgen_callmeth: not dotmethod: %v", l) 1034 } 1035 1036 n2 := *n 1037 n2.Op = OCALLFUNC 1038 n2.Left = newname(l.Sym) 1039 n2.Left.Type = l.Type 1040 1041 if n2.Left.Op == ONAME { 1042 n2.Left.Class = PFUNC 1043 } 1044 cgen_call(&n2, proc) 1045 } 1046 1047 // CgenTemp creates a temporary node, assigns n to it, and returns it. 1048 func CgenTemp(n *Node) *Node { 1049 var tmp Node 1050 Tempname(&tmp, n.Type) 1051 Cgen(n, &tmp) 1052 return &tmp 1053 } 1054 1055 func checklabels() { 1056 for _, lab := range labellist { 1057 if lab.Def == nil { 1058 for _, n := range lab.Use { 1059 yyerrorl(n.Lineno, "label %v not defined", lab.Sym) 1060 } 1061 continue 1062 } 1063 1064 if lab.Use == nil && !lab.Used { 1065 yyerrorl(lab.Def.Lineno, "label %v defined and not used", lab.Sym) 1066 continue 1067 } 1068 1069 if lab.Gotopc != nil { 1070 Fatalf("label %v never resolved", lab.Sym) 1071 } 1072 for _, n := range lab.Use { 1073 checkgoto(n, lab.Def) 1074 } 1075 } 1076 } 1077 1078 // Componentgen copies a composite value by moving its individual components. 1079 // Slices, strings and interfaces are supported. Small structs or arrays with 1080 // elements of basic type are also supported. 1081 // nr is nil when assigning a zero value. 1082 func Componentgen(nr, nl *Node) bool { 1083 return componentgen_wb(nr, nl, false) 1084 } 1085 1086 // componentgen_wb is like componentgen but if wb==true emits write barriers for pointer updates. 1087 func componentgen_wb(nr, nl *Node, wb bool) bool { 1088 // Don't generate any code for complete copy of a variable into itself. 1089 // It's useless, and the VARDEF will incorrectly mark the old value as dead. 1090 // (This check assumes that the arguments passed to componentgen did not 1091 // themselves come from Igen, or else we could have Op==ONAME but 1092 // with a Type and Xoffset describing an individual field, not the entire 1093 // variable.) 1094 if nl.Op == ONAME && nl == nr { 1095 return true 1096 } 1097 1098 // Count number of moves required to move components. 1099 // If using write barrier, can only emit one pointer. 1100 // TODO(rsc): Allow more pointers, for reflect.Value. 1101 const maxMoves = 8 1102 n := 0 1103 numPtr := 0 1104 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1105 n++ 1106 if Simtype[t.Etype] == Tptr && t != itable { 1107 numPtr++ 1108 } 1109 return n <= maxMoves && (!wb || numPtr <= 1) 1110 }) 1111 if n > maxMoves || wb && numPtr > 1 { 1112 return false 1113 } 1114 1115 // Must call emitVardef after evaluating rhs but before writing to lhs. 1116 emitVardef := func() { 1117 // Emit vardef if needed. 1118 if nl.Op == ONAME { 1119 switch nl.Type.Etype { 1120 case TARRAY, TSLICE, TSTRING, TINTER, TSTRUCT: 1121 Gvardef(nl) 1122 } 1123 } 1124 } 1125 1126 isConstString := Isconst(nr, CTSTR) 1127 1128 if !cadable(nl) && nr != nil && !cadable(nr) && !isConstString { 1129 return false 1130 } 1131 1132 var nodl Node 1133 if cadable(nl) { 1134 nodl = *nl 1135 } else { 1136 if nr != nil && !cadable(nr) && !isConstString { 1137 return false 1138 } 1139 if nr == nil || isConstString || nl.Ullman >= nr.Ullman { 1140 Igen(nl, &nodl, nil) 1141 defer Regfree(&nodl) 1142 } 1143 } 1144 lbase := nodl.Xoffset 1145 1146 // Special case: zeroing. 1147 var nodr Node 1148 if nr == nil { 1149 // When zeroing, prepare a register containing zero. 1150 // TODO(rsc): Check that this is actually generating the best code. 1151 if Thearch.REGZERO != 0 { 1152 // cpu has a dedicated zero register 1153 Nodreg(&nodr, Types[TUINT], Thearch.REGZERO) 1154 } else { 1155 // no dedicated zero register 1156 var zero Node 1157 Nodconst(&zero, nl.Type, 0) 1158 Regalloc(&nodr, Types[TUINT], nil) 1159 Thearch.Gmove(&zero, &nodr) 1160 defer Regfree(&nodr) 1161 } 1162 1163 emitVardef() 1164 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1165 nodl.Type = t 1166 nodl.Xoffset = lbase + offset 1167 nodr.Type = t 1168 if t.IsFloat() { 1169 // TODO(rsc): Cache zero register like we do for integers? 1170 Clearslim(&nodl) 1171 } else { 1172 Thearch.Gmove(&nodr, &nodl) 1173 } 1174 return true 1175 }) 1176 return true 1177 } 1178 1179 // Special case: assignment of string constant. 1180 if isConstString { 1181 emitVardef() 1182 1183 // base 1184 nodl.Type = Ptrto(Types[TUINT8]) 1185 Regalloc(&nodr, Types[Tptr], nil) 1186 p := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &nodr) 1187 Datastring(nr.Val().U.(string), &p.From) 1188 p.From.Type = obj.TYPE_ADDR 1189 Thearch.Gmove(&nodr, &nodl) 1190 Regfree(&nodr) 1191 1192 // length 1193 nodl.Type = Types[Simtype[TUINT]] 1194 nodl.Xoffset += int64(Array_nel) - int64(Array_array) 1195 Nodconst(&nodr, nodl.Type, int64(len(nr.Val().U.(string)))) 1196 Thearch.Gmove(&nodr, &nodl) 1197 return true 1198 } 1199 1200 // General case: copy nl = nr. 1201 nodr = *nr 1202 if !cadable(nr) { 1203 if nr.Ullman >= UINF && nodl.Op == OINDREG { 1204 Fatalf("miscompile") 1205 } 1206 Igen(nr, &nodr, nil) 1207 defer Regfree(&nodr) 1208 } 1209 rbase := nodr.Xoffset 1210 1211 if nodl.Op == 0 { 1212 Igen(nl, &nodl, nil) 1213 defer Regfree(&nodl) 1214 lbase = nodl.Xoffset 1215 } 1216 1217 emitVardef() 1218 var ( 1219 ptrType *Type 1220 ptrOffset int64 1221 ) 1222 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1223 if wb && Simtype[t.Etype] == Tptr && t != itable { 1224 if ptrType != nil { 1225 Fatalf("componentgen_wb %v", Tconv(nl.Type, 0)) 1226 } 1227 ptrType = t 1228 ptrOffset = offset 1229 return true 1230 } 1231 nodl.Type = t 1232 nodl.Xoffset = lbase + offset 1233 nodr.Type = t 1234 nodr.Xoffset = rbase + offset 1235 Thearch.Gmove(&nodr, &nodl) 1236 return true 1237 }) 1238 if ptrType != nil { 1239 nodl.Type = ptrType 1240 nodl.Xoffset = lbase + ptrOffset 1241 nodr.Type = ptrType 1242 nodr.Xoffset = rbase + ptrOffset 1243 cgen_wbptr(&nodr, &nodl) 1244 } 1245 return true 1246 } 1247 1248 // visitComponents walks the individual components of the type t, 1249 // walking into array elements, struct fields, the real and imaginary 1250 // parts of complex numbers, and on 32-bit systems the high and 1251 // low halves of 64-bit integers. 1252 // It calls f for each such component, passing the component (aka element) 1253 // type and memory offset, assuming t starts at startOffset. 1254 // If f ever returns false, visitComponents returns false without any more 1255 // calls to f. Otherwise visitComponents returns true. 1256 func visitComponents(t *Type, startOffset int64, f func(elem *Type, elemOffset int64) bool) bool { 1257 switch t.Etype { 1258 case TINT64: 1259 if Widthreg == 8 { 1260 break 1261 } 1262 // NOTE: Assuming little endian (signed top half at offset 4). 1263 // We don't have any 32-bit big-endian systems. 1264 if !Thearch.LinkArch.InFamily(sys.ARM, sys.I386) { 1265 Fatalf("unknown 32-bit architecture") 1266 } 1267 return f(Types[TUINT32], startOffset) && 1268 f(Types[TINT32], startOffset+4) 1269 1270 case TUINT64: 1271 if Widthreg == 8 { 1272 break 1273 } 1274 return f(Types[TUINT32], startOffset) && 1275 f(Types[TUINT32], startOffset+4) 1276 1277 case TCOMPLEX64: 1278 return f(Types[TFLOAT32], startOffset) && 1279 f(Types[TFLOAT32], startOffset+4) 1280 1281 case TCOMPLEX128: 1282 return f(Types[TFLOAT64], startOffset) && 1283 f(Types[TFLOAT64], startOffset+8) 1284 1285 case TINTER: 1286 return f(itable, startOffset) && 1287 f(Ptrto(Types[TUINT8]), startOffset+int64(Widthptr)) 1288 1289 case TSTRING: 1290 return f(Ptrto(Types[TUINT8]), startOffset) && 1291 f(Types[Simtype[TUINT]], startOffset+int64(Widthptr)) 1292 1293 case TSLICE: 1294 return f(Ptrto(t.Elem()), startOffset+int64(Array_array)) && 1295 f(Types[Simtype[TUINT]], startOffset+int64(Array_nel)) && 1296 f(Types[Simtype[TUINT]], startOffset+int64(Array_cap)) 1297 1298 case TARRAY: 1299 // Short-circuit [1e6]struct{}. 1300 if t.Elem().Width == 0 { 1301 return true 1302 } 1303 1304 for i := int64(0); i < t.NumElem(); i++ { 1305 if !visitComponents(t.Elem(), startOffset+i*t.Elem().Width, f) { 1306 return false 1307 } 1308 } 1309 return true 1310 1311 case TSTRUCT: 1312 for _, field := range t.Fields().Slice() { 1313 if !visitComponents(field.Type, startOffset+field.Offset, f) { 1314 return false 1315 } 1316 } 1317 return true 1318 } 1319 return f(t, startOffset) 1320 } 1321 1322 func cadable(n *Node) bool { 1323 // Note: Not sure why you can have n.Op == ONAME without n.Addable, but you can. 1324 return n.Addable && n.Op == ONAME 1325 }