github.com/mh-cbon/go@v0.0.0-20160603070303-9e112a3fe4c0/src/cmd/compile/internal/gc/gen.go (about) 1 // Copyright 2009 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Portable half of code generator; mainly statements and control flow. 6 7 package gc 8 9 import ( 10 "cmd/internal/obj" 11 "cmd/internal/sys" 12 "fmt" 13 ) 14 15 // TODO: labellist should become part of a "compilation state" for functions. 16 var labellist []*Label 17 18 func Sysfunc(name string) *Node { 19 n := newname(Pkglookup(name, Runtimepkg)) 20 n.Class = PFUNC 21 return n 22 } 23 24 // addrescapes tags node n as having had its address taken 25 // by "increasing" the "value" of n.Esc to EscHeap. 26 // Storage is allocated as necessary to allow the address 27 // to be taken. 28 func addrescapes(n *Node) { 29 switch n.Op { 30 // probably a type error already. 31 // dump("addrescapes", n); 32 default: 33 break 34 35 case ONAME: 36 if n == nodfp { 37 break 38 } 39 40 // if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping. 41 // on PPARAM it means something different. 42 if n.Class == PAUTO && n.Esc == EscNever { 43 break 44 } 45 46 // If a closure reference escapes, mark the outer variable as escaping. 47 if n.isClosureVar() { 48 addrescapes(n.Name.Defn) 49 break 50 } 51 52 if n.Class != PPARAM && n.Class != PPARAMOUT && n.Class != PAUTO { 53 break 54 } 55 56 // This is a plain parameter or local variable that needs to move to the heap, 57 // but possibly for the function outside the one we're compiling. 58 // That is, if we have: 59 // 60 // func f(x int) { 61 // func() { 62 // global = &x 63 // } 64 // } 65 // 66 // then we're analyzing the inner closure but we need to move x to the 67 // heap in f, not in the inner closure. Flip over to f before calling moveToHeap. 68 oldfn := Curfn 69 Curfn = n.Name.Curfn 70 if Curfn.Func.Closure != nil && Curfn.Op == OCLOSURE { 71 Curfn = Curfn.Func.Closure 72 } 73 ln := lineno 74 lineno = Curfn.Lineno 75 moveToHeap(n) 76 Curfn = oldfn 77 lineno = ln 78 79 case OIND, ODOTPTR: 80 break 81 82 // ODOTPTR has already been introduced, 83 // so these are the non-pointer ODOT and OINDEX. 84 // In &x[0], if x is a slice, then x does not 85 // escape--the pointer inside x does, but that 86 // is always a heap pointer anyway. 87 case ODOT, OINDEX, OPAREN, OCONVNOP: 88 if !n.Left.Type.IsSlice() { 89 addrescapes(n.Left) 90 } 91 } 92 } 93 94 // isParamStackCopy reports whether this is the on-stack copy of a 95 // function parameter that moved to the heap. 96 func (n *Node) isParamStackCopy() bool { 97 return n.Op == ONAME && (n.Class == PPARAM || n.Class == PPARAMOUT) && n.Name.Heapaddr != nil 98 } 99 100 // isParamHeapCopy reports whether this is the on-heap copy of 101 // a function parameter that moved to the heap. 102 func (n *Node) isParamHeapCopy() bool { 103 return n.Op == ONAME && n.Class == PAUTOHEAP && n.Name.Param.Stackcopy != nil 104 } 105 106 // paramClass reports the parameter class (PPARAM or PPARAMOUT) 107 // of the node, which may be an unmoved on-stack parameter 108 // or the on-heap or on-stack copy of a parameter that moved to the heap. 109 // If the node is not a parameter, paramClass returns Pxxx. 110 func (n *Node) paramClass() Class { 111 if n.Op != ONAME { 112 return Pxxx 113 } 114 if n.Class == PPARAM || n.Class == PPARAMOUT { 115 return n.Class 116 } 117 if n.isParamHeapCopy() { 118 return n.Name.Param.Stackcopy.Class 119 } 120 return Pxxx 121 } 122 123 // moveToHeap records the parameter or local variable n as moved to the heap. 124 func moveToHeap(n *Node) { 125 if Debug['r'] != 0 { 126 Dump("MOVE", n) 127 } 128 if compiling_runtime { 129 Yyerror("%v escapes to heap, not allowed in runtime.", n) 130 } 131 if n.Class == PAUTOHEAP { 132 Dump("n", n) 133 Fatalf("double move to heap") 134 } 135 136 // Allocate a local stack variable to hold the pointer to the heap copy. 137 // temp will add it to the function declaration list automatically. 138 heapaddr := temp(Ptrto(n.Type)) 139 heapaddr.Sym = Lookup("&" + n.Sym.Name) 140 heapaddr.Orig.Sym = heapaddr.Sym 141 142 // Parameters have a local stack copy used at function start/end 143 // in addition to the copy in the heap that may live longer than 144 // the function. 145 if n.Class == PPARAM || n.Class == PPARAMOUT { 146 if n.Xoffset == BADWIDTH { 147 Fatalf("addrescapes before param assignment") 148 } 149 150 // We rewrite n below to be a heap variable (indirection of heapaddr). 151 // Preserve a copy so we can still write code referring to the original, 152 // and substitute that copy into the function declaration list 153 // so that analyses of the local (on-stack) variables use it. 154 stackcopy := Nod(ONAME, nil, nil) 155 stackcopy.Sym = n.Sym 156 stackcopy.Type = n.Type 157 stackcopy.Xoffset = n.Xoffset 158 stackcopy.Class = n.Class 159 stackcopy.Name.Heapaddr = heapaddr 160 if n.Class == PPARAM { 161 stackcopy.SetNotLiveAtEnd(true) 162 } 163 n.Name.Param.Stackcopy = stackcopy 164 165 // Substitute the stackcopy into the function variable list so that 166 // liveness and other analyses use the underlying stack slot 167 // and not the now-pseudo-variable n. 168 found := false 169 for i, d := range Curfn.Func.Dcl { 170 if d == n { 171 Curfn.Func.Dcl[i] = stackcopy 172 found = true 173 break 174 } 175 // Parameters are before locals, so can stop early. 176 // This limits the search even in functions with many local variables. 177 if d.Class == PAUTO { 178 break 179 } 180 } 181 if !found { 182 Fatalf("cannot find %v in local variable list", n) 183 } 184 Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) 185 } 186 187 // Modify n in place so that uses of n now mean indirection of the heapaddr. 188 n.Class = PAUTOHEAP 189 n.Ullman = 2 190 n.Xoffset = 0 191 n.Name.Heapaddr = heapaddr 192 n.Esc = EscHeap 193 if Debug['m'] != 0 { 194 fmt.Printf("%v: moved to heap: %v\n", n.Line(), n) 195 } 196 } 197 198 func clearlabels() { 199 for _, l := range labellist { 200 l.Sym.Label = nil 201 } 202 labellist = labellist[:0] 203 } 204 205 func newlab(n *Node) *Label { 206 s := n.Left.Sym 207 lab := s.Label 208 if lab == nil { 209 lab = new(Label) 210 lab.Sym = s 211 s.Label = lab 212 labellist = append(labellist, lab) 213 } 214 215 if n.Op == OLABEL { 216 if lab.Def != nil { 217 Yyerror("label %v already defined at %v", s, lab.Def.Line()) 218 } else { 219 lab.Def = n 220 } 221 } else { 222 lab.Use = append(lab.Use, n) 223 } 224 225 return lab 226 } 227 228 // There is a copy of checkgoto in the new SSA backend. 229 // Please keep them in sync. 230 func checkgoto(from *Node, to *Node) { 231 if from.Sym == to.Sym { 232 return 233 } 234 235 nf := 0 236 for fs := from.Sym; fs != nil; fs = fs.Link { 237 nf++ 238 } 239 nt := 0 240 for fs := to.Sym; fs != nil; fs = fs.Link { 241 nt++ 242 } 243 fs := from.Sym 244 for ; nf > nt; nf-- { 245 fs = fs.Link 246 } 247 if fs != to.Sym { 248 lno := lineno 249 setlineno(from) 250 251 // decide what to complain about. 252 // prefer to complain about 'into block' over declarations, 253 // so scan backward to find most recent block or else dcl. 254 var block *Sym 255 256 var dcl *Sym 257 ts := to.Sym 258 for ; nt > nf; nt-- { 259 if ts.Pkg == nil { 260 block = ts 261 } else { 262 dcl = ts 263 } 264 ts = ts.Link 265 } 266 267 for ts != fs { 268 if ts.Pkg == nil { 269 block = ts 270 } else { 271 dcl = ts 272 } 273 ts = ts.Link 274 fs = fs.Link 275 } 276 277 if block != nil { 278 Yyerror("goto %v jumps into block starting at %v", from.Left.Sym, linestr(block.Lastlineno)) 279 } else { 280 Yyerror("goto %v jumps over declaration of %v at %v", from.Left.Sym, dcl, linestr(dcl.Lastlineno)) 281 } 282 lineno = lno 283 } 284 } 285 286 func stmtlabel(n *Node) *Label { 287 if n.Sym != nil { 288 lab := n.Sym.Label 289 if lab != nil { 290 if lab.Def != nil { 291 if lab.Def.Name.Defn == n { 292 return lab 293 } 294 } 295 } 296 } 297 return nil 298 } 299 300 // compile statements 301 func Genlist(l Nodes) { 302 for _, n := range l.Slice() { 303 gen(n) 304 } 305 } 306 307 // generate code to start new proc running call n. 308 func cgen_proc(n *Node, proc int) { 309 switch n.Left.Op { 310 default: 311 Fatalf("cgen_proc: unknown call %v", n.Left.Op) 312 313 case OCALLMETH: 314 cgen_callmeth(n.Left, proc) 315 316 case OCALLINTER: 317 cgen_callinter(n.Left, nil, proc) 318 319 case OCALLFUNC: 320 cgen_call(n.Left, proc) 321 } 322 } 323 324 // generate declaration. 325 // have to allocate heap copy 326 // for escaped variables. 327 func cgen_dcl(n *Node) { 328 if Debug['g'] != 0 { 329 Dump("\ncgen-dcl", n) 330 } 331 if n.Op != ONAME { 332 Dump("cgen_dcl", n) 333 Fatalf("cgen_dcl") 334 } 335 336 if n.Class == PAUTOHEAP { 337 Fatalf("cgen_dcl %v", n) 338 } 339 } 340 341 // generate discard of value 342 func cgen_discard(nr *Node) { 343 if nr == nil { 344 return 345 } 346 347 switch nr.Op { 348 case ONAME: 349 if nr.Class != PAUTOHEAP && nr.Class != PEXTERN && nr.Class != PFUNC { 350 gused(nr) 351 } 352 353 // unary 354 case OADD, 355 OAND, 356 ODIV, 357 OEQ, 358 OGE, 359 OGT, 360 OLE, 361 OLSH, 362 OLT, 363 OMOD, 364 OMUL, 365 ONE, 366 OOR, 367 ORSH, 368 OSUB, 369 OXOR: 370 cgen_discard(nr.Left) 371 372 cgen_discard(nr.Right) 373 374 // binary 375 case OCAP, 376 OCOM, 377 OLEN, 378 OMINUS, 379 ONOT, 380 OPLUS: 381 cgen_discard(nr.Left) 382 383 case OIND: 384 Cgen_checknil(nr.Left) 385 386 // special enough to just evaluate 387 default: 388 var tmp Node 389 Tempname(&tmp, nr.Type) 390 391 Cgen_as(&tmp, nr) 392 gused(&tmp) 393 } 394 } 395 396 // clearslim generates code to zero a slim node. 397 func Clearslim(n *Node) { 398 var z Node 399 z.Op = OLITERAL 400 z.Type = n.Type 401 z.Addable = true 402 403 switch Simtype[n.Type.Etype] { 404 case TCOMPLEX64, TCOMPLEX128: 405 z.SetVal(Val{new(Mpcplx)}) 406 z.Val().U.(*Mpcplx).Real.SetFloat64(0.0) 407 z.Val().U.(*Mpcplx).Imag.SetFloat64(0.0) 408 409 case TFLOAT32, TFLOAT64: 410 var zero Mpflt 411 zero.SetFloat64(0.0) 412 z.SetVal(Val{&zero}) 413 414 case TPTR32, TPTR64, TCHAN, TMAP: 415 z.SetVal(Val{new(NilVal)}) 416 417 case TBOOL: 418 z.SetVal(Val{false}) 419 420 case TINT8, 421 TINT16, 422 TINT32, 423 TINT64, 424 TUINT8, 425 TUINT16, 426 TUINT32, 427 TUINT64: 428 z.SetVal(Val{new(Mpint)}) 429 z.Val().U.(*Mpint).SetInt64(0) 430 431 default: 432 Fatalf("clearslim called on type %v", n.Type) 433 } 434 435 ullmancalc(&z) 436 Cgen(&z, n) 437 } 438 439 // generate: 440 // res = iface{typ, data} 441 // n->left is typ 442 // n->right is data 443 func Cgen_eface(n *Node, res *Node) { 444 // the right node of an eface may contain function calls that uses res as an argument, 445 // so it's important that it is done first 446 447 tmp := temp(Types[Tptr]) 448 Cgen(n.Right, tmp) 449 450 Gvardef(res) 451 452 dst := *res 453 dst.Type = Types[Tptr] 454 dst.Xoffset += int64(Widthptr) 455 Cgen(tmp, &dst) 456 457 dst.Xoffset -= int64(Widthptr) 458 Cgen(n.Left, &dst) 459 } 460 461 // generate one of: 462 // res, resok = x.(T) 463 // res = x.(T) (when resok == nil) 464 // n.Left is x 465 // n.Type is T 466 func cgen_dottype(n *Node, res, resok *Node, wb bool) { 467 if Debug_typeassert > 0 { 468 Warn("type assertion inlined") 469 } 470 // iface := n.Left 471 // r1 := iword(iface) 472 // if n.Left is non-empty interface { 473 // r1 = *r1 474 // } 475 // if r1 == T { 476 // res = idata(iface) 477 // resok = true 478 // } else { 479 // assert[EI]2T(x, T, nil) // (when resok == nil; does not return) 480 // resok = false // (when resok != nil) 481 // } 482 // 483 var iface Node 484 Igen(n.Left, &iface, res) 485 var r1, r2 Node 486 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte) 487 Regalloc(&r1, byteptr, nil) 488 iface.Type = byteptr 489 Cgen(&iface, &r1) 490 if !n.Left.Type.IsEmptyInterface() { 491 // Holding itab, want concrete type in second word. 492 p := Thearch.Ginscmp(OEQ, byteptr, &r1, Nodintconst(0), -1) 493 r2 = r1 494 r2.Op = OINDREG 495 r2.Xoffset = int64(Widthptr) 496 Cgen(&r2, &r1) 497 Patch(p, Pc) 498 } 499 Regalloc(&r2, byteptr, nil) 500 Cgen(typename(n.Type), &r2) 501 p := Thearch.Ginscmp(ONE, byteptr, &r1, &r2, -1) 502 Regfree(&r2) // not needed for success path; reclaimed on one failure path 503 iface.Xoffset += int64(Widthptr) 504 Cgen(&iface, &r1) 505 Regfree(&iface) 506 507 if resok == nil { 508 r1.Type = res.Type 509 cgen_wb(&r1, res, wb) 510 q := Gbranch(obj.AJMP, nil, 0) 511 Patch(p, Pc) 512 Regrealloc(&r2) // reclaim from above, for this failure path 513 fn := syslook("panicdottype") 514 dowidth(fn.Type) 515 call := Nod(OCALLFUNC, fn, nil) 516 r1.Type = byteptr 517 r2.Type = byteptr 518 call.List.Set([]*Node{&r1, &r2, typename(n.Left.Type)}) 519 call.List.Set(ascompatte(OCALLFUNC, call, false, fn.Type.Params(), call.List.Slice(), 0, nil)) 520 gen(call) 521 Regfree(&r1) 522 Regfree(&r2) 523 Thearch.Gins(obj.AUNDEF, nil, nil) 524 Patch(q, Pc) 525 } else { 526 // This half is handling the res, resok = x.(T) case, 527 // which is called from gen, not cgen, and is consequently fussier 528 // about blank assignments. We have to avoid calling cgen for those. 529 r1.Type = res.Type 530 if !isblank(res) { 531 cgen_wb(&r1, res, wb) 532 } 533 Regfree(&r1) 534 if !isblank(resok) { 535 Cgen(Nodbool(true), resok) 536 } 537 q := Gbranch(obj.AJMP, nil, 0) 538 Patch(p, Pc) 539 if !isblank(res) { 540 n := nodnil() 541 n.Type = res.Type 542 Cgen(n, res) 543 } 544 if !isblank(resok) { 545 Cgen(Nodbool(false), resok) 546 } 547 Patch(q, Pc) 548 } 549 } 550 551 // generate: 552 // res, resok = x.(T) 553 // n.Left is x 554 // n.Type is T 555 func Cgen_As2dottype(n, res, resok *Node) { 556 if Debug_typeassert > 0 { 557 Warn("type assertion inlined") 558 } 559 // iface := n.Left 560 // r1 := iword(iface) 561 // if n.Left is non-empty interface { 562 // r1 = *r1 563 // } 564 // if r1 == T { 565 // res = idata(iface) 566 // resok = true 567 // } else { 568 // res = nil 569 // resok = false 570 // } 571 // 572 var iface Node 573 Igen(n.Left, &iface, nil) 574 var r1, r2 Node 575 byteptr := Ptrto(Types[TUINT8]) // type used in runtime prototypes for runtime type (*byte) 576 Regalloc(&r1, byteptr, res) 577 iface.Type = byteptr 578 Cgen(&iface, &r1) 579 if !n.Left.Type.IsEmptyInterface() { 580 // Holding itab, want concrete type in second word. 581 p := Thearch.Ginscmp(OEQ, byteptr, &r1, Nodintconst(0), -1) 582 r2 = r1 583 r2.Op = OINDREG 584 r2.Xoffset = int64(Widthptr) 585 Cgen(&r2, &r1) 586 Patch(p, Pc) 587 } 588 Regalloc(&r2, byteptr, nil) 589 Cgen(typename(n.Type), &r2) 590 p := Thearch.Ginscmp(ONE, byteptr, &r1, &r2, -1) 591 iface.Type = n.Type 592 iface.Xoffset += int64(Widthptr) 593 Cgen(&iface, &r1) 594 if iface.Op != 0 { 595 Regfree(&iface) 596 } 597 Cgen(&r1, res) 598 q := Gbranch(obj.AJMP, nil, 0) 599 Patch(p, Pc) 600 601 fn := syslook("panicdottype") 602 dowidth(fn.Type) 603 call := Nod(OCALLFUNC, fn, nil) 604 call.List.Set([]*Node{&r1, &r2, typename(n.Left.Type)}) 605 call.List.Set(ascompatte(OCALLFUNC, call, false, fn.Type.Params(), call.List.Slice(), 0, nil)) 606 gen(call) 607 Regfree(&r1) 608 Regfree(&r2) 609 Thearch.Gins(obj.AUNDEF, nil, nil) 610 Patch(q, Pc) 611 } 612 613 // gather series of offsets 614 // >=0 is direct addressed field 615 // <0 is pointer to next field (+1) 616 func Dotoffset(n *Node, oary []int64, nn **Node) int { 617 var i int 618 619 switch n.Op { 620 case ODOT: 621 if n.Xoffset == BADWIDTH { 622 Dump("bad width in dotoffset", n) 623 Fatalf("bad width in dotoffset") 624 } 625 626 i = Dotoffset(n.Left, oary, nn) 627 if i > 0 { 628 if oary[i-1] >= 0 { 629 oary[i-1] += n.Xoffset 630 } else { 631 oary[i-1] -= n.Xoffset 632 } 633 break 634 } 635 636 if i < 10 { 637 oary[i] = n.Xoffset 638 i++ 639 } 640 641 case ODOTPTR: 642 if n.Xoffset == BADWIDTH { 643 Dump("bad width in dotoffset", n) 644 Fatalf("bad width in dotoffset") 645 } 646 647 i = Dotoffset(n.Left, oary, nn) 648 if i < 10 { 649 oary[i] = -(n.Xoffset + 1) 650 i++ 651 } 652 653 default: 654 *nn = n 655 return 0 656 } 657 658 if i >= 10 { 659 *nn = nil 660 } 661 return i 662 } 663 664 // make a new off the books 665 func Tempname(nn *Node, t *Type) { 666 if Curfn == nil { 667 Fatalf("no curfn for tempname") 668 } 669 if Curfn.Func.Closure != nil && Curfn.Op == OCLOSURE { 670 Dump("Tempname", Curfn) 671 Fatalf("adding tempname to wrong closure function") 672 } 673 674 if t == nil { 675 Yyerror("tempname called with nil type") 676 t = Types[TINT32] 677 } 678 679 // give each tmp a different name so that there 680 // a chance to registerizer them 681 s := LookupN("autotmp_", statuniqgen) 682 statuniqgen++ 683 n := Nod(ONAME, nil, nil) 684 n.Sym = s 685 s.Def = n 686 n.Type = t 687 n.Class = PAUTO 688 n.Addable = true 689 n.Ullman = 1 690 n.Esc = EscNever 691 n.Name.Curfn = Curfn 692 Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) 693 694 dowidth(t) 695 n.Xoffset = 0 696 *nn = *n 697 } 698 699 func temp(t *Type) *Node { 700 var n Node 701 Tempname(&n, t) 702 n.Sym.Def.Used = true 703 return n.Orig 704 } 705 706 func gen(n *Node) { 707 //dump("gen", n); 708 709 lno := setlineno(n) 710 711 wasregalloc := Anyregalloc() 712 713 if n == nil { 714 goto ret 715 } 716 717 if n.Ninit.Len() > 0 { 718 Genlist(n.Ninit) 719 } 720 721 setlineno(n) 722 723 switch n.Op { 724 default: 725 Fatalf("gen: unknown op %v", Nconv(n, FmtShort|FmtSign)) 726 727 case OCASE, 728 OFALL, 729 OXCASE, 730 OXFALL, 731 ODCLCONST, 732 ODCLFUNC, 733 ODCLTYPE: 734 break 735 736 case OEMPTY: 737 break 738 739 case OBLOCK: 740 Genlist(n.List) 741 742 case OLABEL: 743 if isblanksym(n.Left.Sym) { 744 break 745 } 746 747 lab := newlab(n) 748 749 // if there are pending gotos, resolve them all to the current pc. 750 var p2 *obj.Prog 751 for p1 := lab.Gotopc; p1 != nil; p1 = p2 { 752 p2 = unpatch(p1) 753 Patch(p1, Pc) 754 } 755 756 lab.Gotopc = nil 757 if lab.Labelpc == nil { 758 lab.Labelpc = Pc 759 } 760 761 if n.Name.Defn != nil { 762 switch n.Name.Defn.Op { 763 // so stmtlabel can find the label 764 case OFOR, OSWITCH, OSELECT: 765 n.Name.Defn.Sym = lab.Sym 766 } 767 } 768 769 // if label is defined, emit jump to it. 770 // otherwise save list of pending gotos in lab->gotopc. 771 // the list is linked through the normal jump target field 772 // to avoid a second list. (the jumps are actually still 773 // valid code, since they're just going to another goto 774 // to the same label. we'll unwind it when we learn the pc 775 // of the label in the OLABEL case above.) 776 case OGOTO: 777 lab := newlab(n) 778 779 if lab.Labelpc != nil { 780 gjmp(lab.Labelpc) 781 } else { 782 lab.Gotopc = gjmp(lab.Gotopc) 783 } 784 785 case OBREAK: 786 if n.Left != nil { 787 lab := n.Left.Sym.Label 788 if lab == nil { 789 Yyerror("break label not defined: %v", n.Left.Sym) 790 break 791 } 792 793 lab.Used = true 794 if lab.Breakpc == nil { 795 Yyerror("invalid break label %v", n.Left.Sym) 796 break 797 } 798 799 gjmp(lab.Breakpc) 800 break 801 } 802 803 if breakpc == nil { 804 Yyerror("break is not in a loop") 805 break 806 } 807 808 gjmp(breakpc) 809 810 case OCONTINUE: 811 if n.Left != nil { 812 lab := n.Left.Sym.Label 813 if lab == nil { 814 Yyerror("continue label not defined: %v", n.Left.Sym) 815 break 816 } 817 818 lab.Used = true 819 if lab.Continpc == nil { 820 Yyerror("invalid continue label %v", n.Left.Sym) 821 break 822 } 823 824 gjmp(lab.Continpc) 825 break 826 } 827 828 if continpc == nil { 829 Yyerror("continue is not in a loop") 830 break 831 } 832 833 gjmp(continpc) 834 835 case OFOR: 836 sbreak := breakpc 837 p1 := gjmp(nil) // goto test 838 breakpc = gjmp(nil) // break: goto done 839 scontin := continpc 840 continpc = Pc 841 842 // define break and continue labels 843 lab := stmtlabel(n) 844 if lab != nil { 845 lab.Breakpc = breakpc 846 lab.Continpc = continpc 847 } 848 849 gen(n.Right) // contin: incr 850 Patch(p1, Pc) // test: 851 Bgen(n.Left, false, -1, breakpc) // if(!test) goto break 852 Genlist(n.Nbody) // body 853 gjmp(continpc) 854 Patch(breakpc, Pc) // done: 855 continpc = scontin 856 breakpc = sbreak 857 if lab != nil { 858 lab.Breakpc = nil 859 lab.Continpc = nil 860 } 861 862 case OIF: 863 p1 := gjmp(nil) // goto test 864 p2 := gjmp(nil) // p2: goto else 865 Patch(p1, Pc) // test: 866 Bgen(n.Left, false, int(-n.Likely), p2) // if(!test) goto p2 867 Genlist(n.Nbody) // then 868 p3 := gjmp(nil) // goto done 869 Patch(p2, Pc) // else: 870 Genlist(n.Rlist) // else 871 Patch(p3, Pc) // done: 872 873 case OSWITCH: 874 sbreak := breakpc 875 p1 := gjmp(nil) // goto test 876 breakpc = gjmp(nil) // break: goto done 877 878 // define break label 879 lab := stmtlabel(n) 880 if lab != nil { 881 lab.Breakpc = breakpc 882 } 883 884 Patch(p1, Pc) // test: 885 Genlist(n.Nbody) // switch(test) body 886 Patch(breakpc, Pc) // done: 887 breakpc = sbreak 888 if lab != nil { 889 lab.Breakpc = nil 890 } 891 892 case OSELECT: 893 sbreak := breakpc 894 p1 := gjmp(nil) // goto test 895 breakpc = gjmp(nil) // break: goto done 896 897 // define break label 898 lab := stmtlabel(n) 899 if lab != nil { 900 lab.Breakpc = breakpc 901 } 902 903 Patch(p1, Pc) // test: 904 Genlist(n.Nbody) // select() body 905 Patch(breakpc, Pc) // done: 906 breakpc = sbreak 907 if lab != nil { 908 lab.Breakpc = nil 909 } 910 911 case ODCL: 912 cgen_dcl(n.Left) 913 914 case OAS: 915 if gen_as_init(n, false) { 916 break 917 } 918 Cgen_as(n.Left, n.Right) 919 920 case OASWB: 921 Cgen_as_wb(n.Left, n.Right, true) 922 923 case OAS2DOTTYPE: 924 cgen_dottype(n.Rlist.First(), n.List.First(), n.List.Second(), needwritebarrier(n.List.First(), n.Rlist.First())) 925 926 case OCALLMETH: 927 cgen_callmeth(n, 0) 928 929 case OCALLINTER: 930 cgen_callinter(n, nil, 0) 931 932 case OCALLFUNC: 933 cgen_call(n, 0) 934 935 case OPROC: 936 cgen_proc(n, 1) 937 938 case ODEFER: 939 cgen_proc(n, 2) 940 941 case ORETURN, ORETJMP: 942 cgen_ret(n) 943 944 // Function calls turned into compiler intrinsics. 945 // At top level, can just ignore the call and make sure to preserve side effects in the argument, if any. 946 case OGETG: 947 // nothing 948 case OSQRT: 949 cgen_discard(n.Left) 950 951 case OCHECKNIL: 952 Cgen_checknil(n.Left) 953 954 case OVARKILL: 955 Gvarkill(n.Left) 956 957 case OVARLIVE: 958 Gvarlive(n.Left) 959 } 960 961 ret: 962 if Anyregalloc() != wasregalloc { 963 Dump("node", n) 964 Fatalf("registers left allocated") 965 } 966 967 lineno = lno 968 } 969 970 func Cgen_as(nl, nr *Node) { 971 Cgen_as_wb(nl, nr, false) 972 } 973 974 func Cgen_as_wb(nl, nr *Node, wb bool) { 975 if Debug['g'] != 0 { 976 op := "cgen_as" 977 if wb { 978 op = "cgen_as_wb" 979 } 980 Dump(op, nl) 981 Dump(op+" = ", nr) 982 } 983 984 for nr != nil && nr.Op == OCONVNOP { 985 nr = nr.Left 986 } 987 988 if nl == nil || isblank(nl) { 989 cgen_discard(nr) 990 return 991 } 992 993 if nr == nil || iszero(nr) { 994 tl := nl.Type 995 if tl == nil { 996 return 997 } 998 if Isfat(tl) { 999 if nl.Op == ONAME { 1000 Gvardef(nl) 1001 } 1002 Thearch.Clearfat(nl) 1003 return 1004 } 1005 1006 Clearslim(nl) 1007 return 1008 } 1009 1010 tl := nl.Type 1011 if tl == nil { 1012 return 1013 } 1014 1015 cgen_wb(nr, nl, wb) 1016 } 1017 1018 func cgen_callmeth(n *Node, proc int) { 1019 // generate a rewrite in n2 for the method call 1020 // (p.f)(...) goes to (f)(p,...) 1021 1022 l := n.Left 1023 1024 if l.Op != ODOTMETH { 1025 Fatalf("cgen_callmeth: not dotmethod: %v", l) 1026 } 1027 1028 n2 := *n 1029 n2.Op = OCALLFUNC 1030 n2.Left = newname(l.Sym) 1031 n2.Left.Type = l.Type 1032 1033 if n2.Left.Op == ONAME { 1034 n2.Left.Class = PFUNC 1035 } 1036 cgen_call(&n2, proc) 1037 } 1038 1039 // CgenTemp creates a temporary node, assigns n to it, and returns it. 1040 func CgenTemp(n *Node) *Node { 1041 var tmp Node 1042 Tempname(&tmp, n.Type) 1043 Cgen(n, &tmp) 1044 return &tmp 1045 } 1046 1047 func checklabels() { 1048 for _, lab := range labellist { 1049 if lab.Def == nil { 1050 for _, n := range lab.Use { 1051 yyerrorl(n.Lineno, "label %v not defined", lab.Sym) 1052 } 1053 continue 1054 } 1055 1056 if lab.Use == nil && !lab.Used { 1057 yyerrorl(lab.Def.Lineno, "label %v defined and not used", lab.Sym) 1058 continue 1059 } 1060 1061 if lab.Gotopc != nil { 1062 Fatalf("label %v never resolved", lab.Sym) 1063 } 1064 for _, n := range lab.Use { 1065 checkgoto(n, lab.Def) 1066 } 1067 } 1068 } 1069 1070 // Componentgen copies a composite value by moving its individual components. 1071 // Slices, strings and interfaces are supported. Small structs or arrays with 1072 // elements of basic type are also supported. 1073 // nr is nil when assigning a zero value. 1074 func Componentgen(nr, nl *Node) bool { 1075 return componentgen_wb(nr, nl, false) 1076 } 1077 1078 // componentgen_wb is like componentgen but if wb==true emits write barriers for pointer updates. 1079 func componentgen_wb(nr, nl *Node, wb bool) bool { 1080 // Don't generate any code for complete copy of a variable into itself. 1081 // It's useless, and the VARDEF will incorrectly mark the old value as dead. 1082 // (This check assumes that the arguments passed to componentgen did not 1083 // themselves come from Igen, or else we could have Op==ONAME but 1084 // with a Type and Xoffset describing an individual field, not the entire 1085 // variable.) 1086 if nl.Op == ONAME && nl == nr { 1087 return true 1088 } 1089 1090 // Count number of moves required to move components. 1091 // If using write barrier, can only emit one pointer. 1092 // TODO(rsc): Allow more pointers, for reflect.Value. 1093 const maxMoves = 8 1094 n := 0 1095 numPtr := 0 1096 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1097 n++ 1098 if Simtype[t.Etype] == Tptr && t != itable { 1099 numPtr++ 1100 } 1101 return n <= maxMoves && (!wb || numPtr <= 1) 1102 }) 1103 if n > maxMoves || wb && numPtr > 1 { 1104 return false 1105 } 1106 1107 // Must call emitVardef after evaluating rhs but before writing to lhs. 1108 emitVardef := func() { 1109 // Emit vardef if needed. 1110 if nl.Op == ONAME { 1111 switch nl.Type.Etype { 1112 case TARRAY, TSLICE, TSTRING, TINTER, TSTRUCT: 1113 Gvardef(nl) 1114 } 1115 } 1116 } 1117 1118 isConstString := Isconst(nr, CTSTR) 1119 1120 if !cadable(nl) && nr != nil && !cadable(nr) && !isConstString { 1121 return false 1122 } 1123 1124 var nodl Node 1125 if cadable(nl) { 1126 nodl = *nl 1127 } else { 1128 if nr != nil && !cadable(nr) && !isConstString { 1129 return false 1130 } 1131 if nr == nil || isConstString || nl.Ullman >= nr.Ullman { 1132 Igen(nl, &nodl, nil) 1133 defer Regfree(&nodl) 1134 } 1135 } 1136 lbase := nodl.Xoffset 1137 1138 // Special case: zeroing. 1139 var nodr Node 1140 if nr == nil { 1141 // When zeroing, prepare a register containing zero. 1142 // TODO(rsc): Check that this is actually generating the best code. 1143 if Thearch.REGZERO != 0 { 1144 // cpu has a dedicated zero register 1145 Nodreg(&nodr, Types[TUINT], Thearch.REGZERO) 1146 } else { 1147 // no dedicated zero register 1148 var zero Node 1149 Nodconst(&zero, nl.Type, 0) 1150 Regalloc(&nodr, Types[TUINT], nil) 1151 Thearch.Gmove(&zero, &nodr) 1152 defer Regfree(&nodr) 1153 } 1154 1155 emitVardef() 1156 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1157 nodl.Type = t 1158 nodl.Xoffset = lbase + offset 1159 nodr.Type = t 1160 if t.IsFloat() { 1161 // TODO(rsc): Cache zero register like we do for integers? 1162 Clearslim(&nodl) 1163 } else { 1164 Thearch.Gmove(&nodr, &nodl) 1165 } 1166 return true 1167 }) 1168 return true 1169 } 1170 1171 // Special case: assignment of string constant. 1172 if isConstString { 1173 emitVardef() 1174 1175 // base 1176 nodl.Type = Ptrto(Types[TUINT8]) 1177 Regalloc(&nodr, Types[Tptr], nil) 1178 p := Thearch.Gins(Thearch.Optoas(OAS, Types[Tptr]), nil, &nodr) 1179 Datastring(nr.Val().U.(string), &p.From) 1180 p.From.Type = obj.TYPE_ADDR 1181 Thearch.Gmove(&nodr, &nodl) 1182 Regfree(&nodr) 1183 1184 // length 1185 nodl.Type = Types[Simtype[TUINT]] 1186 nodl.Xoffset += int64(Array_nel) - int64(Array_array) 1187 Nodconst(&nodr, nodl.Type, int64(len(nr.Val().U.(string)))) 1188 Thearch.Gmove(&nodr, &nodl) 1189 return true 1190 } 1191 1192 // General case: copy nl = nr. 1193 nodr = *nr 1194 if !cadable(nr) { 1195 if nr.Ullman >= UINF && nodl.Op == OINDREG { 1196 Fatalf("miscompile") 1197 } 1198 Igen(nr, &nodr, nil) 1199 defer Regfree(&nodr) 1200 } 1201 rbase := nodr.Xoffset 1202 1203 if nodl.Op == 0 { 1204 Igen(nl, &nodl, nil) 1205 defer Regfree(&nodl) 1206 lbase = nodl.Xoffset 1207 } 1208 1209 emitVardef() 1210 var ( 1211 ptrType *Type 1212 ptrOffset int64 1213 ) 1214 visitComponents(nl.Type, 0, func(t *Type, offset int64) bool { 1215 if wb && Simtype[t.Etype] == Tptr && t != itable { 1216 if ptrType != nil { 1217 Fatalf("componentgen_wb %v", Tconv(nl.Type, 0)) 1218 } 1219 ptrType = t 1220 ptrOffset = offset 1221 return true 1222 } 1223 nodl.Type = t 1224 nodl.Xoffset = lbase + offset 1225 nodr.Type = t 1226 nodr.Xoffset = rbase + offset 1227 Thearch.Gmove(&nodr, &nodl) 1228 return true 1229 }) 1230 if ptrType != nil { 1231 nodl.Type = ptrType 1232 nodl.Xoffset = lbase + ptrOffset 1233 nodr.Type = ptrType 1234 nodr.Xoffset = rbase + ptrOffset 1235 cgen_wbptr(&nodr, &nodl) 1236 } 1237 return true 1238 } 1239 1240 // visitComponents walks the individual components of the type t, 1241 // walking into array elements, struct fields, the real and imaginary 1242 // parts of complex numbers, and on 32-bit systems the high and 1243 // low halves of 64-bit integers. 1244 // It calls f for each such component, passing the component (aka element) 1245 // type and memory offset, assuming t starts at startOffset. 1246 // If f ever returns false, visitComponents returns false without any more 1247 // calls to f. Otherwise visitComponents returns true. 1248 func visitComponents(t *Type, startOffset int64, f func(elem *Type, elemOffset int64) bool) bool { 1249 switch t.Etype { 1250 case TINT64: 1251 if Widthreg == 8 { 1252 break 1253 } 1254 // NOTE: Assuming little endian (signed top half at offset 4). 1255 // We don't have any 32-bit big-endian systems. 1256 if !Thearch.LinkArch.InFamily(sys.ARM, sys.I386) { 1257 Fatalf("unknown 32-bit architecture") 1258 } 1259 return f(Types[TUINT32], startOffset) && 1260 f(Types[TINT32], startOffset+4) 1261 1262 case TUINT64: 1263 if Widthreg == 8 { 1264 break 1265 } 1266 return f(Types[TUINT32], startOffset) && 1267 f(Types[TUINT32], startOffset+4) 1268 1269 case TCOMPLEX64: 1270 return f(Types[TFLOAT32], startOffset) && 1271 f(Types[TFLOAT32], startOffset+4) 1272 1273 case TCOMPLEX128: 1274 return f(Types[TFLOAT64], startOffset) && 1275 f(Types[TFLOAT64], startOffset+8) 1276 1277 case TINTER: 1278 return f(itable, startOffset) && 1279 f(Ptrto(Types[TUINT8]), startOffset+int64(Widthptr)) 1280 1281 case TSTRING: 1282 return f(Ptrto(Types[TUINT8]), startOffset) && 1283 f(Types[Simtype[TUINT]], startOffset+int64(Widthptr)) 1284 1285 case TSLICE: 1286 return f(Ptrto(t.Elem()), startOffset+int64(Array_array)) && 1287 f(Types[Simtype[TUINT]], startOffset+int64(Array_nel)) && 1288 f(Types[Simtype[TUINT]], startOffset+int64(Array_cap)) 1289 1290 case TARRAY: 1291 // Short-circuit [1e6]struct{}. 1292 if t.Elem().Width == 0 { 1293 return true 1294 } 1295 1296 for i := int64(0); i < t.NumElem(); i++ { 1297 if !visitComponents(t.Elem(), startOffset+i*t.Elem().Width, f) { 1298 return false 1299 } 1300 } 1301 return true 1302 1303 case TSTRUCT: 1304 for _, field := range t.Fields().Slice() { 1305 if !visitComponents(field.Type, startOffset+field.Offset, f) { 1306 return false 1307 } 1308 } 1309 return true 1310 } 1311 return f(t, startOffset) 1312 } 1313 1314 func cadable(n *Node) bool { 1315 // Note: Not sure why you can have n.Op == ONAME without n.Addable, but you can. 1316 return n.Addable && n.Op == ONAME 1317 }