github.com/jd-ly/tools@v0.5.7/go/ssa/builder.go (about) 1 // Copyright 2013 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ssa 6 7 // This file implements the BUILD phase of SSA construction. 8 // 9 // SSA construction has two phases, CREATE and BUILD. In the CREATE phase 10 // (create.go), all packages are constructed and type-checked and 11 // definitions of all package members are created, method-sets are 12 // computed, and wrapper methods are synthesized. 13 // ssa.Packages are created in arbitrary order. 14 // 15 // In the BUILD phase (builder.go), the builder traverses the AST of 16 // each Go source function and generates SSA instructions for the 17 // function body. Initializer expressions for package-level variables 18 // are emitted to the package's init() function in the order specified 19 // by go/types.Info.InitOrder, then code for each function in the 20 // package is generated in lexical order. 21 // The BUILD phases for distinct packages are independent and are 22 // executed in parallel. 23 // 24 // TODO(adonovan): indeed, building functions is now embarrassingly parallel. 25 // Audit for concurrency then benchmark using more goroutines. 26 // 27 // The builder's and Program's indices (maps) are populated and 28 // mutated during the CREATE phase, but during the BUILD phase they 29 // remain constant. The sole exception is Prog.methodSets and its 30 // related maps, which are protected by a dedicated mutex. 31 32 import ( 33 "fmt" 34 "go/ast" 35 "go/constant" 36 "go/token" 37 "go/types" 38 "os" 39 "sync" 40 ) 41 42 type opaqueType struct { 43 types.Type 44 name string 45 } 46 47 func (t *opaqueType) String() string { return t.name } 48 49 var ( 50 varOk = newVar("ok", tBool) 51 varIndex = newVar("index", tInt) 52 53 // Type constants. 54 tBool = types.Typ[types.Bool] 55 tByte = types.Typ[types.Byte] 56 tInt = types.Typ[types.Int] 57 tInvalid = types.Typ[types.Invalid] 58 tString = types.Typ[types.String] 59 tUntypedNil = types.Typ[types.UntypedNil] 60 tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators 61 tEface = types.NewInterface(nil, nil).Complete() 62 63 // SSA Value constants. 64 vZero = intConst(0) 65 vOne = intConst(1) 66 vTrue = NewConst(constant.MakeBool(true), tBool) 67 ) 68 69 // builder holds state associated with the package currently being built. 70 // Its methods contain all the logic for AST-to-SSA conversion. 71 type builder struct{} 72 73 // cond emits to fn code to evaluate boolean condition e and jump 74 // to t or f depending on its value, performing various simplifications. 75 // 76 // Postcondition: fn.currentBlock is nil. 77 // 78 func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { 79 switch e := e.(type) { 80 case *ast.ParenExpr: 81 b.cond(fn, e.X, t, f) 82 return 83 84 case *ast.BinaryExpr: 85 switch e.Op { 86 case token.LAND: 87 ltrue := fn.newBasicBlock("cond.true") 88 b.cond(fn, e.X, ltrue, f) 89 fn.currentBlock = ltrue 90 b.cond(fn, e.Y, t, f) 91 return 92 93 case token.LOR: 94 lfalse := fn.newBasicBlock("cond.false") 95 b.cond(fn, e.X, t, lfalse) 96 fn.currentBlock = lfalse 97 b.cond(fn, e.Y, t, f) 98 return 99 } 100 101 case *ast.UnaryExpr: 102 if e.Op == token.NOT { 103 b.cond(fn, e.X, f, t) 104 return 105 } 106 } 107 108 // A traditional compiler would simplify "if false" (etc) here 109 // but we do not, for better fidelity to the source code. 110 // 111 // The value of a constant condition may be platform-specific, 112 // and may cause blocks that are reachable in some configuration 113 // to be hidden from subsequent analyses such as bug-finding tools. 114 emitIf(fn, b.expr(fn, e), t, f) 115 } 116 117 // logicalBinop emits code to fn to evaluate e, a &&- or 118 // ||-expression whose reified boolean value is wanted. 119 // The value is returned. 120 // 121 func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { 122 rhs := fn.newBasicBlock("binop.rhs") 123 done := fn.newBasicBlock("binop.done") 124 125 // T(e) = T(e.X) = T(e.Y) after untyped constants have been 126 // eliminated. 127 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. 128 t := fn.Pkg.typeOf(e) 129 130 var short Value // value of the short-circuit path 131 switch e.Op { 132 case token.LAND: 133 b.cond(fn, e.X, rhs, done) 134 short = NewConst(constant.MakeBool(false), t) 135 136 case token.LOR: 137 b.cond(fn, e.X, done, rhs) 138 short = NewConst(constant.MakeBool(true), t) 139 } 140 141 // Is rhs unreachable? 142 if rhs.Preds == nil { 143 // Simplify false&&y to false, true||y to true. 144 fn.currentBlock = done 145 return short 146 } 147 148 // Is done unreachable? 149 if done.Preds == nil { 150 // Simplify true&&y (or false||y) to y. 151 fn.currentBlock = rhs 152 return b.expr(fn, e.Y) 153 } 154 155 // All edges from e.X to done carry the short-circuit value. 156 var edges []Value 157 for range done.Preds { 158 edges = append(edges, short) 159 } 160 161 // The edge from e.Y to done carries the value of e.Y. 162 fn.currentBlock = rhs 163 edges = append(edges, b.expr(fn, e.Y)) 164 emitJump(fn, done) 165 fn.currentBlock = done 166 167 phi := &Phi{Edges: edges, Comment: e.Op.String()} 168 phi.pos = e.OpPos 169 phi.typ = t 170 return done.emit(phi) 171 } 172 173 // exprN lowers a multi-result expression e to SSA form, emitting code 174 // to fn and returning a single Value whose type is a *types.Tuple. 175 // The caller must access the components via Extract. 176 // 177 // Multi-result expressions include CallExprs in a multi-value 178 // assignment or return statement, and "value,ok" uses of 179 // TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op 180 // is token.ARROW). 181 // 182 func (b *builder) exprN(fn *Function, e ast.Expr) Value { 183 typ := fn.Pkg.typeOf(e).(*types.Tuple) 184 switch e := e.(type) { 185 case *ast.ParenExpr: 186 return b.exprN(fn, e.X) 187 188 case *ast.CallExpr: 189 // Currently, no built-in function nor type conversion 190 // has multiple results, so we can avoid some of the 191 // cases for single-valued CallExpr. 192 var c Call 193 b.setCall(fn, e, &c.Call) 194 c.typ = typ 195 return fn.emit(&c) 196 197 case *ast.IndexExpr: 198 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) 199 lookup := &Lookup{ 200 X: b.expr(fn, e.X), 201 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), 202 CommaOk: true, 203 } 204 lookup.setType(typ) 205 lookup.setPos(e.Lbrack) 206 return fn.emit(lookup) 207 208 case *ast.TypeAssertExpr: 209 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) 210 211 case *ast.UnaryExpr: // must be receive <- 212 unop := &UnOp{ 213 Op: token.ARROW, 214 X: b.expr(fn, e.X), 215 CommaOk: true, 216 } 217 unop.setType(typ) 218 unop.setPos(e.OpPos) 219 return fn.emit(unop) 220 } 221 panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) 222 } 223 224 // builtin emits to fn SSA instructions to implement a call to the 225 // built-in function obj with the specified arguments 226 // and return type. It returns the value defined by the result. 227 // 228 // The result is nil if no special handling was required; in this case 229 // the caller should treat this like an ordinary library function 230 // call. 231 // 232 func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { 233 switch obj.Name() { 234 case "make": 235 switch typ.Underlying().(type) { 236 case *types.Slice: 237 n := b.expr(fn, args[1]) 238 m := n 239 if len(args) == 3 { 240 m = b.expr(fn, args[2]) 241 } 242 if m, ok := m.(*Const); ok { 243 // treat make([]T, n, m) as new([m]T)[:n] 244 cap := m.Int64() 245 at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) 246 alloc := emitNew(fn, at, pos) 247 alloc.Comment = "makeslice" 248 v := &Slice{ 249 X: alloc, 250 High: n, 251 } 252 v.setPos(pos) 253 v.setType(typ) 254 return fn.emit(v) 255 } 256 v := &MakeSlice{ 257 Len: n, 258 Cap: m, 259 } 260 v.setPos(pos) 261 v.setType(typ) 262 return fn.emit(v) 263 264 case *types.Map: 265 var res Value 266 if len(args) == 2 { 267 res = b.expr(fn, args[1]) 268 } 269 v := &MakeMap{Reserve: res} 270 v.setPos(pos) 271 v.setType(typ) 272 return fn.emit(v) 273 274 case *types.Chan: 275 var sz Value = vZero 276 if len(args) == 2 { 277 sz = b.expr(fn, args[1]) 278 } 279 v := &MakeChan{Size: sz} 280 v.setPos(pos) 281 v.setType(typ) 282 return fn.emit(v) 283 } 284 285 case "new": 286 alloc := emitNew(fn, deref(typ), pos) 287 alloc.Comment = "new" 288 return alloc 289 290 case "len", "cap": 291 // Special case: len or cap of an array or *array is 292 // based on the type, not the value which may be nil. 293 // We must still evaluate the value, though. (If it 294 // was side-effect free, the whole call would have 295 // been constant-folded.) 296 t := deref(fn.Pkg.typeOf(args[0])).Underlying() 297 if at, ok := t.(*types.Array); ok { 298 b.expr(fn, args[0]) // for effects only 299 return intConst(at.Len()) 300 } 301 // Otherwise treat as normal. 302 303 case "panic": 304 fn.emit(&Panic{ 305 X: emitConv(fn, b.expr(fn, args[0]), tEface), 306 pos: pos, 307 }) 308 fn.currentBlock = fn.newBasicBlock("unreachable") 309 return vTrue // any non-nil Value will do 310 } 311 return nil // treat all others as a regular function call 312 } 313 314 // addr lowers a single-result addressable expression e to SSA form, 315 // emitting code to fn and returning the location (an lvalue) defined 316 // by the expression. 317 // 318 // If escaping is true, addr marks the base variable of the 319 // addressable expression e as being a potentially escaping pointer 320 // value. For example, in this code: 321 // 322 // a := A{ 323 // b: [1]B{B{c: 1}} 324 // } 325 // return &a.b[0].c 326 // 327 // the application of & causes a.b[0].c to have its address taken, 328 // which means that ultimately the local variable a must be 329 // heap-allocated. This is a simple but very conservative escape 330 // analysis. 331 // 332 // Operations forming potentially escaping pointers include: 333 // - &x, including when implicit in method call or composite literals. 334 // - a[:] iff a is an array (not *array) 335 // - references to variables in lexically enclosing functions. 336 // 337 func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { 338 switch e := e.(type) { 339 case *ast.Ident: 340 if isBlankIdent(e) { 341 return blank{} 342 } 343 obj := fn.Pkg.objectOf(e) 344 v := fn.Prog.packageLevelValue(obj) // var (address) 345 if v == nil { 346 v = fn.lookup(obj, escaping) 347 } 348 return &address{addr: v, pos: e.Pos(), expr: e} 349 350 case *ast.CompositeLit: 351 t := deref(fn.Pkg.typeOf(e)) 352 var v *Alloc 353 if escaping { 354 v = emitNew(fn, t, e.Lbrace) 355 } else { 356 v = fn.addLocal(t, e.Lbrace) 357 } 358 v.Comment = "complit" 359 var sb storebuf 360 b.compLit(fn, v, e, true, &sb) 361 sb.emit(fn) 362 return &address{addr: v, pos: e.Lbrace, expr: e} 363 364 case *ast.ParenExpr: 365 return b.addr(fn, e.X, escaping) 366 367 case *ast.SelectorExpr: 368 sel, ok := fn.Pkg.info.Selections[e] 369 if !ok { 370 // qualified identifier 371 return b.addr(fn, e.Sel, escaping) 372 } 373 if sel.Kind() != types.FieldVal { 374 panic(sel) 375 } 376 wantAddr := true 377 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 378 last := len(sel.Index()) - 1 379 return &address{ 380 addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), 381 pos: e.Sel.Pos(), 382 expr: e.Sel, 383 } 384 385 case *ast.IndexExpr: 386 var x Value 387 var et types.Type 388 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { 389 case *types.Array: 390 x = b.addr(fn, e.X, escaping).address(fn) 391 et = types.NewPointer(t.Elem()) 392 case *types.Pointer: // *array 393 x = b.expr(fn, e.X) 394 et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) 395 case *types.Slice: 396 x = b.expr(fn, e.X) 397 et = types.NewPointer(t.Elem()) 398 case *types.Map: 399 return &element{ 400 m: b.expr(fn, e.X), 401 k: emitConv(fn, b.expr(fn, e.Index), t.Key()), 402 t: t.Elem(), 403 pos: e.Lbrack, 404 } 405 default: 406 panic("unexpected container type in IndexExpr: " + t.String()) 407 } 408 v := &IndexAddr{ 409 X: x, 410 Index: emitConv(fn, b.expr(fn, e.Index), tInt), 411 } 412 v.setPos(e.Lbrack) 413 v.setType(et) 414 return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e} 415 416 case *ast.StarExpr: 417 return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} 418 } 419 420 panic(fmt.Sprintf("unexpected address expression: %T", e)) 421 } 422 423 type store struct { 424 lhs lvalue 425 rhs Value 426 } 427 428 type storebuf struct{ stores []store } 429 430 func (sb *storebuf) store(lhs lvalue, rhs Value) { 431 sb.stores = append(sb.stores, store{lhs, rhs}) 432 } 433 434 func (sb *storebuf) emit(fn *Function) { 435 for _, s := range sb.stores { 436 s.lhs.store(fn, s.rhs) 437 } 438 } 439 440 // assign emits to fn code to initialize the lvalue loc with the value 441 // of expression e. If isZero is true, assign assumes that loc holds 442 // the zero value for its type. 443 // 444 // This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate 445 // better code in some cases, e.g., for composite literals in an 446 // addressable location. 447 // 448 // If sb is not nil, assign generates code to evaluate expression e, but 449 // not to update loc. Instead, the necessary stores are appended to the 450 // storebuf sb so that they can be executed later. This allows correct 451 // in-place update of existing variables when the RHS is a composite 452 // literal that may reference parts of the LHS. 453 // 454 func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { 455 // Can we initialize it in place? 456 if e, ok := unparen(e).(*ast.CompositeLit); ok { 457 // A CompositeLit never evaluates to a pointer, 458 // so if the type of the location is a pointer, 459 // an &-operation is implied. 460 if _, ok := loc.(blank); !ok { // avoid calling blank.typ() 461 if isPointer(loc.typ()) { 462 ptr := b.addr(fn, e, true).address(fn) 463 // copy address 464 if sb != nil { 465 sb.store(loc, ptr) 466 } else { 467 loc.store(fn, ptr) 468 } 469 return 470 } 471 } 472 473 if _, ok := loc.(*address); ok { 474 if isInterface(loc.typ()) { 475 // e.g. var x interface{} = T{...} 476 // Can't in-place initialize an interface value. 477 // Fall back to copying. 478 } else { 479 // x = T{...} or x := T{...} 480 addr := loc.address(fn) 481 if sb != nil { 482 b.compLit(fn, addr, e, isZero, sb) 483 } else { 484 var sb storebuf 485 b.compLit(fn, addr, e, isZero, &sb) 486 sb.emit(fn) 487 } 488 489 // Subtle: emit debug ref for aggregate types only; 490 // slice and map are handled by store ops in compLit. 491 switch loc.typ().Underlying().(type) { 492 case *types.Struct, *types.Array: 493 emitDebugRef(fn, e, addr, true) 494 } 495 496 return 497 } 498 } 499 } 500 501 // simple case: just copy 502 rhs := b.expr(fn, e) 503 if sb != nil { 504 sb.store(loc, rhs) 505 } else { 506 loc.store(fn, rhs) 507 } 508 } 509 510 // expr lowers a single-result expression e to SSA form, emitting code 511 // to fn and returning the Value defined by the expression. 512 // 513 func (b *builder) expr(fn *Function, e ast.Expr) Value { 514 e = unparen(e) 515 516 tv := fn.Pkg.info.Types[e] 517 518 // Is expression a constant? 519 if tv.Value != nil { 520 return NewConst(tv.Value, tv.Type) 521 } 522 523 var v Value 524 if tv.Addressable() { 525 // Prefer pointer arithmetic ({Index,Field}Addr) followed 526 // by Load over subelement extraction (e.g. Index, Field), 527 // to avoid large copies. 528 v = b.addr(fn, e, false).load(fn) 529 } else { 530 v = b.expr0(fn, e, tv) 531 } 532 if fn.debugInfo() { 533 emitDebugRef(fn, e, v, false) 534 } 535 return v 536 } 537 538 func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { 539 switch e := e.(type) { 540 case *ast.BasicLit: 541 panic("non-constant BasicLit") // unreachable 542 543 case *ast.FuncLit: 544 fn2 := &Function{ 545 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), 546 Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), 547 pos: e.Type.Func, 548 parent: fn, 549 Pkg: fn.Pkg, 550 Prog: fn.Prog, 551 syntax: e, 552 } 553 fn.AnonFuncs = append(fn.AnonFuncs, fn2) 554 b.buildFunction(fn2) 555 if fn2.FreeVars == nil { 556 return fn2 557 } 558 v := &MakeClosure{Fn: fn2} 559 v.setType(tv.Type) 560 for _, fv := range fn2.FreeVars { 561 v.Bindings = append(v.Bindings, fv.outer) 562 fv.outer = nil 563 } 564 return fn.emit(v) 565 566 case *ast.TypeAssertExpr: // single-result form only 567 return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen) 568 569 case *ast.CallExpr: 570 if fn.Pkg.info.Types[e.Fun].IsType() { 571 // Explicit type conversion, e.g. string(x) or big.Int(x) 572 x := b.expr(fn, e.Args[0]) 573 y := emitConv(fn, x, tv.Type) 574 if y != x { 575 switch y := y.(type) { 576 case *Convert: 577 y.pos = e.Lparen 578 case *ChangeType: 579 y.pos = e.Lparen 580 case *MakeInterface: 581 y.pos = e.Lparen 582 } 583 } 584 return y 585 } 586 // Call to "intrinsic" built-ins, e.g. new, make, panic. 587 if id, ok := unparen(e.Fun).(*ast.Ident); ok { 588 if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { 589 if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil { 590 return v 591 } 592 } 593 } 594 // Regular function call. 595 var v Call 596 b.setCall(fn, e, &v.Call) 597 v.setType(tv.Type) 598 return fn.emit(&v) 599 600 case *ast.UnaryExpr: 601 switch e.Op { 602 case token.AND: // &X --- potentially escaping. 603 addr := b.addr(fn, e.X, true) 604 if _, ok := unparen(e.X).(*ast.StarExpr); ok { 605 // &*p must panic if p is nil (http://golang.org/s/go12nil). 606 // For simplicity, we'll just (suboptimally) rely 607 // on the side effects of a load. 608 // TODO(adonovan): emit dedicated nilcheck. 609 addr.load(fn) 610 } 611 return addr.address(fn) 612 case token.ADD: 613 return b.expr(fn, e.X) 614 case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ 615 v := &UnOp{ 616 Op: e.Op, 617 X: b.expr(fn, e.X), 618 } 619 v.setPos(e.OpPos) 620 v.setType(tv.Type) 621 return fn.emit(v) 622 default: 623 panic(e.Op) 624 } 625 626 case *ast.BinaryExpr: 627 switch e.Op { 628 case token.LAND, token.LOR: 629 return b.logicalBinop(fn, e) 630 case token.SHL, token.SHR: 631 fallthrough 632 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: 633 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos) 634 635 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: 636 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) 637 // The type of x==y may be UntypedBool. 638 return emitConv(fn, cmp, types.Default(tv.Type)) 639 default: 640 panic("illegal op in BinaryExpr: " + e.Op.String()) 641 } 642 643 case *ast.SliceExpr: 644 var low, high, max Value 645 var x Value 646 switch fn.Pkg.typeOf(e.X).Underlying().(type) { 647 case *types.Array: 648 // Potentially escaping. 649 x = b.addr(fn, e.X, true).address(fn) 650 case *types.Basic, *types.Slice, *types.Pointer: // *array 651 x = b.expr(fn, e.X) 652 default: 653 panic("unreachable") 654 } 655 if e.High != nil { 656 high = b.expr(fn, e.High) 657 } 658 if e.Low != nil { 659 low = b.expr(fn, e.Low) 660 } 661 if e.Slice3 { 662 max = b.expr(fn, e.Max) 663 } 664 v := &Slice{ 665 X: x, 666 Low: low, 667 High: high, 668 Max: max, 669 } 670 v.setPos(e.Lbrack) 671 v.setType(tv.Type) 672 return fn.emit(v) 673 674 case *ast.Ident: 675 obj := fn.Pkg.info.Uses[e] 676 // Universal built-in or nil? 677 switch obj := obj.(type) { 678 case *types.Builtin: 679 return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} 680 case *types.Nil: 681 return nilConst(tv.Type) 682 } 683 // Package-level func or var? 684 if v := fn.Prog.packageLevelValue(obj); v != nil { 685 if _, ok := obj.(*types.Var); ok { 686 return emitLoad(fn, v) // var (address) 687 } 688 return v // (func) 689 } 690 // Local var. 691 return emitLoad(fn, fn.lookup(obj, false)) // var (address) 692 693 case *ast.SelectorExpr: 694 sel, ok := fn.Pkg.info.Selections[e] 695 if !ok { 696 // qualified identifier 697 return b.expr(fn, e.Sel) 698 } 699 switch sel.Kind() { 700 case types.MethodExpr: 701 // (*T).f or T.f, the method f from the method-set of type T. 702 // The result is a "thunk". 703 return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type) 704 705 case types.MethodVal: 706 // e.f where e is an expression and f is a method. 707 // The result is a "bound". 708 obj := sel.Obj().(*types.Func) 709 rt := recvType(obj) 710 wantAddr := isPointer(rt) 711 escaping := true 712 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 713 if isInterface(rt) { 714 // If v has interface type I, 715 // we must emit a check that v is non-nil. 716 // We use: typeassert v.(I). 717 emitTypeAssert(fn, v, rt, token.NoPos) 718 } 719 c := &MakeClosure{ 720 Fn: makeBound(fn.Prog, obj), 721 Bindings: []Value{v}, 722 } 723 c.setPos(e.Sel.Pos()) 724 c.setType(tv.Type) 725 return fn.emit(c) 726 727 case types.FieldVal: 728 indices := sel.Index() 729 last := len(indices) - 1 730 v := b.expr(fn, e.X) 731 v = emitImplicitSelections(fn, v, indices[:last]) 732 v = emitFieldSelection(fn, v, indices[last], false, e.Sel) 733 return v 734 } 735 736 panic("unexpected expression-relative selector") 737 738 case *ast.IndexExpr: 739 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { 740 case *types.Array: 741 // Non-addressable array (in a register). 742 v := &Index{ 743 X: b.expr(fn, e.X), 744 Index: emitConv(fn, b.expr(fn, e.Index), tInt), 745 } 746 v.setPos(e.Lbrack) 747 v.setType(t.Elem()) 748 return fn.emit(v) 749 750 case *types.Map: 751 // Maps are not addressable. 752 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) 753 v := &Lookup{ 754 X: b.expr(fn, e.X), 755 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), 756 } 757 v.setPos(e.Lbrack) 758 v.setType(mapt.Elem()) 759 return fn.emit(v) 760 761 case *types.Basic: // => string 762 // Strings are not addressable. 763 v := &Lookup{ 764 X: b.expr(fn, e.X), 765 Index: b.expr(fn, e.Index), 766 } 767 v.setPos(e.Lbrack) 768 v.setType(tByte) 769 return fn.emit(v) 770 771 case *types.Slice, *types.Pointer: // *array 772 // Addressable slice/array; use IndexAddr and Load. 773 return b.addr(fn, e, false).load(fn) 774 775 default: 776 panic("unexpected container type in IndexExpr: " + t.String()) 777 } 778 779 case *ast.CompositeLit, *ast.StarExpr: 780 // Addressable types (lvalues) 781 return b.addr(fn, e, false).load(fn) 782 } 783 784 panic(fmt.Sprintf("unexpected expr: %T", e)) 785 } 786 787 // stmtList emits to fn code for all statements in list. 788 func (b *builder) stmtList(fn *Function, list []ast.Stmt) { 789 for _, s := range list { 790 b.stmt(fn, s) 791 } 792 } 793 794 // receiver emits to fn code for expression e in the "receiver" 795 // position of selection e.f (where f may be a field or a method) and 796 // returns the effective receiver after applying the implicit field 797 // selections of sel. 798 // 799 // wantAddr requests that the result is an an address. If 800 // !sel.Indirect(), this may require that e be built in addr() mode; it 801 // must thus be addressable. 802 // 803 // escaping is defined as per builder.addr(). 804 // 805 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value { 806 var v Value 807 if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { 808 v = b.addr(fn, e, escaping).address(fn) 809 } else { 810 v = b.expr(fn, e) 811 } 812 813 last := len(sel.Index()) - 1 814 v = emitImplicitSelections(fn, v, sel.Index()[:last]) 815 if !wantAddr && isPointer(v.Type()) { 816 v = emitLoad(fn, v) 817 } 818 return v 819 } 820 821 // setCallFunc populates the function parts of a CallCommon structure 822 // (Func, Method, Recv, Args[0]) based on the kind of invocation 823 // occurring in e. 824 // 825 func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { 826 c.pos = e.Lparen 827 828 // Is this a method call? 829 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { 830 sel, ok := fn.Pkg.info.Selections[selector] 831 if ok && sel.Kind() == types.MethodVal { 832 obj := sel.Obj().(*types.Func) 833 recv := recvType(obj) 834 wantAddr := isPointer(recv) 835 escaping := true 836 v := b.receiver(fn, selector.X, wantAddr, escaping, sel) 837 if isInterface(recv) { 838 // Invoke-mode call. 839 c.Value = v 840 c.Method = obj 841 } else { 842 // "Call"-mode call. 843 c.Value = fn.Prog.declaredFunc(obj) 844 c.Args = append(c.Args, v) 845 } 846 return 847 } 848 849 // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): 850 // a statically dispatched call to the method f in the 851 // method-set of T or *T. T may be an interface. 852 // 853 // e.Fun would evaluate to a concrete method, interface 854 // wrapper function, or promotion wrapper. 855 // 856 // For now, we evaluate it in the usual way. 857 // 858 // TODO(adonovan): opt: inline expr() here, to make the 859 // call static and to avoid generation of wrappers. 860 // It's somewhat tricky as it may consume the first 861 // actual parameter if the call is "invoke" mode. 862 // 863 // Examples: 864 // type T struct{}; func (T) f() {} // "call" mode 865 // type T interface { f() } // "invoke" mode 866 // 867 // type S struct{ T } 868 // 869 // var s S 870 // S.f(s) 871 // (*S).f(&s) 872 // 873 // Suggested approach: 874 // - consume the first actual parameter expression 875 // and build it with b.expr(). 876 // - apply implicit field selections. 877 // - use MethodVal logic to populate fields of c. 878 } 879 880 // Evaluate the function operand in the usual way. 881 c.Value = b.expr(fn, e.Fun) 882 } 883 884 // emitCallArgs emits to f code for the actual parameters of call e to 885 // a (possibly built-in) function of effective type sig. 886 // The argument values are appended to args, which is then returned. 887 // 888 func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { 889 // f(x, y, z...): pass slice z straight through. 890 if e.Ellipsis != 0 { 891 for i, arg := range e.Args { 892 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) 893 args = append(args, v) 894 } 895 return args 896 } 897 898 offset := len(args) // 1 if call has receiver, 0 otherwise 899 900 // Evaluate actual parameter expressions. 901 // 902 // If this is a chained call of the form f(g()) where g has 903 // multiple return values (MRV), they are flattened out into 904 // args; a suffix of them may end up in a varargs slice. 905 for _, arg := range e.Args { 906 v := b.expr(fn, arg) 907 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain 908 for i, n := 0, ttuple.Len(); i < n; i++ { 909 args = append(args, emitExtract(fn, v, i)) 910 } 911 } else { 912 args = append(args, v) 913 } 914 } 915 916 // Actual->formal assignability conversions for normal parameters. 917 np := sig.Params().Len() // number of normal parameters 918 if sig.Variadic() { 919 np-- 920 } 921 for i := 0; i < np; i++ { 922 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) 923 } 924 925 // Actual->formal assignability conversions for variadic parameter, 926 // and construction of slice. 927 if sig.Variadic() { 928 varargs := args[offset+np:] 929 st := sig.Params().At(np).Type().(*types.Slice) 930 vt := st.Elem() 931 if len(varargs) == 0 { 932 args = append(args, nilConst(st)) 933 } else { 934 // Replace a suffix of args with a slice containing it. 935 at := types.NewArray(vt, int64(len(varargs))) 936 a := emitNew(fn, at, token.NoPos) 937 a.setPos(e.Rparen) 938 a.Comment = "varargs" 939 for i, arg := range varargs { 940 iaddr := &IndexAddr{ 941 X: a, 942 Index: intConst(int64(i)), 943 } 944 iaddr.setType(types.NewPointer(vt)) 945 fn.emit(iaddr) 946 emitStore(fn, iaddr, arg, arg.Pos()) 947 } 948 s := &Slice{X: a} 949 s.setType(st) 950 args[offset+np] = fn.emit(s) 951 args = args[:offset+np+1] 952 } 953 } 954 return args 955 } 956 957 // setCall emits to fn code to evaluate all the parameters of a function 958 // call e, and populates *c with those values. 959 // 960 func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { 961 // First deal with the f(...) part and optional receiver. 962 b.setCallFunc(fn, e, c) 963 964 // Then append the other actual parameters. 965 sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) 966 if sig == nil { 967 panic(fmt.Sprintf("no signature for call of %s", e.Fun)) 968 } 969 c.Args = b.emitCallArgs(fn, sig, e, c.Args) 970 } 971 972 // assignOp emits to fn code to perform loc <op>= val. 973 func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { 974 oldv := loc.load(fn) 975 loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos)) 976 } 977 978 // localValueSpec emits to fn code to define all of the vars in the 979 // function-local ValueSpec, spec. 980 // 981 func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { 982 switch { 983 case len(spec.Values) == len(spec.Names): 984 // e.g. var x, y = 0, 1 985 // 1:1 assignment 986 for i, id := range spec.Names { 987 if !isBlankIdent(id) { 988 fn.addLocalForIdent(id) 989 } 990 lval := b.addr(fn, id, false) // non-escaping 991 b.assign(fn, lval, spec.Values[i], true, nil) 992 } 993 994 case len(spec.Values) == 0: 995 // e.g. var x, y int 996 // Locals are implicitly zero-initialized. 997 for _, id := range spec.Names { 998 if !isBlankIdent(id) { 999 lhs := fn.addLocalForIdent(id) 1000 if fn.debugInfo() { 1001 emitDebugRef(fn, id, lhs, true) 1002 } 1003 } 1004 } 1005 1006 default: 1007 // e.g. var x, y = pos() 1008 tuple := b.exprN(fn, spec.Values[0]) 1009 for i, id := range spec.Names { 1010 if !isBlankIdent(id) { 1011 fn.addLocalForIdent(id) 1012 lhs := b.addr(fn, id, false) // non-escaping 1013 lhs.store(fn, emitExtract(fn, tuple, i)) 1014 } 1015 } 1016 } 1017 } 1018 1019 // assignStmt emits code to fn for a parallel assignment of rhss to lhss. 1020 // isDef is true if this is a short variable declaration (:=). 1021 // 1022 // Note the similarity with localValueSpec. 1023 // 1024 func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { 1025 // Side effects of all LHSs and RHSs must occur in left-to-right order. 1026 lvals := make([]lvalue, len(lhss)) 1027 isZero := make([]bool, len(lhss)) 1028 for i, lhs := range lhss { 1029 var lval lvalue = blank{} 1030 if !isBlankIdent(lhs) { 1031 if isDef { 1032 if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { 1033 fn.addNamedLocal(obj) 1034 isZero[i] = true 1035 } 1036 } 1037 lval = b.addr(fn, lhs, false) // non-escaping 1038 } 1039 lvals[i] = lval 1040 } 1041 if len(lhss) == len(rhss) { 1042 // Simple assignment: x = f() (!isDef) 1043 // Parallel assignment: x, y = f(), g() (!isDef) 1044 // or short var decl: x, y := f(), g() (isDef) 1045 // 1046 // In all cases, the RHSs may refer to the LHSs, 1047 // so we need a storebuf. 1048 var sb storebuf 1049 for i := range rhss { 1050 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) 1051 } 1052 sb.emit(fn) 1053 } else { 1054 // e.g. x, y = pos() 1055 tuple := b.exprN(fn, rhss[0]) 1056 emitDebugRef(fn, rhss[0], tuple, false) 1057 for i, lval := range lvals { 1058 lval.store(fn, emitExtract(fn, tuple, i)) 1059 } 1060 } 1061 } 1062 1063 // arrayLen returns the length of the array whose composite literal elements are elts. 1064 func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { 1065 var max int64 = -1 1066 var i int64 = -1 1067 for _, e := range elts { 1068 if kv, ok := e.(*ast.KeyValueExpr); ok { 1069 i = b.expr(fn, kv.Key).(*Const).Int64() 1070 } else { 1071 i++ 1072 } 1073 if i > max { 1074 max = i 1075 } 1076 } 1077 return max + 1 1078 } 1079 1080 // compLit emits to fn code to initialize a composite literal e at 1081 // address addr with type typ. 1082 // 1083 // Nested composite literals are recursively initialized in place 1084 // where possible. If isZero is true, compLit assumes that addr 1085 // holds the zero value for typ. 1086 // 1087 // Because the elements of a composite literal may refer to the 1088 // variables being updated, as in the second line below, 1089 // x := T{a: 1} 1090 // x = T{a: x.a} 1091 // all the reads must occur before all the writes. Thus all stores to 1092 // loc are emitted to the storebuf sb for later execution. 1093 // 1094 // A CompositeLit may have pointer type only in the recursive (nested) 1095 // case when the type name is implicit. e.g. in []*T{{}}, the inner 1096 // literal has type *T behaves like &T{}. 1097 // In that case, addr must hold a T, not a *T. 1098 // 1099 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { 1100 typ := deref(fn.Pkg.typeOf(e)) 1101 switch t := typ.Underlying().(type) { 1102 case *types.Struct: 1103 if !isZero && len(e.Elts) != t.NumFields() { 1104 // memclear 1105 sb.store(&address{addr, e.Lbrace, nil}, 1106 zeroValue(fn, deref(addr.Type()))) 1107 isZero = true 1108 } 1109 for i, e := range e.Elts { 1110 fieldIndex := i 1111 pos := e.Pos() 1112 if kv, ok := e.(*ast.KeyValueExpr); ok { 1113 fname := kv.Key.(*ast.Ident).Name 1114 for i, n := 0, t.NumFields(); i < n; i++ { 1115 sf := t.Field(i) 1116 if sf.Name() == fname { 1117 fieldIndex = i 1118 pos = kv.Colon 1119 e = kv.Value 1120 break 1121 } 1122 } 1123 } 1124 sf := t.Field(fieldIndex) 1125 faddr := &FieldAddr{ 1126 X: addr, 1127 Field: fieldIndex, 1128 } 1129 faddr.setType(types.NewPointer(sf.Type())) 1130 fn.emit(faddr) 1131 b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) 1132 } 1133 1134 case *types.Array, *types.Slice: 1135 var at *types.Array 1136 var array Value 1137 switch t := t.(type) { 1138 case *types.Slice: 1139 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) 1140 alloc := emitNew(fn, at, e.Lbrace) 1141 alloc.Comment = "slicelit" 1142 array = alloc 1143 case *types.Array: 1144 at = t 1145 array = addr 1146 1147 if !isZero && int64(len(e.Elts)) != at.Len() { 1148 // memclear 1149 sb.store(&address{array, e.Lbrace, nil}, 1150 zeroValue(fn, deref(array.Type()))) 1151 } 1152 } 1153 1154 var idx *Const 1155 for _, e := range e.Elts { 1156 pos := e.Pos() 1157 if kv, ok := e.(*ast.KeyValueExpr); ok { 1158 idx = b.expr(fn, kv.Key).(*Const) 1159 pos = kv.Colon 1160 e = kv.Value 1161 } else { 1162 var idxval int64 1163 if idx != nil { 1164 idxval = idx.Int64() + 1 1165 } 1166 idx = intConst(idxval) 1167 } 1168 iaddr := &IndexAddr{ 1169 X: array, 1170 Index: idx, 1171 } 1172 iaddr.setType(types.NewPointer(at.Elem())) 1173 fn.emit(iaddr) 1174 if t != at { // slice 1175 // backing array is unaliased => storebuf not needed. 1176 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) 1177 } else { 1178 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) 1179 } 1180 } 1181 1182 if t != at { // slice 1183 s := &Slice{X: array} 1184 s.setPos(e.Lbrace) 1185 s.setType(typ) 1186 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) 1187 } 1188 1189 case *types.Map: 1190 m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} 1191 m.setPos(e.Lbrace) 1192 m.setType(typ) 1193 fn.emit(m) 1194 for _, e := range e.Elts { 1195 e := e.(*ast.KeyValueExpr) 1196 1197 // If a key expression in a map literal is itself a 1198 // composite literal, the type may be omitted. 1199 // For example: 1200 // map[*struct{}]bool{{}: true} 1201 // An &-operation may be implied: 1202 // map[*struct{}]bool{&struct{}{}: true} 1203 var key Value 1204 if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { 1205 // A CompositeLit never evaluates to a pointer, 1206 // so if the type of the location is a pointer, 1207 // an &-operation is implied. 1208 key = b.addr(fn, e.Key, true).address(fn) 1209 } else { 1210 key = b.expr(fn, e.Key) 1211 } 1212 1213 loc := element{ 1214 m: m, 1215 k: emitConv(fn, key, t.Key()), 1216 t: t.Elem(), 1217 pos: e.Colon, 1218 } 1219 1220 // We call assign() only because it takes care 1221 // of any &-operation required in the recursive 1222 // case, e.g., 1223 // map[int]*struct{}{0: {}} implies &struct{}{}. 1224 // In-place update is of course impossible, 1225 // and no storebuf is needed. 1226 b.assign(fn, &loc, e.Value, true, nil) 1227 } 1228 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) 1229 1230 default: 1231 panic("unexpected CompositeLit type: " + t.String()) 1232 } 1233 } 1234 1235 // switchStmt emits to fn code for the switch statement s, optionally 1236 // labelled by label. 1237 // 1238 func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { 1239 // We treat SwitchStmt like a sequential if-else chain. 1240 // Multiway dispatch can be recovered later by ssautil.Switches() 1241 // to those cases that are free of side effects. 1242 if s.Init != nil { 1243 b.stmt(fn, s.Init) 1244 } 1245 var tag Value = vTrue 1246 if s.Tag != nil { 1247 tag = b.expr(fn, s.Tag) 1248 } 1249 done := fn.newBasicBlock("switch.done") 1250 if label != nil { 1251 label._break = done 1252 } 1253 // We pull the default case (if present) down to the end. 1254 // But each fallthrough label must point to the next 1255 // body block in source order, so we preallocate a 1256 // body block (fallthru) for the next case. 1257 // Unfortunately this makes for a confusing block order. 1258 var dfltBody *[]ast.Stmt 1259 var dfltFallthrough *BasicBlock 1260 var fallthru, dfltBlock *BasicBlock 1261 ncases := len(s.Body.List) 1262 for i, clause := range s.Body.List { 1263 body := fallthru 1264 if body == nil { 1265 body = fn.newBasicBlock("switch.body") // first case only 1266 } 1267 1268 // Preallocate body block for the next case. 1269 fallthru = done 1270 if i+1 < ncases { 1271 fallthru = fn.newBasicBlock("switch.body") 1272 } 1273 1274 cc := clause.(*ast.CaseClause) 1275 if cc.List == nil { 1276 // Default case. 1277 dfltBody = &cc.Body 1278 dfltFallthrough = fallthru 1279 dfltBlock = body 1280 continue 1281 } 1282 1283 var nextCond *BasicBlock 1284 for _, cond := range cc.List { 1285 nextCond = fn.newBasicBlock("switch.next") 1286 // TODO(adonovan): opt: when tag==vTrue, we'd 1287 // get better code if we use b.cond(cond) 1288 // instead of BinOp(EQL, tag, b.expr(cond)) 1289 // followed by If. Don't forget conversions 1290 // though. 1291 cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos) 1292 emitIf(fn, cond, body, nextCond) 1293 fn.currentBlock = nextCond 1294 } 1295 fn.currentBlock = body 1296 fn.targets = &targets{ 1297 tail: fn.targets, 1298 _break: done, 1299 _fallthrough: fallthru, 1300 } 1301 b.stmtList(fn, cc.Body) 1302 fn.targets = fn.targets.tail 1303 emitJump(fn, done) 1304 fn.currentBlock = nextCond 1305 } 1306 if dfltBlock != nil { 1307 emitJump(fn, dfltBlock) 1308 fn.currentBlock = dfltBlock 1309 fn.targets = &targets{ 1310 tail: fn.targets, 1311 _break: done, 1312 _fallthrough: dfltFallthrough, 1313 } 1314 b.stmtList(fn, *dfltBody) 1315 fn.targets = fn.targets.tail 1316 } 1317 emitJump(fn, done) 1318 fn.currentBlock = done 1319 } 1320 1321 // typeSwitchStmt emits to fn code for the type switch statement s, optionally 1322 // labelled by label. 1323 // 1324 func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { 1325 // We treat TypeSwitchStmt like a sequential if-else chain. 1326 // Multiway dispatch can be recovered later by ssautil.Switches(). 1327 1328 // Typeswitch lowering: 1329 // 1330 // var x X 1331 // switch y := x.(type) { 1332 // case T1, T2: S1 // >1 (y := x) 1333 // case nil: SN // nil (y := x) 1334 // default: SD // 0 types (y := x) 1335 // case T3: S3 // 1 type (y := x.(T3)) 1336 // } 1337 // 1338 // ...s.Init... 1339 // x := eval x 1340 // .caseT1: 1341 // t1, ok1 := typeswitch,ok x <T1> 1342 // if ok1 then goto S1 else goto .caseT2 1343 // .caseT2: 1344 // t2, ok2 := typeswitch,ok x <T2> 1345 // if ok2 then goto S1 else goto .caseNil 1346 // .S1: 1347 // y := x 1348 // ...S1... 1349 // goto done 1350 // .caseNil: 1351 // if t2, ok2 := typeswitch,ok x <T2> 1352 // if x == nil then goto SN else goto .caseT3 1353 // .SN: 1354 // y := x 1355 // ...SN... 1356 // goto done 1357 // .caseT3: 1358 // t3, ok3 := typeswitch,ok x <T3> 1359 // if ok3 then goto S3 else goto default 1360 // .S3: 1361 // y := t3 1362 // ...S3... 1363 // goto done 1364 // .default: 1365 // y := x 1366 // ...SD... 1367 // goto done 1368 // .done: 1369 1370 if s.Init != nil { 1371 b.stmt(fn, s.Init) 1372 } 1373 1374 var x Value 1375 switch ass := s.Assign.(type) { 1376 case *ast.ExprStmt: // x.(type) 1377 x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) 1378 case *ast.AssignStmt: // y := x.(type) 1379 x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) 1380 } 1381 1382 done := fn.newBasicBlock("typeswitch.done") 1383 if label != nil { 1384 label._break = done 1385 } 1386 var default_ *ast.CaseClause 1387 for _, clause := range s.Body.List { 1388 cc := clause.(*ast.CaseClause) 1389 if cc.List == nil { 1390 default_ = cc 1391 continue 1392 } 1393 body := fn.newBasicBlock("typeswitch.body") 1394 var next *BasicBlock 1395 var casetype types.Type 1396 var ti Value // ti, ok := typeassert,ok x <Ti> 1397 for _, cond := range cc.List { 1398 next = fn.newBasicBlock("typeswitch.next") 1399 casetype = fn.Pkg.typeOf(cond) 1400 var condv Value 1401 if casetype == tUntypedNil { 1402 condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos) 1403 ti = x 1404 } else { 1405 yok := emitTypeTest(fn, x, casetype, cc.Case) 1406 ti = emitExtract(fn, yok, 0) 1407 condv = emitExtract(fn, yok, 1) 1408 } 1409 emitIf(fn, condv, body, next) 1410 fn.currentBlock = next 1411 } 1412 if len(cc.List) != 1 { 1413 ti = x 1414 } 1415 fn.currentBlock = body 1416 b.typeCaseBody(fn, cc, ti, done) 1417 fn.currentBlock = next 1418 } 1419 if default_ != nil { 1420 b.typeCaseBody(fn, default_, x, done) 1421 } else { 1422 emitJump(fn, done) 1423 } 1424 fn.currentBlock = done 1425 } 1426 1427 func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { 1428 if obj := fn.Pkg.info.Implicits[cc]; obj != nil { 1429 // In a switch y := x.(type), each case clause 1430 // implicitly declares a distinct object y. 1431 // In a single-type case, y has that type. 1432 // In multi-type cases, 'case nil' and default, 1433 // y has the same type as the interface operand. 1434 emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos()) 1435 } 1436 fn.targets = &targets{ 1437 tail: fn.targets, 1438 _break: done, 1439 } 1440 b.stmtList(fn, cc.Body) 1441 fn.targets = fn.targets.tail 1442 emitJump(fn, done) 1443 } 1444 1445 // selectStmt emits to fn code for the select statement s, optionally 1446 // labelled by label. 1447 // 1448 func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { 1449 // A blocking select of a single case degenerates to a 1450 // simple send or receive. 1451 // TODO(adonovan): opt: is this optimization worth its weight? 1452 if len(s.Body.List) == 1 { 1453 clause := s.Body.List[0].(*ast.CommClause) 1454 if clause.Comm != nil { 1455 b.stmt(fn, clause.Comm) 1456 done := fn.newBasicBlock("select.done") 1457 if label != nil { 1458 label._break = done 1459 } 1460 fn.targets = &targets{ 1461 tail: fn.targets, 1462 _break: done, 1463 } 1464 b.stmtList(fn, clause.Body) 1465 fn.targets = fn.targets.tail 1466 emitJump(fn, done) 1467 fn.currentBlock = done 1468 return 1469 } 1470 } 1471 1472 // First evaluate all channels in all cases, and find 1473 // the directions of each state. 1474 var states []*SelectState 1475 blocking := true 1476 debugInfo := fn.debugInfo() 1477 for _, clause := range s.Body.List { 1478 var st *SelectState 1479 switch comm := clause.(*ast.CommClause).Comm.(type) { 1480 case nil: // default case 1481 blocking = false 1482 continue 1483 1484 case *ast.SendStmt: // ch<- i 1485 ch := b.expr(fn, comm.Chan) 1486 st = &SelectState{ 1487 Dir: types.SendOnly, 1488 Chan: ch, 1489 Send: emitConv(fn, b.expr(fn, comm.Value), 1490 ch.Type().Underlying().(*types.Chan).Elem()), 1491 Pos: comm.Arrow, 1492 } 1493 if debugInfo { 1494 st.DebugNode = comm 1495 } 1496 1497 case *ast.AssignStmt: // x := <-ch 1498 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) 1499 st = &SelectState{ 1500 Dir: types.RecvOnly, 1501 Chan: b.expr(fn, recv.X), 1502 Pos: recv.OpPos, 1503 } 1504 if debugInfo { 1505 st.DebugNode = recv 1506 } 1507 1508 case *ast.ExprStmt: // <-ch 1509 recv := unparen(comm.X).(*ast.UnaryExpr) 1510 st = &SelectState{ 1511 Dir: types.RecvOnly, 1512 Chan: b.expr(fn, recv.X), 1513 Pos: recv.OpPos, 1514 } 1515 if debugInfo { 1516 st.DebugNode = recv 1517 } 1518 } 1519 states = append(states, st) 1520 } 1521 1522 // We dispatch on the (fair) result of Select using a 1523 // sequential if-else chain, in effect: 1524 // 1525 // idx, recvOk, r0...r_n-1 := select(...) 1526 // if idx == 0 { // receive on channel 0 (first receive => r0) 1527 // x, ok := r0, recvOk 1528 // ...state0... 1529 // } else if v == 1 { // send on channel 1 1530 // ...state1... 1531 // } else { 1532 // ...default... 1533 // } 1534 sel := &Select{ 1535 States: states, 1536 Blocking: blocking, 1537 } 1538 sel.setPos(s.Select) 1539 var vars []*types.Var 1540 vars = append(vars, varIndex, varOk) 1541 for _, st := range states { 1542 if st.Dir == types.RecvOnly { 1543 tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() 1544 vars = append(vars, anonVar(tElem)) 1545 } 1546 } 1547 sel.setType(types.NewTuple(vars...)) 1548 1549 fn.emit(sel) 1550 idx := emitExtract(fn, sel, 0) 1551 1552 done := fn.newBasicBlock("select.done") 1553 if label != nil { 1554 label._break = done 1555 } 1556 1557 var defaultBody *[]ast.Stmt 1558 state := 0 1559 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV 1560 for _, cc := range s.Body.List { 1561 clause := cc.(*ast.CommClause) 1562 if clause.Comm == nil { 1563 defaultBody = &clause.Body 1564 continue 1565 } 1566 body := fn.newBasicBlock("select.body") 1567 next := fn.newBasicBlock("select.next") 1568 emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) 1569 fn.currentBlock = body 1570 fn.targets = &targets{ 1571 tail: fn.targets, 1572 _break: done, 1573 } 1574 switch comm := clause.Comm.(type) { 1575 case *ast.ExprStmt: // <-ch 1576 if debugInfo { 1577 v := emitExtract(fn, sel, r) 1578 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1579 } 1580 r++ 1581 1582 case *ast.AssignStmt: // x := <-states[state].Chan 1583 if comm.Tok == token.DEFINE { 1584 fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) 1585 } 1586 x := b.addr(fn, comm.Lhs[0], false) // non-escaping 1587 v := emitExtract(fn, sel, r) 1588 if debugInfo { 1589 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1590 } 1591 x.store(fn, v) 1592 1593 if len(comm.Lhs) == 2 { // x, ok := ... 1594 if comm.Tok == token.DEFINE { 1595 fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) 1596 } 1597 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping 1598 ok.store(fn, emitExtract(fn, sel, 1)) 1599 } 1600 r++ 1601 } 1602 b.stmtList(fn, clause.Body) 1603 fn.targets = fn.targets.tail 1604 emitJump(fn, done) 1605 fn.currentBlock = next 1606 state++ 1607 } 1608 if defaultBody != nil { 1609 fn.targets = &targets{ 1610 tail: fn.targets, 1611 _break: done, 1612 } 1613 b.stmtList(fn, *defaultBody) 1614 fn.targets = fn.targets.tail 1615 } else { 1616 // A blocking select must match some case. 1617 // (This should really be a runtime.errorString, not a string.) 1618 fn.emit(&Panic{ 1619 X: emitConv(fn, stringConst("blocking select matched no case"), tEface), 1620 }) 1621 fn.currentBlock = fn.newBasicBlock("unreachable") 1622 } 1623 emitJump(fn, done) 1624 fn.currentBlock = done 1625 } 1626 1627 // forStmt emits to fn code for the for statement s, optionally 1628 // labelled by label. 1629 // 1630 func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { 1631 // ...init... 1632 // jump loop 1633 // loop: 1634 // if cond goto body else done 1635 // body: 1636 // ...body... 1637 // jump post 1638 // post: (target of continue) 1639 // ...post... 1640 // jump loop 1641 // done: (target of break) 1642 if s.Init != nil { 1643 b.stmt(fn, s.Init) 1644 } 1645 body := fn.newBasicBlock("for.body") 1646 done := fn.newBasicBlock("for.done") // target of 'break' 1647 loop := body // target of back-edge 1648 if s.Cond != nil { 1649 loop = fn.newBasicBlock("for.loop") 1650 } 1651 cont := loop // target of 'continue' 1652 if s.Post != nil { 1653 cont = fn.newBasicBlock("for.post") 1654 } 1655 if label != nil { 1656 label._break = done 1657 label._continue = cont 1658 } 1659 emitJump(fn, loop) 1660 fn.currentBlock = loop 1661 if loop != body { 1662 b.cond(fn, s.Cond, body, done) 1663 fn.currentBlock = body 1664 } 1665 fn.targets = &targets{ 1666 tail: fn.targets, 1667 _break: done, 1668 _continue: cont, 1669 } 1670 b.stmt(fn, s.Body) 1671 fn.targets = fn.targets.tail 1672 emitJump(fn, cont) 1673 1674 if s.Post != nil { 1675 fn.currentBlock = cont 1676 b.stmt(fn, s.Post) 1677 emitJump(fn, loop) // back-edge 1678 } 1679 fn.currentBlock = done 1680 } 1681 1682 // rangeIndexed emits to fn the header for an integer-indexed loop 1683 // over array, *array or slice value x. 1684 // The v result is defined only if tv is non-nil. 1685 // forPos is the position of the "for" token. 1686 // 1687 func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 1688 // 1689 // length = len(x) 1690 // index = -1 1691 // loop: (target of continue) 1692 // index++ 1693 // if index < length goto body else done 1694 // body: 1695 // k = index 1696 // v = x[index] 1697 // ...body... 1698 // jump loop 1699 // done: (target of break) 1700 1701 // Determine number of iterations. 1702 var length Value 1703 if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { 1704 // For array or *array, the number of iterations is 1705 // known statically thanks to the type. We avoid a 1706 // data dependence upon x, permitting later dead-code 1707 // elimination if x is pure, static unrolling, etc. 1708 // Ranging over a nil *array may have >0 iterations. 1709 // We still generate code for x, in case it has effects. 1710 length = intConst(arr.Len()) 1711 } else { 1712 // length = len(x). 1713 var c Call 1714 c.Call.Value = makeLen(x.Type()) 1715 c.Call.Args = []Value{x} 1716 c.setType(tInt) 1717 length = fn.emit(&c) 1718 } 1719 1720 index := fn.addLocal(tInt, token.NoPos) 1721 emitStore(fn, index, intConst(-1), pos) 1722 1723 loop = fn.newBasicBlock("rangeindex.loop") 1724 emitJump(fn, loop) 1725 fn.currentBlock = loop 1726 1727 incr := &BinOp{ 1728 Op: token.ADD, 1729 X: emitLoad(fn, index), 1730 Y: vOne, 1731 } 1732 incr.setType(tInt) 1733 emitStore(fn, index, fn.emit(incr), pos) 1734 1735 body := fn.newBasicBlock("rangeindex.body") 1736 done = fn.newBasicBlock("rangeindex.done") 1737 emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) 1738 fn.currentBlock = body 1739 1740 k = emitLoad(fn, index) 1741 if tv != nil { 1742 switch t := x.Type().Underlying().(type) { 1743 case *types.Array: 1744 instr := &Index{ 1745 X: x, 1746 Index: k, 1747 } 1748 instr.setType(t.Elem()) 1749 instr.setPos(x.Pos()) 1750 v = fn.emit(instr) 1751 1752 case *types.Pointer: // *array 1753 instr := &IndexAddr{ 1754 X: x, 1755 Index: k, 1756 } 1757 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) 1758 instr.setPos(x.Pos()) 1759 v = emitLoad(fn, fn.emit(instr)) 1760 1761 case *types.Slice: 1762 instr := &IndexAddr{ 1763 X: x, 1764 Index: k, 1765 } 1766 instr.setType(types.NewPointer(t.Elem())) 1767 instr.setPos(x.Pos()) 1768 v = emitLoad(fn, fn.emit(instr)) 1769 1770 default: 1771 panic("rangeIndexed x:" + t.String()) 1772 } 1773 } 1774 return 1775 } 1776 1777 // rangeIter emits to fn the header for a loop using 1778 // Range/Next/Extract to iterate over map or string value x. 1779 // tk and tv are the types of the key/value results k and v, or nil 1780 // if the respective component is not wanted. 1781 // 1782 func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 1783 // 1784 // it = range x 1785 // loop: (target of continue) 1786 // okv = next it (ok, key, value) 1787 // ok = extract okv #0 1788 // if ok goto body else done 1789 // body: 1790 // k = extract okv #1 1791 // v = extract okv #2 1792 // ...body... 1793 // jump loop 1794 // done: (target of break) 1795 // 1796 1797 if tk == nil { 1798 tk = tInvalid 1799 } 1800 if tv == nil { 1801 tv = tInvalid 1802 } 1803 1804 rng := &Range{X: x} 1805 rng.setPos(pos) 1806 rng.setType(tRangeIter) 1807 it := fn.emit(rng) 1808 1809 loop = fn.newBasicBlock("rangeiter.loop") 1810 emitJump(fn, loop) 1811 fn.currentBlock = loop 1812 1813 _, isString := x.Type().Underlying().(*types.Basic) 1814 1815 okv := &Next{ 1816 Iter: it, 1817 IsString: isString, 1818 } 1819 okv.setType(types.NewTuple( 1820 varOk, 1821 newVar("k", tk), 1822 newVar("v", tv), 1823 )) 1824 fn.emit(okv) 1825 1826 body := fn.newBasicBlock("rangeiter.body") 1827 done = fn.newBasicBlock("rangeiter.done") 1828 emitIf(fn, emitExtract(fn, okv, 0), body, done) 1829 fn.currentBlock = body 1830 1831 if tk != tInvalid { 1832 k = emitExtract(fn, okv, 1) 1833 } 1834 if tv != tInvalid { 1835 v = emitExtract(fn, okv, 2) 1836 } 1837 return 1838 } 1839 1840 // rangeChan emits to fn the header for a loop that receives from 1841 // channel x until it fails. 1842 // tk is the channel's element type, or nil if the k result is 1843 // not wanted 1844 // pos is the position of the '=' or ':=' token. 1845 // 1846 func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { 1847 // 1848 // loop: (target of continue) 1849 // ko = <-x (key, ok) 1850 // ok = extract ko #1 1851 // if ok goto body else done 1852 // body: 1853 // k = extract ko #0 1854 // ... 1855 // goto loop 1856 // done: (target of break) 1857 1858 loop = fn.newBasicBlock("rangechan.loop") 1859 emitJump(fn, loop) 1860 fn.currentBlock = loop 1861 recv := &UnOp{ 1862 Op: token.ARROW, 1863 X: x, 1864 CommaOk: true, 1865 } 1866 recv.setPos(pos) 1867 recv.setType(types.NewTuple( 1868 newVar("k", x.Type().Underlying().(*types.Chan).Elem()), 1869 varOk, 1870 )) 1871 ko := fn.emit(recv) 1872 body := fn.newBasicBlock("rangechan.body") 1873 done = fn.newBasicBlock("rangechan.done") 1874 emitIf(fn, emitExtract(fn, ko, 1), body, done) 1875 fn.currentBlock = body 1876 if tk != nil { 1877 k = emitExtract(fn, ko, 0) 1878 } 1879 return 1880 } 1881 1882 // rangeStmt emits to fn code for the range statement s, optionally 1883 // labelled by label. 1884 // 1885 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { 1886 var tk, tv types.Type 1887 if s.Key != nil && !isBlankIdent(s.Key) { 1888 tk = fn.Pkg.typeOf(s.Key) 1889 } 1890 if s.Value != nil && !isBlankIdent(s.Value) { 1891 tv = fn.Pkg.typeOf(s.Value) 1892 } 1893 1894 // If iteration variables are defined (:=), this 1895 // occurs once outside the loop. 1896 // 1897 // Unlike a short variable declaration, a RangeStmt 1898 // using := never redeclares an existing variable; it 1899 // always creates a new one. 1900 if s.Tok == token.DEFINE { 1901 if tk != nil { 1902 fn.addLocalForIdent(s.Key.(*ast.Ident)) 1903 } 1904 if tv != nil { 1905 fn.addLocalForIdent(s.Value.(*ast.Ident)) 1906 } 1907 } 1908 1909 x := b.expr(fn, s.X) 1910 1911 var k, v Value 1912 var loop, done *BasicBlock 1913 switch rt := x.Type().Underlying().(type) { 1914 case *types.Slice, *types.Array, *types.Pointer: // *array 1915 k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) 1916 1917 case *types.Chan: 1918 k, loop, done = b.rangeChan(fn, x, tk, s.For) 1919 1920 case *types.Map, *types.Basic: // string 1921 k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) 1922 1923 default: 1924 panic("Cannot range over: " + rt.String()) 1925 } 1926 1927 // Evaluate both LHS expressions before we update either. 1928 var kl, vl lvalue 1929 if tk != nil { 1930 kl = b.addr(fn, s.Key, false) // non-escaping 1931 } 1932 if tv != nil { 1933 vl = b.addr(fn, s.Value, false) // non-escaping 1934 } 1935 if tk != nil { 1936 kl.store(fn, k) 1937 } 1938 if tv != nil { 1939 vl.store(fn, v) 1940 } 1941 1942 if label != nil { 1943 label._break = done 1944 label._continue = loop 1945 } 1946 1947 fn.targets = &targets{ 1948 tail: fn.targets, 1949 _break: done, 1950 _continue: loop, 1951 } 1952 b.stmt(fn, s.Body) 1953 fn.targets = fn.targets.tail 1954 emitJump(fn, loop) // back-edge 1955 fn.currentBlock = done 1956 } 1957 1958 // stmt lowers statement s to SSA form, emitting code to fn. 1959 func (b *builder) stmt(fn *Function, _s ast.Stmt) { 1960 // The label of the current statement. If non-nil, its _goto 1961 // target is always set; its _break and _continue are set only 1962 // within the body of switch/typeswitch/select/for/range. 1963 // It is effectively an additional default-nil parameter of stmt(). 1964 var label *lblock 1965 start: 1966 switch s := _s.(type) { 1967 case *ast.EmptyStmt: 1968 // ignore. (Usually removed by gofmt.) 1969 1970 case *ast.DeclStmt: // Con, Var or Typ 1971 d := s.Decl.(*ast.GenDecl) 1972 if d.Tok == token.VAR { 1973 for _, spec := range d.Specs { 1974 if vs, ok := spec.(*ast.ValueSpec); ok { 1975 b.localValueSpec(fn, vs) 1976 } 1977 } 1978 } 1979 1980 case *ast.LabeledStmt: 1981 label = fn.labelledBlock(s.Label) 1982 emitJump(fn, label._goto) 1983 fn.currentBlock = label._goto 1984 _s = s.Stmt 1985 goto start // effectively: tailcall stmt(fn, s.Stmt, label) 1986 1987 case *ast.ExprStmt: 1988 b.expr(fn, s.X) 1989 1990 case *ast.SendStmt: 1991 fn.emit(&Send{ 1992 Chan: b.expr(fn, s.Chan), 1993 X: emitConv(fn, b.expr(fn, s.Value), 1994 fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()), 1995 pos: s.Arrow, 1996 }) 1997 1998 case *ast.IncDecStmt: 1999 op := token.ADD 2000 if s.Tok == token.DEC { 2001 op = token.SUB 2002 } 2003 loc := b.addr(fn, s.X, false) 2004 b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) 2005 2006 case *ast.AssignStmt: 2007 switch s.Tok { 2008 case token.ASSIGN, token.DEFINE: 2009 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) 2010 2011 default: // +=, etc. 2012 op := s.Tok + token.ADD - token.ADD_ASSIGN 2013 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) 2014 } 2015 2016 case *ast.GoStmt: 2017 // The "intrinsics" new/make/len/cap are forbidden here. 2018 // panic is treated like an ordinary function call. 2019 v := Go{pos: s.Go} 2020 b.setCall(fn, s.Call, &v.Call) 2021 fn.emit(&v) 2022 2023 case *ast.DeferStmt: 2024 // The "intrinsics" new/make/len/cap are forbidden here. 2025 // panic is treated like an ordinary function call. 2026 v := Defer{pos: s.Defer} 2027 b.setCall(fn, s.Call, &v.Call) 2028 fn.emit(&v) 2029 2030 // A deferred call can cause recovery from panic, 2031 // and control resumes at the Recover block. 2032 createRecoverBlock(fn) 2033 2034 case *ast.ReturnStmt: 2035 var results []Value 2036 if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { 2037 // Return of one expression in a multi-valued function. 2038 tuple := b.exprN(fn, s.Results[0]) 2039 ttuple := tuple.Type().(*types.Tuple) 2040 for i, n := 0, ttuple.Len(); i < n; i++ { 2041 results = append(results, 2042 emitConv(fn, emitExtract(fn, tuple, i), 2043 fn.Signature.Results().At(i).Type())) 2044 } 2045 } else { 2046 // 1:1 return, or no-arg return in non-void function. 2047 for i, r := range s.Results { 2048 v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) 2049 results = append(results, v) 2050 } 2051 } 2052 if fn.namedResults != nil { 2053 // Function has named result parameters (NRPs). 2054 // Perform parallel assignment of return operands to NRPs. 2055 for i, r := range results { 2056 emitStore(fn, fn.namedResults[i], r, s.Return) 2057 } 2058 } 2059 // Run function calls deferred in this 2060 // function when explicitly returning from it. 2061 fn.emit(new(RunDefers)) 2062 if fn.namedResults != nil { 2063 // Reload NRPs to form the result tuple. 2064 results = results[:0] 2065 for _, r := range fn.namedResults { 2066 results = append(results, emitLoad(fn, r)) 2067 } 2068 } 2069 fn.emit(&Return{Results: results, pos: s.Return}) 2070 fn.currentBlock = fn.newBasicBlock("unreachable") 2071 2072 case *ast.BranchStmt: 2073 var block *BasicBlock 2074 switch s.Tok { 2075 case token.BREAK: 2076 if s.Label != nil { 2077 block = fn.labelledBlock(s.Label)._break 2078 } else { 2079 for t := fn.targets; t != nil && block == nil; t = t.tail { 2080 block = t._break 2081 } 2082 } 2083 2084 case token.CONTINUE: 2085 if s.Label != nil { 2086 block = fn.labelledBlock(s.Label)._continue 2087 } else { 2088 for t := fn.targets; t != nil && block == nil; t = t.tail { 2089 block = t._continue 2090 } 2091 } 2092 2093 case token.FALLTHROUGH: 2094 for t := fn.targets; t != nil && block == nil; t = t.tail { 2095 block = t._fallthrough 2096 } 2097 2098 case token.GOTO: 2099 block = fn.labelledBlock(s.Label)._goto 2100 } 2101 emitJump(fn, block) 2102 fn.currentBlock = fn.newBasicBlock("unreachable") 2103 2104 case *ast.BlockStmt: 2105 b.stmtList(fn, s.List) 2106 2107 case *ast.IfStmt: 2108 if s.Init != nil { 2109 b.stmt(fn, s.Init) 2110 } 2111 then := fn.newBasicBlock("if.then") 2112 done := fn.newBasicBlock("if.done") 2113 els := done 2114 if s.Else != nil { 2115 els = fn.newBasicBlock("if.else") 2116 } 2117 b.cond(fn, s.Cond, then, els) 2118 fn.currentBlock = then 2119 b.stmt(fn, s.Body) 2120 emitJump(fn, done) 2121 2122 if s.Else != nil { 2123 fn.currentBlock = els 2124 b.stmt(fn, s.Else) 2125 emitJump(fn, done) 2126 } 2127 2128 fn.currentBlock = done 2129 2130 case *ast.SwitchStmt: 2131 b.switchStmt(fn, s, label) 2132 2133 case *ast.TypeSwitchStmt: 2134 b.typeSwitchStmt(fn, s, label) 2135 2136 case *ast.SelectStmt: 2137 b.selectStmt(fn, s, label) 2138 2139 case *ast.ForStmt: 2140 b.forStmt(fn, s, label) 2141 2142 case *ast.RangeStmt: 2143 b.rangeStmt(fn, s, label) 2144 2145 default: 2146 panic(fmt.Sprintf("unexpected statement kind: %T", s)) 2147 } 2148 } 2149 2150 // buildFunction builds SSA code for the body of function fn. Idempotent. 2151 func (b *builder) buildFunction(fn *Function) { 2152 if fn.Blocks != nil { 2153 return // building already started 2154 } 2155 2156 var recvField *ast.FieldList 2157 var body *ast.BlockStmt 2158 var functype *ast.FuncType 2159 switch n := fn.syntax.(type) { 2160 case nil: 2161 return // not a Go source function. (Synthetic, or from object file.) 2162 case *ast.FuncDecl: 2163 functype = n.Type 2164 recvField = n.Recv 2165 body = n.Body 2166 case *ast.FuncLit: 2167 functype = n.Type 2168 body = n.Body 2169 default: 2170 panic(n) 2171 } 2172 2173 if body == nil { 2174 // External function. 2175 if fn.Params == nil { 2176 // This condition ensures we add a non-empty 2177 // params list once only, but we may attempt 2178 // the degenerate empty case repeatedly. 2179 // TODO(adonovan): opt: don't do that. 2180 2181 // We set Function.Params even though there is no body 2182 // code to reference them. This simplifies clients. 2183 if recv := fn.Signature.Recv(); recv != nil { 2184 fn.addParamObj(recv) 2185 } 2186 params := fn.Signature.Params() 2187 for i, n := 0, params.Len(); i < n; i++ { 2188 fn.addParamObj(params.At(i)) 2189 } 2190 } 2191 return 2192 } 2193 if fn.Prog.mode&LogSource != 0 { 2194 defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() 2195 } 2196 fn.startBody() 2197 fn.createSyntacticParams(recvField, functype) 2198 b.stmt(fn, body) 2199 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { 2200 // Control fell off the end of the function's body block. 2201 // 2202 // Block optimizations eliminate the current block, if 2203 // unreachable. It is a builder invariant that 2204 // if this no-arg return is ill-typed for 2205 // fn.Signature.Results, this block must be 2206 // unreachable. The sanity checker checks this. 2207 fn.emit(new(RunDefers)) 2208 fn.emit(new(Return)) 2209 } 2210 fn.finishBody() 2211 } 2212 2213 // buildFuncDecl builds SSA code for the function or method declared 2214 // by decl in package pkg. 2215 // 2216 func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { 2217 id := decl.Name 2218 if isBlankIdent(id) { 2219 return // discard 2220 } 2221 fn := pkg.values[pkg.info.Defs[id]].(*Function) 2222 if decl.Recv == nil && id.Name == "init" { 2223 var v Call 2224 v.Call.Value = fn 2225 v.setType(types.NewTuple()) 2226 pkg.init.emit(&v) 2227 } 2228 b.buildFunction(fn) 2229 } 2230 2231 // Build calls Package.Build for each package in prog. 2232 // Building occurs in parallel unless the BuildSerially mode flag was set. 2233 // 2234 // Build is intended for whole-program analysis; a typical compiler 2235 // need only build a single package. 2236 // 2237 // Build is idempotent and thread-safe. 2238 // 2239 func (prog *Program) Build() { 2240 var wg sync.WaitGroup 2241 for _, p := range prog.packages { 2242 if prog.mode&BuildSerially != 0 { 2243 p.Build() 2244 } else { 2245 wg.Add(1) 2246 go func(p *Package) { 2247 p.Build() 2248 wg.Done() 2249 }(p) 2250 } 2251 } 2252 wg.Wait() 2253 } 2254 2255 // Build builds SSA code for all functions and vars in package p. 2256 // 2257 // Precondition: CreatePackage must have been called for all of p's 2258 // direct imports (and hence its direct imports must have been 2259 // error-free). 2260 // 2261 // Build is idempotent and thread-safe. 2262 // 2263 func (p *Package) Build() { p.buildOnce.Do(p.build) } 2264 2265 func (p *Package) build() { 2266 if p.info == nil { 2267 return // synthetic package, e.g. "testmain" 2268 } 2269 2270 // Ensure we have runtime type info for all exported members. 2271 // TODO(adonovan): ideally belongs in memberFromObject, but 2272 // that would require package creation in topological order. 2273 for name, mem := range p.Members { 2274 if ast.IsExported(name) { 2275 p.Prog.needMethodsOf(mem.Type()) 2276 } 2277 } 2278 if p.Prog.mode&LogSource != 0 { 2279 defer logStack("build %s", p)() 2280 } 2281 init := p.init 2282 init.startBody() 2283 2284 var done *BasicBlock 2285 2286 if p.Prog.mode&BareInits == 0 { 2287 // Make init() skip if package is already initialized. 2288 initguard := p.Var("init$guard") 2289 doinit := init.newBasicBlock("init.start") 2290 done = init.newBasicBlock("init.done") 2291 emitIf(init, emitLoad(init, initguard), done, doinit) 2292 init.currentBlock = doinit 2293 emitStore(init, initguard, vTrue, token.NoPos) 2294 2295 // Call the init() function of each package we import. 2296 for _, pkg := range p.Pkg.Imports() { 2297 prereq := p.Prog.packages[pkg] 2298 if prereq == nil { 2299 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) 2300 } 2301 var v Call 2302 v.Call.Value = prereq.init 2303 v.Call.pos = init.pos 2304 v.setType(types.NewTuple()) 2305 init.emit(&v) 2306 } 2307 } 2308 2309 var b builder 2310 2311 // Initialize package-level vars in correct order. 2312 for _, varinit := range p.info.InitOrder { 2313 if init.Prog.mode&LogSource != 0 { 2314 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", 2315 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) 2316 } 2317 if len(varinit.Lhs) == 1 { 2318 // 1:1 initialization: var x, y = a(), b() 2319 var lval lvalue 2320 if v := varinit.Lhs[0]; v.Name() != "_" { 2321 lval = &address{addr: p.values[v].(*Global), pos: v.Pos()} 2322 } else { 2323 lval = blank{} 2324 } 2325 b.assign(init, lval, varinit.Rhs, true, nil) 2326 } else { 2327 // n:1 initialization: var x, y := f() 2328 tuple := b.exprN(init, varinit.Rhs) 2329 for i, v := range varinit.Lhs { 2330 if v.Name() == "_" { 2331 continue 2332 } 2333 emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos()) 2334 } 2335 } 2336 } 2337 2338 // Build all package-level functions, init functions 2339 // and methods, including unreachable/blank ones. 2340 // We build them in source order, but it's not significant. 2341 for _, file := range p.files { 2342 for _, decl := range file.Decls { 2343 if decl, ok := decl.(*ast.FuncDecl); ok { 2344 b.buildFuncDecl(p, decl) 2345 } 2346 } 2347 } 2348 2349 // Finish up init(). 2350 if p.Prog.mode&BareInits == 0 { 2351 emitJump(init, done) 2352 init.currentBlock = done 2353 } 2354 init.emit(new(Return)) 2355 init.finishBody() 2356 2357 p.info = nil // We no longer need ASTs or go/types deductions. 2358 2359 if p.Prog.mode&SanityCheckFunctions != 0 { 2360 sanityCheckPackage(p) 2361 } 2362 } 2363 2364 // Like ObjectOf, but panics instead of returning nil. 2365 // Only valid during p's create and build phases. 2366 func (p *Package) objectOf(id *ast.Ident) types.Object { 2367 if o := p.info.ObjectOf(id); o != nil { 2368 return o 2369 } 2370 panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", 2371 id.Name, p.Prog.Fset.Position(id.Pos()))) 2372 } 2373 2374 // Like TypeOf, but panics instead of returning nil. 2375 // Only valid during p's create and build phases. 2376 func (p *Package) typeOf(e ast.Expr) types.Type { 2377 if T := p.info.TypeOf(e); T != nil { 2378 return T 2379 } 2380 panic(fmt.Sprintf("no type for %T @ %s", 2381 e, p.Prog.Fset.Position(e.Pos()))) 2382 }