github.com/llvm-mirror/llgo@v0.0.0-20190322182713-bf6f0a60fce1/third_party/gotools/go/ssa/builder.go (about) 1 // Copyright 2013 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ssa 6 7 // This file implements the BUILD phase of SSA construction. 8 // 9 // SSA construction has two phases, CREATE and BUILD. In the CREATE phase 10 // (create.go), all packages are constructed and type-checked and 11 // definitions of all package members are created, method-sets are 12 // computed, and wrapper methods are synthesized. 13 // ssa.Packages are created in arbitrary order. 14 // 15 // In the BUILD phase (builder.go), the builder traverses the AST of 16 // each Go source function and generates SSA instructions for the 17 // function body. Initializer expressions for package-level variables 18 // are emitted to the package's init() function in the order specified 19 // by go/types.Info.InitOrder, then code for each function in the 20 // package is generated in lexical order. 21 // The BUILD phases for distinct packages are independent and are 22 // executed in parallel. 23 // 24 // TODO(adonovan): indeed, building functions is now embarrassingly parallel. 25 // Audit for concurrency then benchmark using more goroutines. 26 // 27 // The builder's and Program's indices (maps) are populated and 28 // mutated during the CREATE phase, but during the BUILD phase they 29 // remain constant. The sole exception is Prog.methodSets and its 30 // related maps, which are protected by a dedicated mutex. 31 32 import ( 33 "fmt" 34 "go/ast" 35 "go/token" 36 "os" 37 "sync" 38 "sync/atomic" 39 40 "llvm.org/llgo/third_party/gotools/go/exact" 41 "llvm.org/llgo/third_party/gotools/go/types" 42 ) 43 44 type opaqueType struct { 45 types.Type 46 name string 47 } 48 49 func (t *opaqueType) String() string { return t.name } 50 51 var ( 52 varOk = newVar("ok", tBool) 53 varIndex = newVar("index", tInt) 54 55 // Type constants. 56 tBool = types.Typ[types.Bool] 57 tByte = types.Typ[types.Byte] 58 tInt = types.Typ[types.Int] 59 tInvalid = types.Typ[types.Invalid] 60 tString = types.Typ[types.String] 61 tUntypedNil = types.Typ[types.UntypedNil] 62 tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators 63 tEface = new(types.Interface) 64 65 // SSA Value constants. 66 vZero = intConst(0) 67 vOne = intConst(1) 68 vTrue = NewConst(exact.MakeBool(true), tBool) 69 ) 70 71 // builder holds state associated with the package currently being built. 72 // Its methods contain all the logic for AST-to-SSA conversion. 73 type builder struct{} 74 75 // cond emits to fn code to evaluate boolean condition e and jump 76 // to t or f depending on its value, performing various simplifications. 77 // 78 // Postcondition: fn.currentBlock is nil. 79 // 80 func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { 81 switch e := e.(type) { 82 case *ast.ParenExpr: 83 b.cond(fn, e.X, t, f) 84 return 85 86 case *ast.BinaryExpr: 87 switch e.Op { 88 case token.LAND: 89 ltrue := fn.newBasicBlock("cond.true") 90 b.cond(fn, e.X, ltrue, f) 91 fn.currentBlock = ltrue 92 b.cond(fn, e.Y, t, f) 93 return 94 95 case token.LOR: 96 lfalse := fn.newBasicBlock("cond.false") 97 b.cond(fn, e.X, t, lfalse) 98 fn.currentBlock = lfalse 99 b.cond(fn, e.Y, t, f) 100 return 101 } 102 103 case *ast.UnaryExpr: 104 if e.Op == token.NOT { 105 b.cond(fn, e.X, f, t) 106 return 107 } 108 } 109 110 // A traditional compiler would simplify "if false" (etc) here 111 // but we do not, for better fidelity to the source code. 112 // 113 // The value of a constant condition may be platform-specific, 114 // and may cause blocks that are reachable in some configuration 115 // to be hidden from subsequent analyses such as bug-finding tools. 116 emitIf(fn, b.expr(fn, e), t, f) 117 } 118 119 // logicalBinop emits code to fn to evaluate e, a &&- or 120 // ||-expression whose reified boolean value is wanted. 121 // The value is returned. 122 // 123 func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { 124 rhs := fn.newBasicBlock("binop.rhs") 125 done := fn.newBasicBlock("binop.done") 126 127 // T(e) = T(e.X) = T(e.Y) after untyped constants have been 128 // eliminated. 129 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. 130 t := fn.Pkg.typeOf(e) 131 132 var short Value // value of the short-circuit path 133 switch e.Op { 134 case token.LAND: 135 b.cond(fn, e.X, rhs, done) 136 short = NewConst(exact.MakeBool(false), t) 137 138 case token.LOR: 139 b.cond(fn, e.X, done, rhs) 140 short = NewConst(exact.MakeBool(true), t) 141 } 142 143 // Is rhs unreachable? 144 if rhs.Preds == nil { 145 // Simplify false&&y to false, true||y to true. 146 fn.currentBlock = done 147 return short 148 } 149 150 // Is done unreachable? 151 if done.Preds == nil { 152 // Simplify true&&y (or false||y) to y. 153 fn.currentBlock = rhs 154 return b.expr(fn, e.Y) 155 } 156 157 // All edges from e.X to done carry the short-circuit value. 158 var edges []Value 159 for _ = range done.Preds { 160 edges = append(edges, short) 161 } 162 163 // The edge from e.Y to done carries the value of e.Y. 164 fn.currentBlock = rhs 165 edges = append(edges, b.expr(fn, e.Y)) 166 emitJump(fn, done) 167 fn.currentBlock = done 168 169 phi := &Phi{Edges: edges, Comment: e.Op.String()} 170 phi.pos = e.OpPos 171 phi.typ = t 172 return done.emit(phi) 173 } 174 175 // exprN lowers a multi-result expression e to SSA form, emitting code 176 // to fn and returning a single Value whose type is a *types.Tuple. 177 // The caller must access the components via Extract. 178 // 179 // Multi-result expressions include CallExprs in a multi-value 180 // assignment or return statement, and "value,ok" uses of 181 // TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op 182 // is token.ARROW). 183 // 184 func (b *builder) exprN(fn *Function, e ast.Expr) Value { 185 typ := fn.Pkg.typeOf(e).(*types.Tuple) 186 switch e := e.(type) { 187 case *ast.ParenExpr: 188 return b.exprN(fn, e.X) 189 190 case *ast.CallExpr: 191 // Currently, no built-in function nor type conversion 192 // has multiple results, so we can avoid some of the 193 // cases for single-valued CallExpr. 194 var c Call 195 b.setCall(fn, e, &c.Call) 196 c.typ = typ 197 return fn.emit(&c) 198 199 case *ast.IndexExpr: 200 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) 201 lookup := &Lookup{ 202 X: b.expr(fn, e.X), 203 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), 204 CommaOk: true, 205 } 206 lookup.setType(typ) 207 lookup.setPos(e.Lbrack) 208 return fn.emit(lookup) 209 210 case *ast.TypeAssertExpr: 211 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) 212 213 case *ast.UnaryExpr: // must be receive <- 214 unop := &UnOp{ 215 Op: token.ARROW, 216 X: b.expr(fn, e.X), 217 CommaOk: true, 218 } 219 unop.setType(typ) 220 unop.setPos(e.OpPos) 221 return fn.emit(unop) 222 } 223 panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) 224 } 225 226 // builtin emits to fn SSA instructions to implement a call to the 227 // built-in function obj with the specified arguments 228 // and return type. It returns the value defined by the result. 229 // 230 // The result is nil if no special handling was required; in this case 231 // the caller should treat this like an ordinary library function 232 // call. 233 // 234 func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { 235 switch obj.Name() { 236 case "make": 237 switch typ.Underlying().(type) { 238 case *types.Slice: 239 n := b.expr(fn, args[1]) 240 m := n 241 if len(args) == 3 { 242 m = b.expr(fn, args[2]) 243 } 244 if m, ok := m.(*Const); ok { 245 // treat make([]T, n, m) as new([m]T)[:n] 246 cap, _ := exact.Int64Val(m.Value) 247 at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) 248 alloc := emitNew(fn, at, pos) 249 alloc.Comment = "makeslice" 250 v := &Slice{ 251 X: alloc, 252 High: n, 253 } 254 v.setPos(pos) 255 v.setType(typ) 256 return fn.emit(v) 257 } 258 v := &MakeSlice{ 259 Len: n, 260 Cap: m, 261 } 262 v.setPos(pos) 263 v.setType(typ) 264 return fn.emit(v) 265 266 case *types.Map: 267 var res Value 268 if len(args) == 2 { 269 res = b.expr(fn, args[1]) 270 } 271 v := &MakeMap{Reserve: res} 272 v.setPos(pos) 273 v.setType(typ) 274 return fn.emit(v) 275 276 case *types.Chan: 277 var sz Value = vZero 278 if len(args) == 2 { 279 sz = b.expr(fn, args[1]) 280 } 281 v := &MakeChan{Size: sz} 282 v.setPos(pos) 283 v.setType(typ) 284 return fn.emit(v) 285 } 286 287 case "new": 288 alloc := emitNew(fn, deref(typ), pos) 289 alloc.Comment = "new" 290 return alloc 291 292 case "len", "cap": 293 // Special case: len or cap of an array or *array is 294 // based on the type, not the value which may be nil. 295 // We must still evaluate the value, though. (If it 296 // was side-effect free, the whole call would have 297 // been constant-folded.) 298 t := deref(fn.Pkg.typeOf(args[0])).Underlying() 299 if at, ok := t.(*types.Array); ok { 300 b.expr(fn, args[0]) // for effects only 301 return intConst(at.Len()) 302 } 303 // Otherwise treat as normal. 304 305 case "panic": 306 fn.emit(&Panic{ 307 X: emitConv(fn, b.expr(fn, args[0]), tEface), 308 pos: pos, 309 }) 310 fn.currentBlock = fn.newBasicBlock("unreachable") 311 return vTrue // any non-nil Value will do 312 } 313 return nil // treat all others as a regular function call 314 } 315 316 // addr lowers a single-result addressable expression e to SSA form, 317 // emitting code to fn and returning the location (an lvalue) defined 318 // by the expression. 319 // 320 // If escaping is true, addr marks the base variable of the 321 // addressable expression e as being a potentially escaping pointer 322 // value. For example, in this code: 323 // 324 // a := A{ 325 // b: [1]B{B{c: 1}} 326 // } 327 // return &a.b[0].c 328 // 329 // the application of & causes a.b[0].c to have its address taken, 330 // which means that ultimately the local variable a must be 331 // heap-allocated. This is a simple but very conservative escape 332 // analysis. 333 // 334 // Operations forming potentially escaping pointers include: 335 // - &x, including when implicit in method call or composite literals. 336 // - a[:] iff a is an array (not *array) 337 // - references to variables in lexically enclosing functions. 338 // 339 func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { 340 switch e := e.(type) { 341 case *ast.Ident: 342 if isBlankIdent(e) { 343 return blank{} 344 } 345 obj := fn.Pkg.objectOf(e) 346 v := fn.Prog.packageLevelValue(obj) // var (address) 347 if v == nil { 348 v = fn.lookup(obj, escaping) 349 } 350 return &address{addr: v, pos: e.Pos(), expr: e} 351 352 case *ast.CompositeLit: 353 t := deref(fn.Pkg.typeOf(e)) 354 var v *Alloc 355 if escaping { 356 v = emitNew(fn, t, e.Lbrace) 357 } else { 358 v = fn.addLocal(t, e.Lbrace) 359 } 360 v.Comment = "complit" 361 var sb storebuf 362 b.compLit(fn, v, e, true, &sb) 363 sb.emit(fn) 364 return &address{addr: v, pos: e.Lbrace, expr: e} 365 366 case *ast.ParenExpr: 367 return b.addr(fn, e.X, escaping) 368 369 case *ast.SelectorExpr: 370 sel, ok := fn.Pkg.info.Selections[e] 371 if !ok { 372 // qualified identifier 373 return b.addr(fn, e.Sel, escaping) 374 } 375 if sel.Kind() != types.FieldVal { 376 panic(sel) 377 } 378 wantAddr := true 379 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 380 last := len(sel.Index()) - 1 381 return &address{ 382 addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), 383 pos: e.Sel.Pos(), 384 expr: e.Sel, 385 } 386 387 case *ast.IndexExpr: 388 var x Value 389 var et types.Type 390 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { 391 case *types.Array: 392 x = b.addr(fn, e.X, escaping).address(fn) 393 et = types.NewPointer(t.Elem()) 394 case *types.Pointer: // *array 395 x = b.expr(fn, e.X) 396 et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) 397 case *types.Slice: 398 x = b.expr(fn, e.X) 399 et = types.NewPointer(t.Elem()) 400 case *types.Map: 401 return &element{ 402 m: b.expr(fn, e.X), 403 k: emitConv(fn, b.expr(fn, e.Index), t.Key()), 404 t: t.Elem(), 405 pos: e.Lbrack, 406 } 407 default: 408 panic("unexpected container type in IndexExpr: " + t.String()) 409 } 410 v := &IndexAddr{ 411 X: x, 412 Index: emitConv(fn, b.expr(fn, e.Index), tInt), 413 } 414 v.setPos(e.Lbrack) 415 v.setType(et) 416 return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e} 417 418 case *ast.StarExpr: 419 return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} 420 } 421 422 panic(fmt.Sprintf("unexpected address expression: %T", e)) 423 } 424 425 type store struct { 426 lhs lvalue 427 rhs Value 428 } 429 430 type storebuf struct{ stores []store } 431 432 func (sb *storebuf) store(lhs lvalue, rhs Value) { 433 sb.stores = append(sb.stores, store{lhs, rhs}) 434 } 435 436 func (sb *storebuf) emit(fn *Function) { 437 for _, s := range sb.stores { 438 s.lhs.store(fn, s.rhs) 439 } 440 } 441 442 // assign emits to fn code to initialize the lvalue loc with the value 443 // of expression e. If isZero is true, assign assumes that loc holds 444 // the zero value for its type. 445 // 446 // This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate 447 // better code in some cases, e.g., for composite literals in an 448 // addressable location. 449 // 450 // If sb is not nil, assign generates code to evaluate expression e, but 451 // not to update loc. Instead, the necessary stores are appended to the 452 // storebuf sb so that they can be executed later. This allows correct 453 // in-place update of existing variables when the RHS is a composite 454 // literal that may reference parts of the LHS. 455 // 456 func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { 457 // Can we initialize it in place? 458 if e, ok := unparen(e).(*ast.CompositeLit); ok { 459 // A CompositeLit never evaluates to a pointer, 460 // so if the type of the location is a pointer, 461 // an &-operation is implied. 462 if _, ok := loc.(blank); !ok { // avoid calling blank.typ() 463 if isPointer(loc.typ()) { 464 ptr := b.addr(fn, e, true).address(fn) 465 // copy address 466 if sb != nil { 467 sb.store(loc, ptr) 468 } else { 469 loc.store(fn, ptr) 470 } 471 return 472 } 473 } 474 475 if _, ok := loc.(*address); ok { 476 if isInterface(loc.typ()) { 477 // e.g. var x interface{} = T{...} 478 // Can't in-place initialize an interface value. 479 // Fall back to copying. 480 } else { 481 // x = T{...} or x := T{...} 482 addr := loc.address(fn) 483 if sb != nil { 484 b.compLit(fn, addr, e, isZero, sb) 485 } else { 486 var sb storebuf 487 b.compLit(fn, addr, e, isZero, &sb) 488 sb.emit(fn) 489 } 490 491 // Subtle: emit debug ref for aggregate types only; 492 // slice and map are handled by store ops in compLit. 493 switch loc.typ().Underlying().(type) { 494 case *types.Struct, *types.Array: 495 emitDebugRef(fn, e, addr, true) 496 } 497 498 return 499 } 500 } 501 } 502 503 // simple case: just copy 504 rhs := b.expr(fn, e) 505 if sb != nil { 506 sb.store(loc, rhs) 507 } else { 508 loc.store(fn, rhs) 509 } 510 } 511 512 // expr lowers a single-result expression e to SSA form, emitting code 513 // to fn and returning the Value defined by the expression. 514 // 515 func (b *builder) expr(fn *Function, e ast.Expr) Value { 516 e = unparen(e) 517 518 tv := fn.Pkg.info.Types[e] 519 520 // Is expression a constant? 521 if tv.Value != nil { 522 return NewConst(tv.Value, tv.Type) 523 } 524 525 var v Value 526 if tv.Addressable() { 527 // Prefer pointer arithmetic ({Index,Field}Addr) followed 528 // by Load over subelement extraction (e.g. Index, Field), 529 // to avoid large copies. 530 v = b.addr(fn, e, false).load(fn) 531 } else { 532 v = b.expr0(fn, e, tv) 533 } 534 if fn.debugInfo() { 535 emitDebugRef(fn, e, v, false) 536 } 537 return v 538 } 539 540 func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { 541 switch e := e.(type) { 542 case *ast.BasicLit: 543 panic("non-constant BasicLit") // unreachable 544 545 case *ast.FuncLit: 546 fn2 := &Function{ 547 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), 548 Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), 549 pos: e.Type.Func, 550 parent: fn, 551 Pkg: fn.Pkg, 552 Prog: fn.Prog, 553 syntax: e, 554 } 555 fn.AnonFuncs = append(fn.AnonFuncs, fn2) 556 b.buildFunction(fn2) 557 if fn2.FreeVars == nil { 558 return fn2 559 } 560 v := &MakeClosure{Fn: fn2} 561 v.setType(tv.Type) 562 for _, fv := range fn2.FreeVars { 563 v.Bindings = append(v.Bindings, fv.outer) 564 fv.outer = nil 565 } 566 return fn.emit(v) 567 568 case *ast.TypeAssertExpr: // single-result form only 569 return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen) 570 571 case *ast.CallExpr: 572 if fn.Pkg.info.Types[e.Fun].IsType() { 573 // Explicit type conversion, e.g. string(x) or big.Int(x) 574 x := b.expr(fn, e.Args[0]) 575 y := emitConv(fn, x, tv.Type) 576 if y != x { 577 switch y := y.(type) { 578 case *Convert: 579 y.pos = e.Lparen 580 case *ChangeType: 581 y.pos = e.Lparen 582 case *MakeInterface: 583 y.pos = e.Lparen 584 } 585 } 586 return y 587 } 588 // Call to "intrinsic" built-ins, e.g. new, make, panic. 589 if id, ok := unparen(e.Fun).(*ast.Ident); ok { 590 if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { 591 if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil { 592 return v 593 } 594 } 595 } 596 // Regular function call. 597 var v Call 598 b.setCall(fn, e, &v.Call) 599 v.setType(tv.Type) 600 return fn.emit(&v) 601 602 case *ast.UnaryExpr: 603 switch e.Op { 604 case token.AND: // &X --- potentially escaping. 605 addr := b.addr(fn, e.X, true) 606 if _, ok := unparen(e.X).(*ast.StarExpr); ok { 607 // &*p must panic if p is nil (http://golang.org/s/go12nil). 608 // For simplicity, we'll just (suboptimally) rely 609 // on the side effects of a load. 610 // TODO(adonovan): emit dedicated nilcheck. 611 addr.load(fn) 612 } 613 return addr.address(fn) 614 case token.ADD: 615 return b.expr(fn, e.X) 616 case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ 617 v := &UnOp{ 618 Op: e.Op, 619 X: b.expr(fn, e.X), 620 } 621 v.setPos(e.OpPos) 622 v.setType(tv.Type) 623 return fn.emit(v) 624 default: 625 panic(e.Op) 626 } 627 628 case *ast.BinaryExpr: 629 switch e.Op { 630 case token.LAND, token.LOR: 631 return b.logicalBinop(fn, e) 632 case token.SHL, token.SHR: 633 fallthrough 634 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: 635 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos) 636 637 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: 638 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) 639 // The type of x==y may be UntypedBool. 640 return emitConv(fn, cmp, DefaultType(tv.Type)) 641 default: 642 panic("illegal op in BinaryExpr: " + e.Op.String()) 643 } 644 645 case *ast.SliceExpr: 646 var low, high, max Value 647 var x Value 648 switch fn.Pkg.typeOf(e.X).Underlying().(type) { 649 case *types.Array: 650 // Potentially escaping. 651 x = b.addr(fn, e.X, true).address(fn) 652 case *types.Basic, *types.Slice, *types.Pointer: // *array 653 x = b.expr(fn, e.X) 654 default: 655 panic("unreachable") 656 } 657 if e.High != nil { 658 high = b.expr(fn, e.High) 659 } 660 if e.Low != nil { 661 low = b.expr(fn, e.Low) 662 } 663 if e.Slice3 { 664 max = b.expr(fn, e.Max) 665 } 666 v := &Slice{ 667 X: x, 668 Low: low, 669 High: high, 670 Max: max, 671 } 672 v.setPos(e.Lbrack) 673 v.setType(tv.Type) 674 return fn.emit(v) 675 676 case *ast.Ident: 677 obj := fn.Pkg.info.Uses[e] 678 // Universal built-in or nil? 679 switch obj := obj.(type) { 680 case *types.Builtin: 681 return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} 682 case *types.Nil: 683 return nilConst(tv.Type) 684 } 685 // Package-level func or var? 686 if v := fn.Prog.packageLevelValue(obj); v != nil { 687 if _, ok := obj.(*types.Var); ok { 688 return emitLoad(fn, v) // var (address) 689 } 690 return v // (func) 691 } 692 // Local var. 693 return emitLoad(fn, fn.lookup(obj, false)) // var (address) 694 695 case *ast.SelectorExpr: 696 sel, ok := fn.Pkg.info.Selections[e] 697 if !ok { 698 // qualified identifier 699 return b.expr(fn, e.Sel) 700 } 701 switch sel.Kind() { 702 case types.MethodExpr: 703 // (*T).f or T.f, the method f from the method-set of type T. 704 // The result is a "thunk". 705 return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type) 706 707 case types.MethodVal: 708 // e.f where e is an expression and f is a method. 709 // The result is a "bound". 710 obj := sel.Obj().(*types.Func) 711 rt := recvType(obj) 712 wantAddr := isPointer(rt) 713 escaping := true 714 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 715 if isInterface(rt) { 716 // If v has interface type I, 717 // we must emit a check that v is non-nil. 718 // We use: typeassert v.(I). 719 emitTypeAssert(fn, v, rt, token.NoPos) 720 } 721 c := &MakeClosure{ 722 Fn: makeBound(fn.Prog, obj), 723 Bindings: []Value{v}, 724 } 725 c.setPos(e.Sel.Pos()) 726 c.setType(tv.Type) 727 return fn.emit(c) 728 729 case types.FieldVal: 730 indices := sel.Index() 731 last := len(indices) - 1 732 v := b.expr(fn, e.X) 733 v = emitImplicitSelections(fn, v, indices[:last]) 734 v = emitFieldSelection(fn, v, indices[last], false, e.Sel) 735 return v 736 } 737 738 panic("unexpected expression-relative selector") 739 740 case *ast.IndexExpr: 741 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { 742 case *types.Array: 743 // Non-addressable array (in a register). 744 v := &Index{ 745 X: b.expr(fn, e.X), 746 Index: emitConv(fn, b.expr(fn, e.Index), tInt), 747 } 748 v.setPos(e.Lbrack) 749 v.setType(t.Elem()) 750 return fn.emit(v) 751 752 case *types.Map: 753 // Maps are not addressable. 754 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) 755 v := &Lookup{ 756 X: b.expr(fn, e.X), 757 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), 758 } 759 v.setPos(e.Lbrack) 760 v.setType(mapt.Elem()) 761 return fn.emit(v) 762 763 case *types.Basic: // => string 764 // Strings are not addressable. 765 v := &Lookup{ 766 X: b.expr(fn, e.X), 767 Index: b.expr(fn, e.Index), 768 } 769 v.setPos(e.Lbrack) 770 v.setType(tByte) 771 return fn.emit(v) 772 773 case *types.Slice, *types.Pointer: // *array 774 // Addressable slice/array; use IndexAddr and Load. 775 return b.addr(fn, e, false).load(fn) 776 777 default: 778 panic("unexpected container type in IndexExpr: " + t.String()) 779 } 780 781 case *ast.CompositeLit, *ast.StarExpr: 782 // Addressable types (lvalues) 783 return b.addr(fn, e, false).load(fn) 784 } 785 786 panic(fmt.Sprintf("unexpected expr: %T", e)) 787 } 788 789 // stmtList emits to fn code for all statements in list. 790 func (b *builder) stmtList(fn *Function, list []ast.Stmt) { 791 for _, s := range list { 792 b.stmt(fn, s) 793 } 794 } 795 796 // receiver emits to fn code for expression e in the "receiver" 797 // position of selection e.f (where f may be a field or a method) and 798 // returns the effective receiver after applying the implicit field 799 // selections of sel. 800 // 801 // wantAddr requests that the result is an an address. If 802 // !sel.Indirect(), this may require that e be built in addr() mode; it 803 // must thus be addressable. 804 // 805 // escaping is defined as per builder.addr(). 806 // 807 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value { 808 var v Value 809 if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { 810 v = b.addr(fn, e, escaping).address(fn) 811 } else { 812 v = b.expr(fn, e) 813 } 814 815 last := len(sel.Index()) - 1 816 v = emitImplicitSelections(fn, v, sel.Index()[:last]) 817 if !wantAddr && isPointer(v.Type()) { 818 v = emitLoad(fn, v) 819 } 820 return v 821 } 822 823 // setCallFunc populates the function parts of a CallCommon structure 824 // (Func, Method, Recv, Args[0]) based on the kind of invocation 825 // occurring in e. 826 // 827 func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { 828 c.pos = e.Lparen 829 830 // Is this a method call? 831 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { 832 sel, ok := fn.Pkg.info.Selections[selector] 833 if ok && sel.Kind() == types.MethodVal { 834 obj := sel.Obj().(*types.Func) 835 recv := recvType(obj) 836 wantAddr := isPointer(recv) 837 escaping := true 838 v := b.receiver(fn, selector.X, wantAddr, escaping, sel) 839 if isInterface(recv) { 840 // Invoke-mode call. 841 c.Value = v 842 c.Method = obj 843 } else { 844 // "Call"-mode call. 845 c.Value = fn.Prog.declaredFunc(obj) 846 c.Args = append(c.Args, v) 847 } 848 return 849 } 850 851 // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): 852 // a statically dispatched call to the method f in the 853 // method-set of T or *T. T may be an interface. 854 // 855 // e.Fun would evaluate to a concrete method, interface 856 // wrapper function, or promotion wrapper. 857 // 858 // For now, we evaluate it in the usual way. 859 // 860 // TODO(adonovan): opt: inline expr() here, to make the 861 // call static and to avoid generation of wrappers. 862 // It's somewhat tricky as it may consume the first 863 // actual parameter if the call is "invoke" mode. 864 // 865 // Examples: 866 // type T struct{}; func (T) f() {} // "call" mode 867 // type T interface { f() } // "invoke" mode 868 // 869 // type S struct{ T } 870 // 871 // var s S 872 // S.f(s) 873 // (*S).f(&s) 874 // 875 // Suggested approach: 876 // - consume the first actual parameter expression 877 // and build it with b.expr(). 878 // - apply implicit field selections. 879 // - use MethodVal logic to populate fields of c. 880 } 881 882 // Evaluate the function operand in the usual way. 883 c.Value = b.expr(fn, e.Fun) 884 } 885 886 // emitCallArgs emits to f code for the actual parameters of call e to 887 // a (possibly built-in) function of effective type sig. 888 // The argument values are appended to args, which is then returned. 889 // 890 func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { 891 // f(x, y, z...): pass slice z straight through. 892 if e.Ellipsis != 0 { 893 for i, arg := range e.Args { 894 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) 895 args = append(args, v) 896 } 897 return args 898 } 899 900 offset := len(args) // 1 if call has receiver, 0 otherwise 901 902 // Evaluate actual parameter expressions. 903 // 904 // If this is a chained call of the form f(g()) where g has 905 // multiple return values (MRV), they are flattened out into 906 // args; a suffix of them may end up in a varargs slice. 907 for _, arg := range e.Args { 908 v := b.expr(fn, arg) 909 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain 910 for i, n := 0, ttuple.Len(); i < n; i++ { 911 args = append(args, emitExtract(fn, v, i)) 912 } 913 } else { 914 args = append(args, v) 915 } 916 } 917 918 // Actual->formal assignability conversions for normal parameters. 919 np := sig.Params().Len() // number of normal parameters 920 if sig.Variadic() { 921 np-- 922 } 923 for i := 0; i < np; i++ { 924 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) 925 } 926 927 // Actual->formal assignability conversions for variadic parameter, 928 // and construction of slice. 929 if sig.Variadic() { 930 varargs := args[offset+np:] 931 st := sig.Params().At(np).Type().(*types.Slice) 932 vt := st.Elem() 933 if len(varargs) == 0 { 934 args = append(args, nilConst(st)) 935 } else { 936 // Replace a suffix of args with a slice containing it. 937 at := types.NewArray(vt, int64(len(varargs))) 938 a := emitNew(fn, at, token.NoPos) 939 a.setPos(e.Rparen) 940 a.Comment = "varargs" 941 for i, arg := range varargs { 942 iaddr := &IndexAddr{ 943 X: a, 944 Index: intConst(int64(i)), 945 } 946 iaddr.setType(types.NewPointer(vt)) 947 fn.emit(iaddr) 948 emitStore(fn, iaddr, arg, arg.Pos()) 949 } 950 s := &Slice{X: a} 951 s.setType(st) 952 args[offset+np] = fn.emit(s) 953 args = args[:offset+np+1] 954 } 955 } 956 return args 957 } 958 959 // setCall emits to fn code to evaluate all the parameters of a function 960 // call e, and populates *c with those values. 961 // 962 func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { 963 // First deal with the f(...) part and optional receiver. 964 b.setCallFunc(fn, e, c) 965 966 // Then append the other actual parameters. 967 sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) 968 if sig == nil { 969 panic(fmt.Sprintf("no signature for call of %s", e.Fun)) 970 } 971 c.Args = b.emitCallArgs(fn, sig, e, c.Args) 972 } 973 974 // assignOp emits to fn code to perform loc += incr or loc -= incr. 975 func (b *builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token) { 976 oldv := loc.load(fn) 977 loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ(), token.NoPos)) 978 } 979 980 // localValueSpec emits to fn code to define all of the vars in the 981 // function-local ValueSpec, spec. 982 // 983 func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { 984 switch { 985 case len(spec.Values) == len(spec.Names): 986 // e.g. var x, y = 0, 1 987 // 1:1 assignment 988 for i, id := range spec.Names { 989 if !isBlankIdent(id) { 990 fn.addLocalForIdent(id) 991 } 992 lval := b.addr(fn, id, false) // non-escaping 993 b.assign(fn, lval, spec.Values[i], true, nil) 994 } 995 996 case len(spec.Values) == 0: 997 // e.g. var x, y int 998 // Locals are implicitly zero-initialized. 999 for _, id := range spec.Names { 1000 if !isBlankIdent(id) { 1001 lhs := fn.addLocalForIdent(id) 1002 if fn.debugInfo() { 1003 emitDebugRef(fn, id, lhs, true) 1004 } 1005 } 1006 } 1007 1008 default: 1009 // e.g. var x, y = pos() 1010 tuple := b.exprN(fn, spec.Values[0]) 1011 for i, id := range spec.Names { 1012 if !isBlankIdent(id) { 1013 fn.addLocalForIdent(id) 1014 lhs := b.addr(fn, id, false) // non-escaping 1015 lhs.store(fn, emitExtract(fn, tuple, i)) 1016 } 1017 } 1018 } 1019 } 1020 1021 // assignStmt emits code to fn for a parallel assignment of rhss to lhss. 1022 // isDef is true if this is a short variable declaration (:=). 1023 // 1024 // Note the similarity with localValueSpec. 1025 // 1026 func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { 1027 // Side effects of all LHSs and RHSs must occur in left-to-right order. 1028 lvals := make([]lvalue, len(lhss)) 1029 isZero := make([]bool, len(lhss)) 1030 for i, lhs := range lhss { 1031 var lval lvalue = blank{} 1032 if !isBlankIdent(lhs) { 1033 if isDef { 1034 if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { 1035 fn.addNamedLocal(obj) 1036 isZero[i] = true 1037 } 1038 } 1039 lval = b.addr(fn, lhs, false) // non-escaping 1040 } 1041 lvals[i] = lval 1042 } 1043 if len(lhss) == len(rhss) { 1044 // Simple assignment: x = f() (!isDef) 1045 // Parallel assignment: x, y = f(), g() (!isDef) 1046 // or short var decl: x, y := f(), g() (isDef) 1047 // 1048 // In all cases, the RHSs may refer to the LHSs, 1049 // so we need a storebuf. 1050 var sb storebuf 1051 for i := range rhss { 1052 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) 1053 } 1054 sb.emit(fn) 1055 } else { 1056 // e.g. x, y = pos() 1057 tuple := b.exprN(fn, rhss[0]) 1058 for i, lval := range lvals { 1059 lval.store(fn, emitExtract(fn, tuple, i)) 1060 } 1061 } 1062 } 1063 1064 // arrayLen returns the length of the array whose composite literal elements are elts. 1065 func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { 1066 var max int64 = -1 1067 var i int64 = -1 1068 for _, e := range elts { 1069 if kv, ok := e.(*ast.KeyValueExpr); ok { 1070 i = b.expr(fn, kv.Key).(*Const).Int64() 1071 } else { 1072 i++ 1073 } 1074 if i > max { 1075 max = i 1076 } 1077 } 1078 return max + 1 1079 } 1080 1081 // compLit emits to fn code to initialize a composite literal e at 1082 // address addr with type typ. 1083 // 1084 // Nested composite literals are recursively initialized in place 1085 // where possible. If isZero is true, compLit assumes that addr 1086 // holds the zero value for typ. 1087 // 1088 // Because the elements of a composite literal may refer to the 1089 // variables being updated, as in the second line below, 1090 // x := T{a: 1} 1091 // x = T{a: x.a} 1092 // all the reads must occur before all the writes. Thus all stores to 1093 // loc are emitted to the storebuf sb for later execution. 1094 // 1095 // A CompositeLit may have pointer type only in the recursive (nested) 1096 // case when the type name is implicit. e.g. in []*T{{}}, the inner 1097 // literal has type *T behaves like &T{}. 1098 // In that case, addr must hold a T, not a *T. 1099 // 1100 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { 1101 typ := deref(fn.Pkg.typeOf(e)) 1102 switch t := typ.Underlying().(type) { 1103 case *types.Struct: 1104 if !isZero && len(e.Elts) != t.NumFields() { 1105 // memclear 1106 sb.store(&address{addr, e.Lbrace, nil}, 1107 zeroValue(fn, deref(addr.Type()))) 1108 isZero = true 1109 } 1110 for i, e := range e.Elts { 1111 fieldIndex := i 1112 pos := e.Pos() 1113 if kv, ok := e.(*ast.KeyValueExpr); ok { 1114 fname := kv.Key.(*ast.Ident).Name 1115 for i, n := 0, t.NumFields(); i < n; i++ { 1116 sf := t.Field(i) 1117 if sf.Name() == fname { 1118 fieldIndex = i 1119 pos = kv.Colon 1120 e = kv.Value 1121 break 1122 } 1123 } 1124 } 1125 sf := t.Field(fieldIndex) 1126 faddr := &FieldAddr{ 1127 X: addr, 1128 Field: fieldIndex, 1129 } 1130 faddr.setType(types.NewPointer(sf.Type())) 1131 fn.emit(faddr) 1132 b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) 1133 } 1134 1135 case *types.Array, *types.Slice: 1136 var at *types.Array 1137 var array Value 1138 switch t := t.(type) { 1139 case *types.Slice: 1140 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) 1141 alloc := emitNew(fn, at, e.Lbrace) 1142 alloc.Comment = "slicelit" 1143 array = alloc 1144 case *types.Array: 1145 at = t 1146 array = addr 1147 1148 if !isZero && int64(len(e.Elts)) != at.Len() { 1149 // memclear 1150 sb.store(&address{array, e.Lbrace, nil}, 1151 zeroValue(fn, deref(array.Type()))) 1152 } 1153 } 1154 1155 var idx *Const 1156 for _, e := range e.Elts { 1157 pos := e.Pos() 1158 if kv, ok := e.(*ast.KeyValueExpr); ok { 1159 idx = b.expr(fn, kv.Key).(*Const) 1160 pos = kv.Colon 1161 e = kv.Value 1162 } else { 1163 var idxval int64 1164 if idx != nil { 1165 idxval = idx.Int64() + 1 1166 } 1167 idx = intConst(idxval) 1168 } 1169 iaddr := &IndexAddr{ 1170 X: array, 1171 Index: idx, 1172 } 1173 iaddr.setType(types.NewPointer(at.Elem())) 1174 fn.emit(iaddr) 1175 if t != at { // slice 1176 // backing array is unaliased => storebuf not needed. 1177 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) 1178 } else { 1179 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) 1180 } 1181 } 1182 1183 if t != at { // slice 1184 s := &Slice{X: array} 1185 s.setPos(e.Lbrace) 1186 s.setType(typ) 1187 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) 1188 } 1189 1190 case *types.Map: 1191 m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} 1192 m.setPos(e.Lbrace) 1193 m.setType(typ) 1194 fn.emit(m) 1195 for _, e := range e.Elts { 1196 e := e.(*ast.KeyValueExpr) 1197 loc := element{ 1198 m: m, 1199 k: emitConv(fn, b.expr(fn, e.Key), t.Key()), 1200 t: t.Elem(), 1201 pos: e.Colon, 1202 } 1203 1204 // We call assign() only because it takes care 1205 // of any &-operation required in the recursive 1206 // case, e.g., 1207 // map[int]*struct{}{0: {}} implies &struct{}{}. 1208 // In-place update is of course impossible, 1209 // and no storebuf is needed. 1210 b.assign(fn, &loc, e.Value, true, nil) 1211 } 1212 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) 1213 1214 default: 1215 panic("unexpected CompositeLit type: " + t.String()) 1216 } 1217 } 1218 1219 // switchStmt emits to fn code for the switch statement s, optionally 1220 // labelled by label. 1221 // 1222 func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { 1223 // We treat SwitchStmt like a sequential if-else chain. 1224 // Multiway dispatch can be recovered later by ssautil.Switches() 1225 // to those cases that are free of side effects. 1226 if s.Init != nil { 1227 b.stmt(fn, s.Init) 1228 } 1229 var tag Value = vTrue 1230 if s.Tag != nil { 1231 tag = b.expr(fn, s.Tag) 1232 } 1233 done := fn.newBasicBlock("switch.done") 1234 if label != nil { 1235 label._break = done 1236 } 1237 // We pull the default case (if present) down to the end. 1238 // But each fallthrough label must point to the next 1239 // body block in source order, so we preallocate a 1240 // body block (fallthru) for the next case. 1241 // Unfortunately this makes for a confusing block order. 1242 var dfltBody *[]ast.Stmt 1243 var dfltFallthrough *BasicBlock 1244 var fallthru, dfltBlock *BasicBlock 1245 ncases := len(s.Body.List) 1246 for i, clause := range s.Body.List { 1247 body := fallthru 1248 if body == nil { 1249 body = fn.newBasicBlock("switch.body") // first case only 1250 } 1251 1252 // Preallocate body block for the next case. 1253 fallthru = done 1254 if i+1 < ncases { 1255 fallthru = fn.newBasicBlock("switch.body") 1256 } 1257 1258 cc := clause.(*ast.CaseClause) 1259 if cc.List == nil { 1260 // Default case. 1261 dfltBody = &cc.Body 1262 dfltFallthrough = fallthru 1263 dfltBlock = body 1264 continue 1265 } 1266 1267 var nextCond *BasicBlock 1268 for _, cond := range cc.List { 1269 nextCond = fn.newBasicBlock("switch.next") 1270 // TODO(adonovan): opt: when tag==vTrue, we'd 1271 // get better code if we use b.cond(cond) 1272 // instead of BinOp(EQL, tag, b.expr(cond)) 1273 // followed by If. Don't forget conversions 1274 // though. 1275 cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos) 1276 emitIf(fn, cond, body, nextCond) 1277 fn.currentBlock = nextCond 1278 } 1279 fn.currentBlock = body 1280 fn.targets = &targets{ 1281 tail: fn.targets, 1282 _break: done, 1283 _fallthrough: fallthru, 1284 } 1285 b.stmtList(fn, cc.Body) 1286 fn.targets = fn.targets.tail 1287 emitJump(fn, done) 1288 fn.currentBlock = nextCond 1289 } 1290 if dfltBlock != nil { 1291 emitJump(fn, dfltBlock) 1292 fn.currentBlock = dfltBlock 1293 fn.targets = &targets{ 1294 tail: fn.targets, 1295 _break: done, 1296 _fallthrough: dfltFallthrough, 1297 } 1298 b.stmtList(fn, *dfltBody) 1299 fn.targets = fn.targets.tail 1300 } 1301 emitJump(fn, done) 1302 fn.currentBlock = done 1303 } 1304 1305 // typeSwitchStmt emits to fn code for the type switch statement s, optionally 1306 // labelled by label. 1307 // 1308 func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { 1309 // We treat TypeSwitchStmt like a sequential if-else chain. 1310 // Multiway dispatch can be recovered later by ssautil.Switches(). 1311 1312 // Typeswitch lowering: 1313 // 1314 // var x X 1315 // switch y := x.(type) { 1316 // case T1, T2: S1 // >1 (y := x) 1317 // case nil: SN // nil (y := x) 1318 // default: SD // 0 types (y := x) 1319 // case T3: S3 // 1 type (y := x.(T3)) 1320 // } 1321 // 1322 // ...s.Init... 1323 // x := eval x 1324 // .caseT1: 1325 // t1, ok1 := typeswitch,ok x <T1> 1326 // if ok1 then goto S1 else goto .caseT2 1327 // .caseT2: 1328 // t2, ok2 := typeswitch,ok x <T2> 1329 // if ok2 then goto S1 else goto .caseNil 1330 // .S1: 1331 // y := x 1332 // ...S1... 1333 // goto done 1334 // .caseNil: 1335 // if t2, ok2 := typeswitch,ok x <T2> 1336 // if x == nil then goto SN else goto .caseT3 1337 // .SN: 1338 // y := x 1339 // ...SN... 1340 // goto done 1341 // .caseT3: 1342 // t3, ok3 := typeswitch,ok x <T3> 1343 // if ok3 then goto S3 else goto default 1344 // .S3: 1345 // y := t3 1346 // ...S3... 1347 // goto done 1348 // .default: 1349 // y := x 1350 // ...SD... 1351 // goto done 1352 // .done: 1353 1354 if s.Init != nil { 1355 b.stmt(fn, s.Init) 1356 } 1357 1358 var x Value 1359 switch ass := s.Assign.(type) { 1360 case *ast.ExprStmt: // x.(type) 1361 x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) 1362 case *ast.AssignStmt: // y := x.(type) 1363 x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) 1364 } 1365 1366 done := fn.newBasicBlock("typeswitch.done") 1367 if label != nil { 1368 label._break = done 1369 } 1370 var default_ *ast.CaseClause 1371 for _, clause := range s.Body.List { 1372 cc := clause.(*ast.CaseClause) 1373 if cc.List == nil { 1374 default_ = cc 1375 continue 1376 } 1377 body := fn.newBasicBlock("typeswitch.body") 1378 var next *BasicBlock 1379 var casetype types.Type 1380 var ti Value // ti, ok := typeassert,ok x <Ti> 1381 for _, cond := range cc.List { 1382 next = fn.newBasicBlock("typeswitch.next") 1383 casetype = fn.Pkg.typeOf(cond) 1384 var condv Value 1385 if casetype == tUntypedNil { 1386 condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos) 1387 ti = x 1388 } else { 1389 yok := emitTypeTest(fn, x, casetype, cc.Case) 1390 ti = emitExtract(fn, yok, 0) 1391 condv = emitExtract(fn, yok, 1) 1392 } 1393 emitIf(fn, condv, body, next) 1394 fn.currentBlock = next 1395 } 1396 if len(cc.List) != 1 { 1397 ti = x 1398 } 1399 fn.currentBlock = body 1400 b.typeCaseBody(fn, cc, ti, done) 1401 fn.currentBlock = next 1402 } 1403 if default_ != nil { 1404 b.typeCaseBody(fn, default_, x, done) 1405 } else { 1406 emitJump(fn, done) 1407 } 1408 fn.currentBlock = done 1409 } 1410 1411 func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { 1412 if obj := fn.Pkg.info.Implicits[cc]; obj != nil { 1413 // In a switch y := x.(type), each case clause 1414 // implicitly declares a distinct object y. 1415 // In a single-type case, y has that type. 1416 // In multi-type cases, 'case nil' and default, 1417 // y has the same type as the interface operand. 1418 emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos()) 1419 } 1420 fn.targets = &targets{ 1421 tail: fn.targets, 1422 _break: done, 1423 } 1424 b.stmtList(fn, cc.Body) 1425 fn.targets = fn.targets.tail 1426 emitJump(fn, done) 1427 } 1428 1429 // selectStmt emits to fn code for the select statement s, optionally 1430 // labelled by label. 1431 // 1432 func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { 1433 // A blocking select of a single case degenerates to a 1434 // simple send or receive. 1435 // TODO(adonovan): opt: is this optimization worth its weight? 1436 if len(s.Body.List) == 1 { 1437 clause := s.Body.List[0].(*ast.CommClause) 1438 if clause.Comm != nil { 1439 b.stmt(fn, clause.Comm) 1440 done := fn.newBasicBlock("select.done") 1441 if label != nil { 1442 label._break = done 1443 } 1444 fn.targets = &targets{ 1445 tail: fn.targets, 1446 _break: done, 1447 } 1448 b.stmtList(fn, clause.Body) 1449 fn.targets = fn.targets.tail 1450 emitJump(fn, done) 1451 fn.currentBlock = done 1452 return 1453 } 1454 } 1455 1456 // First evaluate all channels in all cases, and find 1457 // the directions of each state. 1458 var states []*SelectState 1459 blocking := true 1460 debugInfo := fn.debugInfo() 1461 for _, clause := range s.Body.List { 1462 var st *SelectState 1463 switch comm := clause.(*ast.CommClause).Comm.(type) { 1464 case nil: // default case 1465 blocking = false 1466 continue 1467 1468 case *ast.SendStmt: // ch<- i 1469 ch := b.expr(fn, comm.Chan) 1470 st = &SelectState{ 1471 Dir: types.SendOnly, 1472 Chan: ch, 1473 Send: emitConv(fn, b.expr(fn, comm.Value), 1474 ch.Type().Underlying().(*types.Chan).Elem()), 1475 Pos: comm.Arrow, 1476 } 1477 if debugInfo { 1478 st.DebugNode = comm 1479 } 1480 1481 case *ast.AssignStmt: // x := <-ch 1482 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) 1483 st = &SelectState{ 1484 Dir: types.RecvOnly, 1485 Chan: b.expr(fn, recv.X), 1486 Pos: recv.OpPos, 1487 } 1488 if debugInfo { 1489 st.DebugNode = recv 1490 } 1491 1492 case *ast.ExprStmt: // <-ch 1493 recv := unparen(comm.X).(*ast.UnaryExpr) 1494 st = &SelectState{ 1495 Dir: types.RecvOnly, 1496 Chan: b.expr(fn, recv.X), 1497 Pos: recv.OpPos, 1498 } 1499 if debugInfo { 1500 st.DebugNode = recv 1501 } 1502 } 1503 states = append(states, st) 1504 } 1505 1506 // We dispatch on the (fair) result of Select using a 1507 // sequential if-else chain, in effect: 1508 // 1509 // idx, recvOk, r0...r_n-1 := select(...) 1510 // if idx == 0 { // receive on channel 0 (first receive => r0) 1511 // x, ok := r0, recvOk 1512 // ...state0... 1513 // } else if v == 1 { // send on channel 1 1514 // ...state1... 1515 // } else { 1516 // ...default... 1517 // } 1518 sel := &Select{ 1519 States: states, 1520 Blocking: blocking, 1521 } 1522 sel.setPos(s.Select) 1523 var vars []*types.Var 1524 vars = append(vars, varIndex, varOk) 1525 for _, st := range states { 1526 if st.Dir == types.RecvOnly { 1527 tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() 1528 vars = append(vars, anonVar(tElem)) 1529 } 1530 } 1531 sel.setType(types.NewTuple(vars...)) 1532 1533 fn.emit(sel) 1534 idx := emitExtract(fn, sel, 0) 1535 1536 done := fn.newBasicBlock("select.done") 1537 if label != nil { 1538 label._break = done 1539 } 1540 1541 var defaultBody *[]ast.Stmt 1542 state := 0 1543 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV 1544 for _, cc := range s.Body.List { 1545 clause := cc.(*ast.CommClause) 1546 if clause.Comm == nil { 1547 defaultBody = &clause.Body 1548 continue 1549 } 1550 body := fn.newBasicBlock("select.body") 1551 next := fn.newBasicBlock("select.next") 1552 emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) 1553 fn.currentBlock = body 1554 fn.targets = &targets{ 1555 tail: fn.targets, 1556 _break: done, 1557 } 1558 switch comm := clause.Comm.(type) { 1559 case *ast.ExprStmt: // <-ch 1560 if debugInfo { 1561 v := emitExtract(fn, sel, r) 1562 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1563 } 1564 r++ 1565 1566 case *ast.AssignStmt: // x := <-states[state].Chan 1567 if comm.Tok == token.DEFINE { 1568 fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) 1569 } 1570 x := b.addr(fn, comm.Lhs[0], false) // non-escaping 1571 v := emitExtract(fn, sel, r) 1572 if debugInfo { 1573 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1574 } 1575 x.store(fn, v) 1576 1577 if len(comm.Lhs) == 2 { // x, ok := ... 1578 if comm.Tok == token.DEFINE { 1579 fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) 1580 } 1581 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping 1582 ok.store(fn, emitExtract(fn, sel, 1)) 1583 } 1584 r++ 1585 } 1586 b.stmtList(fn, clause.Body) 1587 fn.targets = fn.targets.tail 1588 emitJump(fn, done) 1589 fn.currentBlock = next 1590 state++ 1591 } 1592 if defaultBody != nil { 1593 fn.targets = &targets{ 1594 tail: fn.targets, 1595 _break: done, 1596 } 1597 b.stmtList(fn, *defaultBody) 1598 fn.targets = fn.targets.tail 1599 } else { 1600 // A blocking select must match some case. 1601 // (This should really be a runtime.errorString, not a string.) 1602 fn.emit(&Panic{ 1603 X: emitConv(fn, stringConst("blocking select matched no case"), tEface), 1604 }) 1605 fn.currentBlock = fn.newBasicBlock("unreachable") 1606 } 1607 emitJump(fn, done) 1608 fn.currentBlock = done 1609 } 1610 1611 // forStmt emits to fn code for the for statement s, optionally 1612 // labelled by label. 1613 // 1614 func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { 1615 // ...init... 1616 // jump loop 1617 // loop: 1618 // if cond goto body else done 1619 // body: 1620 // ...body... 1621 // jump post 1622 // post: (target of continue) 1623 // ...post... 1624 // jump loop 1625 // done: (target of break) 1626 if s.Init != nil { 1627 b.stmt(fn, s.Init) 1628 } 1629 body := fn.newBasicBlock("for.body") 1630 done := fn.newBasicBlock("for.done") // target of 'break' 1631 loop := body // target of back-edge 1632 if s.Cond != nil { 1633 loop = fn.newBasicBlock("for.loop") 1634 } 1635 cont := loop // target of 'continue' 1636 if s.Post != nil { 1637 cont = fn.newBasicBlock("for.post") 1638 } 1639 if label != nil { 1640 label._break = done 1641 label._continue = cont 1642 } 1643 emitJump(fn, loop) 1644 fn.currentBlock = loop 1645 if loop != body { 1646 b.cond(fn, s.Cond, body, done) 1647 fn.currentBlock = body 1648 } 1649 fn.targets = &targets{ 1650 tail: fn.targets, 1651 _break: done, 1652 _continue: cont, 1653 } 1654 b.stmt(fn, s.Body) 1655 fn.targets = fn.targets.tail 1656 emitJump(fn, cont) 1657 1658 if s.Post != nil { 1659 fn.currentBlock = cont 1660 b.stmt(fn, s.Post) 1661 emitJump(fn, loop) // back-edge 1662 } 1663 fn.currentBlock = done 1664 } 1665 1666 // rangeIndexed emits to fn the header for an integer-indexed loop 1667 // over array, *array or slice value x. 1668 // The v result is defined only if tv is non-nil. 1669 // forPos is the position of the "for" token. 1670 // 1671 func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 1672 // 1673 // length = len(x) 1674 // index = -1 1675 // loop: (target of continue) 1676 // index++ 1677 // if index < length goto body else done 1678 // body: 1679 // k = index 1680 // v = x[index] 1681 // ...body... 1682 // jump loop 1683 // done: (target of break) 1684 1685 // Determine number of iterations. 1686 var length Value 1687 if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { 1688 // For array or *array, the number of iterations is 1689 // known statically thanks to the type. We avoid a 1690 // data dependence upon x, permitting later dead-code 1691 // elimination if x is pure, static unrolling, etc. 1692 // Ranging over a nil *array may have >0 iterations. 1693 // We still generate code for x, in case it has effects. 1694 length = intConst(arr.Len()) 1695 } else { 1696 // length = len(x). 1697 var c Call 1698 c.Call.Value = makeLen(x.Type()) 1699 c.Call.Args = []Value{x} 1700 c.setType(tInt) 1701 length = fn.emit(&c) 1702 } 1703 1704 index := fn.addLocal(tInt, token.NoPos) 1705 emitStore(fn, index, intConst(-1), pos) 1706 1707 loop = fn.newBasicBlock("rangeindex.loop") 1708 emitJump(fn, loop) 1709 fn.currentBlock = loop 1710 1711 incr := &BinOp{ 1712 Op: token.ADD, 1713 X: emitLoad(fn, index), 1714 Y: vOne, 1715 } 1716 incr.setType(tInt) 1717 emitStore(fn, index, fn.emit(incr), pos) 1718 1719 body := fn.newBasicBlock("rangeindex.body") 1720 done = fn.newBasicBlock("rangeindex.done") 1721 emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) 1722 fn.currentBlock = body 1723 1724 k = emitLoad(fn, index) 1725 if tv != nil { 1726 switch t := x.Type().Underlying().(type) { 1727 case *types.Array: 1728 instr := &Index{ 1729 X: x, 1730 Index: k, 1731 } 1732 instr.setType(t.Elem()) 1733 v = fn.emit(instr) 1734 1735 case *types.Pointer: // *array 1736 instr := &IndexAddr{ 1737 X: x, 1738 Index: k, 1739 } 1740 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) 1741 v = emitLoad(fn, fn.emit(instr)) 1742 1743 case *types.Slice: 1744 instr := &IndexAddr{ 1745 X: x, 1746 Index: k, 1747 } 1748 instr.setType(types.NewPointer(t.Elem())) 1749 v = emitLoad(fn, fn.emit(instr)) 1750 1751 default: 1752 panic("rangeIndexed x:" + t.String()) 1753 } 1754 } 1755 return 1756 } 1757 1758 // rangeIter emits to fn the header for a loop using 1759 // Range/Next/Extract to iterate over map or string value x. 1760 // tk and tv are the types of the key/value results k and v, or nil 1761 // if the respective component is not wanted. 1762 // 1763 func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 1764 // 1765 // it = range x 1766 // loop: (target of continue) 1767 // okv = next it (ok, key, value) 1768 // ok = extract okv #0 1769 // if ok goto body else done 1770 // body: 1771 // k = extract okv #1 1772 // v = extract okv #2 1773 // ...body... 1774 // jump loop 1775 // done: (target of break) 1776 // 1777 1778 if tk == nil { 1779 tk = tInvalid 1780 } 1781 if tv == nil { 1782 tv = tInvalid 1783 } 1784 1785 rng := &Range{X: x} 1786 rng.setPos(pos) 1787 rng.setType(tRangeIter) 1788 it := fn.emit(rng) 1789 1790 loop = fn.newBasicBlock("rangeiter.loop") 1791 emitJump(fn, loop) 1792 fn.currentBlock = loop 1793 1794 _, isString := x.Type().Underlying().(*types.Basic) 1795 1796 okv := &Next{ 1797 Iter: it, 1798 IsString: isString, 1799 } 1800 okv.setType(types.NewTuple( 1801 varOk, 1802 newVar("k", tk), 1803 newVar("v", tv), 1804 )) 1805 fn.emit(okv) 1806 1807 body := fn.newBasicBlock("rangeiter.body") 1808 done = fn.newBasicBlock("rangeiter.done") 1809 emitIf(fn, emitExtract(fn, okv, 0), body, done) 1810 fn.currentBlock = body 1811 1812 if tk != tInvalid { 1813 k = emitExtract(fn, okv, 1) 1814 } 1815 if tv != tInvalid { 1816 v = emitExtract(fn, okv, 2) 1817 } 1818 return 1819 } 1820 1821 // rangeChan emits to fn the header for a loop that receives from 1822 // channel x until it fails. 1823 // tk is the channel's element type, or nil if the k result is 1824 // not wanted 1825 // pos is the position of the '=' or ':=' token. 1826 // 1827 func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { 1828 // 1829 // loop: (target of continue) 1830 // ko = <-x (key, ok) 1831 // ok = extract ko #1 1832 // if ok goto body else done 1833 // body: 1834 // k = extract ko #0 1835 // ... 1836 // goto loop 1837 // done: (target of break) 1838 1839 loop = fn.newBasicBlock("rangechan.loop") 1840 emitJump(fn, loop) 1841 fn.currentBlock = loop 1842 recv := &UnOp{ 1843 Op: token.ARROW, 1844 X: x, 1845 CommaOk: true, 1846 } 1847 recv.setPos(pos) 1848 recv.setType(types.NewTuple( 1849 newVar("k", x.Type().Underlying().(*types.Chan).Elem()), 1850 varOk, 1851 )) 1852 ko := fn.emit(recv) 1853 body := fn.newBasicBlock("rangechan.body") 1854 done = fn.newBasicBlock("rangechan.done") 1855 emitIf(fn, emitExtract(fn, ko, 1), body, done) 1856 fn.currentBlock = body 1857 if tk != nil { 1858 k = emitExtract(fn, ko, 0) 1859 } 1860 return 1861 } 1862 1863 // rangeStmt emits to fn code for the range statement s, optionally 1864 // labelled by label. 1865 // 1866 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { 1867 var tk, tv types.Type 1868 if s.Key != nil && !isBlankIdent(s.Key) { 1869 tk = fn.Pkg.typeOf(s.Key) 1870 } 1871 if s.Value != nil && !isBlankIdent(s.Value) { 1872 tv = fn.Pkg.typeOf(s.Value) 1873 } 1874 1875 // If iteration variables are defined (:=), this 1876 // occurs once outside the loop. 1877 // 1878 // Unlike a short variable declaration, a RangeStmt 1879 // using := never redeclares an existing variable; it 1880 // always creates a new one. 1881 if s.Tok == token.DEFINE { 1882 if tk != nil { 1883 fn.addLocalForIdent(s.Key.(*ast.Ident)) 1884 } 1885 if tv != nil { 1886 fn.addLocalForIdent(s.Value.(*ast.Ident)) 1887 } 1888 } 1889 1890 x := b.expr(fn, s.X) 1891 1892 var k, v Value 1893 var loop, done *BasicBlock 1894 switch rt := x.Type().Underlying().(type) { 1895 case *types.Slice, *types.Array, *types.Pointer: // *array 1896 k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) 1897 1898 case *types.Chan: 1899 k, loop, done = b.rangeChan(fn, x, tk, s.For) 1900 1901 case *types.Map, *types.Basic: // string 1902 k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) 1903 1904 default: 1905 panic("Cannot range over: " + rt.String()) 1906 } 1907 1908 // Evaluate both LHS expressions before we update either. 1909 var kl, vl lvalue 1910 if tk != nil { 1911 kl = b.addr(fn, s.Key, false) // non-escaping 1912 } 1913 if tv != nil { 1914 vl = b.addr(fn, s.Value, false) // non-escaping 1915 } 1916 if tk != nil { 1917 kl.store(fn, k) 1918 } 1919 if tv != nil { 1920 vl.store(fn, v) 1921 } 1922 1923 if label != nil { 1924 label._break = done 1925 label._continue = loop 1926 } 1927 1928 fn.targets = &targets{ 1929 tail: fn.targets, 1930 _break: done, 1931 _continue: loop, 1932 } 1933 b.stmt(fn, s.Body) 1934 fn.targets = fn.targets.tail 1935 emitJump(fn, loop) // back-edge 1936 fn.currentBlock = done 1937 } 1938 1939 // stmt lowers statement s to SSA form, emitting code to fn. 1940 func (b *builder) stmt(fn *Function, _s ast.Stmt) { 1941 // The label of the current statement. If non-nil, its _goto 1942 // target is always set; its _break and _continue are set only 1943 // within the body of switch/typeswitch/select/for/range. 1944 // It is effectively an additional default-nil parameter of stmt(). 1945 var label *lblock 1946 start: 1947 switch s := _s.(type) { 1948 case *ast.EmptyStmt: 1949 // ignore. (Usually removed by gofmt.) 1950 1951 case *ast.DeclStmt: // Con, Var or Typ 1952 d := s.Decl.(*ast.GenDecl) 1953 if d.Tok == token.VAR { 1954 for _, spec := range d.Specs { 1955 if vs, ok := spec.(*ast.ValueSpec); ok { 1956 b.localValueSpec(fn, vs) 1957 } 1958 } 1959 } 1960 1961 case *ast.LabeledStmt: 1962 label = fn.labelledBlock(s.Label) 1963 emitJump(fn, label._goto) 1964 fn.currentBlock = label._goto 1965 _s = s.Stmt 1966 goto start // effectively: tailcall stmt(fn, s.Stmt, label) 1967 1968 case *ast.ExprStmt: 1969 b.expr(fn, s.X) 1970 1971 case *ast.SendStmt: 1972 fn.emit(&Send{ 1973 Chan: b.expr(fn, s.Chan), 1974 X: emitConv(fn, b.expr(fn, s.Value), 1975 fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()), 1976 pos: s.Arrow, 1977 }) 1978 1979 case *ast.IncDecStmt: 1980 op := token.ADD 1981 if s.Tok == token.DEC { 1982 op = token.SUB 1983 } 1984 loc := b.addr(fn, s.X, false) 1985 b.assignOp(fn, loc, NewConst(exact.MakeInt64(1), loc.typ()), op) 1986 1987 case *ast.AssignStmt: 1988 switch s.Tok { 1989 case token.ASSIGN, token.DEFINE: 1990 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) 1991 1992 default: // +=, etc. 1993 op := s.Tok + token.ADD - token.ADD_ASSIGN 1994 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op) 1995 } 1996 1997 case *ast.GoStmt: 1998 // The "intrinsics" new/make/len/cap are forbidden here. 1999 // panic is treated like an ordinary function call. 2000 v := Go{pos: s.Go} 2001 b.setCall(fn, s.Call, &v.Call) 2002 fn.emit(&v) 2003 2004 case *ast.DeferStmt: 2005 // The "intrinsics" new/make/len/cap are forbidden here. 2006 // panic is treated like an ordinary function call. 2007 v := Defer{pos: s.Defer} 2008 b.setCall(fn, s.Call, &v.Call) 2009 fn.emit(&v) 2010 2011 // A deferred call can cause recovery from panic, 2012 // and control resumes at the Recover block. 2013 createRecoverBlock(fn) 2014 2015 case *ast.ReturnStmt: 2016 var results []Value 2017 if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { 2018 // Return of one expression in a multi-valued function. 2019 tuple := b.exprN(fn, s.Results[0]) 2020 ttuple := tuple.Type().(*types.Tuple) 2021 for i, n := 0, ttuple.Len(); i < n; i++ { 2022 results = append(results, 2023 emitConv(fn, emitExtract(fn, tuple, i), 2024 fn.Signature.Results().At(i).Type())) 2025 } 2026 } else { 2027 // 1:1 return, or no-arg return in non-void function. 2028 for i, r := range s.Results { 2029 v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) 2030 results = append(results, v) 2031 } 2032 } 2033 if fn.namedResults != nil { 2034 // Function has named result parameters (NRPs). 2035 // Perform parallel assignment of return operands to NRPs. 2036 for i, r := range results { 2037 emitStore(fn, fn.namedResults[i], r, s.Return) 2038 } 2039 } 2040 // Run function calls deferred in this 2041 // function when explicitly returning from it. 2042 fn.emit(new(RunDefers)) 2043 if fn.namedResults != nil { 2044 // Reload NRPs to form the result tuple. 2045 results = results[:0] 2046 for _, r := range fn.namedResults { 2047 results = append(results, emitLoad(fn, r)) 2048 } 2049 } 2050 fn.emit(&Return{Results: results, pos: s.Return}) 2051 fn.currentBlock = fn.newBasicBlock("unreachable") 2052 2053 case *ast.BranchStmt: 2054 var block *BasicBlock 2055 switch s.Tok { 2056 case token.BREAK: 2057 if s.Label != nil { 2058 block = fn.labelledBlock(s.Label)._break 2059 } else { 2060 for t := fn.targets; t != nil && block == nil; t = t.tail { 2061 block = t._break 2062 } 2063 } 2064 2065 case token.CONTINUE: 2066 if s.Label != nil { 2067 block = fn.labelledBlock(s.Label)._continue 2068 } else { 2069 for t := fn.targets; t != nil && block == nil; t = t.tail { 2070 block = t._continue 2071 } 2072 } 2073 2074 case token.FALLTHROUGH: 2075 for t := fn.targets; t != nil && block == nil; t = t.tail { 2076 block = t._fallthrough 2077 } 2078 2079 case token.GOTO: 2080 block = fn.labelledBlock(s.Label)._goto 2081 } 2082 emitJump(fn, block) 2083 fn.currentBlock = fn.newBasicBlock("unreachable") 2084 2085 case *ast.BlockStmt: 2086 b.stmtList(fn, s.List) 2087 2088 case *ast.IfStmt: 2089 if s.Init != nil { 2090 b.stmt(fn, s.Init) 2091 } 2092 then := fn.newBasicBlock("if.then") 2093 done := fn.newBasicBlock("if.done") 2094 els := done 2095 if s.Else != nil { 2096 els = fn.newBasicBlock("if.else") 2097 } 2098 b.cond(fn, s.Cond, then, els) 2099 fn.currentBlock = then 2100 b.stmt(fn, s.Body) 2101 emitJump(fn, done) 2102 2103 if s.Else != nil { 2104 fn.currentBlock = els 2105 b.stmt(fn, s.Else) 2106 emitJump(fn, done) 2107 } 2108 2109 fn.currentBlock = done 2110 2111 case *ast.SwitchStmt: 2112 b.switchStmt(fn, s, label) 2113 2114 case *ast.TypeSwitchStmt: 2115 b.typeSwitchStmt(fn, s, label) 2116 2117 case *ast.SelectStmt: 2118 b.selectStmt(fn, s, label) 2119 2120 case *ast.ForStmt: 2121 b.forStmt(fn, s, label) 2122 2123 case *ast.RangeStmt: 2124 b.rangeStmt(fn, s, label) 2125 2126 default: 2127 panic(fmt.Sprintf("unexpected statement kind: %T", s)) 2128 } 2129 } 2130 2131 // buildFunction builds SSA code for the body of function fn. Idempotent. 2132 func (b *builder) buildFunction(fn *Function) { 2133 if fn.Blocks != nil { 2134 return // building already started 2135 } 2136 2137 var recvField *ast.FieldList 2138 var body *ast.BlockStmt 2139 var functype *ast.FuncType 2140 switch n := fn.syntax.(type) { 2141 case nil: 2142 return // not a Go source function. (Synthetic, or from object file.) 2143 case *ast.FuncDecl: 2144 functype = n.Type 2145 recvField = n.Recv 2146 body = n.Body 2147 case *ast.FuncLit: 2148 functype = n.Type 2149 body = n.Body 2150 default: 2151 panic(n) 2152 } 2153 2154 if body == nil { 2155 // External function. 2156 if fn.Params == nil { 2157 // This condition ensures we add a non-empty 2158 // params list once only, but we may attempt 2159 // the degenerate empty case repeatedly. 2160 // TODO(adonovan): opt: don't do that. 2161 2162 // We set Function.Params even though there is no body 2163 // code to reference them. This simplifies clients. 2164 if recv := fn.Signature.Recv(); recv != nil { 2165 fn.addParamObj(recv) 2166 } 2167 params := fn.Signature.Params() 2168 for i, n := 0, params.Len(); i < n; i++ { 2169 fn.addParamObj(params.At(i)) 2170 } 2171 } 2172 return 2173 } 2174 if fn.Prog.mode&LogSource != 0 { 2175 defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() 2176 } 2177 fn.startBody() 2178 fn.createSyntacticParams(recvField, functype) 2179 b.stmt(fn, body) 2180 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { 2181 // Control fell off the end of the function's body block. 2182 // 2183 // Block optimizations eliminate the current block, if 2184 // unreachable. It is a builder invariant that 2185 // if this no-arg return is ill-typed for 2186 // fn.Signature.Results, this block must be 2187 // unreachable. The sanity checker checks this. 2188 fn.emit(new(RunDefers)) 2189 fn.emit(new(Return)) 2190 } 2191 fn.finishBody() 2192 } 2193 2194 // buildFuncDecl builds SSA code for the function or method declared 2195 // by decl in package pkg. 2196 // 2197 func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { 2198 id := decl.Name 2199 if isBlankIdent(id) { 2200 return // discard 2201 } 2202 fn := pkg.values[pkg.info.Defs[id]].(*Function) 2203 if decl.Recv == nil && id.Name == "init" { 2204 var v Call 2205 v.Call.Value = fn 2206 v.setType(types.NewTuple()) 2207 pkg.init.emit(&v) 2208 } 2209 b.buildFunction(fn) 2210 } 2211 2212 // BuildAll calls Package.Build() for each package in prog. 2213 // Building occurs in parallel unless the BuildSerially mode flag was set. 2214 // 2215 // BuildAll is idempotent and thread-safe. 2216 // 2217 func (prog *Program) BuildAll() { 2218 var wg sync.WaitGroup 2219 for _, p := range prog.packages { 2220 if prog.mode&BuildSerially != 0 { 2221 p.Build() 2222 } else { 2223 wg.Add(1) 2224 go func(p *Package) { 2225 p.Build() 2226 wg.Done() 2227 }(p) 2228 } 2229 } 2230 wg.Wait() 2231 } 2232 2233 // Build builds SSA code for all functions and vars in package p. 2234 // 2235 // Precondition: CreatePackage must have been called for all of p's 2236 // direct imports (and hence its direct imports must have been 2237 // error-free). 2238 // 2239 // Build is idempotent and thread-safe. 2240 // 2241 func (p *Package) Build() { 2242 if !atomic.CompareAndSwapInt32(&p.started, 0, 1) { 2243 return // already started 2244 } 2245 if p.info == nil { 2246 return // synthetic package, e.g. "testmain" 2247 } 2248 if len(p.info.Files) == 0 { 2249 p.info = nil 2250 return // package loaded from export data 2251 } 2252 2253 // Ensure we have runtime type info for all exported members. 2254 // TODO(adonovan): ideally belongs in memberFromObject, but 2255 // that would require package creation in topological order. 2256 for name, mem := range p.Members { 2257 if ast.IsExported(name) { 2258 p.Prog.needMethodsOf(mem.Type()) 2259 } 2260 } 2261 if p.Prog.mode&LogSource != 0 { 2262 defer logStack("build %s", p)() 2263 } 2264 init := p.init 2265 init.startBody() 2266 2267 var done *BasicBlock 2268 2269 if p.Prog.mode&BareInits == 0 { 2270 // Make init() skip if package is already initialized. 2271 initguard := p.Var("init$guard") 2272 doinit := init.newBasicBlock("init.start") 2273 done = init.newBasicBlock("init.done") 2274 emitIf(init, emitLoad(init, initguard), done, doinit) 2275 init.currentBlock = doinit 2276 emitStore(init, initguard, vTrue, token.NoPos) 2277 2278 // Call the init() function of each package we import. 2279 for _, pkg := range p.info.Pkg.Imports() { 2280 prereq := p.Prog.packages[pkg] 2281 if prereq == nil { 2282 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Object.Path(), pkg.Path())) 2283 } 2284 var v Call 2285 v.Call.Value = prereq.init 2286 v.Call.pos = init.pos 2287 v.setType(types.NewTuple()) 2288 init.emit(&v) 2289 } 2290 } 2291 2292 var b builder 2293 2294 // Initialize package-level vars in correct order. 2295 for _, varinit := range p.info.InitOrder { 2296 if init.Prog.mode&LogSource != 0 { 2297 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", 2298 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) 2299 } 2300 if len(varinit.Lhs) == 1 { 2301 // 1:1 initialization: var x, y = a(), b() 2302 var lval lvalue 2303 if v := varinit.Lhs[0]; v.Name() != "_" { 2304 lval = &address{addr: p.values[v].(*Global), pos: v.Pos()} 2305 } else { 2306 lval = blank{} 2307 } 2308 b.assign(init, lval, varinit.Rhs, true, nil) 2309 } else { 2310 // n:1 initialization: var x, y := f() 2311 tuple := b.exprN(init, varinit.Rhs) 2312 for i, v := range varinit.Lhs { 2313 if v.Name() == "_" { 2314 continue 2315 } 2316 emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos()) 2317 } 2318 } 2319 } 2320 2321 // Build all package-level functions, init functions 2322 // and methods, including unreachable/blank ones. 2323 // We build them in source order, but it's not significant. 2324 for _, file := range p.info.Files { 2325 for _, decl := range file.Decls { 2326 if decl, ok := decl.(*ast.FuncDecl); ok { 2327 b.buildFuncDecl(p, decl) 2328 } 2329 } 2330 } 2331 2332 // Finish up init(). 2333 if p.Prog.mode&BareInits == 0 { 2334 emitJump(init, done) 2335 init.currentBlock = done 2336 } 2337 init.emit(new(Return)) 2338 init.finishBody() 2339 2340 p.info = nil // We no longer need ASTs or go/types deductions. 2341 2342 if p.Prog.mode&SanityCheckFunctions != 0 { 2343 sanityCheckPackage(p) 2344 } 2345 } 2346 2347 // Like ObjectOf, but panics instead of returning nil. 2348 // Only valid during p's create and build phases. 2349 func (p *Package) objectOf(id *ast.Ident) types.Object { 2350 if o := p.info.ObjectOf(id); o != nil { 2351 return o 2352 } 2353 panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", 2354 id.Name, p.Prog.Fset.Position(id.Pos()))) 2355 } 2356 2357 // Like TypeOf, but panics instead of returning nil. 2358 // Only valid during p's create and build phases. 2359 func (p *Package) typeOf(e ast.Expr) types.Type { 2360 if T := p.info.TypeOf(e); T != nil { 2361 return T 2362 } 2363 panic(fmt.Sprintf("no type for %T @ %s", 2364 e, p.Prog.Fset.Position(e.Pos()))) 2365 }