github.com/jhump/golang-x-tools@v0.0.0-20220218190644-4958d6d39439/go/ssa/builder.go (about) 1 // Copyright 2013 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ssa 6 7 // This file implements the BUILD phase of SSA construction. 8 // 9 // SSA construction has two phases, CREATE and BUILD. In the CREATE phase 10 // (create.go), all packages are constructed and type-checked and 11 // definitions of all package members are created, method-sets are 12 // computed, and wrapper methods are synthesized. 13 // ssa.Packages are created in arbitrary order. 14 // 15 // In the BUILD phase (builder.go), the builder traverses the AST of 16 // each Go source function and generates SSA instructions for the 17 // function body. Initializer expressions for package-level variables 18 // are emitted to the package's init() function in the order specified 19 // by go/types.Info.InitOrder, then code for each function in the 20 // package is generated in lexical order. 21 // The BUILD phases for distinct packages are independent and are 22 // executed in parallel. 23 // 24 // TODO(adonovan): indeed, building functions is now embarrassingly parallel. 25 // Audit for concurrency then benchmark using more goroutines. 26 // 27 // The builder's and Program's indices (maps) are populated and 28 // mutated during the CREATE phase, but during the BUILD phase they 29 // remain constant. The sole exception is Prog.methodSets and its 30 // related maps, which are protected by a dedicated mutex. 31 32 import ( 33 "fmt" 34 "go/ast" 35 "go/constant" 36 "go/token" 37 "go/types" 38 "os" 39 "sync" 40 ) 41 42 type opaqueType struct { 43 types.Type 44 name string 45 } 46 47 func (t *opaqueType) String() string { return t.name } 48 49 var ( 50 varOk = newVar("ok", tBool) 51 varIndex = newVar("index", tInt) 52 53 // Type constants. 54 tBool = types.Typ[types.Bool] 55 tByte = types.Typ[types.Byte] 56 tInt = types.Typ[types.Int] 57 tInvalid = types.Typ[types.Invalid] 58 tString = types.Typ[types.String] 59 tUntypedNil = types.Typ[types.UntypedNil] 60 tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators 61 tEface = types.NewInterface(nil, nil).Complete() 62 63 // SSA Value constants. 64 vZero = intConst(0) 65 vOne = intConst(1) 66 vTrue = NewConst(constant.MakeBool(true), tBool) 67 ) 68 69 // builder holds state associated with the package currently being built. 70 // Its methods contain all the logic for AST-to-SSA conversion. 71 type builder struct{} 72 73 // cond emits to fn code to evaluate boolean condition e and jump 74 // to t or f depending on its value, performing various simplifications. 75 // 76 // Postcondition: fn.currentBlock is nil. 77 // 78 func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { 79 switch e := e.(type) { 80 case *ast.ParenExpr: 81 b.cond(fn, e.X, t, f) 82 return 83 84 case *ast.BinaryExpr: 85 switch e.Op { 86 case token.LAND: 87 ltrue := fn.newBasicBlock("cond.true") 88 b.cond(fn, e.X, ltrue, f) 89 fn.currentBlock = ltrue 90 b.cond(fn, e.Y, t, f) 91 return 92 93 case token.LOR: 94 lfalse := fn.newBasicBlock("cond.false") 95 b.cond(fn, e.X, t, lfalse) 96 fn.currentBlock = lfalse 97 b.cond(fn, e.Y, t, f) 98 return 99 } 100 101 case *ast.UnaryExpr: 102 if e.Op == token.NOT { 103 b.cond(fn, e.X, f, t) 104 return 105 } 106 } 107 108 // A traditional compiler would simplify "if false" (etc) here 109 // but we do not, for better fidelity to the source code. 110 // 111 // The value of a constant condition may be platform-specific, 112 // and may cause blocks that are reachable in some configuration 113 // to be hidden from subsequent analyses such as bug-finding tools. 114 emitIf(fn, b.expr(fn, e), t, f) 115 } 116 117 // logicalBinop emits code to fn to evaluate e, a &&- or 118 // ||-expression whose reified boolean value is wanted. 119 // The value is returned. 120 // 121 func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { 122 rhs := fn.newBasicBlock("binop.rhs") 123 done := fn.newBasicBlock("binop.done") 124 125 // T(e) = T(e.X) = T(e.Y) after untyped constants have been 126 // eliminated. 127 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. 128 t := fn.Pkg.typeOf(e) 129 130 var short Value // value of the short-circuit path 131 switch e.Op { 132 case token.LAND: 133 b.cond(fn, e.X, rhs, done) 134 short = NewConst(constant.MakeBool(false), t) 135 136 case token.LOR: 137 b.cond(fn, e.X, done, rhs) 138 short = NewConst(constant.MakeBool(true), t) 139 } 140 141 // Is rhs unreachable? 142 if rhs.Preds == nil { 143 // Simplify false&&y to false, true||y to true. 144 fn.currentBlock = done 145 return short 146 } 147 148 // Is done unreachable? 149 if done.Preds == nil { 150 // Simplify true&&y (or false||y) to y. 151 fn.currentBlock = rhs 152 return b.expr(fn, e.Y) 153 } 154 155 // All edges from e.X to done carry the short-circuit value. 156 var edges []Value 157 for range done.Preds { 158 edges = append(edges, short) 159 } 160 161 // The edge from e.Y to done carries the value of e.Y. 162 fn.currentBlock = rhs 163 edges = append(edges, b.expr(fn, e.Y)) 164 emitJump(fn, done) 165 fn.currentBlock = done 166 167 phi := &Phi{Edges: edges, Comment: e.Op.String()} 168 phi.pos = e.OpPos 169 phi.typ = t 170 return done.emit(phi) 171 } 172 173 // exprN lowers a multi-result expression e to SSA form, emitting code 174 // to fn and returning a single Value whose type is a *types.Tuple. 175 // The caller must access the components via Extract. 176 // 177 // Multi-result expressions include CallExprs in a multi-value 178 // assignment or return statement, and "value,ok" uses of 179 // TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op 180 // is token.ARROW). 181 // 182 func (b *builder) exprN(fn *Function, e ast.Expr) Value { 183 typ := fn.Pkg.typeOf(e).(*types.Tuple) 184 switch e := e.(type) { 185 case *ast.ParenExpr: 186 return b.exprN(fn, e.X) 187 188 case *ast.CallExpr: 189 // Currently, no built-in function nor type conversion 190 // has multiple results, so we can avoid some of the 191 // cases for single-valued CallExpr. 192 var c Call 193 b.setCall(fn, e, &c.Call) 194 c.typ = typ 195 return fn.emit(&c) 196 197 case *ast.IndexExpr: 198 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) 199 lookup := &Lookup{ 200 X: b.expr(fn, e.X), 201 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), 202 CommaOk: true, 203 } 204 lookup.setType(typ) 205 lookup.setPos(e.Lbrack) 206 return fn.emit(lookup) 207 208 case *ast.TypeAssertExpr: 209 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) 210 211 case *ast.UnaryExpr: // must be receive <- 212 unop := &UnOp{ 213 Op: token.ARROW, 214 X: b.expr(fn, e.X), 215 CommaOk: true, 216 } 217 unop.setType(typ) 218 unop.setPos(e.OpPos) 219 return fn.emit(unop) 220 } 221 panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) 222 } 223 224 // builtin emits to fn SSA instructions to implement a call to the 225 // built-in function obj with the specified arguments 226 // and return type. It returns the value defined by the result. 227 // 228 // The result is nil if no special handling was required; in this case 229 // the caller should treat this like an ordinary library function 230 // call. 231 // 232 func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { 233 switch obj.Name() { 234 case "make": 235 switch typ.Underlying().(type) { 236 case *types.Slice: 237 n := b.expr(fn, args[1]) 238 m := n 239 if len(args) == 3 { 240 m = b.expr(fn, args[2]) 241 } 242 if m, ok := m.(*Const); ok { 243 // treat make([]T, n, m) as new([m]T)[:n] 244 cap := m.Int64() 245 at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) 246 alloc := emitNew(fn, at, pos) 247 alloc.Comment = "makeslice" 248 v := &Slice{ 249 X: alloc, 250 High: n, 251 } 252 v.setPos(pos) 253 v.setType(typ) 254 return fn.emit(v) 255 } 256 v := &MakeSlice{ 257 Len: n, 258 Cap: m, 259 } 260 v.setPos(pos) 261 v.setType(typ) 262 return fn.emit(v) 263 264 case *types.Map: 265 var res Value 266 if len(args) == 2 { 267 res = b.expr(fn, args[1]) 268 } 269 v := &MakeMap{Reserve: res} 270 v.setPos(pos) 271 v.setType(typ) 272 return fn.emit(v) 273 274 case *types.Chan: 275 var sz Value = vZero 276 if len(args) == 2 { 277 sz = b.expr(fn, args[1]) 278 } 279 v := &MakeChan{Size: sz} 280 v.setPos(pos) 281 v.setType(typ) 282 return fn.emit(v) 283 } 284 285 case "new": 286 alloc := emitNew(fn, deref(typ), pos) 287 alloc.Comment = "new" 288 return alloc 289 290 case "len", "cap": 291 // Special case: len or cap of an array or *array is 292 // based on the type, not the value which may be nil. 293 // We must still evaluate the value, though. (If it 294 // was side-effect free, the whole call would have 295 // been constant-folded.) 296 t := deref(fn.Pkg.typeOf(args[0])).Underlying() 297 if at, ok := t.(*types.Array); ok { 298 b.expr(fn, args[0]) // for effects only 299 return intConst(at.Len()) 300 } 301 // Otherwise treat as normal. 302 303 case "panic": 304 fn.emit(&Panic{ 305 X: emitConv(fn, b.expr(fn, args[0]), tEface), 306 pos: pos, 307 }) 308 fn.currentBlock = fn.newBasicBlock("unreachable") 309 return vTrue // any non-nil Value will do 310 } 311 return nil // treat all others as a regular function call 312 } 313 314 // addr lowers a single-result addressable expression e to SSA form, 315 // emitting code to fn and returning the location (an lvalue) defined 316 // by the expression. 317 // 318 // If escaping is true, addr marks the base variable of the 319 // addressable expression e as being a potentially escaping pointer 320 // value. For example, in this code: 321 // 322 // a := A{ 323 // b: [1]B{B{c: 1}} 324 // } 325 // return &a.b[0].c 326 // 327 // the application of & causes a.b[0].c to have its address taken, 328 // which means that ultimately the local variable a must be 329 // heap-allocated. This is a simple but very conservative escape 330 // analysis. 331 // 332 // Operations forming potentially escaping pointers include: 333 // - &x, including when implicit in method call or composite literals. 334 // - a[:] iff a is an array (not *array) 335 // - references to variables in lexically enclosing functions. 336 // 337 func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { 338 switch e := e.(type) { 339 case *ast.Ident: 340 if isBlankIdent(e) { 341 return blank{} 342 } 343 obj := fn.Pkg.objectOf(e) 344 v := fn.Prog.packageLevelValue(obj) // var (address) 345 if v == nil { 346 v = fn.lookup(obj, escaping) 347 } 348 return &address{addr: v, pos: e.Pos(), expr: e} 349 350 case *ast.CompositeLit: 351 t := deref(fn.Pkg.typeOf(e)) 352 var v *Alloc 353 if escaping { 354 v = emitNew(fn, t, e.Lbrace) 355 } else { 356 v = fn.addLocal(t, e.Lbrace) 357 } 358 v.Comment = "complit" 359 var sb storebuf 360 b.compLit(fn, v, e, true, &sb) 361 sb.emit(fn) 362 return &address{addr: v, pos: e.Lbrace, expr: e} 363 364 case *ast.ParenExpr: 365 return b.addr(fn, e.X, escaping) 366 367 case *ast.SelectorExpr: 368 sel, ok := fn.Pkg.info.Selections[e] 369 if !ok { 370 // qualified identifier 371 return b.addr(fn, e.Sel, escaping) 372 } 373 if sel.Kind() != types.FieldVal { 374 panic(sel) 375 } 376 wantAddr := true 377 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 378 last := len(sel.Index()) - 1 379 return &address{ 380 addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), 381 pos: e.Sel.Pos(), 382 expr: e.Sel, 383 } 384 385 case *ast.IndexExpr: 386 var x Value 387 var et types.Type 388 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { 389 case *types.Array: 390 x = b.addr(fn, e.X, escaping).address(fn) 391 et = types.NewPointer(t.Elem()) 392 case *types.Pointer: // *array 393 x = b.expr(fn, e.X) 394 et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) 395 case *types.Slice: 396 x = b.expr(fn, e.X) 397 et = types.NewPointer(t.Elem()) 398 case *types.Map: 399 return &element{ 400 m: b.expr(fn, e.X), 401 k: emitConv(fn, b.expr(fn, e.Index), t.Key()), 402 t: t.Elem(), 403 pos: e.Lbrack, 404 } 405 default: 406 panic("unexpected container type in IndexExpr: " + t.String()) 407 } 408 v := &IndexAddr{ 409 X: x, 410 Index: emitConv(fn, b.expr(fn, e.Index), tInt), 411 } 412 v.setPos(e.Lbrack) 413 v.setType(et) 414 return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e} 415 416 case *ast.StarExpr: 417 return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} 418 } 419 420 panic(fmt.Sprintf("unexpected address expression: %T", e)) 421 } 422 423 type store struct { 424 lhs lvalue 425 rhs Value 426 } 427 428 type storebuf struct{ stores []store } 429 430 func (sb *storebuf) store(lhs lvalue, rhs Value) { 431 sb.stores = append(sb.stores, store{lhs, rhs}) 432 } 433 434 func (sb *storebuf) emit(fn *Function) { 435 for _, s := range sb.stores { 436 s.lhs.store(fn, s.rhs) 437 } 438 } 439 440 // assign emits to fn code to initialize the lvalue loc with the value 441 // of expression e. If isZero is true, assign assumes that loc holds 442 // the zero value for its type. 443 // 444 // This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate 445 // better code in some cases, e.g., for composite literals in an 446 // addressable location. 447 // 448 // If sb is not nil, assign generates code to evaluate expression e, but 449 // not to update loc. Instead, the necessary stores are appended to the 450 // storebuf sb so that they can be executed later. This allows correct 451 // in-place update of existing variables when the RHS is a composite 452 // literal that may reference parts of the LHS. 453 // 454 func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { 455 // Can we initialize it in place? 456 if e, ok := unparen(e).(*ast.CompositeLit); ok { 457 // A CompositeLit never evaluates to a pointer, 458 // so if the type of the location is a pointer, 459 // an &-operation is implied. 460 if _, ok := loc.(blank); !ok { // avoid calling blank.typ() 461 if isPointer(loc.typ()) { 462 ptr := b.addr(fn, e, true).address(fn) 463 // copy address 464 if sb != nil { 465 sb.store(loc, ptr) 466 } else { 467 loc.store(fn, ptr) 468 } 469 return 470 } 471 } 472 473 if _, ok := loc.(*address); ok { 474 if isInterface(loc.typ()) { 475 // e.g. var x interface{} = T{...} 476 // Can't in-place initialize an interface value. 477 // Fall back to copying. 478 } else { 479 // x = T{...} or x := T{...} 480 addr := loc.address(fn) 481 if sb != nil { 482 b.compLit(fn, addr, e, isZero, sb) 483 } else { 484 var sb storebuf 485 b.compLit(fn, addr, e, isZero, &sb) 486 sb.emit(fn) 487 } 488 489 // Subtle: emit debug ref for aggregate types only; 490 // slice and map are handled by store ops in compLit. 491 switch loc.typ().Underlying().(type) { 492 case *types.Struct, *types.Array: 493 emitDebugRef(fn, e, addr, true) 494 } 495 496 return 497 } 498 } 499 } 500 501 // simple case: just copy 502 rhs := b.expr(fn, e) 503 if sb != nil { 504 sb.store(loc, rhs) 505 } else { 506 loc.store(fn, rhs) 507 } 508 } 509 510 // expr lowers a single-result expression e to SSA form, emitting code 511 // to fn and returning the Value defined by the expression. 512 // 513 func (b *builder) expr(fn *Function, e ast.Expr) Value { 514 e = unparen(e) 515 516 tv := fn.Pkg.info.Types[e] 517 518 // Is expression a constant? 519 if tv.Value != nil { 520 return NewConst(tv.Value, tv.Type) 521 } 522 523 var v Value 524 if tv.Addressable() { 525 // Prefer pointer arithmetic ({Index,Field}Addr) followed 526 // by Load over subelement extraction (e.g. Index, Field), 527 // to avoid large copies. 528 v = b.addr(fn, e, false).load(fn) 529 } else { 530 v = b.expr0(fn, e, tv) 531 } 532 if fn.debugInfo() { 533 emitDebugRef(fn, e, v, false) 534 } 535 return v 536 } 537 538 func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { 539 switch e := e.(type) { 540 case *ast.BasicLit: 541 panic("non-constant BasicLit") // unreachable 542 543 case *ast.FuncLit: 544 fn2 := &Function{ 545 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), 546 Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), 547 pos: e.Type.Func, 548 parent: fn, 549 Pkg: fn.Pkg, 550 Prog: fn.Prog, 551 syntax: e, 552 } 553 fn.AnonFuncs = append(fn.AnonFuncs, fn2) 554 b.buildFunction(fn2) 555 if fn2.FreeVars == nil { 556 return fn2 557 } 558 v := &MakeClosure{Fn: fn2} 559 v.setType(tv.Type) 560 for _, fv := range fn2.FreeVars { 561 v.Bindings = append(v.Bindings, fv.outer) 562 fv.outer = nil 563 } 564 return fn.emit(v) 565 566 case *ast.TypeAssertExpr: // single-result form only 567 return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen) 568 569 case *ast.CallExpr: 570 if fn.Pkg.info.Types[e.Fun].IsType() { 571 // Explicit type conversion, e.g. string(x) or big.Int(x) 572 x := b.expr(fn, e.Args[0]) 573 y := emitConv(fn, x, tv.Type) 574 if y != x { 575 switch y := y.(type) { 576 case *Convert: 577 y.pos = e.Lparen 578 case *ChangeType: 579 y.pos = e.Lparen 580 case *MakeInterface: 581 y.pos = e.Lparen 582 case *SliceToArrayPointer: 583 y.pos = e.Lparen 584 } 585 } 586 return y 587 } 588 // Call to "intrinsic" built-ins, e.g. new, make, panic. 589 if id, ok := unparen(e.Fun).(*ast.Ident); ok { 590 if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { 591 if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil { 592 return v 593 } 594 } 595 } 596 // Regular function call. 597 var v Call 598 b.setCall(fn, e, &v.Call) 599 v.setType(tv.Type) 600 return fn.emit(&v) 601 602 case *ast.UnaryExpr: 603 switch e.Op { 604 case token.AND: // &X --- potentially escaping. 605 addr := b.addr(fn, e.X, true) 606 if _, ok := unparen(e.X).(*ast.StarExpr); ok { 607 // &*p must panic if p is nil (http://golang.org/s/go12nil). 608 // For simplicity, we'll just (suboptimally) rely 609 // on the side effects of a load. 610 // TODO(adonovan): emit dedicated nilcheck. 611 addr.load(fn) 612 } 613 return addr.address(fn) 614 case token.ADD: 615 return b.expr(fn, e.X) 616 case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ 617 v := &UnOp{ 618 Op: e.Op, 619 X: b.expr(fn, e.X), 620 } 621 v.setPos(e.OpPos) 622 v.setType(tv.Type) 623 return fn.emit(v) 624 default: 625 panic(e.Op) 626 } 627 628 case *ast.BinaryExpr: 629 switch e.Op { 630 case token.LAND, token.LOR: 631 return b.logicalBinop(fn, e) 632 case token.SHL, token.SHR: 633 fallthrough 634 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: 635 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos) 636 637 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: 638 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) 639 // The type of x==y may be UntypedBool. 640 return emitConv(fn, cmp, types.Default(tv.Type)) 641 default: 642 panic("illegal op in BinaryExpr: " + e.Op.String()) 643 } 644 645 case *ast.SliceExpr: 646 var low, high, max Value 647 var x Value 648 switch fn.Pkg.typeOf(e.X).Underlying().(type) { 649 case *types.Array: 650 // Potentially escaping. 651 x = b.addr(fn, e.X, true).address(fn) 652 case *types.Basic, *types.Slice, *types.Pointer: // *array 653 x = b.expr(fn, e.X) 654 default: 655 panic("unreachable") 656 } 657 if e.High != nil { 658 high = b.expr(fn, e.High) 659 } 660 if e.Low != nil { 661 low = b.expr(fn, e.Low) 662 } 663 if e.Slice3 { 664 max = b.expr(fn, e.Max) 665 } 666 v := &Slice{ 667 X: x, 668 Low: low, 669 High: high, 670 Max: max, 671 } 672 v.setPos(e.Lbrack) 673 v.setType(tv.Type) 674 return fn.emit(v) 675 676 case *ast.Ident: 677 obj := fn.Pkg.info.Uses[e] 678 // Universal built-in or nil? 679 switch obj := obj.(type) { 680 case *types.Builtin: 681 return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} 682 case *types.Nil: 683 return nilConst(tv.Type) 684 } 685 // Package-level func or var? 686 if v := fn.Prog.packageLevelValue(obj); v != nil { 687 if _, ok := obj.(*types.Var); ok { 688 return emitLoad(fn, v) // var (address) 689 } 690 return v // (func) 691 } 692 // Local var. 693 return emitLoad(fn, fn.lookup(obj, false)) // var (address) 694 695 case *ast.SelectorExpr: 696 sel, ok := fn.Pkg.info.Selections[e] 697 if !ok { 698 // builtin unsafe.{Add,Slice} 699 if obj, ok := fn.Pkg.info.Uses[e.Sel].(*types.Builtin); ok { 700 return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} 701 } 702 // qualified identifier 703 return b.expr(fn, e.Sel) 704 } 705 switch sel.Kind() { 706 case types.MethodExpr: 707 // (*T).f or T.f, the method f from the method-set of type T. 708 // The result is a "thunk". 709 return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type) 710 711 case types.MethodVal: 712 // e.f where e is an expression and f is a method. 713 // The result is a "bound". 714 obj := sel.Obj().(*types.Func) 715 rt := recvType(obj) 716 wantAddr := isPointer(rt) 717 escaping := true 718 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 719 if isInterface(rt) { 720 // If v has interface type I, 721 // we must emit a check that v is non-nil. 722 // We use: typeassert v.(I). 723 emitTypeAssert(fn, v, rt, token.NoPos) 724 } 725 c := &MakeClosure{ 726 Fn: makeBound(fn.Prog, obj), 727 Bindings: []Value{v}, 728 } 729 c.setPos(e.Sel.Pos()) 730 c.setType(tv.Type) 731 return fn.emit(c) 732 733 case types.FieldVal: 734 indices := sel.Index() 735 last := len(indices) - 1 736 v := b.expr(fn, e.X) 737 v = emitImplicitSelections(fn, v, indices[:last]) 738 v = emitFieldSelection(fn, v, indices[last], false, e.Sel) 739 return v 740 } 741 742 panic("unexpected expression-relative selector") 743 744 case *ast.IndexExpr: 745 switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { 746 case *types.Array: 747 // Non-addressable array (in a register). 748 v := &Index{ 749 X: b.expr(fn, e.X), 750 Index: emitConv(fn, b.expr(fn, e.Index), tInt), 751 } 752 v.setPos(e.Lbrack) 753 v.setType(t.Elem()) 754 return fn.emit(v) 755 756 case *types.Map: 757 // Maps are not addressable. 758 mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) 759 v := &Lookup{ 760 X: b.expr(fn, e.X), 761 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), 762 } 763 v.setPos(e.Lbrack) 764 v.setType(mapt.Elem()) 765 return fn.emit(v) 766 767 case *types.Basic: // => string 768 // Strings are not addressable. 769 v := &Lookup{ 770 X: b.expr(fn, e.X), 771 Index: b.expr(fn, e.Index), 772 } 773 v.setPos(e.Lbrack) 774 v.setType(tByte) 775 return fn.emit(v) 776 777 case *types.Slice, *types.Pointer: // *array 778 // Addressable slice/array; use IndexAddr and Load. 779 return b.addr(fn, e, false).load(fn) 780 781 default: 782 panic("unexpected container type in IndexExpr: " + t.String()) 783 } 784 785 case *ast.CompositeLit, *ast.StarExpr: 786 // Addressable types (lvalues) 787 return b.addr(fn, e, false).load(fn) 788 } 789 790 panic(fmt.Sprintf("unexpected expr: %T", e)) 791 } 792 793 // stmtList emits to fn code for all statements in list. 794 func (b *builder) stmtList(fn *Function, list []ast.Stmt) { 795 for _, s := range list { 796 b.stmt(fn, s) 797 } 798 } 799 800 // receiver emits to fn code for expression e in the "receiver" 801 // position of selection e.f (where f may be a field or a method) and 802 // returns the effective receiver after applying the implicit field 803 // selections of sel. 804 // 805 // wantAddr requests that the result is an an address. If 806 // !sel.Indirect(), this may require that e be built in addr() mode; it 807 // must thus be addressable. 808 // 809 // escaping is defined as per builder.addr(). 810 // 811 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value { 812 var v Value 813 if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { 814 v = b.addr(fn, e, escaping).address(fn) 815 } else { 816 v = b.expr(fn, e) 817 } 818 819 last := len(sel.Index()) - 1 820 v = emitImplicitSelections(fn, v, sel.Index()[:last]) 821 if !wantAddr && isPointer(v.Type()) { 822 v = emitLoad(fn, v) 823 } 824 return v 825 } 826 827 // setCallFunc populates the function parts of a CallCommon structure 828 // (Func, Method, Recv, Args[0]) based on the kind of invocation 829 // occurring in e. 830 // 831 func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { 832 c.pos = e.Lparen 833 834 // Is this a method call? 835 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { 836 sel, ok := fn.Pkg.info.Selections[selector] 837 if ok && sel.Kind() == types.MethodVal { 838 obj := sel.Obj().(*types.Func) 839 recv := recvType(obj) 840 wantAddr := isPointer(recv) 841 escaping := true 842 v := b.receiver(fn, selector.X, wantAddr, escaping, sel) 843 if isInterface(recv) { 844 // Invoke-mode call. 845 c.Value = v 846 c.Method = obj 847 } else { 848 // "Call"-mode call. 849 c.Value = fn.Prog.declaredFunc(obj) 850 c.Args = append(c.Args, v) 851 } 852 return 853 } 854 855 // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): 856 // a statically dispatched call to the method f in the 857 // method-set of T or *T. T may be an interface. 858 // 859 // e.Fun would evaluate to a concrete method, interface 860 // wrapper function, or promotion wrapper. 861 // 862 // For now, we evaluate it in the usual way. 863 // 864 // TODO(adonovan): opt: inline expr() here, to make the 865 // call static and to avoid generation of wrappers. 866 // It's somewhat tricky as it may consume the first 867 // actual parameter if the call is "invoke" mode. 868 // 869 // Examples: 870 // type T struct{}; func (T) f() {} // "call" mode 871 // type T interface { f() } // "invoke" mode 872 // 873 // type S struct{ T } 874 // 875 // var s S 876 // S.f(s) 877 // (*S).f(&s) 878 // 879 // Suggested approach: 880 // - consume the first actual parameter expression 881 // and build it with b.expr(). 882 // - apply implicit field selections. 883 // - use MethodVal logic to populate fields of c. 884 } 885 886 // Evaluate the function operand in the usual way. 887 c.Value = b.expr(fn, e.Fun) 888 } 889 890 // emitCallArgs emits to f code for the actual parameters of call e to 891 // a (possibly built-in) function of effective type sig. 892 // The argument values are appended to args, which is then returned. 893 // 894 func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { 895 // f(x, y, z...): pass slice z straight through. 896 if e.Ellipsis != 0 { 897 for i, arg := range e.Args { 898 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) 899 args = append(args, v) 900 } 901 return args 902 } 903 904 offset := len(args) // 1 if call has receiver, 0 otherwise 905 906 // Evaluate actual parameter expressions. 907 // 908 // If this is a chained call of the form f(g()) where g has 909 // multiple return values (MRV), they are flattened out into 910 // args; a suffix of them may end up in a varargs slice. 911 for _, arg := range e.Args { 912 v := b.expr(fn, arg) 913 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain 914 for i, n := 0, ttuple.Len(); i < n; i++ { 915 args = append(args, emitExtract(fn, v, i)) 916 } 917 } else { 918 args = append(args, v) 919 } 920 } 921 922 // Actual->formal assignability conversions for normal parameters. 923 np := sig.Params().Len() // number of normal parameters 924 if sig.Variadic() { 925 np-- 926 } 927 for i := 0; i < np; i++ { 928 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) 929 } 930 931 // Actual->formal assignability conversions for variadic parameter, 932 // and construction of slice. 933 if sig.Variadic() { 934 varargs := args[offset+np:] 935 st := sig.Params().At(np).Type().(*types.Slice) 936 vt := st.Elem() 937 if len(varargs) == 0 { 938 args = append(args, nilConst(st)) 939 } else { 940 // Replace a suffix of args with a slice containing it. 941 at := types.NewArray(vt, int64(len(varargs))) 942 a := emitNew(fn, at, token.NoPos) 943 a.setPos(e.Rparen) 944 a.Comment = "varargs" 945 for i, arg := range varargs { 946 iaddr := &IndexAddr{ 947 X: a, 948 Index: intConst(int64(i)), 949 } 950 iaddr.setType(types.NewPointer(vt)) 951 fn.emit(iaddr) 952 emitStore(fn, iaddr, arg, arg.Pos()) 953 } 954 s := &Slice{X: a} 955 s.setType(st) 956 args[offset+np] = fn.emit(s) 957 args = args[:offset+np+1] 958 } 959 } 960 return args 961 } 962 963 // setCall emits to fn code to evaluate all the parameters of a function 964 // call e, and populates *c with those values. 965 // 966 func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { 967 // First deal with the f(...) part and optional receiver. 968 b.setCallFunc(fn, e, c) 969 970 // Then append the other actual parameters. 971 sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) 972 if sig == nil { 973 panic(fmt.Sprintf("no signature for call of %s", e.Fun)) 974 } 975 c.Args = b.emitCallArgs(fn, sig, e, c.Args) 976 } 977 978 // assignOp emits to fn code to perform loc <op>= val. 979 func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { 980 oldv := loc.load(fn) 981 loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos)) 982 } 983 984 // localValueSpec emits to fn code to define all of the vars in the 985 // function-local ValueSpec, spec. 986 // 987 func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { 988 switch { 989 case len(spec.Values) == len(spec.Names): 990 // e.g. var x, y = 0, 1 991 // 1:1 assignment 992 for i, id := range spec.Names { 993 if !isBlankIdent(id) { 994 fn.addLocalForIdent(id) 995 } 996 lval := b.addr(fn, id, false) // non-escaping 997 b.assign(fn, lval, spec.Values[i], true, nil) 998 } 999 1000 case len(spec.Values) == 0: 1001 // e.g. var x, y int 1002 // Locals are implicitly zero-initialized. 1003 for _, id := range spec.Names { 1004 if !isBlankIdent(id) { 1005 lhs := fn.addLocalForIdent(id) 1006 if fn.debugInfo() { 1007 emitDebugRef(fn, id, lhs, true) 1008 } 1009 } 1010 } 1011 1012 default: 1013 // e.g. var x, y = pos() 1014 tuple := b.exprN(fn, spec.Values[0]) 1015 for i, id := range spec.Names { 1016 if !isBlankIdent(id) { 1017 fn.addLocalForIdent(id) 1018 lhs := b.addr(fn, id, false) // non-escaping 1019 lhs.store(fn, emitExtract(fn, tuple, i)) 1020 } 1021 } 1022 } 1023 } 1024 1025 // assignStmt emits code to fn for a parallel assignment of rhss to lhss. 1026 // isDef is true if this is a short variable declaration (:=). 1027 // 1028 // Note the similarity with localValueSpec. 1029 // 1030 func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { 1031 // Side effects of all LHSs and RHSs must occur in left-to-right order. 1032 lvals := make([]lvalue, len(lhss)) 1033 isZero := make([]bool, len(lhss)) 1034 for i, lhs := range lhss { 1035 var lval lvalue = blank{} 1036 if !isBlankIdent(lhs) { 1037 if isDef { 1038 if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { 1039 fn.addNamedLocal(obj) 1040 isZero[i] = true 1041 } 1042 } 1043 lval = b.addr(fn, lhs, false) // non-escaping 1044 } 1045 lvals[i] = lval 1046 } 1047 if len(lhss) == len(rhss) { 1048 // Simple assignment: x = f() (!isDef) 1049 // Parallel assignment: x, y = f(), g() (!isDef) 1050 // or short var decl: x, y := f(), g() (isDef) 1051 // 1052 // In all cases, the RHSs may refer to the LHSs, 1053 // so we need a storebuf. 1054 var sb storebuf 1055 for i := range rhss { 1056 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) 1057 } 1058 sb.emit(fn) 1059 } else { 1060 // e.g. x, y = pos() 1061 tuple := b.exprN(fn, rhss[0]) 1062 emitDebugRef(fn, rhss[0], tuple, false) 1063 for i, lval := range lvals { 1064 lval.store(fn, emitExtract(fn, tuple, i)) 1065 } 1066 } 1067 } 1068 1069 // arrayLen returns the length of the array whose composite literal elements are elts. 1070 func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { 1071 var max int64 = -1 1072 var i int64 = -1 1073 for _, e := range elts { 1074 if kv, ok := e.(*ast.KeyValueExpr); ok { 1075 i = b.expr(fn, kv.Key).(*Const).Int64() 1076 } else { 1077 i++ 1078 } 1079 if i > max { 1080 max = i 1081 } 1082 } 1083 return max + 1 1084 } 1085 1086 // compLit emits to fn code to initialize a composite literal e at 1087 // address addr with type typ. 1088 // 1089 // Nested composite literals are recursively initialized in place 1090 // where possible. If isZero is true, compLit assumes that addr 1091 // holds the zero value for typ. 1092 // 1093 // Because the elements of a composite literal may refer to the 1094 // variables being updated, as in the second line below, 1095 // x := T{a: 1} 1096 // x = T{a: x.a} 1097 // all the reads must occur before all the writes. Thus all stores to 1098 // loc are emitted to the storebuf sb for later execution. 1099 // 1100 // A CompositeLit may have pointer type only in the recursive (nested) 1101 // case when the type name is implicit. e.g. in []*T{{}}, the inner 1102 // literal has type *T behaves like &T{}. 1103 // In that case, addr must hold a T, not a *T. 1104 // 1105 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { 1106 typ := deref(fn.Pkg.typeOf(e)) 1107 switch t := typ.Underlying().(type) { 1108 case *types.Struct: 1109 if !isZero && len(e.Elts) != t.NumFields() { 1110 // memclear 1111 sb.store(&address{addr, e.Lbrace, nil}, 1112 zeroValue(fn, deref(addr.Type()))) 1113 isZero = true 1114 } 1115 for i, e := range e.Elts { 1116 fieldIndex := i 1117 pos := e.Pos() 1118 if kv, ok := e.(*ast.KeyValueExpr); ok { 1119 fname := kv.Key.(*ast.Ident).Name 1120 for i, n := 0, t.NumFields(); i < n; i++ { 1121 sf := t.Field(i) 1122 if sf.Name() == fname { 1123 fieldIndex = i 1124 pos = kv.Colon 1125 e = kv.Value 1126 break 1127 } 1128 } 1129 } 1130 sf := t.Field(fieldIndex) 1131 faddr := &FieldAddr{ 1132 X: addr, 1133 Field: fieldIndex, 1134 } 1135 faddr.setType(types.NewPointer(sf.Type())) 1136 fn.emit(faddr) 1137 b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) 1138 } 1139 1140 case *types.Array, *types.Slice: 1141 var at *types.Array 1142 var array Value 1143 switch t := t.(type) { 1144 case *types.Slice: 1145 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) 1146 alloc := emitNew(fn, at, e.Lbrace) 1147 alloc.Comment = "slicelit" 1148 array = alloc 1149 case *types.Array: 1150 at = t 1151 array = addr 1152 1153 if !isZero && int64(len(e.Elts)) != at.Len() { 1154 // memclear 1155 sb.store(&address{array, e.Lbrace, nil}, 1156 zeroValue(fn, deref(array.Type()))) 1157 } 1158 } 1159 1160 var idx *Const 1161 for _, e := range e.Elts { 1162 pos := e.Pos() 1163 if kv, ok := e.(*ast.KeyValueExpr); ok { 1164 idx = b.expr(fn, kv.Key).(*Const) 1165 pos = kv.Colon 1166 e = kv.Value 1167 } else { 1168 var idxval int64 1169 if idx != nil { 1170 idxval = idx.Int64() + 1 1171 } 1172 idx = intConst(idxval) 1173 } 1174 iaddr := &IndexAddr{ 1175 X: array, 1176 Index: idx, 1177 } 1178 iaddr.setType(types.NewPointer(at.Elem())) 1179 fn.emit(iaddr) 1180 if t != at { // slice 1181 // backing array is unaliased => storebuf not needed. 1182 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) 1183 } else { 1184 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) 1185 } 1186 } 1187 1188 if t != at { // slice 1189 s := &Slice{X: array} 1190 s.setPos(e.Lbrace) 1191 s.setType(typ) 1192 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) 1193 } 1194 1195 case *types.Map: 1196 m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} 1197 m.setPos(e.Lbrace) 1198 m.setType(typ) 1199 fn.emit(m) 1200 for _, e := range e.Elts { 1201 e := e.(*ast.KeyValueExpr) 1202 1203 // If a key expression in a map literal is itself a 1204 // composite literal, the type may be omitted. 1205 // For example: 1206 // map[*struct{}]bool{{}: true} 1207 // An &-operation may be implied: 1208 // map[*struct{}]bool{&struct{}{}: true} 1209 var key Value 1210 if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { 1211 // A CompositeLit never evaluates to a pointer, 1212 // so if the type of the location is a pointer, 1213 // an &-operation is implied. 1214 key = b.addr(fn, e.Key, true).address(fn) 1215 } else { 1216 key = b.expr(fn, e.Key) 1217 } 1218 1219 loc := element{ 1220 m: m, 1221 k: emitConv(fn, key, t.Key()), 1222 t: t.Elem(), 1223 pos: e.Colon, 1224 } 1225 1226 // We call assign() only because it takes care 1227 // of any &-operation required in the recursive 1228 // case, e.g., 1229 // map[int]*struct{}{0: {}} implies &struct{}{}. 1230 // In-place update is of course impossible, 1231 // and no storebuf is needed. 1232 b.assign(fn, &loc, e.Value, true, nil) 1233 } 1234 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) 1235 1236 default: 1237 panic("unexpected CompositeLit type: " + t.String()) 1238 } 1239 } 1240 1241 // switchStmt emits to fn code for the switch statement s, optionally 1242 // labelled by label. 1243 // 1244 func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { 1245 // We treat SwitchStmt like a sequential if-else chain. 1246 // Multiway dispatch can be recovered later by ssautil.Switches() 1247 // to those cases that are free of side effects. 1248 if s.Init != nil { 1249 b.stmt(fn, s.Init) 1250 } 1251 var tag Value = vTrue 1252 if s.Tag != nil { 1253 tag = b.expr(fn, s.Tag) 1254 } 1255 done := fn.newBasicBlock("switch.done") 1256 if label != nil { 1257 label._break = done 1258 } 1259 // We pull the default case (if present) down to the end. 1260 // But each fallthrough label must point to the next 1261 // body block in source order, so we preallocate a 1262 // body block (fallthru) for the next case. 1263 // Unfortunately this makes for a confusing block order. 1264 var dfltBody *[]ast.Stmt 1265 var dfltFallthrough *BasicBlock 1266 var fallthru, dfltBlock *BasicBlock 1267 ncases := len(s.Body.List) 1268 for i, clause := range s.Body.List { 1269 body := fallthru 1270 if body == nil { 1271 body = fn.newBasicBlock("switch.body") // first case only 1272 } 1273 1274 // Preallocate body block for the next case. 1275 fallthru = done 1276 if i+1 < ncases { 1277 fallthru = fn.newBasicBlock("switch.body") 1278 } 1279 1280 cc := clause.(*ast.CaseClause) 1281 if cc.List == nil { 1282 // Default case. 1283 dfltBody = &cc.Body 1284 dfltFallthrough = fallthru 1285 dfltBlock = body 1286 continue 1287 } 1288 1289 var nextCond *BasicBlock 1290 for _, cond := range cc.List { 1291 nextCond = fn.newBasicBlock("switch.next") 1292 // TODO(adonovan): opt: when tag==vTrue, we'd 1293 // get better code if we use b.cond(cond) 1294 // instead of BinOp(EQL, tag, b.expr(cond)) 1295 // followed by If. Don't forget conversions 1296 // though. 1297 cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos) 1298 emitIf(fn, cond, body, nextCond) 1299 fn.currentBlock = nextCond 1300 } 1301 fn.currentBlock = body 1302 fn.targets = &targets{ 1303 tail: fn.targets, 1304 _break: done, 1305 _fallthrough: fallthru, 1306 } 1307 b.stmtList(fn, cc.Body) 1308 fn.targets = fn.targets.tail 1309 emitJump(fn, done) 1310 fn.currentBlock = nextCond 1311 } 1312 if dfltBlock != nil { 1313 emitJump(fn, dfltBlock) 1314 fn.currentBlock = dfltBlock 1315 fn.targets = &targets{ 1316 tail: fn.targets, 1317 _break: done, 1318 _fallthrough: dfltFallthrough, 1319 } 1320 b.stmtList(fn, *dfltBody) 1321 fn.targets = fn.targets.tail 1322 } 1323 emitJump(fn, done) 1324 fn.currentBlock = done 1325 } 1326 1327 // typeSwitchStmt emits to fn code for the type switch statement s, optionally 1328 // labelled by label. 1329 // 1330 func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { 1331 // We treat TypeSwitchStmt like a sequential if-else chain. 1332 // Multiway dispatch can be recovered later by ssautil.Switches(). 1333 1334 // Typeswitch lowering: 1335 // 1336 // var x X 1337 // switch y := x.(type) { 1338 // case T1, T2: S1 // >1 (y := x) 1339 // case nil: SN // nil (y := x) 1340 // default: SD // 0 types (y := x) 1341 // case T3: S3 // 1 type (y := x.(T3)) 1342 // } 1343 // 1344 // ...s.Init... 1345 // x := eval x 1346 // .caseT1: 1347 // t1, ok1 := typeswitch,ok x <T1> 1348 // if ok1 then goto S1 else goto .caseT2 1349 // .caseT2: 1350 // t2, ok2 := typeswitch,ok x <T2> 1351 // if ok2 then goto S1 else goto .caseNil 1352 // .S1: 1353 // y := x 1354 // ...S1... 1355 // goto done 1356 // .caseNil: 1357 // if t2, ok2 := typeswitch,ok x <T2> 1358 // if x == nil then goto SN else goto .caseT3 1359 // .SN: 1360 // y := x 1361 // ...SN... 1362 // goto done 1363 // .caseT3: 1364 // t3, ok3 := typeswitch,ok x <T3> 1365 // if ok3 then goto S3 else goto default 1366 // .S3: 1367 // y := t3 1368 // ...S3... 1369 // goto done 1370 // .default: 1371 // y := x 1372 // ...SD... 1373 // goto done 1374 // .done: 1375 1376 if s.Init != nil { 1377 b.stmt(fn, s.Init) 1378 } 1379 1380 var x Value 1381 switch ass := s.Assign.(type) { 1382 case *ast.ExprStmt: // x.(type) 1383 x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) 1384 case *ast.AssignStmt: // y := x.(type) 1385 x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) 1386 } 1387 1388 done := fn.newBasicBlock("typeswitch.done") 1389 if label != nil { 1390 label._break = done 1391 } 1392 var default_ *ast.CaseClause 1393 for _, clause := range s.Body.List { 1394 cc := clause.(*ast.CaseClause) 1395 if cc.List == nil { 1396 default_ = cc 1397 continue 1398 } 1399 body := fn.newBasicBlock("typeswitch.body") 1400 var next *BasicBlock 1401 var casetype types.Type 1402 var ti Value // ti, ok := typeassert,ok x <Ti> 1403 for _, cond := range cc.List { 1404 next = fn.newBasicBlock("typeswitch.next") 1405 casetype = fn.Pkg.typeOf(cond) 1406 var condv Value 1407 if casetype == tUntypedNil { 1408 condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos) 1409 ti = x 1410 } else { 1411 yok := emitTypeTest(fn, x, casetype, cc.Case) 1412 ti = emitExtract(fn, yok, 0) 1413 condv = emitExtract(fn, yok, 1) 1414 } 1415 emitIf(fn, condv, body, next) 1416 fn.currentBlock = next 1417 } 1418 if len(cc.List) != 1 { 1419 ti = x 1420 } 1421 fn.currentBlock = body 1422 b.typeCaseBody(fn, cc, ti, done) 1423 fn.currentBlock = next 1424 } 1425 if default_ != nil { 1426 b.typeCaseBody(fn, default_, x, done) 1427 } else { 1428 emitJump(fn, done) 1429 } 1430 fn.currentBlock = done 1431 } 1432 1433 func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { 1434 if obj := fn.Pkg.info.Implicits[cc]; obj != nil { 1435 // In a switch y := x.(type), each case clause 1436 // implicitly declares a distinct object y. 1437 // In a single-type case, y has that type. 1438 // In multi-type cases, 'case nil' and default, 1439 // y has the same type as the interface operand. 1440 emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos()) 1441 } 1442 fn.targets = &targets{ 1443 tail: fn.targets, 1444 _break: done, 1445 } 1446 b.stmtList(fn, cc.Body) 1447 fn.targets = fn.targets.tail 1448 emitJump(fn, done) 1449 } 1450 1451 // selectStmt emits to fn code for the select statement s, optionally 1452 // labelled by label. 1453 // 1454 func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { 1455 // A blocking select of a single case degenerates to a 1456 // simple send or receive. 1457 // TODO(adonovan): opt: is this optimization worth its weight? 1458 if len(s.Body.List) == 1 { 1459 clause := s.Body.List[0].(*ast.CommClause) 1460 if clause.Comm != nil { 1461 b.stmt(fn, clause.Comm) 1462 done := fn.newBasicBlock("select.done") 1463 if label != nil { 1464 label._break = done 1465 } 1466 fn.targets = &targets{ 1467 tail: fn.targets, 1468 _break: done, 1469 } 1470 b.stmtList(fn, clause.Body) 1471 fn.targets = fn.targets.tail 1472 emitJump(fn, done) 1473 fn.currentBlock = done 1474 return 1475 } 1476 } 1477 1478 // First evaluate all channels in all cases, and find 1479 // the directions of each state. 1480 var states []*SelectState 1481 blocking := true 1482 debugInfo := fn.debugInfo() 1483 for _, clause := range s.Body.List { 1484 var st *SelectState 1485 switch comm := clause.(*ast.CommClause).Comm.(type) { 1486 case nil: // default case 1487 blocking = false 1488 continue 1489 1490 case *ast.SendStmt: // ch<- i 1491 ch := b.expr(fn, comm.Chan) 1492 st = &SelectState{ 1493 Dir: types.SendOnly, 1494 Chan: ch, 1495 Send: emitConv(fn, b.expr(fn, comm.Value), 1496 ch.Type().Underlying().(*types.Chan).Elem()), 1497 Pos: comm.Arrow, 1498 } 1499 if debugInfo { 1500 st.DebugNode = comm 1501 } 1502 1503 case *ast.AssignStmt: // x := <-ch 1504 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) 1505 st = &SelectState{ 1506 Dir: types.RecvOnly, 1507 Chan: b.expr(fn, recv.X), 1508 Pos: recv.OpPos, 1509 } 1510 if debugInfo { 1511 st.DebugNode = recv 1512 } 1513 1514 case *ast.ExprStmt: // <-ch 1515 recv := unparen(comm.X).(*ast.UnaryExpr) 1516 st = &SelectState{ 1517 Dir: types.RecvOnly, 1518 Chan: b.expr(fn, recv.X), 1519 Pos: recv.OpPos, 1520 } 1521 if debugInfo { 1522 st.DebugNode = recv 1523 } 1524 } 1525 states = append(states, st) 1526 } 1527 1528 // We dispatch on the (fair) result of Select using a 1529 // sequential if-else chain, in effect: 1530 // 1531 // idx, recvOk, r0...r_n-1 := select(...) 1532 // if idx == 0 { // receive on channel 0 (first receive => r0) 1533 // x, ok := r0, recvOk 1534 // ...state0... 1535 // } else if v == 1 { // send on channel 1 1536 // ...state1... 1537 // } else { 1538 // ...default... 1539 // } 1540 sel := &Select{ 1541 States: states, 1542 Blocking: blocking, 1543 } 1544 sel.setPos(s.Select) 1545 var vars []*types.Var 1546 vars = append(vars, varIndex, varOk) 1547 for _, st := range states { 1548 if st.Dir == types.RecvOnly { 1549 tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() 1550 vars = append(vars, anonVar(tElem)) 1551 } 1552 } 1553 sel.setType(types.NewTuple(vars...)) 1554 1555 fn.emit(sel) 1556 idx := emitExtract(fn, sel, 0) 1557 1558 done := fn.newBasicBlock("select.done") 1559 if label != nil { 1560 label._break = done 1561 } 1562 1563 var defaultBody *[]ast.Stmt 1564 state := 0 1565 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV 1566 for _, cc := range s.Body.List { 1567 clause := cc.(*ast.CommClause) 1568 if clause.Comm == nil { 1569 defaultBody = &clause.Body 1570 continue 1571 } 1572 body := fn.newBasicBlock("select.body") 1573 next := fn.newBasicBlock("select.next") 1574 emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) 1575 fn.currentBlock = body 1576 fn.targets = &targets{ 1577 tail: fn.targets, 1578 _break: done, 1579 } 1580 switch comm := clause.Comm.(type) { 1581 case *ast.ExprStmt: // <-ch 1582 if debugInfo { 1583 v := emitExtract(fn, sel, r) 1584 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1585 } 1586 r++ 1587 1588 case *ast.AssignStmt: // x := <-states[state].Chan 1589 if comm.Tok == token.DEFINE { 1590 fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) 1591 } 1592 x := b.addr(fn, comm.Lhs[0], false) // non-escaping 1593 v := emitExtract(fn, sel, r) 1594 if debugInfo { 1595 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1596 } 1597 x.store(fn, v) 1598 1599 if len(comm.Lhs) == 2 { // x, ok := ... 1600 if comm.Tok == token.DEFINE { 1601 fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) 1602 } 1603 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping 1604 ok.store(fn, emitExtract(fn, sel, 1)) 1605 } 1606 r++ 1607 } 1608 b.stmtList(fn, clause.Body) 1609 fn.targets = fn.targets.tail 1610 emitJump(fn, done) 1611 fn.currentBlock = next 1612 state++ 1613 } 1614 if defaultBody != nil { 1615 fn.targets = &targets{ 1616 tail: fn.targets, 1617 _break: done, 1618 } 1619 b.stmtList(fn, *defaultBody) 1620 fn.targets = fn.targets.tail 1621 } else { 1622 // A blocking select must match some case. 1623 // (This should really be a runtime.errorString, not a string.) 1624 fn.emit(&Panic{ 1625 X: emitConv(fn, stringConst("blocking select matched no case"), tEface), 1626 }) 1627 fn.currentBlock = fn.newBasicBlock("unreachable") 1628 } 1629 emitJump(fn, done) 1630 fn.currentBlock = done 1631 } 1632 1633 // forStmt emits to fn code for the for statement s, optionally 1634 // labelled by label. 1635 // 1636 func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { 1637 // ...init... 1638 // jump loop 1639 // loop: 1640 // if cond goto body else done 1641 // body: 1642 // ...body... 1643 // jump post 1644 // post: (target of continue) 1645 // ...post... 1646 // jump loop 1647 // done: (target of break) 1648 if s.Init != nil { 1649 b.stmt(fn, s.Init) 1650 } 1651 body := fn.newBasicBlock("for.body") 1652 done := fn.newBasicBlock("for.done") // target of 'break' 1653 loop := body // target of back-edge 1654 if s.Cond != nil { 1655 loop = fn.newBasicBlock("for.loop") 1656 } 1657 cont := loop // target of 'continue' 1658 if s.Post != nil { 1659 cont = fn.newBasicBlock("for.post") 1660 } 1661 if label != nil { 1662 label._break = done 1663 label._continue = cont 1664 } 1665 emitJump(fn, loop) 1666 fn.currentBlock = loop 1667 if loop != body { 1668 b.cond(fn, s.Cond, body, done) 1669 fn.currentBlock = body 1670 } 1671 fn.targets = &targets{ 1672 tail: fn.targets, 1673 _break: done, 1674 _continue: cont, 1675 } 1676 b.stmt(fn, s.Body) 1677 fn.targets = fn.targets.tail 1678 emitJump(fn, cont) 1679 1680 if s.Post != nil { 1681 fn.currentBlock = cont 1682 b.stmt(fn, s.Post) 1683 emitJump(fn, loop) // back-edge 1684 } 1685 fn.currentBlock = done 1686 } 1687 1688 // rangeIndexed emits to fn the header for an integer-indexed loop 1689 // over array, *array or slice value x. 1690 // The v result is defined only if tv is non-nil. 1691 // forPos is the position of the "for" token. 1692 // 1693 func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 1694 // 1695 // length = len(x) 1696 // index = -1 1697 // loop: (target of continue) 1698 // index++ 1699 // if index < length goto body else done 1700 // body: 1701 // k = index 1702 // v = x[index] 1703 // ...body... 1704 // jump loop 1705 // done: (target of break) 1706 1707 // Determine number of iterations. 1708 var length Value 1709 if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { 1710 // For array or *array, the number of iterations is 1711 // known statically thanks to the type. We avoid a 1712 // data dependence upon x, permitting later dead-code 1713 // elimination if x is pure, static unrolling, etc. 1714 // Ranging over a nil *array may have >0 iterations. 1715 // We still generate code for x, in case it has effects. 1716 length = intConst(arr.Len()) 1717 } else { 1718 // length = len(x). 1719 var c Call 1720 c.Call.Value = makeLen(x.Type()) 1721 c.Call.Args = []Value{x} 1722 c.setType(tInt) 1723 length = fn.emit(&c) 1724 } 1725 1726 index := fn.addLocal(tInt, token.NoPos) 1727 emitStore(fn, index, intConst(-1), pos) 1728 1729 loop = fn.newBasicBlock("rangeindex.loop") 1730 emitJump(fn, loop) 1731 fn.currentBlock = loop 1732 1733 incr := &BinOp{ 1734 Op: token.ADD, 1735 X: emitLoad(fn, index), 1736 Y: vOne, 1737 } 1738 incr.setType(tInt) 1739 emitStore(fn, index, fn.emit(incr), pos) 1740 1741 body := fn.newBasicBlock("rangeindex.body") 1742 done = fn.newBasicBlock("rangeindex.done") 1743 emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) 1744 fn.currentBlock = body 1745 1746 k = emitLoad(fn, index) 1747 if tv != nil { 1748 switch t := x.Type().Underlying().(type) { 1749 case *types.Array: 1750 instr := &Index{ 1751 X: x, 1752 Index: k, 1753 } 1754 instr.setType(t.Elem()) 1755 instr.setPos(x.Pos()) 1756 v = fn.emit(instr) 1757 1758 case *types.Pointer: // *array 1759 instr := &IndexAddr{ 1760 X: x, 1761 Index: k, 1762 } 1763 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) 1764 instr.setPos(x.Pos()) 1765 v = emitLoad(fn, fn.emit(instr)) 1766 1767 case *types.Slice: 1768 instr := &IndexAddr{ 1769 X: x, 1770 Index: k, 1771 } 1772 instr.setType(types.NewPointer(t.Elem())) 1773 instr.setPos(x.Pos()) 1774 v = emitLoad(fn, fn.emit(instr)) 1775 1776 default: 1777 panic("rangeIndexed x:" + t.String()) 1778 } 1779 } 1780 return 1781 } 1782 1783 // rangeIter emits to fn the header for a loop using 1784 // Range/Next/Extract to iterate over map or string value x. 1785 // tk and tv are the types of the key/value results k and v, or nil 1786 // if the respective component is not wanted. 1787 // 1788 func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 1789 // 1790 // it = range x 1791 // loop: (target of continue) 1792 // okv = next it (ok, key, value) 1793 // ok = extract okv #0 1794 // if ok goto body else done 1795 // body: 1796 // k = extract okv #1 1797 // v = extract okv #2 1798 // ...body... 1799 // jump loop 1800 // done: (target of break) 1801 // 1802 1803 if tk == nil { 1804 tk = tInvalid 1805 } 1806 if tv == nil { 1807 tv = tInvalid 1808 } 1809 1810 rng := &Range{X: x} 1811 rng.setPos(pos) 1812 rng.setType(tRangeIter) 1813 it := fn.emit(rng) 1814 1815 loop = fn.newBasicBlock("rangeiter.loop") 1816 emitJump(fn, loop) 1817 fn.currentBlock = loop 1818 1819 _, isString := x.Type().Underlying().(*types.Basic) 1820 1821 okv := &Next{ 1822 Iter: it, 1823 IsString: isString, 1824 } 1825 okv.setType(types.NewTuple( 1826 varOk, 1827 newVar("k", tk), 1828 newVar("v", tv), 1829 )) 1830 fn.emit(okv) 1831 1832 body := fn.newBasicBlock("rangeiter.body") 1833 done = fn.newBasicBlock("rangeiter.done") 1834 emitIf(fn, emitExtract(fn, okv, 0), body, done) 1835 fn.currentBlock = body 1836 1837 if tk != tInvalid { 1838 k = emitExtract(fn, okv, 1) 1839 } 1840 if tv != tInvalid { 1841 v = emitExtract(fn, okv, 2) 1842 } 1843 return 1844 } 1845 1846 // rangeChan emits to fn the header for a loop that receives from 1847 // channel x until it fails. 1848 // tk is the channel's element type, or nil if the k result is 1849 // not wanted 1850 // pos is the position of the '=' or ':=' token. 1851 // 1852 func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { 1853 // 1854 // loop: (target of continue) 1855 // ko = <-x (key, ok) 1856 // ok = extract ko #1 1857 // if ok goto body else done 1858 // body: 1859 // k = extract ko #0 1860 // ... 1861 // goto loop 1862 // done: (target of break) 1863 1864 loop = fn.newBasicBlock("rangechan.loop") 1865 emitJump(fn, loop) 1866 fn.currentBlock = loop 1867 recv := &UnOp{ 1868 Op: token.ARROW, 1869 X: x, 1870 CommaOk: true, 1871 } 1872 recv.setPos(pos) 1873 recv.setType(types.NewTuple( 1874 newVar("k", x.Type().Underlying().(*types.Chan).Elem()), 1875 varOk, 1876 )) 1877 ko := fn.emit(recv) 1878 body := fn.newBasicBlock("rangechan.body") 1879 done = fn.newBasicBlock("rangechan.done") 1880 emitIf(fn, emitExtract(fn, ko, 1), body, done) 1881 fn.currentBlock = body 1882 if tk != nil { 1883 k = emitExtract(fn, ko, 0) 1884 } 1885 return 1886 } 1887 1888 // rangeStmt emits to fn code for the range statement s, optionally 1889 // labelled by label. 1890 // 1891 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { 1892 var tk, tv types.Type 1893 if s.Key != nil && !isBlankIdent(s.Key) { 1894 tk = fn.Pkg.typeOf(s.Key) 1895 } 1896 if s.Value != nil && !isBlankIdent(s.Value) { 1897 tv = fn.Pkg.typeOf(s.Value) 1898 } 1899 1900 // If iteration variables are defined (:=), this 1901 // occurs once outside the loop. 1902 // 1903 // Unlike a short variable declaration, a RangeStmt 1904 // using := never redeclares an existing variable; it 1905 // always creates a new one. 1906 if s.Tok == token.DEFINE { 1907 if tk != nil { 1908 fn.addLocalForIdent(s.Key.(*ast.Ident)) 1909 } 1910 if tv != nil { 1911 fn.addLocalForIdent(s.Value.(*ast.Ident)) 1912 } 1913 } 1914 1915 x := b.expr(fn, s.X) 1916 1917 var k, v Value 1918 var loop, done *BasicBlock 1919 switch rt := x.Type().Underlying().(type) { 1920 case *types.Slice, *types.Array, *types.Pointer: // *array 1921 k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) 1922 1923 case *types.Chan: 1924 k, loop, done = b.rangeChan(fn, x, tk, s.For) 1925 1926 case *types.Map, *types.Basic: // string 1927 k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) 1928 1929 default: 1930 panic("Cannot range over: " + rt.String()) 1931 } 1932 1933 // Evaluate both LHS expressions before we update either. 1934 var kl, vl lvalue 1935 if tk != nil { 1936 kl = b.addr(fn, s.Key, false) // non-escaping 1937 } 1938 if tv != nil { 1939 vl = b.addr(fn, s.Value, false) // non-escaping 1940 } 1941 if tk != nil { 1942 kl.store(fn, k) 1943 } 1944 if tv != nil { 1945 vl.store(fn, v) 1946 } 1947 1948 if label != nil { 1949 label._break = done 1950 label._continue = loop 1951 } 1952 1953 fn.targets = &targets{ 1954 tail: fn.targets, 1955 _break: done, 1956 _continue: loop, 1957 } 1958 b.stmt(fn, s.Body) 1959 fn.targets = fn.targets.tail 1960 emitJump(fn, loop) // back-edge 1961 fn.currentBlock = done 1962 } 1963 1964 // stmt lowers statement s to SSA form, emitting code to fn. 1965 func (b *builder) stmt(fn *Function, _s ast.Stmt) { 1966 // The label of the current statement. If non-nil, its _goto 1967 // target is always set; its _break and _continue are set only 1968 // within the body of switch/typeswitch/select/for/range. 1969 // It is effectively an additional default-nil parameter of stmt(). 1970 var label *lblock 1971 start: 1972 switch s := _s.(type) { 1973 case *ast.EmptyStmt: 1974 // ignore. (Usually removed by gofmt.) 1975 1976 case *ast.DeclStmt: // Con, Var or Typ 1977 d := s.Decl.(*ast.GenDecl) 1978 if d.Tok == token.VAR { 1979 for _, spec := range d.Specs { 1980 if vs, ok := spec.(*ast.ValueSpec); ok { 1981 b.localValueSpec(fn, vs) 1982 } 1983 } 1984 } 1985 1986 case *ast.LabeledStmt: 1987 label = fn.labelledBlock(s.Label) 1988 emitJump(fn, label._goto) 1989 fn.currentBlock = label._goto 1990 _s = s.Stmt 1991 goto start // effectively: tailcall stmt(fn, s.Stmt, label) 1992 1993 case *ast.ExprStmt: 1994 b.expr(fn, s.X) 1995 1996 case *ast.SendStmt: 1997 fn.emit(&Send{ 1998 Chan: b.expr(fn, s.Chan), 1999 X: emitConv(fn, b.expr(fn, s.Value), 2000 fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()), 2001 pos: s.Arrow, 2002 }) 2003 2004 case *ast.IncDecStmt: 2005 op := token.ADD 2006 if s.Tok == token.DEC { 2007 op = token.SUB 2008 } 2009 loc := b.addr(fn, s.X, false) 2010 b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) 2011 2012 case *ast.AssignStmt: 2013 switch s.Tok { 2014 case token.ASSIGN, token.DEFINE: 2015 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) 2016 2017 default: // +=, etc. 2018 op := s.Tok + token.ADD - token.ADD_ASSIGN 2019 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) 2020 } 2021 2022 case *ast.GoStmt: 2023 // The "intrinsics" new/make/len/cap are forbidden here. 2024 // panic is treated like an ordinary function call. 2025 v := Go{pos: s.Go} 2026 b.setCall(fn, s.Call, &v.Call) 2027 fn.emit(&v) 2028 2029 case *ast.DeferStmt: 2030 // The "intrinsics" new/make/len/cap are forbidden here. 2031 // panic is treated like an ordinary function call. 2032 v := Defer{pos: s.Defer} 2033 b.setCall(fn, s.Call, &v.Call) 2034 fn.emit(&v) 2035 2036 // A deferred call can cause recovery from panic, 2037 // and control resumes at the Recover block. 2038 createRecoverBlock(fn) 2039 2040 case *ast.ReturnStmt: 2041 var results []Value 2042 if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { 2043 // Return of one expression in a multi-valued function. 2044 tuple := b.exprN(fn, s.Results[0]) 2045 ttuple := tuple.Type().(*types.Tuple) 2046 for i, n := 0, ttuple.Len(); i < n; i++ { 2047 results = append(results, 2048 emitConv(fn, emitExtract(fn, tuple, i), 2049 fn.Signature.Results().At(i).Type())) 2050 } 2051 } else { 2052 // 1:1 return, or no-arg return in non-void function. 2053 for i, r := range s.Results { 2054 v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) 2055 results = append(results, v) 2056 } 2057 } 2058 if fn.namedResults != nil { 2059 // Function has named result parameters (NRPs). 2060 // Perform parallel assignment of return operands to NRPs. 2061 for i, r := range results { 2062 emitStore(fn, fn.namedResults[i], r, s.Return) 2063 } 2064 } 2065 // Run function calls deferred in this 2066 // function when explicitly returning from it. 2067 fn.emit(new(RunDefers)) 2068 if fn.namedResults != nil { 2069 // Reload NRPs to form the result tuple. 2070 results = results[:0] 2071 for _, r := range fn.namedResults { 2072 results = append(results, emitLoad(fn, r)) 2073 } 2074 } 2075 fn.emit(&Return{Results: results, pos: s.Return}) 2076 fn.currentBlock = fn.newBasicBlock("unreachable") 2077 2078 case *ast.BranchStmt: 2079 var block *BasicBlock 2080 switch s.Tok { 2081 case token.BREAK: 2082 if s.Label != nil { 2083 block = fn.labelledBlock(s.Label)._break 2084 } else { 2085 for t := fn.targets; t != nil && block == nil; t = t.tail { 2086 block = t._break 2087 } 2088 } 2089 2090 case token.CONTINUE: 2091 if s.Label != nil { 2092 block = fn.labelledBlock(s.Label)._continue 2093 } else { 2094 for t := fn.targets; t != nil && block == nil; t = t.tail { 2095 block = t._continue 2096 } 2097 } 2098 2099 case token.FALLTHROUGH: 2100 for t := fn.targets; t != nil && block == nil; t = t.tail { 2101 block = t._fallthrough 2102 } 2103 2104 case token.GOTO: 2105 block = fn.labelledBlock(s.Label)._goto 2106 } 2107 emitJump(fn, block) 2108 fn.currentBlock = fn.newBasicBlock("unreachable") 2109 2110 case *ast.BlockStmt: 2111 b.stmtList(fn, s.List) 2112 2113 case *ast.IfStmt: 2114 if s.Init != nil { 2115 b.stmt(fn, s.Init) 2116 } 2117 then := fn.newBasicBlock("if.then") 2118 done := fn.newBasicBlock("if.done") 2119 els := done 2120 if s.Else != nil { 2121 els = fn.newBasicBlock("if.else") 2122 } 2123 b.cond(fn, s.Cond, then, els) 2124 fn.currentBlock = then 2125 b.stmt(fn, s.Body) 2126 emitJump(fn, done) 2127 2128 if s.Else != nil { 2129 fn.currentBlock = els 2130 b.stmt(fn, s.Else) 2131 emitJump(fn, done) 2132 } 2133 2134 fn.currentBlock = done 2135 2136 case *ast.SwitchStmt: 2137 b.switchStmt(fn, s, label) 2138 2139 case *ast.TypeSwitchStmt: 2140 b.typeSwitchStmt(fn, s, label) 2141 2142 case *ast.SelectStmt: 2143 b.selectStmt(fn, s, label) 2144 2145 case *ast.ForStmt: 2146 b.forStmt(fn, s, label) 2147 2148 case *ast.RangeStmt: 2149 b.rangeStmt(fn, s, label) 2150 2151 default: 2152 panic(fmt.Sprintf("unexpected statement kind: %T", s)) 2153 } 2154 } 2155 2156 // buildFunction builds SSA code for the body of function fn. Idempotent. 2157 func (b *builder) buildFunction(fn *Function) { 2158 if fn.Blocks != nil { 2159 return // building already started 2160 } 2161 2162 var recvField *ast.FieldList 2163 var body *ast.BlockStmt 2164 var functype *ast.FuncType 2165 switch n := fn.syntax.(type) { 2166 case nil: 2167 return // not a Go source function. (Synthetic, or from object file.) 2168 case *ast.FuncDecl: 2169 functype = n.Type 2170 recvField = n.Recv 2171 body = n.Body 2172 case *ast.FuncLit: 2173 functype = n.Type 2174 body = n.Body 2175 default: 2176 panic(n) 2177 } 2178 2179 if body == nil { 2180 // External function. 2181 if fn.Params == nil { 2182 // This condition ensures we add a non-empty 2183 // params list once only, but we may attempt 2184 // the degenerate empty case repeatedly. 2185 // TODO(adonovan): opt: don't do that. 2186 2187 // We set Function.Params even though there is no body 2188 // code to reference them. This simplifies clients. 2189 if recv := fn.Signature.Recv(); recv != nil { 2190 fn.addParamObj(recv) 2191 } 2192 params := fn.Signature.Params() 2193 for i, n := 0, params.Len(); i < n; i++ { 2194 fn.addParamObj(params.At(i)) 2195 } 2196 } 2197 return 2198 } 2199 if fn.Prog.mode&LogSource != 0 { 2200 defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() 2201 } 2202 fn.startBody() 2203 fn.createSyntacticParams(recvField, functype) 2204 b.stmt(fn, body) 2205 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { 2206 // Control fell off the end of the function's body block. 2207 // 2208 // Block optimizations eliminate the current block, if 2209 // unreachable. It is a builder invariant that 2210 // if this no-arg return is ill-typed for 2211 // fn.Signature.Results, this block must be 2212 // unreachable. The sanity checker checks this. 2213 fn.emit(new(RunDefers)) 2214 fn.emit(new(Return)) 2215 } 2216 fn.finishBody() 2217 } 2218 2219 // buildFuncDecl builds SSA code for the function or method declared 2220 // by decl in package pkg. 2221 // 2222 func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { 2223 id := decl.Name 2224 if isBlankIdent(id) { 2225 return // discard 2226 } 2227 fn := pkg.values[pkg.info.Defs[id]].(*Function) 2228 if decl.Recv == nil && id.Name == "init" { 2229 var v Call 2230 v.Call.Value = fn 2231 v.setType(types.NewTuple()) 2232 pkg.init.emit(&v) 2233 } 2234 b.buildFunction(fn) 2235 } 2236 2237 // Build calls Package.Build for each package in prog. 2238 // Building occurs in parallel unless the BuildSerially mode flag was set. 2239 // 2240 // Build is intended for whole-program analysis; a typical compiler 2241 // need only build a single package. 2242 // 2243 // Build is idempotent and thread-safe. 2244 // 2245 func (prog *Program) Build() { 2246 var wg sync.WaitGroup 2247 for _, p := range prog.packages { 2248 if prog.mode&BuildSerially != 0 { 2249 p.Build() 2250 } else { 2251 wg.Add(1) 2252 go func(p *Package) { 2253 p.Build() 2254 wg.Done() 2255 }(p) 2256 } 2257 } 2258 wg.Wait() 2259 } 2260 2261 // Build builds SSA code for all functions and vars in package p. 2262 // 2263 // Precondition: CreatePackage must have been called for all of p's 2264 // direct imports (and hence its direct imports must have been 2265 // error-free). 2266 // 2267 // Build is idempotent and thread-safe. 2268 // 2269 func (p *Package) Build() { p.buildOnce.Do(p.build) } 2270 2271 func (p *Package) build() { 2272 if p.info == nil { 2273 return // synthetic package, e.g. "testmain" 2274 } 2275 2276 // Ensure we have runtime type info for all exported members. 2277 // TODO(adonovan): ideally belongs in memberFromObject, but 2278 // that would require package creation in topological order. 2279 for name, mem := range p.Members { 2280 if ast.IsExported(name) { 2281 p.Prog.needMethodsOf(mem.Type()) 2282 } 2283 } 2284 if p.Prog.mode&LogSource != 0 { 2285 defer logStack("build %s", p)() 2286 } 2287 init := p.init 2288 init.startBody() 2289 2290 var done *BasicBlock 2291 2292 if p.Prog.mode&BareInits == 0 { 2293 // Make init() skip if package is already initialized. 2294 initguard := p.Var("init$guard") 2295 doinit := init.newBasicBlock("init.start") 2296 done = init.newBasicBlock("init.done") 2297 emitIf(init, emitLoad(init, initguard), done, doinit) 2298 init.currentBlock = doinit 2299 emitStore(init, initguard, vTrue, token.NoPos) 2300 2301 // Call the init() function of each package we import. 2302 for _, pkg := range p.Pkg.Imports() { 2303 prereq := p.Prog.packages[pkg] 2304 if prereq == nil { 2305 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) 2306 } 2307 var v Call 2308 v.Call.Value = prereq.init 2309 v.Call.pos = init.pos 2310 v.setType(types.NewTuple()) 2311 init.emit(&v) 2312 } 2313 } 2314 2315 var b builder 2316 2317 // Initialize package-level vars in correct order. 2318 for _, varinit := range p.info.InitOrder { 2319 if init.Prog.mode&LogSource != 0 { 2320 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", 2321 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) 2322 } 2323 if len(varinit.Lhs) == 1 { 2324 // 1:1 initialization: var x, y = a(), b() 2325 var lval lvalue 2326 if v := varinit.Lhs[0]; v.Name() != "_" { 2327 lval = &address{addr: p.values[v].(*Global), pos: v.Pos()} 2328 } else { 2329 lval = blank{} 2330 } 2331 b.assign(init, lval, varinit.Rhs, true, nil) 2332 } else { 2333 // n:1 initialization: var x, y := f() 2334 tuple := b.exprN(init, varinit.Rhs) 2335 for i, v := range varinit.Lhs { 2336 if v.Name() == "_" { 2337 continue 2338 } 2339 emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos()) 2340 } 2341 } 2342 } 2343 2344 // Build all package-level functions, init functions 2345 // and methods, including unreachable/blank ones. 2346 // We build them in source order, but it's not significant. 2347 for _, file := range p.files { 2348 for _, decl := range file.Decls { 2349 if decl, ok := decl.(*ast.FuncDecl); ok { 2350 b.buildFuncDecl(p, decl) 2351 } 2352 } 2353 } 2354 2355 // Finish up init(). 2356 if p.Prog.mode&BareInits == 0 { 2357 emitJump(init, done) 2358 init.currentBlock = done 2359 } 2360 init.emit(new(Return)) 2361 init.finishBody() 2362 2363 p.info = nil // We no longer need ASTs or go/types deductions. 2364 2365 if p.Prog.mode&SanityCheckFunctions != 0 { 2366 sanityCheckPackage(p) 2367 } 2368 } 2369 2370 // Like ObjectOf, but panics instead of returning nil. 2371 // Only valid during p's create and build phases. 2372 func (p *Package) objectOf(id *ast.Ident) types.Object { 2373 if o := p.info.ObjectOf(id); o != nil { 2374 return o 2375 } 2376 panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", 2377 id.Name, p.Prog.Fset.Position(id.Pos()))) 2378 } 2379 2380 // Like TypeOf, but panics instead of returning nil. 2381 // Only valid during p's create and build phases. 2382 func (p *Package) typeOf(e ast.Expr) types.Type { 2383 if T := p.info.TypeOf(e); T != nil { 2384 return T 2385 } 2386 panic(fmt.Sprintf("no type for %T @ %s", 2387 e, p.Prog.Fset.Position(e.Pos()))) 2388 }