github.com/amarpal/go-tools@v0.0.0-20240422043104-40142f59f616/go/ir/builder.go (about) 1 // Copyright 2013 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ir 6 7 // This file implements the BUILD phase of IR construction. 8 // 9 // IR construction has two phases, CREATE and BUILD. In the CREATE phase 10 // (create.go), all packages are constructed and type-checked and 11 // definitions of all package members are created, method-sets are 12 // computed, and wrapper methods are synthesized. 13 // ir.Packages are created in arbitrary order. 14 // 15 // In the BUILD phase (builder.go), the builder traverses the AST of 16 // each Go source function and generates IR instructions for the 17 // function body. Initializer expressions for package-level variables 18 // are emitted to the package's init() function in the order specified 19 // by go/types.Info.InitOrder, then code for each function in the 20 // package is generated in lexical order. 21 // 22 // The builder's and Program's indices (maps) are populated and 23 // mutated during the CREATE phase, but during the BUILD phase they 24 // remain constant. The sole exception is Prog.methodSets and its 25 // related maps, which are protected by a dedicated mutex. 26 27 import ( 28 "fmt" 29 "go/ast" 30 "go/constant" 31 "go/token" 32 "go/types" 33 "os" 34 35 "github.com/amarpal/go-tools/analysis/lint" 36 "github.com/amarpal/go-tools/go/types/typeutil" 37 38 "golang.org/x/exp/typeparams" 39 ) 40 41 var ( 42 varOk = newVar("ok", tBool) 43 varIndex = newVar("index", tInt) 44 45 // Type constants. 46 tBool = types.Typ[types.Bool] 47 tInt = types.Typ[types.Int] 48 tInvalid = types.Typ[types.Invalid] 49 tString = types.Typ[types.String] 50 tUntypedNil = types.Typ[types.UntypedNil] 51 tEface = types.NewInterfaceType(nil, nil).Complete() 52 ) 53 54 // builder holds state associated with the package currently being built. 55 // Its methods contain all the logic for AST-to-IR conversion. 56 type builder struct { 57 printFunc string 58 59 blocksets [5]BlockSet 60 } 61 62 // cond emits to fn code to evaluate boolean condition e and jump 63 // to t or f depending on its value, performing various simplifications. 64 // 65 // Postcondition: fn.currentBlock is nil. 66 func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If { 67 switch e := e.(type) { 68 case *ast.ParenExpr: 69 return b.cond(fn, e.X, t, f) 70 71 case *ast.BinaryExpr: 72 switch e.Op { 73 case token.LAND: 74 ltrue := fn.newBasicBlock("cond.true") 75 b.cond(fn, e.X, ltrue, f) 76 fn.currentBlock = ltrue 77 return b.cond(fn, e.Y, t, f) 78 79 case token.LOR: 80 lfalse := fn.newBasicBlock("cond.false") 81 b.cond(fn, e.X, t, lfalse) 82 fn.currentBlock = lfalse 83 return b.cond(fn, e.Y, t, f) 84 } 85 86 case *ast.UnaryExpr: 87 if e.Op == token.NOT { 88 return b.cond(fn, e.X, f, t) 89 } 90 } 91 92 // A traditional compiler would simplify "if false" (etc) here 93 // but we do not, for better fidelity to the source code. 94 // 95 // The value of a constant condition may be platform-specific, 96 // and may cause blocks that are reachable in some configuration 97 // to be hidden from subsequent analyses such as bug-finding tools. 98 return emitIf(fn, b.expr(fn, e), t, f, e) 99 } 100 101 // logicalBinop emits code to fn to evaluate e, a &&- or 102 // ||-expression whose reified boolean value is wanted. 103 // The value is returned. 104 func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { 105 rhs := fn.newBasicBlock("binop.rhs") 106 done := fn.newBasicBlock("binop.done") 107 108 // T(e) = T(e.X) = T(e.Y) after untyped constants have been 109 // eliminated. 110 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. 111 t := fn.Pkg.typeOf(e) 112 113 var short Value // value of the short-circuit path 114 switch e.Op { 115 case token.LAND: 116 b.cond(fn, e.X, rhs, done) 117 short = emitConst(fn, NewConst(constant.MakeBool(false), t, e)) 118 119 case token.LOR: 120 b.cond(fn, e.X, done, rhs) 121 short = emitConst(fn, NewConst(constant.MakeBool(true), t, e)) 122 } 123 124 // Is rhs unreachable? 125 if rhs.Preds == nil { 126 // Simplify false&&y to false, true||y to true. 127 fn.currentBlock = done 128 return short 129 } 130 131 // Is done unreachable? 132 if done.Preds == nil { 133 // Simplify true&&y (or false||y) to y. 134 fn.currentBlock = rhs 135 return b.expr(fn, e.Y) 136 } 137 138 // All edges from e.X to done carry the short-circuit value. 139 var edges []Value 140 for range done.Preds { 141 edges = append(edges, short) 142 } 143 144 // The edge from e.Y to done carries the value of e.Y. 145 fn.currentBlock = rhs 146 edges = append(edges, b.expr(fn, e.Y)) 147 emitJump(fn, done, e) 148 fn.currentBlock = done 149 150 phi := &Phi{Edges: edges} 151 phi.typ = t 152 return done.emit(phi, e) 153 } 154 155 // exprN lowers a multi-result expression e to IR form, emitting code 156 // to fn and returning a single Value whose type is a *types.Tuple. 157 // The caller must access the components via Extract. 158 // 159 // Multi-result expressions include CallExprs in a multi-value 160 // assignment or return statement, and "value,ok" uses of 161 // TypeAssertExpr, IndexExpr (when X is a map), and Recv. 162 func (b *builder) exprN(fn *Function, e ast.Expr) Value { 163 typ := fn.Pkg.typeOf(e).(*types.Tuple) 164 switch e := e.(type) { 165 case *ast.ParenExpr: 166 return b.exprN(fn, e.X) 167 168 case *ast.CallExpr: 169 // Currently, no built-in function nor type conversion 170 // has multiple results, so we can avoid some of the 171 // cases for single-valued CallExpr. 172 var c Call 173 b.setCall(fn, e, &c.Call) 174 c.typ = typ 175 return fn.emit(&c, e) 176 177 case *ast.IndexExpr: 178 mapt := typeutil.CoreType(fn.Pkg.typeOf(e.X)).Underlying().(*types.Map) 179 lookup := &MapLookup{ 180 X: b.expr(fn, e.X), 181 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e), 182 CommaOk: true, 183 } 184 lookup.setType(typ) 185 return fn.emit(lookup, e) 186 187 case *ast.TypeAssertExpr: 188 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e) 189 190 case *ast.UnaryExpr: // must be receive <- 191 return emitRecv(fn, b.expr(fn, e.X), true, typ, e) 192 } 193 panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) 194 } 195 196 // builtin emits to fn IR instructions to implement a call to the 197 // built-in function obj with the specified arguments 198 // and return type. It returns the value defined by the result. 199 // 200 // The result is nil if no special handling was required; in this case 201 // the caller should treat this like an ordinary library function 202 // call. 203 func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, source ast.Node) Value { 204 switch obj.Name() { 205 case "make": 206 styp := typ.Underlying() 207 if _, ok := typ.Underlying().(*types.Interface); ok { 208 // This must be a type parameter with a core type. 209 // Set styp to the core type and generate instructions based on it. 210 assert(typeparams.IsTypeParam(typ)) 211 styp = typeutil.CoreType(typ) 212 assert(styp != nil) 213 } 214 switch styp.(type) { 215 case *types.Slice: 216 n := b.expr(fn, args[1]) 217 m := n 218 if len(args) == 3 { 219 m = b.expr(fn, args[2]) 220 } 221 if m, ok := m.(*Const); ok { 222 // treat make([]T, n, m) as new([m]T)[:n] 223 cap := m.Int64() 224 at := types.NewArray(styp.Underlying().(*types.Slice).Elem(), cap) 225 alloc := emitNew(fn, at, source) 226 v := &Slice{ 227 X: alloc, 228 High: n, 229 } 230 v.setType(typ) 231 return fn.emit(v, source) 232 } 233 v := &MakeSlice{ 234 Len: n, 235 Cap: m, 236 } 237 v.setType(typ) 238 return fn.emit(v, source) 239 240 case *types.Map: 241 var res Value 242 if len(args) == 2 { 243 res = b.expr(fn, args[1]) 244 } 245 v := &MakeMap{Reserve: res} 246 v.setType(typ) 247 return fn.emit(v, source) 248 249 case *types.Chan: 250 var sz Value = emitConst(fn, intConst(0, source)) 251 if len(args) == 2 { 252 sz = b.expr(fn, args[1]) 253 } 254 v := &MakeChan{Size: sz} 255 v.setType(typ) 256 return fn.emit(v, source) 257 258 default: 259 lint.ExhaustiveTypeSwitch(typ.Underlying()) 260 } 261 262 case "new": 263 alloc := emitNew(fn, deref(typ), source) 264 return alloc 265 266 case "len", "cap": 267 // Special case: len or cap of an array or *array is based on the type, not the value which may be nil. We must 268 // still evaluate the value, though. (If it was side-effect free, the whole call would have been 269 // constant-folded.) 270 // 271 // For example, for len(gen()), we need to evaluate gen() for its side-effects, but don't need the returned 272 // value to determine the length of the array, which is constant. 273 // 274 // This never applies to type parameters. Even if the constraint has a structural type, len/cap on a type 275 // parameter aren't constant. 276 t := deref(fn.Pkg.typeOf(args[0])).Underlying() 277 if at, ok := t.(*types.Array); ok { 278 b.expr(fn, args[0]) // for effects only 279 return emitConst(fn, intConst(at.Len(), args[0])) 280 } 281 // Otherwise treat as normal. 282 283 case "panic": 284 fn.emit(&Panic{ 285 X: emitConv(fn, b.expr(fn, args[0]), tEface, source), 286 }, source) 287 addEdge(fn.currentBlock, fn.Exit) 288 fn.currentBlock = fn.newBasicBlock("unreachable") 289 return emitConst(fn, NewConst(constant.MakeBool(true), tBool, nil)) // any non-nil Value will do 290 } 291 return nil // treat all others as a regular function call 292 } 293 294 // addr lowers a single-result addressable expression e to IR form, 295 // emitting code to fn and returning the location (an lvalue) defined 296 // by the expression. 297 // 298 // If escaping is true, addr marks the base variable of the 299 // addressable expression e as being a potentially escaping pointer 300 // value. For example, in this code: 301 // 302 // a := A{ 303 // b: [1]B{B{c: 1}} 304 // } 305 // return &a.b[0].c 306 // 307 // the application of & causes a.b[0].c to have its address taken, 308 // which means that ultimately the local variable a must be 309 // heap-allocated. This is a simple but very conservative escape 310 // analysis. 311 // 312 // Operations forming potentially escaping pointers include: 313 // - &x, including when implicit in method call or composite literals. 314 // - a[:] iff a is an array (not *array) 315 // - references to variables in lexically enclosing functions. 316 func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) (RET lvalue) { 317 switch e := e.(type) { 318 case *ast.Ident: 319 if isBlankIdent(e) { 320 return blank{} 321 } 322 obj := fn.Pkg.objectOf(e) 323 v := fn.Prog.packageLevelValue(obj) // var (address) 324 if v == nil { 325 v = fn.lookup(obj, escaping) 326 } 327 return &address{addr: v, expr: e} 328 329 case *ast.CompositeLit: 330 t := deref(fn.Pkg.typeOf(e)) 331 var v *Alloc 332 if escaping { 333 v = emitNew(fn, t, e) 334 } else { 335 v = fn.addLocal(t, e) 336 } 337 var sb storebuf 338 b.compLit(fn, v, e, true, &sb) 339 sb.emit(fn) 340 return &address{addr: v, expr: e} 341 342 case *ast.ParenExpr: 343 return b.addr(fn, e.X, escaping) 344 345 case *ast.SelectorExpr: 346 sel, ok := fn.Pkg.info.Selections[e] 347 if !ok { 348 // qualified identifier 349 return b.addr(fn, e.Sel, escaping) 350 } 351 if sel.Kind() != types.FieldVal { 352 panic(sel) 353 } 354 wantAddr := true 355 v := b.receiver(fn, e.X, wantAddr, escaping, sel, e) 356 index := sel.Index()[len(sel.Index())-1] 357 vut := typeutil.CoreType(deref(v.Type())).Underlying().(*types.Struct) 358 fld := vut.Field(index) 359 // Due to the two phases of resolving AssignStmt, a panic from x.f = p() 360 // when x is nil is required to come after the side-effects of 361 // evaluating x and p(). 362 emit := func(fn *Function) Value { 363 return emitFieldSelection(fn, v, index, true, e.Sel) 364 } 365 return &lazyAddress{addr: emit, t: fld.Type(), expr: e.Sel} 366 367 case *ast.IndexExpr: 368 var x Value 369 var et types.Type 370 xt := fn.Pkg.typeOf(e.X) 371 372 // Indexing doesn't need a core type, it only requires all types to be similar enough. For example, []int64 | 373 // [5]int64 can be indexed. The element types do have to match though. 374 375 terms, err := typeparams.NormalTerms(xt) 376 if err != nil { 377 panic(fmt.Sprintf("unexpected error: %s", err)) 378 } 379 isArrayLike := func() (types.Type, bool) { 380 for _, term := range terms { 381 arr, ok := term.Type().Underlying().(*types.Array) 382 if ok { 383 return arr.Elem(), true 384 } 385 } 386 return nil, false 387 } 388 389 isSliceLike := func() (types.Type, bool) { 390 for _, term := range terms { 391 switch t := term.Type().Underlying().(type) { 392 case *types.Slice: 393 return t.Elem(), true 394 case *types.Pointer: 395 return t.Elem().Underlying().(*types.Array).Elem(), true 396 } 397 } 398 return nil, false 399 } 400 401 if elem, ok := isArrayLike(); ok { 402 // array 403 x = b.addr(fn, e.X, escaping).address(fn) 404 et = types.NewPointer(elem) 405 } else if elem, ok := isSliceLike(); ok { 406 // slice or *array 407 x = b.expr(fn, e.X) 408 et = types.NewPointer(elem) 409 } else if t, ok := typeutil.CoreType(xt).Underlying().(*types.Map); ok { 410 return &element{ 411 m: b.expr(fn, e.X), 412 k: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index), 413 t: t.Elem(), 414 } 415 } else { 416 panic("unexpected container type in IndexExpr: " + t.String()) 417 } 418 419 // Due to the two phases of resolving AssignStmt, a panic from x[i] = p() 420 // when x is nil or i is out-of-bounds is required to come after the 421 // side-effects of evaluating x, i and p(). 422 index := b.expr(fn, e.Index) 423 emit := func(fn *Function) Value { 424 v := &IndexAddr{ 425 X: x, 426 Index: index, 427 } 428 v.setType(et) 429 return fn.emit(v, e) 430 } 431 return &lazyAddress{addr: emit, t: deref(et), expr: e} 432 433 case *ast.StarExpr: 434 return &address{addr: b.expr(fn, e.X), expr: e} 435 } 436 437 panic(fmt.Sprintf("unexpected address expression: %T", e)) 438 } 439 440 type store struct { 441 lhs lvalue 442 rhs Value 443 source ast.Node 444 445 // if debugRef is set no other fields will be set 446 debugRef *DebugRef 447 } 448 449 type storebuf struct{ stores []store } 450 451 func (sb *storebuf) store(lhs lvalue, rhs Value, source ast.Node) { 452 sb.stores = append(sb.stores, store{lhs, rhs, source, nil}) 453 } 454 455 func (sb *storebuf) storeDebugRef(ref *DebugRef) { 456 sb.stores = append(sb.stores, store{debugRef: ref}) 457 } 458 459 func (sb *storebuf) emit(fn *Function) { 460 for _, s := range sb.stores { 461 if s.debugRef == nil { 462 s.lhs.store(fn, s.rhs, s.source) 463 } else { 464 fn.emit(s.debugRef, nil) 465 } 466 } 467 } 468 469 // assign emits to fn code to initialize the lvalue loc with the value 470 // of expression e. If isZero is true, assign assumes that loc holds 471 // the zero value for its type. 472 // 473 // This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate 474 // better code in some cases, e.g., for composite literals in an 475 // addressable location. 476 // 477 // If sb is not nil, assign generates code to evaluate expression e, but 478 // not to update loc. Instead, the necessary stores are appended to the 479 // storebuf sb so that they can be executed later. This allows correct 480 // in-place update of existing variables when the RHS is a composite 481 // literal that may reference parts of the LHS. 482 func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf, source ast.Node) { 483 // Can we initialize it in place? 484 if e, ok := unparen(e).(*ast.CompositeLit); ok { 485 // A CompositeLit never evaluates to a pointer, 486 // so if the type of the location is a pointer, 487 // an &-operation is implied. 488 if _, ok := loc.(blank); !ok { // avoid calling blank.typ() 489 if isPointer(loc.typ()) { 490 // Example input that hits this code: 491 // 492 // type S1 struct{ X int } 493 // x := []*S1{ 494 // {1}, // <-- & is implied 495 // } 496 // _ = x 497 ptr := b.addr(fn, e, true).address(fn) 498 // copy address 499 if sb != nil { 500 sb.store(loc, ptr, source) 501 } else { 502 loc.store(fn, ptr, source) 503 } 504 return 505 } 506 } 507 508 if _, ok := loc.(*address); ok { 509 if isInterface(loc.typ()) && !typeparams.IsTypeParam(loc.typ()) { 510 // e.g. var x interface{} = T{...} 511 // Can't in-place initialize an interface value. 512 // Fall back to copying. 513 } else { 514 // x = T{...} or x := T{...} 515 addr := loc.address(fn) 516 if sb != nil { 517 b.compLit(fn, addr, e, isZero, sb) 518 } else { 519 var sb storebuf 520 b.compLit(fn, addr, e, isZero, &sb) 521 sb.emit(fn) 522 } 523 524 // Subtle: emit debug ref for aggregate types only; 525 // slice and map are handled by store ops in compLit. 526 switch typeutil.CoreType(loc.typ()).Underlying().(type) { 527 case *types.Struct, *types.Array: 528 if sb != nil { 529 // Make sure we don't emit DebugRefs before the store has actually occurred 530 if ref := makeDebugRef(fn, e, addr, true); ref != nil { 531 sb.storeDebugRef(ref) 532 } 533 } else { 534 emitDebugRef(fn, e, addr, true) 535 } 536 } 537 538 return 539 } 540 } 541 } 542 543 // simple case: just copy 544 rhs := b.expr(fn, e) 545 if sb != nil { 546 sb.store(loc, rhs, source) 547 } else { 548 loc.store(fn, rhs, source) 549 } 550 } 551 552 // expr lowers a single-result expression e to IR form, emitting code 553 // to fn and returning the Value defined by the expression. 554 func (b *builder) expr(fn *Function, e ast.Expr) Value { 555 e = unparen(e) 556 557 tv := fn.Pkg.info.Types[e] 558 559 // Is expression a constant? 560 if tv.Value != nil { 561 return emitConst(fn, NewConst(tv.Value, tv.Type, e)) 562 } 563 564 var v Value 565 if tv.Addressable() { 566 // Prefer pointer arithmetic ({Index,Field}Addr) followed 567 // by Load over subelement extraction (e.g. Index, Field), 568 // to avoid large copies. 569 v = b.addr(fn, e, false).load(fn, e) 570 } else { 571 v = b.expr0(fn, e, tv) 572 } 573 if fn.debugInfo() { 574 emitDebugRef(fn, e, v, false) 575 } 576 return v 577 } 578 579 func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { 580 switch e := e.(type) { 581 case *ast.BasicLit: 582 panic("non-constant BasicLit") // unreachable 583 584 case *ast.FuncLit: 585 fn2 := &Function{ 586 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), 587 Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), 588 parent: fn, 589 Pkg: fn.Pkg, 590 Prog: fn.Prog, 591 functionBody: new(functionBody), 592 } 593 fn2.source = e 594 fn.AnonFuncs = append(fn.AnonFuncs, fn2) 595 fn2.initHTML(b.printFunc) 596 b.buildFunction(fn2) 597 if fn2.FreeVars == nil { 598 return fn2 599 } 600 v := &MakeClosure{Fn: fn2} 601 v.setType(tv.Type) 602 for _, fv := range fn2.FreeVars { 603 v.Bindings = append(v.Bindings, fv.outer) 604 fv.outer = nil 605 } 606 return fn.emit(v, e) 607 608 case *ast.TypeAssertExpr: // single-result form only 609 return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e) 610 611 case *ast.CallExpr: 612 if fn.Pkg.info.Types[e.Fun].IsType() { 613 // Explicit type conversion, e.g. string(x) or big.Int(x) 614 x := b.expr(fn, e.Args[0]) 615 y := emitConv(fn, x, tv.Type, e) 616 return y 617 } 618 // Call to "intrinsic" built-ins, e.g. new, make, panic. 619 if id, ok := unparen(e.Fun).(*ast.Ident); ok { 620 if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { 621 if v := b.builtin(fn, obj, e.Args, tv.Type, e); v != nil { 622 return v 623 } 624 } 625 } 626 // Regular function call. 627 var v Call 628 b.setCall(fn, e, &v.Call) 629 v.setType(tv.Type) 630 return fn.emit(&v, e) 631 632 case *ast.UnaryExpr: 633 switch e.Op { 634 case token.AND: // &X --- potentially escaping. 635 addr := b.addr(fn, e.X, true) 636 if _, ok := unparen(e.X).(*ast.StarExpr); ok { 637 // &*p must panic if p is nil (https://golang.org/s/go12nil). 638 // For simplicity, we'll just (suboptimally) rely 639 // on the side effects of a load. 640 // TODO(adonovan): emit dedicated nilcheck. 641 addr.load(fn, e) 642 } 643 return addr.address(fn) 644 case token.ADD: 645 return b.expr(fn, e.X) 646 case token.NOT, token.SUB, token.XOR: // ! <- - ^ 647 v := &UnOp{ 648 Op: e.Op, 649 X: b.expr(fn, e.X), 650 } 651 v.setType(tv.Type) 652 return fn.emit(v, e) 653 case token.ARROW: 654 return emitRecv(fn, b.expr(fn, e.X), false, tv.Type, e) 655 default: 656 panic(e.Op) 657 } 658 659 case *ast.BinaryExpr: 660 switch e.Op { 661 case token.LAND, token.LOR: 662 return b.logicalBinop(fn, e) 663 case token.SHL, token.SHR: 664 fallthrough 665 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: 666 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e) 667 668 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: 669 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e) 670 // The type of x==y may be UntypedBool. 671 return emitConv(fn, cmp, types.Default(tv.Type), e) 672 default: 673 panic("illegal op in BinaryExpr: " + e.Op.String()) 674 } 675 676 case *ast.SliceExpr: 677 var x Value 678 if core := typeutil.CoreType(fn.Pkg.typeOf(e.X)); core != nil { 679 switch core.Underlying().(type) { 680 case *types.Array: 681 // Potentially escaping. 682 x = b.addr(fn, e.X, true).address(fn) 683 case *types.Basic, *types.Slice, *types.Pointer: // *array 684 x = b.expr(fn, e.X) 685 default: 686 panic("unreachable") 687 } 688 } else { 689 // We're indexing a string | []byte. Note that other combinations such as []byte | [4]byte are currently not 690 // allowed by the language. 691 x = b.expr(fn, e.X) 692 } 693 694 var low, high, max Value 695 if e.Low != nil { 696 low = b.expr(fn, e.Low) 697 } 698 if e.High != nil { 699 high = b.expr(fn, e.High) 700 } 701 if e.Slice3 { 702 max = b.expr(fn, e.Max) 703 } 704 v := &Slice{ 705 X: x, 706 Low: low, 707 High: high, 708 Max: max, 709 } 710 v.setType(tv.Type) 711 return fn.emit(v, e) 712 713 case *ast.Ident: 714 obj := fn.Pkg.info.Uses[e] 715 // Universal built-in or nil? 716 switch obj := obj.(type) { 717 case *types.Builtin: 718 return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} 719 case *types.Nil: 720 return emitConst(fn, nilConst(tv.Type, e)) 721 } 722 // Package-level func or var? 723 if v := fn.Prog.packageLevelValue(obj); v != nil { 724 if _, ok := obj.(*types.Var); ok { 725 return emitLoad(fn, v, e) // var (address) 726 } 727 if instance, ok := fn.Pkg.info.Instances[e]; ok { 728 // Instantiated generic function 729 return makeInstance(fn.Prog, v.(*Function), instance.Type.(*types.Signature), instance.TypeArgs) 730 } 731 return v // (func) 732 } 733 // Local var. 734 return emitLoad(fn, fn.lookup(obj, false), e) // var (address) 735 736 case *ast.SelectorExpr: 737 sel, ok := fn.Pkg.info.Selections[e] 738 if !ok { 739 // builtin unsafe.{Add,Slice} 740 if obj, ok := fn.Pkg.info.Uses[e.Sel].(*types.Builtin); ok { 741 return &Builtin{name: "Unsafe" + obj.Name(), sig: tv.Type.(*types.Signature)} 742 } 743 // qualified identifier 744 return b.expr(fn, e.Sel) 745 } 746 switch sel.Kind() { 747 case types.MethodExpr: 748 // (*T).f or T.f, the method f from the method-set of type T. 749 // The result is a "thunk". 750 return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type, e) 751 752 case types.MethodVal: 753 // e.f where e is an expression and f is a method. 754 // The result is a "bound". 755 obj := sel.Obj().(*types.Func) 756 rt := recvType(obj) 757 wantAddr := isPointer(rt) 758 escaping := true 759 v := b.receiver(fn, e.X, wantAddr, escaping, sel, e) 760 if isInterface(rt) { 761 // If v has interface type I, 762 // we must emit a check that v is non-nil. 763 // We use: typeassert v.(I). 764 emitTypeAssert(fn, v, rt, e) 765 } 766 c := &MakeClosure{ 767 Fn: makeBound(fn.Prog, obj), 768 Bindings: []Value{v}, 769 } 770 c.source = e.Sel 771 c.setType(tv.Type) 772 return fn.emit(c, e) 773 774 case types.FieldVal: 775 indices := sel.Index() 776 last := len(indices) - 1 777 v := b.expr(fn, e.X) 778 v = emitImplicitSelections(fn, v, indices[:last], e) 779 v = emitFieldSelection(fn, v, indices[last], false, e.Sel) 780 return v 781 } 782 783 panic("unexpected expression-relative selector") 784 785 case *ast.IndexExpr: 786 // IndexExpr might either be an actual indexing operation, or an instantiation 787 xt := fn.Pkg.typeOf(e.X) 788 789 terms, err := typeparams.NormalTerms(xt) 790 if err != nil { 791 panic(fmt.Sprintf("unexpected error: %s", err)) 792 } 793 isNonAddressableIndexable := func() (types.Type, bool) { 794 for _, term := range terms { 795 switch t := term.Type().Underlying().(type) { 796 case *types.Array: 797 return t.Elem(), true 798 case *types.Basic: 799 // a string 800 return types.Universe.Lookup("byte").Type(), true 801 } 802 } 803 return nil, false 804 } 805 806 isAddressableIndexable := func() (types.Type, bool) { 807 for _, term := range terms { 808 switch t := term.Type().Underlying().(type) { 809 case *types.Slice: 810 return t.Elem(), true 811 case *types.Pointer: 812 return t.Elem().Underlying().(*types.Array).Elem(), true 813 } 814 } 815 return nil, false 816 } 817 818 if elem, ok := isNonAddressableIndexable(); ok { 819 // At least one of the types is non-addressable 820 v := &Index{ 821 X: b.expr(fn, e.X), 822 Index: b.expr(fn, e.Index), 823 } 824 v.setType(elem) 825 return fn.emit(v, e) 826 } else if _, ok := isAddressableIndexable(); ok { 827 // All types are addressable (otherwise the previous branch would've fired) 828 return b.addr(fn, e, false).load(fn, e) 829 } else if t, ok := typeutil.CoreType(xt).Underlying().(*types.Map); ok { 830 // Maps are not addressable. 831 v := &MapLookup{ 832 X: b.expr(fn, e.X), 833 Index: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index), 834 } 835 v.setType(t.Elem()) 836 return fn.emit(v, e) 837 } else if _, ok := xt.Underlying().(*types.Signature); ok { 838 // Instantiating a generic function 839 return b.expr(fn, e.X) 840 } else { 841 panic("unexpected container type in IndexExpr: " + t.String()) 842 } 843 844 case *ast.IndexListExpr: 845 // Instantiating a generic function 846 return b.expr(fn, e.X) 847 848 case *ast.CompositeLit, *ast.StarExpr: 849 // Addressable types (lvalues) 850 return b.addr(fn, e, false).load(fn, e) 851 } 852 853 panic(fmt.Sprintf("unexpected expr: %T", e)) 854 } 855 856 // stmtList emits to fn code for all statements in list. 857 func (b *builder) stmtList(fn *Function, list []ast.Stmt) { 858 for _, s := range list { 859 b.stmt(fn, s) 860 } 861 } 862 863 // receiver emits to fn code for expression e in the "receiver" 864 // position of selection e.f (where f may be a field or a method) and 865 // returns the effective receiver after applying the implicit field 866 // selections of sel. 867 // 868 // wantAddr requests that the result is an an address. If 869 // !sel.Indirect(), this may require that e be built in addr() mode; it 870 // must thus be addressable. 871 // 872 // escaping is defined as per builder.addr(). 873 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection, source ast.Node) Value { 874 var v Value 875 if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { 876 v = b.addr(fn, e, escaping).address(fn) 877 } else { 878 v = b.expr(fn, e) 879 } 880 881 last := len(sel.Index()) - 1 882 v = emitImplicitSelections(fn, v, sel.Index()[:last], source) 883 if !wantAddr && isPointer(v.Type()) { 884 v = emitLoad(fn, v, e) 885 } 886 return v 887 } 888 889 // setCallFunc populates the function parts of a CallCommon structure 890 // (Func, Method, Recv, Args[0]) based on the kind of invocation 891 // occurring in e. 892 func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { 893 // Is this a method call? 894 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { 895 sel, ok := fn.Pkg.info.Selections[selector] 896 if ok && sel.Kind() == types.MethodVal { 897 obj := sel.Obj().(*types.Func) 898 recv := recvType(obj) 899 wantAddr := isPointer(recv) 900 escaping := true 901 v := b.receiver(fn, selector.X, wantAddr, escaping, sel, selector) 902 if isInterface(recv) { 903 // Invoke-mode call. 904 905 // Methods in interfaces cannot have their own type parameters, so we needn't do anything for type 906 // parameters. 907 c.Value = v 908 c.Method = obj 909 } else { 910 // "Call"-mode call. 911 912 // declaredFunc takes care of creating wrappers for functions with type parameters. 913 c.Value = fn.Prog.declaredFunc(obj) 914 c.Args = append(c.Args, v) 915 } 916 return 917 } 918 919 // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): 920 // a statically dispatched call to the method f in the 921 // method-set of T or *T. T may be an interface. 922 // 923 // e.Fun would evaluate to a concrete method, interface 924 // wrapper function, or promotion wrapper. 925 // 926 // For now, we evaluate it in the usual way. 927 // 928 // TODO(adonovan): opt: inline expr() here, to make the 929 // call static and to avoid generation of wrappers. 930 // It's somewhat tricky as it may consume the first 931 // actual parameter if the call is "invoke" mode. 932 // 933 // Examples: 934 // type T struct{}; func (T) f() {} // "call" mode 935 // type T interface { f() } // "invoke" mode 936 // 937 // type S struct{ T } 938 // 939 // var s S 940 // S.f(s) 941 // (*S).f(&s) 942 // 943 // Suggested approach: 944 // - consume the first actual parameter expression 945 // and build it with b.expr(). 946 // - apply implicit field selections. 947 // - use MethodVal logic to populate fields of c. 948 } 949 // Evaluate the function operand in the usual way. 950 // 951 // Code in expr takes care of creating wrappers for functions with type parameters. 952 c.Value = b.expr(fn, e.Fun) 953 } 954 955 // emitCallArgs emits to f code for the actual parameters of call e to 956 // a (possibly built-in) function of effective type sig. 957 // The argument values are appended to args, which is then returned. 958 func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { 959 // f(x, y, z...): pass slice z straight through. 960 if e.Ellipsis != 0 { 961 for i, arg := range e.Args { 962 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type(), arg) 963 args = append(args, v) 964 } 965 return args 966 } 967 968 offset := len(args) // 1 if call has receiver, 0 otherwise 969 970 // Evaluate actual parameter expressions. 971 // 972 // If this is a chained call of the form f(g()) where g has 973 // multiple return values (MRV), they are flattened out into 974 // args; a suffix of them may end up in a varargs slice. 975 for _, arg := range e.Args { 976 v := b.expr(fn, arg) 977 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain 978 for i, n := 0, ttuple.Len(); i < n; i++ { 979 args = append(args, emitExtract(fn, v, i, arg)) 980 } 981 } else { 982 args = append(args, v) 983 } 984 } 985 986 // Actual->formal assignability conversions for normal parameters. 987 np := sig.Params().Len() // number of normal parameters 988 if sig.Variadic() { 989 np-- 990 } 991 for i := 0; i < np; i++ { 992 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type(), args[offset+i].Source()) 993 } 994 995 // Actual->formal assignability conversions for variadic parameter, 996 // and construction of slice. 997 if sig.Variadic() { 998 varargs := args[offset+np:] 999 st := sig.Params().At(np).Type().(*types.Slice) 1000 vt := st.Elem() 1001 if len(varargs) == 0 { 1002 args = append(args, emitConst(fn, nilConst(st, nil))) 1003 } else { 1004 // Replace a suffix of args with a slice containing it. 1005 at := types.NewArray(vt, int64(len(varargs))) 1006 a := emitNew(fn, at, e) 1007 a.source = e 1008 for i, arg := range varargs { 1009 iaddr := &IndexAddr{ 1010 X: a, 1011 Index: emitConst(fn, intConst(int64(i), nil)), 1012 } 1013 iaddr.setType(types.NewPointer(vt)) 1014 fn.emit(iaddr, e) 1015 emitStore(fn, iaddr, arg, arg.Source()) 1016 } 1017 s := &Slice{X: a} 1018 s.setType(st) 1019 args[offset+np] = fn.emit(s, args[offset+np].Source()) 1020 args = args[:offset+np+1] 1021 } 1022 } 1023 return args 1024 } 1025 1026 // setCall emits to fn code to evaluate all the parameters of a function 1027 // call e, and populates *c with those values. 1028 func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { 1029 // First deal with the f(...) part and optional receiver. 1030 b.setCallFunc(fn, e, c) 1031 1032 // Then append the other actual parameters. 1033 sig, _ := typeutil.CoreType(fn.Pkg.typeOf(e.Fun)).(*types.Signature) 1034 if sig == nil { 1035 panic(fmt.Sprintf("no signature for call of %s", e.Fun)) 1036 } 1037 c.Args = b.emitCallArgs(fn, sig, e, c.Args) 1038 } 1039 1040 // assignOp emits to fn code to perform loc <op>= val. 1041 func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, source ast.Node) { 1042 loc.store(fn, emitArith(fn, op, loc.load(fn, source), val, loc.typ(), source), source) 1043 } 1044 1045 // localValueSpec emits to fn code to define all of the vars in the 1046 // function-local ValueSpec, spec. 1047 func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { 1048 switch { 1049 case len(spec.Values) == len(spec.Names): 1050 // e.g. var x, y = 0, 1 1051 // 1:1 assignment 1052 for i, id := range spec.Names { 1053 if !isBlankIdent(id) { 1054 fn.addLocalForIdent(id) 1055 } 1056 lval := b.addr(fn, id, false) // non-escaping 1057 b.assign(fn, lval, spec.Values[i], true, nil, spec) 1058 } 1059 1060 case len(spec.Values) == 0: 1061 // e.g. var x, y int 1062 // Locals are implicitly zero-initialized. 1063 for _, id := range spec.Names { 1064 if !isBlankIdent(id) { 1065 lhs := fn.addLocalForIdent(id) 1066 if fn.debugInfo() { 1067 emitDebugRef(fn, id, lhs, true) 1068 } 1069 } 1070 } 1071 1072 default: 1073 // e.g. var x, y = pos() 1074 tuple := b.exprN(fn, spec.Values[0]) 1075 for i, id := range spec.Names { 1076 if !isBlankIdent(id) { 1077 fn.addLocalForIdent(id) 1078 lhs := b.addr(fn, id, false) // non-escaping 1079 lhs.store(fn, emitExtract(fn, tuple, i, id), id) 1080 } 1081 } 1082 } 1083 } 1084 1085 // assignStmt emits code to fn for a parallel assignment of rhss to lhss. 1086 // isDef is true if this is a short variable declaration (:=). 1087 // 1088 // Note the similarity with localValueSpec. 1089 func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, source ast.Node) { 1090 // Side effects of all LHSs and RHSs must occur in left-to-right order. 1091 lvals := make([]lvalue, len(lhss)) 1092 isZero := make([]bool, len(lhss)) 1093 for i, lhs := range lhss { 1094 var lval lvalue = blank{} 1095 if !isBlankIdent(lhs) { 1096 if isDef { 1097 if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { 1098 fn.addNamedLocal(obj, lhs) 1099 isZero[i] = true 1100 } 1101 } 1102 lval = b.addr(fn, lhs, false) // non-escaping 1103 } 1104 lvals[i] = lval 1105 } 1106 if len(lhss) == len(rhss) { 1107 // Simple assignment: x = f() (!isDef) 1108 // Parallel assignment: x, y = f(), g() (!isDef) 1109 // or short var decl: x, y := f(), g() (isDef) 1110 // 1111 // In all cases, the RHSs may refer to the LHSs, 1112 // so we need a storebuf. 1113 var sb storebuf 1114 for i := range rhss { 1115 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb, source) 1116 } 1117 sb.emit(fn) 1118 } else { 1119 // e.g. x, y = pos() 1120 tuple := b.exprN(fn, rhss[0]) 1121 emitDebugRef(fn, rhss[0], tuple, false) 1122 for i, lval := range lvals { 1123 lval.store(fn, emitExtract(fn, tuple, i, source), source) 1124 } 1125 } 1126 } 1127 1128 // arrayLen returns the length of the array whose composite literal elements are elts. 1129 func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { 1130 var max int64 = -1 1131 var i int64 = -1 1132 for _, e := range elts { 1133 if kv, ok := e.(*ast.KeyValueExpr); ok { 1134 i = b.expr(fn, kv.Key).(*Const).Int64() 1135 } else { 1136 i++ 1137 } 1138 if i > max { 1139 max = i 1140 } 1141 } 1142 return max + 1 1143 } 1144 1145 // compLit emits to fn code to initialize a composite literal e at 1146 // address addr with type typ. 1147 // 1148 // Nested composite literals are recursively initialized in place 1149 // where possible. If isZero is true, compLit assumes that addr 1150 // holds the zero value for typ. 1151 // 1152 // Because the elements of a composite literal may refer to the 1153 // variables being updated, as in the second line below, 1154 // 1155 // x := T{a: 1} 1156 // x = T{a: x.a} 1157 // 1158 // all the reads must occur before all the writes. This is implicitly handled by the write buffering effected by 1159 // compositeElement and explicitly by the storebuf for when we don't use CompositeValue. 1160 // 1161 // A CompositeLit may have pointer type only in the recursive (nested) 1162 // case when the type name is implicit. e.g. in []*T{{}}, the inner 1163 // literal has type *T behaves like &T{}. 1164 // In that case, addr must hold a T, not a *T. 1165 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { 1166 typ := deref(fn.Pkg.typeOf(e)) 1167 switch t := typeutil.CoreType(typ).(type) { 1168 case *types.Struct: 1169 lvalue := &address{addr: addr, expr: e} 1170 if len(e.Elts) == 0 { 1171 if !isZero { 1172 sb.store(lvalue, zeroValue(fn, deref(addr.Type()), e), e) 1173 } 1174 } else { 1175 v := &CompositeValue{ 1176 Values: make([]Value, t.NumFields()), 1177 } 1178 for i := 0; i < t.NumFields(); i++ { 1179 v.Values[i] = emitConst(fn, zeroConst(t.Field(i).Type(), e)) 1180 } 1181 v.setType(typ) 1182 1183 for i, e := range e.Elts { 1184 fieldIndex := i 1185 if kv, ok := e.(*ast.KeyValueExpr); ok { 1186 fname := kv.Key.(*ast.Ident).Name 1187 for i, n := 0, t.NumFields(); i < n; i++ { 1188 sf := t.Field(i) 1189 if sf.Name() == fname { 1190 fieldIndex = i 1191 e = kv.Value 1192 break 1193 } 1194 } 1195 } 1196 1197 ce := &compositeElement{ 1198 cv: v, 1199 idx: fieldIndex, 1200 t: t.Field(fieldIndex).Type(), 1201 expr: e, 1202 } 1203 b.assign(fn, ce, e, isZero, sb, e) 1204 v.Bitmap.SetBit(&v.Bitmap, fieldIndex, 1) 1205 v.NumSet++ 1206 } 1207 fn.emit(v, e) 1208 sb.store(lvalue, v, e) 1209 } 1210 1211 case *types.Array, *types.Slice: 1212 var at *types.Array 1213 var array Value 1214 switch t := t.(type) { 1215 case *types.Slice: 1216 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) 1217 alloc := emitNew(fn, at, e) 1218 array = alloc 1219 case *types.Array: 1220 at = t 1221 array = addr 1222 } 1223 1224 var final Value 1225 if len(e.Elts) == 0 { 1226 if !isZero { 1227 zc := emitConst(fn, zeroConst(at, e)) 1228 final = zc 1229 } 1230 } else { 1231 if at.Len() == int64(len(e.Elts)) { 1232 // The literal specifies all elements, so we can use a composite value 1233 v := &CompositeValue{ 1234 Values: make([]Value, at.Len()), 1235 } 1236 zc := emitConst(fn, zeroConst(at.Elem(), e)) 1237 for i := range v.Values { 1238 v.Values[i] = zc 1239 } 1240 v.setType(at) 1241 1242 var idx *Const 1243 for _, e := range e.Elts { 1244 if kv, ok := e.(*ast.KeyValueExpr); ok { 1245 idx = b.expr(fn, kv.Key).(*Const) 1246 e = kv.Value 1247 } else { 1248 var idxval int64 1249 if idx != nil { 1250 idxval = idx.Int64() + 1 1251 } 1252 idx = emitConst(fn, intConst(idxval, e)).(*Const) 1253 } 1254 1255 iaddr := &compositeElement{ 1256 cv: v, 1257 idx: int(idx.Int64()), 1258 t: at.Elem(), 1259 expr: e, 1260 } 1261 1262 b.assign(fn, iaddr, e, true, sb, e) 1263 v.Bitmap.SetBit(&v.Bitmap, int(idx.Int64()), 1) 1264 v.NumSet++ 1265 } 1266 final = v 1267 fn.emit(v, e) 1268 } else { 1269 // Not all elements are specified. Populate the array with a series of stores, to guard against literals 1270 // like []int{1<<62: 1}. 1271 if !isZero { 1272 // memclear 1273 sb.store(&address{array, nil}, zeroValue(fn, deref(array.Type()), e), e) 1274 } 1275 1276 var idx *Const 1277 for _, e := range e.Elts { 1278 if kv, ok := e.(*ast.KeyValueExpr); ok { 1279 idx = b.expr(fn, kv.Key).(*Const) 1280 e = kv.Value 1281 } else { 1282 var idxval int64 1283 if idx != nil { 1284 idxval = idx.Int64() + 1 1285 } 1286 idx = emitConst(fn, intConst(idxval, e)).(*Const) 1287 } 1288 iaddr := &IndexAddr{ 1289 X: array, 1290 Index: idx, 1291 } 1292 iaddr.setType(types.NewPointer(at.Elem())) 1293 fn.emit(iaddr, e) 1294 if t != at { // slice 1295 // backing array is unaliased => storebuf not needed. 1296 b.assign(fn, &address{addr: iaddr, expr: e}, e, true, nil, e) 1297 } else { 1298 b.assign(fn, &address{addr: iaddr, expr: e}, e, true, sb, e) 1299 } 1300 } 1301 } 1302 } 1303 if t != at { // slice 1304 if final != nil { 1305 sb.store(&address{addr: array}, final, e) 1306 } 1307 s := &Slice{X: array} 1308 s.setType(typ) 1309 sb.store(&address{addr: addr, expr: e}, fn.emit(s, e), e) 1310 } else if final != nil { 1311 sb.store(&address{addr: array, expr: e}, final, e) 1312 } 1313 1314 case *types.Map: 1315 m := &MakeMap{Reserve: emitConst(fn, intConst(int64(len(e.Elts)), e))} 1316 m.setType(typ) 1317 fn.emit(m, e) 1318 for _, e := range e.Elts { 1319 e := e.(*ast.KeyValueExpr) 1320 1321 // If a key expression in a map literal is itself a 1322 // composite literal, the type may be omitted. 1323 // For example: 1324 // map[*struct{}]bool{{}: true} 1325 // An &-operation may be implied: 1326 // map[*struct{}]bool{&struct{}{}: true} 1327 var key Value 1328 if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { 1329 // A CompositeLit never evaluates to a pointer, 1330 // so if the type of the location is a pointer, 1331 // an &-operation is implied. 1332 key = b.addr(fn, e.Key, true).address(fn) 1333 } else { 1334 key = b.expr(fn, e.Key) 1335 } 1336 1337 loc := element{ 1338 m: m, 1339 k: emitConv(fn, key, t.Key(), e), 1340 t: t.Elem(), 1341 } 1342 1343 // We call assign() only because it takes care 1344 // of any &-operation required in the recursive 1345 // case, e.g., 1346 // map[int]*struct{}{0: {}} implies &struct{}{}. 1347 // In-place update is of course impossible, 1348 // and no storebuf is needed. 1349 b.assign(fn, &loc, e.Value, true, nil, e) 1350 } 1351 sb.store(&address{addr: addr, expr: e}, m, e) 1352 1353 default: 1354 panic("unexpected CompositeLit type: " + t.String()) 1355 } 1356 } 1357 1358 func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { 1359 if s.Tag == nil { 1360 b.switchStmtDynamic(fn, s, label) 1361 return 1362 } 1363 dynamic := false 1364 for _, iclause := range s.Body.List { 1365 clause := iclause.(*ast.CaseClause) 1366 for _, cond := range clause.List { 1367 if fn.Pkg.info.Types[unparen(cond)].Value == nil { 1368 dynamic = true 1369 break 1370 } 1371 } 1372 } 1373 1374 if dynamic { 1375 b.switchStmtDynamic(fn, s, label) 1376 return 1377 } 1378 1379 if s.Init != nil { 1380 b.stmt(fn, s.Init) 1381 } 1382 1383 entry := fn.currentBlock 1384 tag := b.expr(fn, s.Tag) 1385 1386 heads := make([]*BasicBlock, 0, len(s.Body.List)) 1387 bodies := make([]*BasicBlock, len(s.Body.List)) 1388 conds := make([]Value, 0, len(s.Body.List)) 1389 1390 hasDefault := false 1391 done := fn.newBasicBlock("switch.done") 1392 if label != nil { 1393 label._break = done 1394 } 1395 for i, stmt := range s.Body.List { 1396 body := fn.newBasicBlock(fmt.Sprintf("switch.body.%d", i)) 1397 bodies[i] = body 1398 cas := stmt.(*ast.CaseClause) 1399 if cas.List == nil { 1400 // default branch 1401 hasDefault = true 1402 head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d", i)) 1403 conds = append(conds, nil) 1404 heads = append(heads, head) 1405 fn.currentBlock = head 1406 emitJump(fn, body, cas) 1407 } 1408 for j, cond := range stmt.(*ast.CaseClause).List { 1409 fn.currentBlock = entry 1410 head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d.%d", i, j)) 1411 conds = append(conds, b.expr(fn, cond)) 1412 heads = append(heads, head) 1413 fn.currentBlock = head 1414 emitJump(fn, body, cond) 1415 } 1416 } 1417 1418 for i, stmt := range s.Body.List { 1419 clause := stmt.(*ast.CaseClause) 1420 body := bodies[i] 1421 fn.currentBlock = body 1422 fallthru := done 1423 if i+1 < len(bodies) { 1424 fallthru = bodies[i+1] 1425 } 1426 fn.targets = &targets{ 1427 tail: fn.targets, 1428 _break: done, 1429 _fallthrough: fallthru, 1430 } 1431 b.stmtList(fn, clause.Body) 1432 fn.targets = fn.targets.tail 1433 emitJump(fn, done, stmt) 1434 } 1435 1436 if !hasDefault { 1437 head := fn.newBasicBlock("switch.head.implicit-default") 1438 body := fn.newBasicBlock("switch.body.implicit-default") 1439 fn.currentBlock = head 1440 emitJump(fn, body, s) 1441 fn.currentBlock = body 1442 emitJump(fn, done, s) 1443 heads = append(heads, head) 1444 conds = append(conds, nil) 1445 } 1446 1447 if len(heads) != len(conds) { 1448 panic(fmt.Sprintf("internal error: %d heads for %d conds", len(heads), len(conds))) 1449 } 1450 for _, head := range heads { 1451 addEdge(entry, head) 1452 } 1453 fn.currentBlock = entry 1454 entry.emit(&ConstantSwitch{ 1455 Tag: tag, 1456 Conds: conds, 1457 }, s) 1458 fn.currentBlock = done 1459 } 1460 1461 // switchStmt emits to fn code for the switch statement s, optionally 1462 // labelled by label. 1463 func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblock) { 1464 // We treat SwitchStmt like a sequential if-else chain. 1465 // Multiway dispatch can be recovered later by irutil.Switches() 1466 // to those cases that are free of side effects. 1467 if s.Init != nil { 1468 b.stmt(fn, s.Init) 1469 } 1470 kTrue := emitConst(fn, NewConst(constant.MakeBool(true), tBool, nil)) 1471 1472 var tagv Value = kTrue 1473 var tagSource ast.Node = s 1474 if s.Tag != nil { 1475 tagv = b.expr(fn, s.Tag) 1476 tagSource = s.Tag 1477 } 1478 // lifting only considers loads and stores, but we want different 1479 // sigma nodes for the different comparisons. use a temporary and 1480 // load it in every branch. 1481 tag := fn.addLocal(tagv.Type(), tagSource) 1482 emitStore(fn, tag, tagv, tagSource) 1483 1484 done := fn.newBasicBlock("switch.done") 1485 if label != nil { 1486 label._break = done 1487 } 1488 // We pull the default case (if present) down to the end. 1489 // But each fallthrough label must point to the next 1490 // body block in source order, so we preallocate a 1491 // body block (fallthru) for the next case. 1492 // Unfortunately this makes for a confusing block order. 1493 var dfltBody *[]ast.Stmt 1494 var dfltFallthrough *BasicBlock 1495 var fallthru, dfltBlock *BasicBlock 1496 ncases := len(s.Body.List) 1497 for i, clause := range s.Body.List { 1498 body := fallthru 1499 if body == nil { 1500 body = fn.newBasicBlock("switch.body") // first case only 1501 } 1502 1503 // Preallocate body block for the next case. 1504 fallthru = done 1505 if i+1 < ncases { 1506 fallthru = fn.newBasicBlock("switch.body") 1507 } 1508 1509 cc := clause.(*ast.CaseClause) 1510 if cc.List == nil { 1511 // Default case. 1512 dfltBody = &cc.Body 1513 dfltFallthrough = fallthru 1514 dfltBlock = body 1515 continue 1516 } 1517 1518 var nextCond *BasicBlock 1519 for _, cond := range cc.List { 1520 nextCond = fn.newBasicBlock("switch.next") 1521 if tagv == kTrue { 1522 // emit a proper if/else chain instead of a comparison 1523 // of a value against true. 1524 // 1525 // NOTE(dh): adonovan had a todo saying "don't forget 1526 // conversions though". As far as I can tell, there 1527 // aren't any conversions that we need to take care of 1528 // here. `case bool(a) && bool(b)` as well as `case 1529 // bool(a && b)` are being taken care of by b.cond, 1530 // and `case a` where a is not of type bool is 1531 // invalid. 1532 b.cond(fn, cond, body, nextCond) 1533 } else { 1534 cond := emitCompare(fn, token.EQL, emitLoad(fn, tag, cond), b.expr(fn, cond), cond) 1535 emitIf(fn, cond, body, nextCond, cond.Source()) 1536 } 1537 1538 fn.currentBlock = nextCond 1539 } 1540 fn.currentBlock = body 1541 fn.targets = &targets{ 1542 tail: fn.targets, 1543 _break: done, 1544 _fallthrough: fallthru, 1545 } 1546 b.stmtList(fn, cc.Body) 1547 fn.targets = fn.targets.tail 1548 emitJump(fn, done, s) 1549 fn.currentBlock = nextCond 1550 } 1551 if dfltBlock != nil { 1552 // The lack of a Source for the jump doesn't matter, block 1553 // fusing will get rid of the jump later. 1554 1555 emitJump(fn, dfltBlock, s) 1556 fn.currentBlock = dfltBlock 1557 fn.targets = &targets{ 1558 tail: fn.targets, 1559 _break: done, 1560 _fallthrough: dfltFallthrough, 1561 } 1562 b.stmtList(fn, *dfltBody) 1563 fn.targets = fn.targets.tail 1564 } 1565 emitJump(fn, done, s) 1566 fn.currentBlock = done 1567 } 1568 1569 func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { 1570 if s.Init != nil { 1571 b.stmt(fn, s.Init) 1572 } 1573 1574 var tag Value 1575 switch e := s.Assign.(type) { 1576 case *ast.ExprStmt: // x.(type) 1577 tag = b.expr(fn, unparen(e.X).(*ast.TypeAssertExpr).X) 1578 case *ast.AssignStmt: // y := x.(type) 1579 tag = b.expr(fn, unparen(e.Rhs[0]).(*ast.TypeAssertExpr).X) 1580 default: 1581 panic("unreachable") 1582 } 1583 tagPtr := fn.addLocal(tag.Type(), tag.Source()) 1584 emitStore(fn, tagPtr, tag, tag.Source()) 1585 1586 // +1 in case there's no explicit default case 1587 heads := make([]*BasicBlock, 0, len(s.Body.List)+1) 1588 1589 entry := fn.currentBlock 1590 done := fn.newBasicBlock("done") 1591 if label != nil { 1592 label._break = done 1593 } 1594 1595 // set up type switch and constant switch, populate their conditions 1596 tswtch := &TypeSwitch{ 1597 Tag: emitLoad(fn, tagPtr, tag.Source()), 1598 Conds: make([]types.Type, 0, len(s.Body.List)+1), 1599 } 1600 cswtch := &ConstantSwitch{ 1601 Conds: make([]Value, 0, len(s.Body.List)+1), 1602 } 1603 1604 rets := make([]types.Type, 0, len(s.Body.List)+1) 1605 index := 0 1606 var default_ *ast.CaseClause 1607 for _, clause := range s.Body.List { 1608 cc := clause.(*ast.CaseClause) 1609 if obj := fn.Pkg.info.Implicits[cc]; obj != nil { 1610 fn.addNamedLocal(obj, cc) 1611 } 1612 if cc.List == nil { 1613 // default case 1614 default_ = cc 1615 } else { 1616 for _, expr := range cc.List { 1617 tswtch.Conds = append(tswtch.Conds, fn.Pkg.typeOf(expr)) 1618 cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(index), expr))) 1619 index++ 1620 } 1621 if len(cc.List) == 1 { 1622 rets = append(rets, fn.Pkg.typeOf(cc.List[0])) 1623 } else { 1624 for range cc.List { 1625 rets = append(rets, tag.Type()) 1626 } 1627 } 1628 } 1629 } 1630 1631 // default branch 1632 rets = append(rets, tag.Type()) 1633 1634 var vars []*types.Var 1635 vars = append(vars, varIndex) 1636 for _, typ := range rets { 1637 vars = append(vars, anonVar(typ)) 1638 } 1639 tswtch.setType(types.NewTuple(vars...)) 1640 // default branch 1641 fn.currentBlock = entry 1642 fn.emit(tswtch, s) 1643 cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(-1), nil))) 1644 // in theory we should add a local and stores/loads for tswtch, to 1645 // generate sigma nodes in the branches. however, there isn't any 1646 // useful information we could possibly attach to it. 1647 cswtch.Tag = emitExtract(fn, tswtch, 0, s) 1648 fn.emit(cswtch, s) 1649 1650 // build heads and bodies 1651 index = 0 1652 for _, clause := range s.Body.List { 1653 cc := clause.(*ast.CaseClause) 1654 if cc.List == nil { 1655 continue 1656 } 1657 1658 body := fn.newBasicBlock("typeswitch.body") 1659 for _, expr := range cc.List { 1660 head := fn.newBasicBlock("typeswitch.head") 1661 heads = append(heads, head) 1662 fn.currentBlock = head 1663 1664 if obj := fn.Pkg.info.Implicits[cc]; obj != nil { 1665 // In a switch y := x.(type), each case clause 1666 // implicitly declares a distinct object y. 1667 // In a single-type case, y has that type. 1668 // In multi-type cases, 'case nil' and default, 1669 // y has the same type as the interface operand. 1670 1671 l := fn.objects[obj] 1672 if rets[index] == tUntypedNil { 1673 emitStore(fn, l, emitConst(fn, nilConst(tswtch.Tag.Type(), nil)), s.Assign) 1674 } else { 1675 x := emitExtract(fn, tswtch, index+1, s.Assign) 1676 emitStore(fn, l, x, nil) 1677 } 1678 } 1679 1680 emitJump(fn, body, expr) 1681 index++ 1682 } 1683 fn.currentBlock = body 1684 fn.targets = &targets{ 1685 tail: fn.targets, 1686 _break: done, 1687 } 1688 b.stmtList(fn, cc.Body) 1689 fn.targets = fn.targets.tail 1690 emitJump(fn, done, clause) 1691 } 1692 1693 if default_ == nil { 1694 // implicit default 1695 heads = append(heads, done) 1696 } else { 1697 body := fn.newBasicBlock("typeswitch.default") 1698 heads = append(heads, body) 1699 fn.currentBlock = body 1700 fn.targets = &targets{ 1701 tail: fn.targets, 1702 _break: done, 1703 } 1704 if obj := fn.Pkg.info.Implicits[default_]; obj != nil { 1705 l := fn.objects[obj] 1706 x := emitExtract(fn, tswtch, index+1, s.Assign) 1707 emitStore(fn, l, x, s) 1708 } 1709 b.stmtList(fn, default_.Body) 1710 fn.targets = fn.targets.tail 1711 emitJump(fn, done, s) 1712 } 1713 1714 fn.currentBlock = entry 1715 for _, head := range heads { 1716 addEdge(entry, head) 1717 } 1718 fn.currentBlock = done 1719 } 1720 1721 // selectStmt emits to fn code for the select statement s, optionally 1722 // labelled by label. 1723 func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (noreturn bool) { 1724 if len(s.Body.List) == 0 { 1725 instr := &Select{Blocking: true} 1726 instr.setType(types.NewTuple(varIndex, varOk)) 1727 fn.emit(instr, s) 1728 fn.emit(new(Unreachable), s) 1729 addEdge(fn.currentBlock, fn.Exit) 1730 return true 1731 } 1732 1733 // A blocking select of a single case degenerates to a 1734 // simple send or receive. 1735 // TODO(adonovan): opt: is this optimization worth its weight? 1736 if len(s.Body.List) == 1 { 1737 clause := s.Body.List[0].(*ast.CommClause) 1738 if clause.Comm != nil { 1739 b.stmt(fn, clause.Comm) 1740 done := fn.newBasicBlock("select.done") 1741 if label != nil { 1742 label._break = done 1743 } 1744 fn.targets = &targets{ 1745 tail: fn.targets, 1746 _break: done, 1747 } 1748 b.stmtList(fn, clause.Body) 1749 fn.targets = fn.targets.tail 1750 emitJump(fn, done, clause) 1751 fn.currentBlock = done 1752 return false 1753 } 1754 } 1755 1756 // First evaluate all channels in all cases, and find 1757 // the directions of each state. 1758 var states []*SelectState 1759 blocking := true 1760 debugInfo := fn.debugInfo() 1761 for _, clause := range s.Body.List { 1762 var st *SelectState 1763 switch comm := clause.(*ast.CommClause).Comm.(type) { 1764 case nil: // default case 1765 blocking = false 1766 continue 1767 1768 case *ast.SendStmt: // ch<- i 1769 ch := b.expr(fn, comm.Chan) 1770 st = &SelectState{ 1771 Dir: types.SendOnly, 1772 Chan: ch, 1773 Send: emitConv(fn, b.expr(fn, comm.Value), 1774 typeutil.CoreType(ch.Type()).Underlying().(*types.Chan).Elem(), comm), 1775 Pos: comm.Arrow, 1776 } 1777 if debugInfo { 1778 st.DebugNode = comm 1779 } 1780 1781 case *ast.AssignStmt: // x := <-ch 1782 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) 1783 st = &SelectState{ 1784 Dir: types.RecvOnly, 1785 Chan: b.expr(fn, recv.X), 1786 Pos: recv.OpPos, 1787 } 1788 if debugInfo { 1789 st.DebugNode = recv 1790 } 1791 1792 case *ast.ExprStmt: // <-ch 1793 recv := unparen(comm.X).(*ast.UnaryExpr) 1794 st = &SelectState{ 1795 Dir: types.RecvOnly, 1796 Chan: b.expr(fn, recv.X), 1797 Pos: recv.OpPos, 1798 } 1799 if debugInfo { 1800 st.DebugNode = recv 1801 } 1802 } 1803 states = append(states, st) 1804 } 1805 1806 // We dispatch on the (fair) result of Select using a 1807 // switch on the returned index. 1808 sel := &Select{ 1809 States: states, 1810 Blocking: blocking, 1811 } 1812 sel.source = s 1813 var vars []*types.Var 1814 vars = append(vars, varIndex, varOk) 1815 for _, st := range states { 1816 if st.Dir == types.RecvOnly { 1817 tElem := typeutil.CoreType(st.Chan.Type()).Underlying().(*types.Chan).Elem() 1818 vars = append(vars, anonVar(tElem)) 1819 } 1820 } 1821 sel.setType(types.NewTuple(vars...)) 1822 fn.emit(sel, s) 1823 idx := emitExtract(fn, sel, 0, s) 1824 1825 done := fn.newBasicBlock("select.done") 1826 if label != nil { 1827 label._break = done 1828 } 1829 1830 entry := fn.currentBlock 1831 swtch := &ConstantSwitch{ 1832 Tag: idx, 1833 // one condition per case 1834 Conds: make([]Value, 0, len(s.Body.List)+1), 1835 } 1836 // note that we don't need heads; a select case can only have a single condition 1837 var bodies []*BasicBlock 1838 1839 state := 0 1840 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV 1841 for _, cc := range s.Body.List { 1842 clause := cc.(*ast.CommClause) 1843 if clause.Comm == nil { 1844 body := fn.newBasicBlock("select.default") 1845 fn.currentBlock = body 1846 bodies = append(bodies, body) 1847 fn.targets = &targets{ 1848 tail: fn.targets, 1849 _break: done, 1850 } 1851 b.stmtList(fn, clause.Body) 1852 emitJump(fn, done, s) 1853 fn.targets = fn.targets.tail 1854 swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(-1, nil))) 1855 continue 1856 } 1857 swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(int64(state), nil))) 1858 body := fn.newBasicBlock("select.body") 1859 fn.currentBlock = body 1860 bodies = append(bodies, body) 1861 fn.targets = &targets{ 1862 tail: fn.targets, 1863 _break: done, 1864 } 1865 switch comm := clause.Comm.(type) { 1866 case *ast.ExprStmt: // <-ch 1867 if debugInfo { 1868 v := emitExtract(fn, sel, r, comm) 1869 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1870 } 1871 r++ 1872 1873 case *ast.AssignStmt: // x := <-states[state].Chan 1874 if comm.Tok == token.DEFINE { 1875 fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) 1876 } 1877 x := b.addr(fn, comm.Lhs[0], false) // non-escaping 1878 v := emitExtract(fn, sel, r, comm) 1879 if debugInfo { 1880 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1881 } 1882 x.store(fn, v, comm) 1883 1884 if len(comm.Lhs) == 2 { // x, ok := ... 1885 if comm.Tok == token.DEFINE { 1886 fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) 1887 } 1888 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping 1889 ok.store(fn, emitExtract(fn, sel, 1, comm), comm) 1890 } 1891 r++ 1892 } 1893 b.stmtList(fn, clause.Body) 1894 fn.targets = fn.targets.tail 1895 emitJump(fn, done, s) 1896 state++ 1897 } 1898 fn.currentBlock = entry 1899 fn.emit(swtch, s) 1900 for _, body := range bodies { 1901 addEdge(entry, body) 1902 } 1903 fn.currentBlock = done 1904 return false 1905 } 1906 1907 // forStmt emits to fn code for the for statement s, optionally 1908 // labelled by label. 1909 func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { 1910 // ...init... 1911 // jump loop 1912 // loop: 1913 // if cond goto body else done 1914 // body: 1915 // ...body... 1916 // jump post 1917 // post: (target of continue) 1918 // ...post... 1919 // jump loop 1920 // done: (target of break) 1921 if s.Init != nil { 1922 b.stmt(fn, s.Init) 1923 } 1924 body := fn.newBasicBlock("for.body") 1925 done := fn.newBasicBlock("for.done") // target of 'break' 1926 loop := body // target of back-edge 1927 if s.Cond != nil { 1928 loop = fn.newBasicBlock("for.loop") 1929 } 1930 cont := loop // target of 'continue' 1931 if s.Post != nil { 1932 cont = fn.newBasicBlock("for.post") 1933 } 1934 if label != nil { 1935 label._break = done 1936 label._continue = cont 1937 } 1938 emitJump(fn, loop, s) 1939 fn.currentBlock = loop 1940 if loop != body { 1941 b.cond(fn, s.Cond, body, done) 1942 fn.currentBlock = body 1943 } 1944 fn.targets = &targets{ 1945 tail: fn.targets, 1946 _break: done, 1947 _continue: cont, 1948 } 1949 b.stmt(fn, s.Body) 1950 fn.targets = fn.targets.tail 1951 emitJump(fn, cont, s) 1952 1953 if s.Post != nil { 1954 fn.currentBlock = cont 1955 b.stmt(fn, s.Post) 1956 emitJump(fn, loop, s) // back-edge 1957 } 1958 fn.currentBlock = done 1959 } 1960 1961 // rangeIndexed emits to fn the header for an integer-indexed loop 1962 // over array, *array or slice value x. 1963 // The v result is defined only if tv is non-nil. 1964 // forPos is the position of the "for" token. 1965 func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { 1966 // 1967 // length = len(x) 1968 // index = -1 1969 // loop: (target of continue) 1970 // index++ 1971 // if index < length goto body else done 1972 // body: 1973 // k = index 1974 // v = x[index] 1975 // ...body... 1976 // jump loop 1977 // done: (target of break) 1978 1979 // We store in an Alloc and load it on each iteration so that lifting produces the necessary σ nodes 1980 xAlloc := newVariable(fn, x.Type(), source) 1981 xAlloc.store(x) 1982 1983 // Determine number of iterations. 1984 // 1985 // We store the length in an Alloc and load it on each iteration so that lifting produces the necessary σ nodes 1986 length := newVariable(fn, tInt, source) 1987 if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok && !typeparams.IsTypeParam(x.Type()) { 1988 // For array or *array, the number of iterations is known statically thanks to the type. We avoid a data 1989 // dependence upon x, permitting later dead-code elimination if x is pure, static unrolling, etc. Ranging over a 1990 // nil *array may have >0 iterations. We still generate code for x, in case it has effects. 1991 // 1992 // This intentionally misses type parameters with core types, because their length isn't technically constant. 1993 length.store(emitConst(fn, intConst(arr.Len(), nil))) 1994 } else { 1995 // length = len(x). 1996 var c Call 1997 c.Call.Value = makeLen(x.Type()) 1998 c.Call.Args = []Value{x} 1999 c.setType(tInt) 2000 length.store(fn.emit(&c, source)) 2001 } 2002 2003 index := fn.addLocal(tInt, source) 2004 emitStore(fn, index, emitConst(fn, intConst(-1, nil)), source) 2005 2006 loop = fn.newBasicBlock("rangeindex.loop") 2007 emitJump(fn, loop, source) 2008 fn.currentBlock = loop 2009 2010 incr := &BinOp{ 2011 Op: token.ADD, 2012 X: emitLoad(fn, index, source), 2013 Y: emitConst(fn, intConst(1, nil)), 2014 } 2015 incr.setType(tInt) 2016 emitStore(fn, index, fn.emit(incr, source), source) 2017 2018 body := fn.newBasicBlock("rangeindex.body") 2019 done = fn.newBasicBlock("rangeindex.done") 2020 emitIf(fn, emitCompare(fn, token.LSS, incr, length.load(), source), body, done, source) 2021 fn.currentBlock = body 2022 2023 k = emitLoad(fn, index, source) 2024 if tv != nil { 2025 x := xAlloc.load() 2026 switch t := typeutil.CoreType(x.Type()).Underlying().(type) { 2027 case *types.Array: 2028 instr := &Index{ 2029 X: x, 2030 Index: k, 2031 } 2032 instr.setType(t.Elem()) 2033 v = fn.emit(instr, source) 2034 2035 case *types.Pointer: // *array 2036 instr := &IndexAddr{ 2037 X: x, 2038 Index: k, 2039 } 2040 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) 2041 v = emitLoad(fn, fn.emit(instr, source), source) 2042 2043 case *types.Slice: 2044 instr := &IndexAddr{ 2045 X: x, 2046 Index: k, 2047 } 2048 instr.setType(types.NewPointer(t.Elem())) 2049 v = emitLoad(fn, fn.emit(instr, source), source) 2050 2051 default: 2052 panic("rangeIndexed x:" + t.String()) 2053 } 2054 } 2055 return 2056 } 2057 2058 // rangeIter emits to fn the header for a loop using 2059 // Range/Next/Extract to iterate over map or string value x. 2060 // tk and tv are the types of the key/value results k and v, or nil 2061 // if the respective component is not wanted. 2062 func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { 2063 // 2064 // it = range x 2065 // loop: (target of continue) 2066 // okv = next it (ok, key, value) 2067 // ok = extract okv #0 2068 // if ok goto body else done 2069 // body: 2070 // k = extract okv #1 2071 // v = extract okv #2 2072 // ...body... 2073 // jump loop 2074 // done: (target of break) 2075 // 2076 2077 if tk == nil { 2078 tk = tInvalid 2079 } 2080 if tv == nil { 2081 tv = tInvalid 2082 } 2083 2084 rng := &Range{X: x} 2085 rng.setType(typeutil.NewIterator(types.NewTuple( 2086 varOk, 2087 newVar("k", tk), 2088 newVar("v", tv), 2089 ))) 2090 it := newVariable(fn, rng.typ, source) 2091 it.store(fn.emit(rng, source)) 2092 2093 loop = fn.newBasicBlock("rangeiter.loop") 2094 emitJump(fn, loop, source) 2095 fn.currentBlock = loop 2096 2097 // Go doesn't currently allow ranging over string|[]byte, so isString is decidable. 2098 _, isString := typeutil.CoreType(x.Type()).Underlying().(*types.Basic) 2099 2100 okvInstr := &Next{ 2101 Iter: it.load(), 2102 IsString: isString, 2103 } 2104 okvInstr.setType(rng.typ.(*typeutil.Iterator).Elem()) 2105 fn.emit(okvInstr, source) 2106 okv := newVariable(fn, okvInstr.Type(), source) 2107 okv.store(okvInstr) 2108 2109 body := fn.newBasicBlock("rangeiter.body") 2110 done = fn.newBasicBlock("rangeiter.done") 2111 emitIf(fn, emitExtract(fn, okv.load(), 0, source), body, done, source) 2112 fn.currentBlock = body 2113 2114 if tk != tInvalid { 2115 k = emitExtract(fn, okv.load(), 1, source) 2116 } 2117 if tv != tInvalid { 2118 v = emitExtract(fn, okv.load(), 2, source) 2119 } 2120 return 2121 } 2122 2123 // rangeChan emits to fn the header for a loop that receives from 2124 // channel x until it fails. 2125 // tk is the channel's element type, or nil if the k result is 2126 // not wanted 2127 // pos is the position of the '=' or ':=' token. 2128 func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Node) (k Value, loop, done *BasicBlock) { 2129 // 2130 // loop: (target of continue) 2131 // ko = <-x (key, ok) 2132 // ok = extract ko #1 2133 // if ok goto body else done 2134 // body: 2135 // k = extract ko #0 2136 // ... 2137 // goto loop 2138 // done: (target of break) 2139 2140 loop = fn.newBasicBlock("rangechan.loop") 2141 emitJump(fn, loop, source) 2142 fn.currentBlock = loop 2143 2144 recv := emitRecv(fn, x, true, types.NewTuple(newVar("k", typeutil.CoreType(x.Type()).Underlying().(*types.Chan).Elem()), varOk), source) 2145 retv := newVariable(fn, recv.Type(), source) 2146 retv.store(recv) 2147 2148 body := fn.newBasicBlock("rangechan.body") 2149 done = fn.newBasicBlock("rangechan.done") 2150 emitIf(fn, emitExtract(fn, retv.load(), 1, source), body, done, source) 2151 fn.currentBlock = body 2152 if tk != nil { 2153 k = emitExtract(fn, retv.load(), 0, source) 2154 } 2155 return 2156 } 2157 2158 type variable struct { 2159 alloc *Alloc 2160 fn *Function 2161 source ast.Node 2162 } 2163 2164 func newVariable(fn *Function, typ types.Type, source ast.Node) *variable { 2165 alloc := &Alloc{} 2166 alloc.setType(types.NewPointer(typ)) 2167 fn.emit(alloc, source) 2168 fn.Locals = append(fn.Locals, alloc) 2169 return &variable{ 2170 alloc: alloc, 2171 fn: fn, 2172 source: source, 2173 } 2174 } 2175 2176 func (v *variable) store(sv Value) { 2177 emitStore(v.fn, v.alloc, sv, v.source) 2178 } 2179 2180 func (v *variable) load() Value { 2181 return emitLoad(v.fn, v.alloc, v.source) 2182 } 2183 2184 // rangeStmt emits to fn code for the range statement s, optionally 2185 // labelled by label. 2186 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, source ast.Node) { 2187 var tk, tv types.Type 2188 if s.Key != nil && !isBlankIdent(s.Key) { 2189 tk = fn.Pkg.typeOf(s.Key) 2190 } 2191 if s.Value != nil && !isBlankIdent(s.Value) { 2192 tv = fn.Pkg.typeOf(s.Value) 2193 } 2194 2195 // If iteration variables are defined (:=), this 2196 // occurs once outside the loop. 2197 // 2198 // Unlike a short variable declaration, a RangeStmt 2199 // using := never redeclares an existing variable; it 2200 // always creates a new one. 2201 if s.Tok == token.DEFINE { 2202 if tk != nil { 2203 fn.addLocalForIdent(s.Key.(*ast.Ident)) 2204 } 2205 if tv != nil { 2206 fn.addLocalForIdent(s.Value.(*ast.Ident)) 2207 } 2208 } 2209 2210 x := b.expr(fn, s.X) 2211 2212 var k, v Value 2213 var loop, done *BasicBlock 2214 switch rt := typeutil.CoreType(x.Type()).Underlying().(type) { 2215 case *types.Slice, *types.Array, *types.Pointer: // *array 2216 k, v, loop, done = b.rangeIndexed(fn, x, tv, source) 2217 2218 case *types.Chan: 2219 k, loop, done = b.rangeChan(fn, x, tk, source) 2220 2221 case *types.Map, *types.Basic: // string 2222 k, v, loop, done = b.rangeIter(fn, x, tk, tv, source) 2223 2224 default: 2225 panic("Cannot range over: " + rt.String()) 2226 } 2227 2228 // Evaluate both LHS expressions before we update either. 2229 var kl, vl lvalue 2230 if tk != nil { 2231 kl = b.addr(fn, s.Key, false) // non-escaping 2232 } 2233 if tv != nil { 2234 vl = b.addr(fn, s.Value, false) // non-escaping 2235 } 2236 if tk != nil { 2237 kl.store(fn, k, s) 2238 } 2239 if tv != nil { 2240 vl.store(fn, v, s) 2241 } 2242 2243 if label != nil { 2244 label._break = done 2245 label._continue = loop 2246 } 2247 2248 fn.targets = &targets{ 2249 tail: fn.targets, 2250 _break: done, 2251 _continue: loop, 2252 } 2253 b.stmt(fn, s.Body) 2254 fn.targets = fn.targets.tail 2255 emitJump(fn, loop, source) // back-edge 2256 fn.currentBlock = done 2257 } 2258 2259 // stmt lowers statement s to IR form, emitting code to fn. 2260 func (b *builder) stmt(fn *Function, _s ast.Stmt) { 2261 // The label of the current statement. If non-nil, its _goto 2262 // target is always set; its _break and _continue are set only 2263 // within the body of switch/typeswitch/select/for/range. 2264 // It is effectively an additional default-nil parameter of stmt(). 2265 var label *lblock 2266 start: 2267 switch s := _s.(type) { 2268 case *ast.EmptyStmt: 2269 // ignore. (Usually removed by gofmt.) 2270 2271 case *ast.DeclStmt: // Con, Var or Typ 2272 d := s.Decl.(*ast.GenDecl) 2273 if d.Tok == token.VAR { 2274 for _, spec := range d.Specs { 2275 if vs, ok := spec.(*ast.ValueSpec); ok { 2276 b.localValueSpec(fn, vs) 2277 } 2278 } 2279 } 2280 2281 case *ast.LabeledStmt: 2282 label = fn.labelledBlock(s.Label) 2283 emitJump(fn, label._goto, s) 2284 fn.currentBlock = label._goto 2285 _s = s.Stmt 2286 goto start // effectively: tailcall stmt(fn, s.Stmt, label) 2287 2288 case *ast.ExprStmt: 2289 b.expr(fn, s.X) 2290 2291 case *ast.SendStmt: 2292 instr := &Send{ 2293 Chan: b.expr(fn, s.Chan), 2294 X: emitConv(fn, b.expr(fn, s.Value), 2295 typeutil.CoreType(fn.Pkg.typeOf(s.Chan)).Underlying().(*types.Chan).Elem(), s), 2296 } 2297 fn.emit(instr, s) 2298 2299 case *ast.IncDecStmt: 2300 op := token.ADD 2301 if s.Tok == token.DEC { 2302 op = token.SUB 2303 } 2304 loc := b.addr(fn, s.X, false) 2305 b.assignOp(fn, loc, emitConst(fn, NewConst(constant.MakeInt64(1), loc.typ(), s)), op, s) 2306 2307 case *ast.AssignStmt: 2308 switch s.Tok { 2309 case token.ASSIGN, token.DEFINE: 2310 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE, _s) 2311 2312 default: // +=, etc. 2313 op := s.Tok + token.ADD - token.ADD_ASSIGN 2314 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s) 2315 } 2316 2317 case *ast.GoStmt: 2318 // The "intrinsics" new/make/len/cap are forbidden here. 2319 // panic is treated like an ordinary function call. 2320 v := Go{} 2321 b.setCall(fn, s.Call, &v.Call) 2322 fn.emit(&v, s) 2323 2324 case *ast.DeferStmt: 2325 // The "intrinsics" new/make/len/cap are forbidden here. 2326 // panic is treated like an ordinary function call. 2327 v := Defer{} 2328 b.setCall(fn, s.Call, &v.Call) 2329 fn.hasDefer = true 2330 fn.emit(&v, s) 2331 2332 case *ast.ReturnStmt: 2333 // TODO(dh): we could emit tighter position information by 2334 // using the ith returned expression 2335 2336 var results []Value 2337 if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { 2338 // Return of one expression in a multi-valued function. 2339 tuple := b.exprN(fn, s.Results[0]) 2340 ttuple := tuple.Type().(*types.Tuple) 2341 for i, n := 0, ttuple.Len(); i < n; i++ { 2342 results = append(results, 2343 emitConv(fn, emitExtract(fn, tuple, i, s), 2344 fn.Signature.Results().At(i).Type(), s)) 2345 } 2346 } else { 2347 // 1:1 return, or no-arg return in non-void function. 2348 for i, r := range s.Results { 2349 v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type(), s) 2350 results = append(results, v) 2351 } 2352 } 2353 2354 ret := fn.results() 2355 for i, r := range results { 2356 emitStore(fn, ret[i], r, s) 2357 } 2358 2359 emitJump(fn, fn.Exit, s) 2360 fn.currentBlock = fn.newBasicBlock("unreachable") 2361 2362 case *ast.BranchStmt: 2363 var block *BasicBlock 2364 switch s.Tok { 2365 case token.BREAK: 2366 if s.Label != nil { 2367 block = fn.labelledBlock(s.Label)._break 2368 } else { 2369 for t := fn.targets; t != nil && block == nil; t = t.tail { 2370 block = t._break 2371 } 2372 } 2373 2374 case token.CONTINUE: 2375 if s.Label != nil { 2376 block = fn.labelledBlock(s.Label)._continue 2377 } else { 2378 for t := fn.targets; t != nil && block == nil; t = t.tail { 2379 block = t._continue 2380 } 2381 } 2382 2383 case token.FALLTHROUGH: 2384 for t := fn.targets; t != nil && block == nil; t = t.tail { 2385 block = t._fallthrough 2386 } 2387 2388 case token.GOTO: 2389 block = fn.labelledBlock(s.Label)._goto 2390 } 2391 j := emitJump(fn, block, s) 2392 j.comment = s.Tok.String() 2393 fn.currentBlock = fn.newBasicBlock("unreachable") 2394 2395 case *ast.BlockStmt: 2396 b.stmtList(fn, s.List) 2397 2398 case *ast.IfStmt: 2399 if s.Init != nil { 2400 b.stmt(fn, s.Init) 2401 } 2402 then := fn.newBasicBlock("if.then") 2403 done := fn.newBasicBlock("if.done") 2404 els := done 2405 if s.Else != nil { 2406 els = fn.newBasicBlock("if.else") 2407 } 2408 instr := b.cond(fn, s.Cond, then, els) 2409 instr.source = s 2410 fn.currentBlock = then 2411 b.stmt(fn, s.Body) 2412 emitJump(fn, done, s) 2413 2414 if s.Else != nil { 2415 fn.currentBlock = els 2416 b.stmt(fn, s.Else) 2417 emitJump(fn, done, s) 2418 } 2419 2420 fn.currentBlock = done 2421 2422 case *ast.SwitchStmt: 2423 b.switchStmt(fn, s, label) 2424 2425 case *ast.TypeSwitchStmt: 2426 b.typeSwitchStmt(fn, s, label) 2427 2428 case *ast.SelectStmt: 2429 if b.selectStmt(fn, s, label) { 2430 // the select has no cases, it blocks forever 2431 fn.currentBlock = fn.newBasicBlock("unreachable") 2432 } 2433 2434 case *ast.ForStmt: 2435 b.forStmt(fn, s, label) 2436 2437 case *ast.RangeStmt: 2438 b.rangeStmt(fn, s, label, s) 2439 2440 default: 2441 panic(fmt.Sprintf("unexpected statement kind: %T", s)) 2442 } 2443 } 2444 2445 // buildFunction builds IR code for the body of function fn. Idempotent. 2446 func (b *builder) buildFunction(fn *Function) { 2447 if fn.Blocks != nil { 2448 return // building already started 2449 } 2450 2451 var recvField *ast.FieldList 2452 var body *ast.BlockStmt 2453 var functype *ast.FuncType 2454 switch n := fn.source.(type) { 2455 case nil: 2456 return // not a Go source function. (Synthetic, or from object file.) 2457 case *ast.FuncDecl: 2458 functype = n.Type 2459 recvField = n.Recv 2460 body = n.Body 2461 case *ast.FuncLit: 2462 functype = n.Type 2463 body = n.Body 2464 default: 2465 panic(n) 2466 } 2467 2468 if fn.Package().Pkg.Path() == "syscall" && fn.Name() == "Exit" { 2469 // syscall.Exit is a stub and the way os.Exit terminates the 2470 // process. Note that there are other functions in the runtime 2471 // that also terminate or unwind that we cannot analyze. 2472 // However, they aren't stubs, so buildExits ends up getting 2473 // called on them, so that's where we handle those special 2474 // cases. 2475 fn.NoReturn = AlwaysExits 2476 } 2477 2478 if body == nil { 2479 // External function. 2480 if fn.Params == nil { 2481 // This condition ensures we add a non-empty 2482 // params list once only, but we may attempt 2483 // the degenerate empty case repeatedly. 2484 // TODO(adonovan): opt: don't do that. 2485 2486 // We set Function.Params even though there is no body 2487 // code to reference them. This simplifies clients. 2488 if recv := fn.Signature.Recv(); recv != nil { 2489 // XXX synthesize an ast.Node 2490 fn.addParamObj(recv, nil) 2491 } 2492 params := fn.Signature.Params() 2493 for i, n := 0, params.Len(); i < n; i++ { 2494 // XXX synthesize an ast.Node 2495 fn.addParamObj(params.At(i), nil) 2496 } 2497 } 2498 return 2499 } 2500 if fn.Prog.mode&LogSource != 0 { 2501 defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.Pos()))() 2502 } 2503 fn.blocksets = b.blocksets 2504 fn.Blocks = make([]*BasicBlock, 0, avgBlocks) 2505 fn.startBody() 2506 fn.createSyntacticParams(recvField, functype) 2507 fn.exitBlock() 2508 b.stmt(fn, body) 2509 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb.Preds != nil) { 2510 // Control fell off the end of the function's body block. 2511 // 2512 // Block optimizations eliminate the current block, if 2513 // unreachable. It is a builder invariant that 2514 // if this no-arg return is ill-typed for 2515 // fn.Signature.Results, this block must be 2516 // unreachable. The sanity checker checks this. 2517 // fn.emit(new(RunDefers)) 2518 // fn.emit(new(Return)) 2519 emitJump(fn, fn.Exit, nil) 2520 } 2521 optimizeBlocks(fn) 2522 buildFakeExits(fn) 2523 b.buildExits(fn) 2524 b.addUnreachables(fn) 2525 fn.finishBody() 2526 b.blocksets = fn.blocksets 2527 fn.functionBody = nil 2528 } 2529 2530 // buildFuncDecl builds IR code for the function or method declared 2531 // by decl in package pkg. 2532 func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { 2533 id := decl.Name 2534 if isBlankIdent(id) { 2535 return // discard 2536 } 2537 fn := pkg.values[pkg.info.Defs[id]].(*Function) 2538 if decl.Recv == nil && id.Name == "init" { 2539 var v Call 2540 v.Call.Value = fn 2541 v.setType(types.NewTuple()) 2542 pkg.init.emit(&v, decl) 2543 } 2544 fn.source = decl 2545 b.buildFunction(fn) 2546 } 2547 2548 // Build calls Package.Build for each package in prog. 2549 // 2550 // Build is intended for whole-program analysis; a typical compiler 2551 // need only build a single package. 2552 // 2553 // Build is idempotent and thread-safe. 2554 func (prog *Program) Build() { 2555 for _, p := range prog.packages { 2556 p.Build() 2557 } 2558 } 2559 2560 // Build builds IR code for all functions and vars in package p. 2561 // 2562 // Precondition: CreatePackage must have been called for all of p's 2563 // direct imports (and hence its direct imports must have been 2564 // error-free). 2565 // 2566 // Build is idempotent and thread-safe. 2567 func (p *Package) Build() { p.buildOnce.Do(p.build) } 2568 2569 func (p *Package) build() { 2570 if p.info == nil { 2571 return // synthetic package, e.g. "testmain" 2572 } 2573 2574 // Ensure we have runtime type info for all exported members. 2575 // TODO(adonovan): ideally belongs in memberFromObject, but 2576 // that would require package creation in topological order. 2577 for name, mem := range p.Members { 2578 if ast.IsExported(name) { 2579 p.Prog.needMethodsOf(mem.Type()) 2580 } 2581 } 2582 if p.Prog.mode&LogSource != 0 { 2583 defer logStack("build %s", p)() 2584 } 2585 init := p.init 2586 init.startBody() 2587 init.exitBlock() 2588 2589 var done *BasicBlock 2590 2591 // Make init() skip if package is already initialized. 2592 initguard := p.Var("init$guard") 2593 doinit := init.newBasicBlock("init.start") 2594 done = init.Exit 2595 emitIf(init, emitLoad(init, initguard, nil), done, doinit, nil) 2596 init.currentBlock = doinit 2597 emitStore(init, initguard, emitConst(init, NewConst(constant.MakeBool(true), tBool, nil)), nil) 2598 2599 // Call the init() function of each package we import. 2600 for _, pkg := range p.Pkg.Imports() { 2601 prereq := p.Prog.packages[pkg] 2602 if prereq == nil { 2603 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) 2604 } 2605 var v Call 2606 v.Call.Value = prereq.init 2607 v.setType(types.NewTuple()) 2608 init.emit(&v, nil) 2609 } 2610 2611 b := builder{ 2612 printFunc: p.printFunc, 2613 } 2614 2615 // Initialize package-level vars in correct order. 2616 for _, varinit := range p.info.InitOrder { 2617 if init.Prog.mode&LogSource != 0 { 2618 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", 2619 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) 2620 } 2621 if len(varinit.Lhs) == 1 { 2622 // 1:1 initialization: var x, y = a(), b() 2623 var lval lvalue 2624 if v := varinit.Lhs[0]; v.Name() != "_" { 2625 lval = &address{addr: p.values[v].(*Global)} 2626 } else { 2627 lval = blank{} 2628 } 2629 // TODO(dh): do emit position information 2630 b.assign(init, lval, varinit.Rhs, true, nil, nil) 2631 } else { 2632 // n:1 initialization: var x, y := f() 2633 tuple := b.exprN(init, varinit.Rhs) 2634 for i, v := range varinit.Lhs { 2635 if v.Name() == "_" { 2636 continue 2637 } 2638 emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i, nil), nil) 2639 } 2640 } 2641 } 2642 2643 // Build all package-level functions, init functions 2644 // and methods, including unreachable/blank ones. 2645 // We build them in source order, but it's not significant. 2646 for _, file := range p.files { 2647 for _, decl := range file.Decls { 2648 if decl, ok := decl.(*ast.FuncDecl); ok { 2649 b.buildFuncDecl(p, decl) 2650 } 2651 } 2652 } 2653 2654 // Finish up init(). 2655 emitJump(init, done, nil) 2656 init.finishBody() 2657 2658 p.info = nil // We no longer need ASTs or go/types deductions. 2659 2660 if p.Prog.mode&SanityCheckFunctions != 0 { 2661 sanityCheckPackage(p) 2662 } 2663 } 2664 2665 // Like ObjectOf, but panics instead of returning nil. 2666 // Only valid during p's create and build phases. 2667 func (p *Package) objectOf(id *ast.Ident) types.Object { 2668 if o := p.info.ObjectOf(id); o != nil { 2669 return o 2670 } 2671 panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", 2672 id.Name, p.Prog.Fset.Position(id.Pos()))) 2673 } 2674 2675 // Like TypeOf, but panics instead of returning nil. 2676 // Only valid during p's create and build phases. 2677 func (p *Package) typeOf(e ast.Expr) types.Type { 2678 if T := p.info.TypeOf(e); T != nil { 2679 return T 2680 } 2681 panic(fmt.Sprintf("no type for %T @ %s", 2682 e, p.Prog.Fset.Position(e.Pos()))) 2683 }