golang.org/x/tools@v0.21.1-0.20240520172518-788d39e776b1/go/ssa/builder.go (about) 1 // Copyright 2013 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ssa 6 7 // This file defines the builder, which builds SSA-form IR for function bodies. 8 // 9 // SSA construction has two phases, "create" and "build". First, one 10 // or more packages are created in any order by a sequence of calls to 11 // CreatePackage, either from syntax or from mere type information. 12 // Each created package has a complete set of Members (const, var, 13 // type, func) that can be accessed through methods like 14 // Program.FuncValue. 15 // 16 // It is not necessary to call CreatePackage for all dependencies of 17 // each syntax package, only for its direct imports. (In future 18 // perhaps even this restriction may be lifted.) 19 // 20 // Second, packages created from syntax are built, by one or more 21 // calls to Package.Build, which may be concurrent; or by a call to 22 // Program.Build, which builds all packages in parallel. Building 23 // traverses the type-annotated syntax tree of each function body and 24 // creates SSA-form IR, a control-flow graph of instructions, 25 // populating fields such as Function.Body, .Params, and others. 26 // 27 // Building may create additional methods, including: 28 // - wrapper methods (e.g. for embeddding, or implicit &recv) 29 // - bound method closures (e.g. for use(recv.f)) 30 // - thunks (e.g. for use(I.f) or use(T.f)) 31 // - generic instances (e.g. to produce f[int] from f[any]). 32 // As these methods are created, they are added to the build queue, 33 // and then processed in turn, until a fixed point is reached, 34 // Since these methods might belong to packages that were not 35 // created (by a call to CreatePackage), their Pkg field is unset. 36 // 37 // Instances of generic functions may be either instantiated (f[int] 38 // is a copy of f[T] with substitutions) or wrapped (f[int] delegates 39 // to f[T]), depending on the availability of generic syntax and the 40 // InstantiateGenerics mode flag. 41 // 42 // Each package has an initializer function named "init" that calls 43 // the initializer functions of each direct import, computes and 44 // assigns the initial value of each global variable, and calls each 45 // source-level function named "init". (These generate SSA functions 46 // named "init#1", "init#2", etc.) 47 // 48 // Runtime types 49 // 50 // Each MakeInterface operation is a conversion from a non-interface 51 // type to an interface type. The semantics of this operation requires 52 // a runtime type descriptor, which is the type portion of an 53 // interface, and the value abstracted by reflect.Type. 54 // 55 // The program accumulates all non-parameterized types that are 56 // encountered as MakeInterface operands, along with all types that 57 // may be derived from them using reflection. This set is available as 58 // Program.RuntimeTypes, and the methods of these types may be 59 // reachable via interface calls or reflection even if they are never 60 // referenced from the SSA IR. (In practice, algorithms such as RTA 61 // that compute reachability from package main perform their own 62 // tracking of runtime types at a finer grain, so this feature is not 63 // very useful.) 64 // 65 // Function literals 66 // 67 // Anonymous functions must be built as soon as they are encountered, 68 // as it may affect locals of the enclosing function, but they are not 69 // marked 'built' until the end of the outermost enclosing function. 70 // (Among other things, this causes them to be logged in top-down order.) 71 // 72 // The Function.build fields determines the algorithm for building the 73 // function body. It is cleared to mark that building is complete. 74 75 import ( 76 "fmt" 77 "go/ast" 78 "go/constant" 79 "go/token" 80 "go/types" 81 "os" 82 "runtime" 83 "sync" 84 85 "golang.org/x/tools/internal/aliases" 86 "golang.org/x/tools/internal/typeparams" 87 "golang.org/x/tools/internal/versions" 88 ) 89 90 type opaqueType struct{ name string } 91 92 func (t *opaqueType) String() string { return t.name } 93 func (t *opaqueType) Underlying() types.Type { return t } 94 95 var ( 96 varOk = newVar("ok", tBool) 97 varIndex = newVar("index", tInt) 98 99 // Type constants. 100 tBool = types.Typ[types.Bool] 101 tByte = types.Typ[types.Byte] 102 tInt = types.Typ[types.Int] 103 tInvalid = types.Typ[types.Invalid] 104 tString = types.Typ[types.String] 105 tUntypedNil = types.Typ[types.UntypedNil] 106 107 tRangeIter = &opaqueType{"iter"} // the type of all "range" iterators 108 tDeferStack = types.NewPointer(&opaqueType{"deferStack"}) // the type of a "deferStack" from ssa:deferstack() 109 tEface = types.NewInterfaceType(nil, nil).Complete() 110 111 // SSA Value constants. 112 vZero = intConst(0) 113 vOne = intConst(1) 114 vTrue = NewConst(constant.MakeBool(true), tBool) 115 vFalse = NewConst(constant.MakeBool(false), tBool) 116 117 jReady = intConst(0) // range-over-func jump is READY 118 jBusy = intConst(-1) // range-over-func jump is BUSY 119 jDone = intConst(-2) // range-over-func jump is DONE 120 121 // The ssa:deferstack intrinsic returns the current function's defer stack. 122 vDeferStack = &Builtin{ 123 name: "ssa:deferstack", 124 sig: types.NewSignatureType(nil, nil, nil, nil, types.NewTuple(anonVar(tDeferStack)), false), 125 } 126 ) 127 128 // builder holds state associated with the package currently being built. 129 // Its methods contain all the logic for AST-to-SSA conversion. 130 type builder struct { 131 // Invariant: 0 <= rtypes <= finished <= created.Len() 132 created *creator // functions created during building 133 finished int // Invariant: create[i].built holds for i in [0,finished) 134 } 135 136 // cond emits to fn code to evaluate boolean condition e and jump 137 // to t or f depending on its value, performing various simplifications. 138 // 139 // Postcondition: fn.currentBlock is nil. 140 func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { 141 switch e := e.(type) { 142 case *ast.ParenExpr: 143 b.cond(fn, e.X, t, f) 144 return 145 146 case *ast.BinaryExpr: 147 switch e.Op { 148 case token.LAND: 149 ltrue := fn.newBasicBlock("cond.true") 150 b.cond(fn, e.X, ltrue, f) 151 fn.currentBlock = ltrue 152 b.cond(fn, e.Y, t, f) 153 return 154 155 case token.LOR: 156 lfalse := fn.newBasicBlock("cond.false") 157 b.cond(fn, e.X, t, lfalse) 158 fn.currentBlock = lfalse 159 b.cond(fn, e.Y, t, f) 160 return 161 } 162 163 case *ast.UnaryExpr: 164 if e.Op == token.NOT { 165 b.cond(fn, e.X, f, t) 166 return 167 } 168 } 169 170 // A traditional compiler would simplify "if false" (etc) here 171 // but we do not, for better fidelity to the source code. 172 // 173 // The value of a constant condition may be platform-specific, 174 // and may cause blocks that are reachable in some configuration 175 // to be hidden from subsequent analyses such as bug-finding tools. 176 emitIf(fn, b.expr(fn, e), t, f) 177 } 178 179 // logicalBinop emits code to fn to evaluate e, a &&- or 180 // ||-expression whose reified boolean value is wanted. 181 // The value is returned. 182 func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { 183 rhs := fn.newBasicBlock("binop.rhs") 184 done := fn.newBasicBlock("binop.done") 185 186 // T(e) = T(e.X) = T(e.Y) after untyped constants have been 187 // eliminated. 188 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. 189 t := fn.typeOf(e) 190 191 var short Value // value of the short-circuit path 192 switch e.Op { 193 case token.LAND: 194 b.cond(fn, e.X, rhs, done) 195 short = NewConst(constant.MakeBool(false), t) 196 197 case token.LOR: 198 b.cond(fn, e.X, done, rhs) 199 short = NewConst(constant.MakeBool(true), t) 200 } 201 202 // Is rhs unreachable? 203 if rhs.Preds == nil { 204 // Simplify false&&y to false, true||y to true. 205 fn.currentBlock = done 206 return short 207 } 208 209 // Is done unreachable? 210 if done.Preds == nil { 211 // Simplify true&&y (or false||y) to y. 212 fn.currentBlock = rhs 213 return b.expr(fn, e.Y) 214 } 215 216 // All edges from e.X to done carry the short-circuit value. 217 var edges []Value 218 for range done.Preds { 219 edges = append(edges, short) 220 } 221 222 // The edge from e.Y to done carries the value of e.Y. 223 fn.currentBlock = rhs 224 edges = append(edges, b.expr(fn, e.Y)) 225 emitJump(fn, done) 226 fn.currentBlock = done 227 228 phi := &Phi{Edges: edges, Comment: e.Op.String()} 229 phi.pos = e.OpPos 230 phi.typ = t 231 return done.emit(phi) 232 } 233 234 // exprN lowers a multi-result expression e to SSA form, emitting code 235 // to fn and returning a single Value whose type is a *types.Tuple. 236 // The caller must access the components via Extract. 237 // 238 // Multi-result expressions include CallExprs in a multi-value 239 // assignment or return statement, and "value,ok" uses of 240 // TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op 241 // is token.ARROW). 242 func (b *builder) exprN(fn *Function, e ast.Expr) Value { 243 typ := fn.typeOf(e).(*types.Tuple) 244 switch e := e.(type) { 245 case *ast.ParenExpr: 246 return b.exprN(fn, e.X) 247 248 case *ast.CallExpr: 249 // Currently, no built-in function nor type conversion 250 // has multiple results, so we can avoid some of the 251 // cases for single-valued CallExpr. 252 var c Call 253 b.setCall(fn, e, &c.Call) 254 c.typ = typ 255 return fn.emit(&c) 256 257 case *ast.IndexExpr: 258 mapt := typeparams.CoreType(fn.typeOf(e.X)).(*types.Map) // ,ok must be a map. 259 lookup := &Lookup{ 260 X: b.expr(fn, e.X), 261 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), 262 CommaOk: true, 263 } 264 lookup.setType(typ) 265 lookup.setPos(e.Lbrack) 266 return fn.emit(lookup) 267 268 case *ast.TypeAssertExpr: 269 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) 270 271 case *ast.UnaryExpr: // must be receive <- 272 unop := &UnOp{ 273 Op: token.ARROW, 274 X: b.expr(fn, e.X), 275 CommaOk: true, 276 } 277 unop.setType(typ) 278 unop.setPos(e.OpPos) 279 return fn.emit(unop) 280 } 281 panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) 282 } 283 284 // builtin emits to fn SSA instructions to implement a call to the 285 // built-in function obj with the specified arguments 286 // and return type. It returns the value defined by the result. 287 // 288 // The result is nil if no special handling was required; in this case 289 // the caller should treat this like an ordinary library function 290 // call. 291 func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { 292 typ = fn.typ(typ) 293 switch obj.Name() { 294 case "make": 295 switch ct := typeparams.CoreType(typ).(type) { 296 case *types.Slice: 297 n := b.expr(fn, args[1]) 298 m := n 299 if len(args) == 3 { 300 m = b.expr(fn, args[2]) 301 } 302 if m, ok := m.(*Const); ok { 303 // treat make([]T, n, m) as new([m]T)[:n] 304 cap := m.Int64() 305 at := types.NewArray(ct.Elem(), cap) 306 v := &Slice{ 307 X: emitNew(fn, at, pos, "makeslice"), 308 High: n, 309 } 310 v.setPos(pos) 311 v.setType(typ) 312 return fn.emit(v) 313 } 314 v := &MakeSlice{ 315 Len: n, 316 Cap: m, 317 } 318 v.setPos(pos) 319 v.setType(typ) 320 return fn.emit(v) 321 322 case *types.Map: 323 var res Value 324 if len(args) == 2 { 325 res = b.expr(fn, args[1]) 326 } 327 v := &MakeMap{Reserve: res} 328 v.setPos(pos) 329 v.setType(typ) 330 return fn.emit(v) 331 332 case *types.Chan: 333 var sz Value = vZero 334 if len(args) == 2 { 335 sz = b.expr(fn, args[1]) 336 } 337 v := &MakeChan{Size: sz} 338 v.setPos(pos) 339 v.setType(typ) 340 return fn.emit(v) 341 } 342 343 case "new": 344 return emitNew(fn, typeparams.MustDeref(typ), pos, "new") 345 346 case "len", "cap": 347 // Special case: len or cap of an array or *array is 348 // based on the type, not the value which may be nil. 349 // We must still evaluate the value, though. (If it 350 // was side-effect free, the whole call would have 351 // been constant-folded.) 352 t := typeparams.Deref(fn.typeOf(args[0])) 353 if at, ok := typeparams.CoreType(t).(*types.Array); ok { 354 b.expr(fn, args[0]) // for effects only 355 return intConst(at.Len()) 356 } 357 // Otherwise treat as normal. 358 359 case "panic": 360 fn.emit(&Panic{ 361 X: emitConv(fn, b.expr(fn, args[0]), tEface), 362 pos: pos, 363 }) 364 fn.currentBlock = fn.newBasicBlock("unreachable") 365 return vTrue // any non-nil Value will do 366 } 367 return nil // treat all others as a regular function call 368 } 369 370 // addr lowers a single-result addressable expression e to SSA form, 371 // emitting code to fn and returning the location (an lvalue) defined 372 // by the expression. 373 // 374 // If escaping is true, addr marks the base variable of the 375 // addressable expression e as being a potentially escaping pointer 376 // value. For example, in this code: 377 // 378 // a := A{ 379 // b: [1]B{B{c: 1}} 380 // } 381 // return &a.b[0].c 382 // 383 // the application of & causes a.b[0].c to have its address taken, 384 // which means that ultimately the local variable a must be 385 // heap-allocated. This is a simple but very conservative escape 386 // analysis. 387 // 388 // Operations forming potentially escaping pointers include: 389 // - &x, including when implicit in method call or composite literals. 390 // - a[:] iff a is an array (not *array) 391 // - references to variables in lexically enclosing functions. 392 func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { 393 switch e := e.(type) { 394 case *ast.Ident: 395 if isBlankIdent(e) { 396 return blank{} 397 } 398 obj := fn.objectOf(e).(*types.Var) 399 var v Value 400 if g := fn.Prog.packageLevelMember(obj); g != nil { 401 v = g.(*Global) // var (address) 402 } else { 403 v = fn.lookup(obj, escaping) 404 } 405 return &address{addr: v, pos: e.Pos(), expr: e} 406 407 case *ast.CompositeLit: 408 typ := typeparams.Deref(fn.typeOf(e)) 409 var v *Alloc 410 if escaping { 411 v = emitNew(fn, typ, e.Lbrace, "complit") 412 } else { 413 v = emitLocal(fn, typ, e.Lbrace, "complit") 414 } 415 var sb storebuf 416 b.compLit(fn, v, e, true, &sb) 417 sb.emit(fn) 418 return &address{addr: v, pos: e.Lbrace, expr: e} 419 420 case *ast.ParenExpr: 421 return b.addr(fn, e.X, escaping) 422 423 case *ast.SelectorExpr: 424 sel := fn.selection(e) 425 if sel == nil { 426 // qualified identifier 427 return b.addr(fn, e.Sel, escaping) 428 } 429 if sel.kind != types.FieldVal { 430 panic(sel) 431 } 432 wantAddr := true 433 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 434 index := sel.index[len(sel.index)-1] 435 fld := fieldOf(typeparams.MustDeref(v.Type()), index) // v is an addr. 436 437 // Due to the two phases of resolving AssignStmt, a panic from x.f = p() 438 // when x is nil is required to come after the side-effects of 439 // evaluating x and p(). 440 emit := func(fn *Function) Value { 441 return emitFieldSelection(fn, v, index, true, e.Sel) 442 } 443 return &lazyAddress{addr: emit, t: fld.Type(), pos: e.Sel.Pos(), expr: e.Sel} 444 445 case *ast.IndexExpr: 446 xt := fn.typeOf(e.X) 447 elem, mode := indexType(xt) 448 var x Value 449 var et types.Type 450 switch mode { 451 case ixArrVar: // array, array|slice, array|*array, or array|*array|slice. 452 x = b.addr(fn, e.X, escaping).address(fn) 453 et = types.NewPointer(elem) 454 case ixVar: // *array, slice, *array|slice 455 x = b.expr(fn, e.X) 456 et = types.NewPointer(elem) 457 case ixMap: 458 mt := typeparams.CoreType(xt).(*types.Map) 459 return &element{ 460 m: b.expr(fn, e.X), 461 k: emitConv(fn, b.expr(fn, e.Index), mt.Key()), 462 t: mt.Elem(), 463 pos: e.Lbrack, 464 } 465 default: 466 panic("unexpected container type in IndexExpr: " + xt.String()) 467 } 468 index := b.expr(fn, e.Index) 469 if isUntyped(index.Type()) { 470 index = emitConv(fn, index, tInt) 471 } 472 // Due to the two phases of resolving AssignStmt, a panic from x[i] = p() 473 // when x is nil or i is out-of-bounds is required to come after the 474 // side-effects of evaluating x, i and p(). 475 emit := func(fn *Function) Value { 476 v := &IndexAddr{ 477 X: x, 478 Index: index, 479 } 480 v.setPos(e.Lbrack) 481 v.setType(et) 482 return fn.emit(v) 483 } 484 return &lazyAddress{addr: emit, t: typeparams.MustDeref(et), pos: e.Lbrack, expr: e} 485 486 case *ast.StarExpr: 487 return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} 488 } 489 490 panic(fmt.Sprintf("unexpected address expression: %T", e)) 491 } 492 493 type store struct { 494 lhs lvalue 495 rhs Value 496 } 497 498 type storebuf struct{ stores []store } 499 500 func (sb *storebuf) store(lhs lvalue, rhs Value) { 501 sb.stores = append(sb.stores, store{lhs, rhs}) 502 } 503 504 func (sb *storebuf) emit(fn *Function) { 505 for _, s := range sb.stores { 506 s.lhs.store(fn, s.rhs) 507 } 508 } 509 510 // assign emits to fn code to initialize the lvalue loc with the value 511 // of expression e. If isZero is true, assign assumes that loc holds 512 // the zero value for its type. 513 // 514 // This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate 515 // better code in some cases, e.g., for composite literals in an 516 // addressable location. 517 // 518 // If sb is not nil, assign generates code to evaluate expression e, but 519 // not to update loc. Instead, the necessary stores are appended to the 520 // storebuf sb so that they can be executed later. This allows correct 521 // in-place update of existing variables when the RHS is a composite 522 // literal that may reference parts of the LHS. 523 func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { 524 // Can we initialize it in place? 525 if e, ok := unparen(e).(*ast.CompositeLit); ok { 526 // A CompositeLit never evaluates to a pointer, 527 // so if the type of the location is a pointer, 528 // an &-operation is implied. 529 if !is[blank](loc) && isPointerCore(loc.typ()) { // avoid calling blank.typ() 530 ptr := b.addr(fn, e, true).address(fn) 531 // copy address 532 if sb != nil { 533 sb.store(loc, ptr) 534 } else { 535 loc.store(fn, ptr) 536 } 537 return 538 } 539 540 if _, ok := loc.(*address); ok { 541 if isNonTypeParamInterface(loc.typ()) { 542 // e.g. var x interface{} = T{...} 543 // Can't in-place initialize an interface value. 544 // Fall back to copying. 545 } else { 546 // x = T{...} or x := T{...} 547 addr := loc.address(fn) 548 if sb != nil { 549 b.compLit(fn, addr, e, isZero, sb) 550 } else { 551 var sb storebuf 552 b.compLit(fn, addr, e, isZero, &sb) 553 sb.emit(fn) 554 } 555 556 // Subtle: emit debug ref for aggregate types only; 557 // slice and map are handled by store ops in compLit. 558 switch typeparams.CoreType(loc.typ()).(type) { 559 case *types.Struct, *types.Array: 560 emitDebugRef(fn, e, addr, true) 561 } 562 563 return 564 } 565 } 566 } 567 568 // simple case: just copy 569 rhs := b.expr(fn, e) 570 if sb != nil { 571 sb.store(loc, rhs) 572 } else { 573 loc.store(fn, rhs) 574 } 575 } 576 577 // expr lowers a single-result expression e to SSA form, emitting code 578 // to fn and returning the Value defined by the expression. 579 func (b *builder) expr(fn *Function, e ast.Expr) Value { 580 e = unparen(e) 581 582 tv := fn.info.Types[e] 583 584 // Is expression a constant? 585 if tv.Value != nil { 586 return NewConst(tv.Value, fn.typ(tv.Type)) 587 } 588 589 var v Value 590 if tv.Addressable() { 591 // Prefer pointer arithmetic ({Index,Field}Addr) followed 592 // by Load over subelement extraction (e.g. Index, Field), 593 // to avoid large copies. 594 v = b.addr(fn, e, false).load(fn) 595 } else { 596 v = b.expr0(fn, e, tv) 597 } 598 if fn.debugInfo() { 599 emitDebugRef(fn, e, v, false) 600 } 601 return v 602 } 603 604 func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { 605 switch e := e.(type) { 606 case *ast.BasicLit: 607 panic("non-constant BasicLit") // unreachable 608 609 case *ast.FuncLit: 610 /* function literal */ 611 anon := &Function{ 612 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), 613 Signature: fn.typeOf(e.Type).(*types.Signature), 614 pos: e.Type.Func, 615 parent: fn, 616 anonIdx: int32(len(fn.AnonFuncs)), 617 Pkg: fn.Pkg, 618 Prog: fn.Prog, 619 syntax: e, 620 info: fn.info, 621 goversion: fn.goversion, 622 build: (*builder).buildFromSyntax, 623 topLevelOrigin: nil, // use anonIdx to lookup an anon instance's origin. 624 typeparams: fn.typeparams, // share the parent's type parameters. 625 typeargs: fn.typeargs, // share the parent's type arguments. 626 subst: fn.subst, // share the parent's type substitutions. 627 uniq: fn.uniq, // start from parent's unique values 628 } 629 fn.AnonFuncs = append(fn.AnonFuncs, anon) 630 // Build anon immediately, as it may cause fn's locals to escape. 631 // (It is not marked 'built' until the end of the enclosing FuncDecl.) 632 anon.build(b, anon) 633 fn.uniq = anon.uniq // resume after anon's unique values 634 if anon.FreeVars == nil { 635 return anon 636 } 637 v := &MakeClosure{Fn: anon} 638 v.setType(fn.typ(tv.Type)) 639 for _, fv := range anon.FreeVars { 640 v.Bindings = append(v.Bindings, fv.outer) 641 fv.outer = nil 642 } 643 return fn.emit(v) 644 645 case *ast.TypeAssertExpr: // single-result form only 646 return emitTypeAssert(fn, b.expr(fn, e.X), fn.typ(tv.Type), e.Lparen) 647 648 case *ast.CallExpr: 649 if fn.info.Types[e.Fun].IsType() { 650 // Explicit type conversion, e.g. string(x) or big.Int(x) 651 x := b.expr(fn, e.Args[0]) 652 y := emitConv(fn, x, fn.typ(tv.Type)) 653 if y != x { 654 switch y := y.(type) { 655 case *Convert: 656 y.pos = e.Lparen 657 case *ChangeType: 658 y.pos = e.Lparen 659 case *MakeInterface: 660 y.pos = e.Lparen 661 case *SliceToArrayPointer: 662 y.pos = e.Lparen 663 case *UnOp: // conversion from slice to array. 664 y.pos = e.Lparen 665 } 666 } 667 return y 668 } 669 // Call to "intrinsic" built-ins, e.g. new, make, panic. 670 if id, ok := unparen(e.Fun).(*ast.Ident); ok { 671 if obj, ok := fn.info.Uses[id].(*types.Builtin); ok { 672 if v := b.builtin(fn, obj, e.Args, fn.typ(tv.Type), e.Lparen); v != nil { 673 return v 674 } 675 } 676 } 677 // Regular function call. 678 var v Call 679 b.setCall(fn, e, &v.Call) 680 v.setType(fn.typ(tv.Type)) 681 return fn.emit(&v) 682 683 case *ast.UnaryExpr: 684 switch e.Op { 685 case token.AND: // &X --- potentially escaping. 686 addr := b.addr(fn, e.X, true) 687 if _, ok := unparen(e.X).(*ast.StarExpr); ok { 688 // &*p must panic if p is nil (http://golang.org/s/go12nil). 689 // For simplicity, we'll just (suboptimally) rely 690 // on the side effects of a load. 691 // TODO(adonovan): emit dedicated nilcheck. 692 addr.load(fn) 693 } 694 return addr.address(fn) 695 case token.ADD: 696 return b.expr(fn, e.X) 697 case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ 698 v := &UnOp{ 699 Op: e.Op, 700 X: b.expr(fn, e.X), 701 } 702 v.setPos(e.OpPos) 703 v.setType(fn.typ(tv.Type)) 704 return fn.emit(v) 705 default: 706 panic(e.Op) 707 } 708 709 case *ast.BinaryExpr: 710 switch e.Op { 711 case token.LAND, token.LOR: 712 return b.logicalBinop(fn, e) 713 case token.SHL, token.SHR: 714 fallthrough 715 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: 716 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), fn.typ(tv.Type), e.OpPos) 717 718 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: 719 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) 720 // The type of x==y may be UntypedBool. 721 return emitConv(fn, cmp, types.Default(fn.typ(tv.Type))) 722 default: 723 panic("illegal op in BinaryExpr: " + e.Op.String()) 724 } 725 726 case *ast.SliceExpr: 727 var low, high, max Value 728 var x Value 729 xtyp := fn.typeOf(e.X) 730 switch typeparams.CoreType(xtyp).(type) { 731 case *types.Array: 732 // Potentially escaping. 733 x = b.addr(fn, e.X, true).address(fn) 734 case *types.Basic, *types.Slice, *types.Pointer: // *array 735 x = b.expr(fn, e.X) 736 default: 737 // core type exception? 738 if isBytestring(xtyp) { 739 x = b.expr(fn, e.X) // bytestring is handled as string and []byte. 740 } else { 741 panic("unexpected sequence type in SliceExpr") 742 } 743 } 744 if e.Low != nil { 745 low = b.expr(fn, e.Low) 746 } 747 if e.High != nil { 748 high = b.expr(fn, e.High) 749 } 750 if e.Slice3 { 751 max = b.expr(fn, e.Max) 752 } 753 v := &Slice{ 754 X: x, 755 Low: low, 756 High: high, 757 Max: max, 758 } 759 v.setPos(e.Lbrack) 760 v.setType(fn.typ(tv.Type)) 761 return fn.emit(v) 762 763 case *ast.Ident: 764 obj := fn.info.Uses[e] 765 // Universal built-in or nil? 766 switch obj := obj.(type) { 767 case *types.Builtin: 768 return &Builtin{name: obj.Name(), sig: fn.instanceType(e).(*types.Signature)} 769 case *types.Nil: 770 return zeroConst(fn.instanceType(e)) 771 } 772 773 // Package-level func or var? 774 // (obj must belong to same package or a direct import.) 775 if v := fn.Prog.packageLevelMember(obj); v != nil { 776 if g, ok := v.(*Global); ok { 777 return emitLoad(fn, g) // var (address) 778 } 779 callee := v.(*Function) // (func) 780 if callee.typeparams.Len() > 0 { 781 targs := fn.subst.types(instanceArgs(fn.info, e)) 782 callee = callee.instance(targs, b.created) 783 } 784 return callee 785 } 786 // Local var. 787 return emitLoad(fn, fn.lookup(obj.(*types.Var), false)) // var (address) 788 789 case *ast.SelectorExpr: 790 sel := fn.selection(e) 791 if sel == nil { 792 // builtin unsafe.{Add,Slice} 793 if obj, ok := fn.info.Uses[e.Sel].(*types.Builtin); ok { 794 return &Builtin{name: obj.Name(), sig: fn.typ(tv.Type).(*types.Signature)} 795 } 796 // qualified identifier 797 return b.expr(fn, e.Sel) 798 } 799 switch sel.kind { 800 case types.MethodExpr: 801 // (*T).f or T.f, the method f from the method-set of type T. 802 // The result is a "thunk". 803 thunk := createThunk(fn.Prog, sel, b.created) 804 return emitConv(fn, thunk, fn.typ(tv.Type)) 805 806 case types.MethodVal: 807 // e.f where e is an expression and f is a method. 808 // The result is a "bound". 809 obj := sel.obj.(*types.Func) 810 rt := fn.typ(recvType(obj)) 811 wantAddr := isPointer(rt) 812 escaping := true 813 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 814 815 if types.IsInterface(rt) { 816 // If v may be an interface type I (after instantiating), 817 // we must emit a check that v is non-nil. 818 if recv, ok := aliases.Unalias(sel.recv).(*types.TypeParam); ok { 819 // Emit a nil check if any possible instantiation of the 820 // type parameter is an interface type. 821 if typeSetOf(recv).Len() > 0 { 822 // recv has a concrete term its typeset. 823 // So it cannot be instantiated as an interface. 824 // 825 // Example: 826 // func _[T interface{~int; Foo()}] () { 827 // var v T 828 // _ = v.Foo // <-- MethodVal 829 // } 830 } else { 831 // rt may be instantiated as an interface. 832 // Emit nil check: typeassert (any(v)).(any). 833 emitTypeAssert(fn, emitConv(fn, v, tEface), tEface, token.NoPos) 834 } 835 } else { 836 // non-type param interface 837 // Emit nil check: typeassert v.(I). 838 emitTypeAssert(fn, v, rt, e.Sel.Pos()) 839 } 840 } 841 if targs := receiverTypeArgs(obj); len(targs) > 0 { 842 // obj is generic. 843 obj = fn.Prog.canon.instantiateMethod(obj, fn.subst.types(targs), fn.Prog.ctxt) 844 } 845 c := &MakeClosure{ 846 Fn: createBound(fn.Prog, obj, b.created), 847 Bindings: []Value{v}, 848 } 849 c.setPos(e.Sel.Pos()) 850 c.setType(fn.typ(tv.Type)) 851 return fn.emit(c) 852 853 case types.FieldVal: 854 indices := sel.index 855 last := len(indices) - 1 856 v := b.expr(fn, e.X) 857 v = emitImplicitSelections(fn, v, indices[:last], e.Pos()) 858 v = emitFieldSelection(fn, v, indices[last], false, e.Sel) 859 return v 860 } 861 862 panic("unexpected expression-relative selector") 863 864 case *ast.IndexListExpr: 865 // f[X, Y] must be a generic function 866 if !instance(fn.info, e.X) { 867 panic("unexpected expression-could not match index list to instantiation") 868 } 869 return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases. 870 871 case *ast.IndexExpr: 872 if instance(fn.info, e.X) { 873 return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases. 874 } 875 // not a generic instantiation. 876 xt := fn.typeOf(e.X) 877 switch et, mode := indexType(xt); mode { 878 case ixVar: 879 // Addressable slice/array; use IndexAddr and Load. 880 return b.addr(fn, e, false).load(fn) 881 882 case ixArrVar, ixValue: 883 // An array in a register, a string or a combined type that contains 884 // either an [_]array (ixArrVar) or string (ixValue). 885 886 // Note: for ixArrVar and CoreType(xt)==nil can be IndexAddr and Load. 887 index := b.expr(fn, e.Index) 888 if isUntyped(index.Type()) { 889 index = emitConv(fn, index, tInt) 890 } 891 v := &Index{ 892 X: b.expr(fn, e.X), 893 Index: index, 894 } 895 v.setPos(e.Lbrack) 896 v.setType(et) 897 return fn.emit(v) 898 899 case ixMap: 900 ct := typeparams.CoreType(xt).(*types.Map) 901 v := &Lookup{ 902 X: b.expr(fn, e.X), 903 Index: emitConv(fn, b.expr(fn, e.Index), ct.Key()), 904 } 905 v.setPos(e.Lbrack) 906 v.setType(ct.Elem()) 907 return fn.emit(v) 908 default: 909 panic("unexpected container type in IndexExpr: " + xt.String()) 910 } 911 912 case *ast.CompositeLit, *ast.StarExpr: 913 // Addressable types (lvalues) 914 return b.addr(fn, e, false).load(fn) 915 } 916 917 panic(fmt.Sprintf("unexpected expr: %T", e)) 918 } 919 920 // stmtList emits to fn code for all statements in list. 921 func (b *builder) stmtList(fn *Function, list []ast.Stmt) { 922 for _, s := range list { 923 b.stmt(fn, s) 924 } 925 } 926 927 // receiver emits to fn code for expression e in the "receiver" 928 // position of selection e.f (where f may be a field or a method) and 929 // returns the effective receiver after applying the implicit field 930 // selections of sel. 931 // 932 // wantAddr requests that the result is an address. If 933 // !sel.indirect, this may require that e be built in addr() mode; it 934 // must thus be addressable. 935 // 936 // escaping is defined as per builder.addr(). 937 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value { 938 var v Value 939 if wantAddr && !sel.indirect && !isPointerCore(fn.typeOf(e)) { 940 v = b.addr(fn, e, escaping).address(fn) 941 } else { 942 v = b.expr(fn, e) 943 } 944 945 last := len(sel.index) - 1 946 // The position of implicit selection is the position of the inducing receiver expression. 947 v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos()) 948 if types.IsInterface(v.Type()) { 949 // When v is an interface, sel.Kind()==MethodValue and v.f is invoked. 950 // So v is not loaded, even if v has a pointer core type. 951 } else if !wantAddr && isPointerCore(v.Type()) { 952 v = emitLoad(fn, v) 953 } 954 return v 955 } 956 957 // setCallFunc populates the function parts of a CallCommon structure 958 // (Func, Method, Recv, Args[0]) based on the kind of invocation 959 // occurring in e. 960 func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { 961 c.pos = e.Lparen 962 963 // Is this a method call? 964 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { 965 sel := fn.selection(selector) 966 if sel != nil && sel.kind == types.MethodVal { 967 obj := sel.obj.(*types.Func) 968 recv := recvType(obj) 969 970 wantAddr := isPointer(recv) 971 escaping := true 972 v := b.receiver(fn, selector.X, wantAddr, escaping, sel) 973 if types.IsInterface(recv) { 974 // Invoke-mode call. 975 c.Value = v // possibly type param 976 c.Method = obj 977 } else { 978 // "Call"-mode call. 979 c.Value = fn.Prog.objectMethod(obj, b.created) 980 c.Args = append(c.Args, v) 981 } 982 return 983 } 984 985 // sel.kind==MethodExpr indicates T.f() or (*T).f(): 986 // a statically dispatched call to the method f in the 987 // method-set of T or *T. T may be an interface. 988 // 989 // e.Fun would evaluate to a concrete method, interface 990 // wrapper function, or promotion wrapper. 991 // 992 // For now, we evaluate it in the usual way. 993 // 994 // TODO(adonovan): opt: inline expr() here, to make the 995 // call static and to avoid generation of wrappers. 996 // It's somewhat tricky as it may consume the first 997 // actual parameter if the call is "invoke" mode. 998 // 999 // Examples: 1000 // type T struct{}; func (T) f() {} // "call" mode 1001 // type T interface { f() } // "invoke" mode 1002 // 1003 // type S struct{ T } 1004 // 1005 // var s S 1006 // S.f(s) 1007 // (*S).f(&s) 1008 // 1009 // Suggested approach: 1010 // - consume the first actual parameter expression 1011 // and build it with b.expr(). 1012 // - apply implicit field selections. 1013 // - use MethodVal logic to populate fields of c. 1014 } 1015 1016 // Evaluate the function operand in the usual way. 1017 c.Value = b.expr(fn, e.Fun) 1018 } 1019 1020 // emitCallArgs emits to f code for the actual parameters of call e to 1021 // a (possibly built-in) function of effective type sig. 1022 // The argument values are appended to args, which is then returned. 1023 func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { 1024 // f(x, y, z...): pass slice z straight through. 1025 if e.Ellipsis != 0 { 1026 for i, arg := range e.Args { 1027 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) 1028 args = append(args, v) 1029 } 1030 return args 1031 } 1032 1033 offset := len(args) // 1 if call has receiver, 0 otherwise 1034 1035 // Evaluate actual parameter expressions. 1036 // 1037 // If this is a chained call of the form f(g()) where g has 1038 // multiple return values (MRV), they are flattened out into 1039 // args; a suffix of them may end up in a varargs slice. 1040 for _, arg := range e.Args { 1041 v := b.expr(fn, arg) 1042 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain 1043 for i, n := 0, ttuple.Len(); i < n; i++ { 1044 args = append(args, emitExtract(fn, v, i)) 1045 } 1046 } else { 1047 args = append(args, v) 1048 } 1049 } 1050 1051 // Actual->formal assignability conversions for normal parameters. 1052 np := sig.Params().Len() // number of normal parameters 1053 if sig.Variadic() { 1054 np-- 1055 } 1056 for i := 0; i < np; i++ { 1057 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) 1058 } 1059 1060 // Actual->formal assignability conversions for variadic parameter, 1061 // and construction of slice. 1062 if sig.Variadic() { 1063 varargs := args[offset+np:] 1064 st := sig.Params().At(np).Type().(*types.Slice) 1065 vt := st.Elem() 1066 if len(varargs) == 0 { 1067 args = append(args, zeroConst(st)) 1068 } else { 1069 // Replace a suffix of args with a slice containing it. 1070 at := types.NewArray(vt, int64(len(varargs))) 1071 a := emitNew(fn, at, token.NoPos, "varargs") 1072 a.setPos(e.Rparen) 1073 for i, arg := range varargs { 1074 iaddr := &IndexAddr{ 1075 X: a, 1076 Index: intConst(int64(i)), 1077 } 1078 iaddr.setType(types.NewPointer(vt)) 1079 fn.emit(iaddr) 1080 emitStore(fn, iaddr, arg, arg.Pos()) 1081 } 1082 s := &Slice{X: a} 1083 s.setType(st) 1084 args[offset+np] = fn.emit(s) 1085 args = args[:offset+np+1] 1086 } 1087 } 1088 return args 1089 } 1090 1091 // setCall emits to fn code to evaluate all the parameters of a function 1092 // call e, and populates *c with those values. 1093 func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { 1094 // First deal with the f(...) part and optional receiver. 1095 b.setCallFunc(fn, e, c) 1096 1097 // Then append the other actual parameters. 1098 sig, _ := typeparams.CoreType(fn.typeOf(e.Fun)).(*types.Signature) 1099 if sig == nil { 1100 panic(fmt.Sprintf("no signature for call of %s", e.Fun)) 1101 } 1102 c.Args = b.emitCallArgs(fn, sig, e, c.Args) 1103 } 1104 1105 // assignOp emits to fn code to perform loc <op>= val. 1106 func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { 1107 loc.store(fn, emitArith(fn, op, loc.load(fn), val, loc.typ(), pos)) 1108 } 1109 1110 // localValueSpec emits to fn code to define all of the vars in the 1111 // function-local ValueSpec, spec. 1112 func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { 1113 switch { 1114 case len(spec.Values) == len(spec.Names): 1115 // e.g. var x, y = 0, 1 1116 // 1:1 assignment 1117 for i, id := range spec.Names { 1118 if !isBlankIdent(id) { 1119 emitLocalVar(fn, identVar(fn, id)) 1120 } 1121 lval := b.addr(fn, id, false) // non-escaping 1122 b.assign(fn, lval, spec.Values[i], true, nil) 1123 } 1124 1125 case len(spec.Values) == 0: 1126 // e.g. var x, y int 1127 // Locals are implicitly zero-initialized. 1128 for _, id := range spec.Names { 1129 if !isBlankIdent(id) { 1130 lhs := emitLocalVar(fn, identVar(fn, id)) 1131 if fn.debugInfo() { 1132 emitDebugRef(fn, id, lhs, true) 1133 } 1134 } 1135 } 1136 1137 default: 1138 // e.g. var x, y = pos() 1139 tuple := b.exprN(fn, spec.Values[0]) 1140 for i, id := range spec.Names { 1141 if !isBlankIdent(id) { 1142 emitLocalVar(fn, identVar(fn, id)) 1143 lhs := b.addr(fn, id, false) // non-escaping 1144 lhs.store(fn, emitExtract(fn, tuple, i)) 1145 } 1146 } 1147 } 1148 } 1149 1150 // assignStmt emits code to fn for a parallel assignment of rhss to lhss. 1151 // isDef is true if this is a short variable declaration (:=). 1152 // 1153 // Note the similarity with localValueSpec. 1154 func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { 1155 // Side effects of all LHSs and RHSs must occur in left-to-right order. 1156 lvals := make([]lvalue, len(lhss)) 1157 isZero := make([]bool, len(lhss)) 1158 for i, lhs := range lhss { 1159 var lval lvalue = blank{} 1160 if !isBlankIdent(lhs) { 1161 if isDef { 1162 if obj, ok := fn.info.Defs[lhs.(*ast.Ident)].(*types.Var); ok { 1163 emitLocalVar(fn, obj) 1164 isZero[i] = true 1165 } 1166 } 1167 lval = b.addr(fn, lhs, false) // non-escaping 1168 } 1169 lvals[i] = lval 1170 } 1171 if len(lhss) == len(rhss) { 1172 // Simple assignment: x = f() (!isDef) 1173 // Parallel assignment: x, y = f(), g() (!isDef) 1174 // or short var decl: x, y := f(), g() (isDef) 1175 // 1176 // In all cases, the RHSs may refer to the LHSs, 1177 // so we need a storebuf. 1178 var sb storebuf 1179 for i := range rhss { 1180 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) 1181 } 1182 sb.emit(fn) 1183 } else { 1184 // e.g. x, y = pos() 1185 tuple := b.exprN(fn, rhss[0]) 1186 emitDebugRef(fn, rhss[0], tuple, false) 1187 for i, lval := range lvals { 1188 lval.store(fn, emitExtract(fn, tuple, i)) 1189 } 1190 } 1191 } 1192 1193 // arrayLen returns the length of the array whose composite literal elements are elts. 1194 func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { 1195 var max int64 = -1 1196 var i int64 = -1 1197 for _, e := range elts { 1198 if kv, ok := e.(*ast.KeyValueExpr); ok { 1199 i = b.expr(fn, kv.Key).(*Const).Int64() 1200 } else { 1201 i++ 1202 } 1203 if i > max { 1204 max = i 1205 } 1206 } 1207 return max + 1 1208 } 1209 1210 // compLit emits to fn code to initialize a composite literal e at 1211 // address addr with type typ. 1212 // 1213 // Nested composite literals are recursively initialized in place 1214 // where possible. If isZero is true, compLit assumes that addr 1215 // holds the zero value for typ. 1216 // 1217 // Because the elements of a composite literal may refer to the 1218 // variables being updated, as in the second line below, 1219 // 1220 // x := T{a: 1} 1221 // x = T{a: x.a} 1222 // 1223 // all the reads must occur before all the writes. Thus all stores to 1224 // loc are emitted to the storebuf sb for later execution. 1225 // 1226 // A CompositeLit may have pointer type only in the recursive (nested) 1227 // case when the type name is implicit. e.g. in []*T{{}}, the inner 1228 // literal has type *T behaves like &T{}. 1229 // In that case, addr must hold a T, not a *T. 1230 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { 1231 typ := typeparams.Deref(fn.typeOf(e)) // retain the named/alias/param type, if any 1232 switch t := typeparams.CoreType(typ).(type) { 1233 case *types.Struct: 1234 if !isZero && len(e.Elts) != t.NumFields() { 1235 // memclear 1236 zt := typeparams.MustDeref(addr.Type()) 1237 sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt)) 1238 isZero = true 1239 } 1240 for i, e := range e.Elts { 1241 fieldIndex := i 1242 pos := e.Pos() 1243 if kv, ok := e.(*ast.KeyValueExpr); ok { 1244 fname := kv.Key.(*ast.Ident).Name 1245 for i, n := 0, t.NumFields(); i < n; i++ { 1246 sf := t.Field(i) 1247 if sf.Name() == fname { 1248 fieldIndex = i 1249 pos = kv.Colon 1250 e = kv.Value 1251 break 1252 } 1253 } 1254 } 1255 sf := t.Field(fieldIndex) 1256 faddr := &FieldAddr{ 1257 X: addr, 1258 Field: fieldIndex, 1259 } 1260 faddr.setPos(pos) 1261 faddr.setType(types.NewPointer(sf.Type())) 1262 fn.emit(faddr) 1263 b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) 1264 } 1265 1266 case *types.Array, *types.Slice: 1267 var at *types.Array 1268 var array Value 1269 switch t := t.(type) { 1270 case *types.Slice: 1271 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) 1272 array = emitNew(fn, at, e.Lbrace, "slicelit") 1273 case *types.Array: 1274 at = t 1275 array = addr 1276 1277 if !isZero && int64(len(e.Elts)) != at.Len() { 1278 // memclear 1279 zt := typeparams.MustDeref(array.Type()) 1280 sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt)) 1281 } 1282 } 1283 1284 var idx *Const 1285 for _, e := range e.Elts { 1286 pos := e.Pos() 1287 if kv, ok := e.(*ast.KeyValueExpr); ok { 1288 idx = b.expr(fn, kv.Key).(*Const) 1289 pos = kv.Colon 1290 e = kv.Value 1291 } else { 1292 var idxval int64 1293 if idx != nil { 1294 idxval = idx.Int64() + 1 1295 } 1296 idx = intConst(idxval) 1297 } 1298 iaddr := &IndexAddr{ 1299 X: array, 1300 Index: idx, 1301 } 1302 iaddr.setType(types.NewPointer(at.Elem())) 1303 fn.emit(iaddr) 1304 if t != at { // slice 1305 // backing array is unaliased => storebuf not needed. 1306 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) 1307 } else { 1308 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) 1309 } 1310 } 1311 1312 if t != at { // slice 1313 s := &Slice{X: array} 1314 s.setPos(e.Lbrace) 1315 s.setType(typ) 1316 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) 1317 } 1318 1319 case *types.Map: 1320 m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} 1321 m.setPos(e.Lbrace) 1322 m.setType(typ) 1323 fn.emit(m) 1324 for _, e := range e.Elts { 1325 e := e.(*ast.KeyValueExpr) 1326 1327 // If a key expression in a map literal is itself a 1328 // composite literal, the type may be omitted. 1329 // For example: 1330 // map[*struct{}]bool{{}: true} 1331 // An &-operation may be implied: 1332 // map[*struct{}]bool{&struct{}{}: true} 1333 wantAddr := false 1334 if _, ok := unparen(e.Key).(*ast.CompositeLit); ok { 1335 wantAddr = isPointerCore(t.Key()) 1336 } 1337 1338 var key Value 1339 if wantAddr { 1340 // A CompositeLit never evaluates to a pointer, 1341 // so if the type of the location is a pointer, 1342 // an &-operation is implied. 1343 key = b.addr(fn, e.Key, true).address(fn) 1344 } else { 1345 key = b.expr(fn, e.Key) 1346 } 1347 1348 loc := element{ 1349 m: m, 1350 k: emitConv(fn, key, t.Key()), 1351 t: t.Elem(), 1352 pos: e.Colon, 1353 } 1354 1355 // We call assign() only because it takes care 1356 // of any &-operation required in the recursive 1357 // case, e.g., 1358 // map[int]*struct{}{0: {}} implies &struct{}{}. 1359 // In-place update is of course impossible, 1360 // and no storebuf is needed. 1361 b.assign(fn, &loc, e.Value, true, nil) 1362 } 1363 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) 1364 1365 default: 1366 panic("unexpected CompositeLit type: " + typ.String()) 1367 } 1368 } 1369 1370 // switchStmt emits to fn code for the switch statement s, optionally 1371 // labelled by label. 1372 func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { 1373 // We treat SwitchStmt like a sequential if-else chain. 1374 // Multiway dispatch can be recovered later by ssautil.Switches() 1375 // to those cases that are free of side effects. 1376 if s.Init != nil { 1377 b.stmt(fn, s.Init) 1378 } 1379 var tag Value = vTrue 1380 if s.Tag != nil { 1381 tag = b.expr(fn, s.Tag) 1382 } 1383 done := fn.newBasicBlock("switch.done") 1384 if label != nil { 1385 label._break = done 1386 } 1387 // We pull the default case (if present) down to the end. 1388 // But each fallthrough label must point to the next 1389 // body block in source order, so we preallocate a 1390 // body block (fallthru) for the next case. 1391 // Unfortunately this makes for a confusing block order. 1392 var dfltBody *[]ast.Stmt 1393 var dfltFallthrough *BasicBlock 1394 var fallthru, dfltBlock *BasicBlock 1395 ncases := len(s.Body.List) 1396 for i, clause := range s.Body.List { 1397 body := fallthru 1398 if body == nil { 1399 body = fn.newBasicBlock("switch.body") // first case only 1400 } 1401 1402 // Preallocate body block for the next case. 1403 fallthru = done 1404 if i+1 < ncases { 1405 fallthru = fn.newBasicBlock("switch.body") 1406 } 1407 1408 cc := clause.(*ast.CaseClause) 1409 if cc.List == nil { 1410 // Default case. 1411 dfltBody = &cc.Body 1412 dfltFallthrough = fallthru 1413 dfltBlock = body 1414 continue 1415 } 1416 1417 var nextCond *BasicBlock 1418 for _, cond := range cc.List { 1419 nextCond = fn.newBasicBlock("switch.next") 1420 // TODO(adonovan): opt: when tag==vTrue, we'd 1421 // get better code if we use b.cond(cond) 1422 // instead of BinOp(EQL, tag, b.expr(cond)) 1423 // followed by If. Don't forget conversions 1424 // though. 1425 cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos()) 1426 emitIf(fn, cond, body, nextCond) 1427 fn.currentBlock = nextCond 1428 } 1429 fn.currentBlock = body 1430 fn.targets = &targets{ 1431 tail: fn.targets, 1432 _break: done, 1433 _fallthrough: fallthru, 1434 } 1435 b.stmtList(fn, cc.Body) 1436 fn.targets = fn.targets.tail 1437 emitJump(fn, done) 1438 fn.currentBlock = nextCond 1439 } 1440 if dfltBlock != nil { 1441 emitJump(fn, dfltBlock) 1442 fn.currentBlock = dfltBlock 1443 fn.targets = &targets{ 1444 tail: fn.targets, 1445 _break: done, 1446 _fallthrough: dfltFallthrough, 1447 } 1448 b.stmtList(fn, *dfltBody) 1449 fn.targets = fn.targets.tail 1450 } 1451 emitJump(fn, done) 1452 fn.currentBlock = done 1453 } 1454 1455 // typeSwitchStmt emits to fn code for the type switch statement s, optionally 1456 // labelled by label. 1457 func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { 1458 // We treat TypeSwitchStmt like a sequential if-else chain. 1459 // Multiway dispatch can be recovered later by ssautil.Switches(). 1460 1461 // Typeswitch lowering: 1462 // 1463 // var x X 1464 // switch y := x.(type) { 1465 // case T1, T2: S1 // >1 (y := x) 1466 // case nil: SN // nil (y := x) 1467 // default: SD // 0 types (y := x) 1468 // case T3: S3 // 1 type (y := x.(T3)) 1469 // } 1470 // 1471 // ...s.Init... 1472 // x := eval x 1473 // .caseT1: 1474 // t1, ok1 := typeswitch,ok x <T1> 1475 // if ok1 then goto S1 else goto .caseT2 1476 // .caseT2: 1477 // t2, ok2 := typeswitch,ok x <T2> 1478 // if ok2 then goto S1 else goto .caseNil 1479 // .S1: 1480 // y := x 1481 // ...S1... 1482 // goto done 1483 // .caseNil: 1484 // if t2, ok2 := typeswitch,ok x <T2> 1485 // if x == nil then goto SN else goto .caseT3 1486 // .SN: 1487 // y := x 1488 // ...SN... 1489 // goto done 1490 // .caseT3: 1491 // t3, ok3 := typeswitch,ok x <T3> 1492 // if ok3 then goto S3 else goto default 1493 // .S3: 1494 // y := t3 1495 // ...S3... 1496 // goto done 1497 // .default: 1498 // y := x 1499 // ...SD... 1500 // goto done 1501 // .done: 1502 if s.Init != nil { 1503 b.stmt(fn, s.Init) 1504 } 1505 1506 var x Value 1507 switch ass := s.Assign.(type) { 1508 case *ast.ExprStmt: // x.(type) 1509 x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) 1510 case *ast.AssignStmt: // y := x.(type) 1511 x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) 1512 } 1513 1514 done := fn.newBasicBlock("typeswitch.done") 1515 if label != nil { 1516 label._break = done 1517 } 1518 var default_ *ast.CaseClause 1519 for _, clause := range s.Body.List { 1520 cc := clause.(*ast.CaseClause) 1521 if cc.List == nil { 1522 default_ = cc 1523 continue 1524 } 1525 body := fn.newBasicBlock("typeswitch.body") 1526 var next *BasicBlock 1527 var casetype types.Type 1528 var ti Value // ti, ok := typeassert,ok x <Ti> 1529 for _, cond := range cc.List { 1530 next = fn.newBasicBlock("typeswitch.next") 1531 casetype = fn.typeOf(cond) 1532 var condv Value 1533 if casetype == tUntypedNil { 1534 condv = emitCompare(fn, token.EQL, x, zeroConst(x.Type()), cond.Pos()) 1535 ti = x 1536 } else { 1537 yok := emitTypeTest(fn, x, casetype, cc.Case) 1538 ti = emitExtract(fn, yok, 0) 1539 condv = emitExtract(fn, yok, 1) 1540 } 1541 emitIf(fn, condv, body, next) 1542 fn.currentBlock = next 1543 } 1544 if len(cc.List) != 1 { 1545 ti = x 1546 } 1547 fn.currentBlock = body 1548 b.typeCaseBody(fn, cc, ti, done) 1549 fn.currentBlock = next 1550 } 1551 if default_ != nil { 1552 b.typeCaseBody(fn, default_, x, done) 1553 } else { 1554 emitJump(fn, done) 1555 } 1556 fn.currentBlock = done 1557 } 1558 1559 func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { 1560 if obj, ok := fn.info.Implicits[cc].(*types.Var); ok { 1561 // In a switch y := x.(type), each case clause 1562 // implicitly declares a distinct object y. 1563 // In a single-type case, y has that type. 1564 // In multi-type cases, 'case nil' and default, 1565 // y has the same type as the interface operand. 1566 emitStore(fn, emitLocalVar(fn, obj), x, obj.Pos()) 1567 } 1568 fn.targets = &targets{ 1569 tail: fn.targets, 1570 _break: done, 1571 } 1572 b.stmtList(fn, cc.Body) 1573 fn.targets = fn.targets.tail 1574 emitJump(fn, done) 1575 } 1576 1577 // selectStmt emits to fn code for the select statement s, optionally 1578 // labelled by label. 1579 func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { 1580 // A blocking select of a single case degenerates to a 1581 // simple send or receive. 1582 // TODO(adonovan): opt: is this optimization worth its weight? 1583 if len(s.Body.List) == 1 { 1584 clause := s.Body.List[0].(*ast.CommClause) 1585 if clause.Comm != nil { 1586 b.stmt(fn, clause.Comm) 1587 done := fn.newBasicBlock("select.done") 1588 if label != nil { 1589 label._break = done 1590 } 1591 fn.targets = &targets{ 1592 tail: fn.targets, 1593 _break: done, 1594 } 1595 b.stmtList(fn, clause.Body) 1596 fn.targets = fn.targets.tail 1597 emitJump(fn, done) 1598 fn.currentBlock = done 1599 return 1600 } 1601 } 1602 1603 // First evaluate all channels in all cases, and find 1604 // the directions of each state. 1605 var states []*SelectState 1606 blocking := true 1607 debugInfo := fn.debugInfo() 1608 for _, clause := range s.Body.List { 1609 var st *SelectState 1610 switch comm := clause.(*ast.CommClause).Comm.(type) { 1611 case nil: // default case 1612 blocking = false 1613 continue 1614 1615 case *ast.SendStmt: // ch<- i 1616 ch := b.expr(fn, comm.Chan) 1617 chtyp := typeparams.CoreType(fn.typ(ch.Type())).(*types.Chan) 1618 st = &SelectState{ 1619 Dir: types.SendOnly, 1620 Chan: ch, 1621 Send: emitConv(fn, b.expr(fn, comm.Value), chtyp.Elem()), 1622 Pos: comm.Arrow, 1623 } 1624 if debugInfo { 1625 st.DebugNode = comm 1626 } 1627 1628 case *ast.AssignStmt: // x := <-ch 1629 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) 1630 st = &SelectState{ 1631 Dir: types.RecvOnly, 1632 Chan: b.expr(fn, recv.X), 1633 Pos: recv.OpPos, 1634 } 1635 if debugInfo { 1636 st.DebugNode = recv 1637 } 1638 1639 case *ast.ExprStmt: // <-ch 1640 recv := unparen(comm.X).(*ast.UnaryExpr) 1641 st = &SelectState{ 1642 Dir: types.RecvOnly, 1643 Chan: b.expr(fn, recv.X), 1644 Pos: recv.OpPos, 1645 } 1646 if debugInfo { 1647 st.DebugNode = recv 1648 } 1649 } 1650 states = append(states, st) 1651 } 1652 1653 // We dispatch on the (fair) result of Select using a 1654 // sequential if-else chain, in effect: 1655 // 1656 // idx, recvOk, r0...r_n-1 := select(...) 1657 // if idx == 0 { // receive on channel 0 (first receive => r0) 1658 // x, ok := r0, recvOk 1659 // ...state0... 1660 // } else if v == 1 { // send on channel 1 1661 // ...state1... 1662 // } else { 1663 // ...default... 1664 // } 1665 sel := &Select{ 1666 States: states, 1667 Blocking: blocking, 1668 } 1669 sel.setPos(s.Select) 1670 var vars []*types.Var 1671 vars = append(vars, varIndex, varOk) 1672 for _, st := range states { 1673 if st.Dir == types.RecvOnly { 1674 chtyp := typeparams.CoreType(fn.typ(st.Chan.Type())).(*types.Chan) 1675 vars = append(vars, anonVar(chtyp.Elem())) 1676 } 1677 } 1678 sel.setType(types.NewTuple(vars...)) 1679 1680 fn.emit(sel) 1681 idx := emitExtract(fn, sel, 0) 1682 1683 done := fn.newBasicBlock("select.done") 1684 if label != nil { 1685 label._break = done 1686 } 1687 1688 var defaultBody *[]ast.Stmt 1689 state := 0 1690 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV 1691 for _, cc := range s.Body.List { 1692 clause := cc.(*ast.CommClause) 1693 if clause.Comm == nil { 1694 defaultBody = &clause.Body 1695 continue 1696 } 1697 body := fn.newBasicBlock("select.body") 1698 next := fn.newBasicBlock("select.next") 1699 emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) 1700 fn.currentBlock = body 1701 fn.targets = &targets{ 1702 tail: fn.targets, 1703 _break: done, 1704 } 1705 switch comm := clause.Comm.(type) { 1706 case *ast.ExprStmt: // <-ch 1707 if debugInfo { 1708 v := emitExtract(fn, sel, r) 1709 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1710 } 1711 r++ 1712 1713 case *ast.AssignStmt: // x := <-states[state].Chan 1714 if comm.Tok == token.DEFINE { 1715 emitLocalVar(fn, identVar(fn, comm.Lhs[0].(*ast.Ident))) 1716 } 1717 x := b.addr(fn, comm.Lhs[0], false) // non-escaping 1718 v := emitExtract(fn, sel, r) 1719 if debugInfo { 1720 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1721 } 1722 x.store(fn, v) 1723 1724 if len(comm.Lhs) == 2 { // x, ok := ... 1725 if comm.Tok == token.DEFINE { 1726 emitLocalVar(fn, identVar(fn, comm.Lhs[1].(*ast.Ident))) 1727 } 1728 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping 1729 ok.store(fn, emitExtract(fn, sel, 1)) 1730 } 1731 r++ 1732 } 1733 b.stmtList(fn, clause.Body) 1734 fn.targets = fn.targets.tail 1735 emitJump(fn, done) 1736 fn.currentBlock = next 1737 state++ 1738 } 1739 if defaultBody != nil { 1740 fn.targets = &targets{ 1741 tail: fn.targets, 1742 _break: done, 1743 } 1744 b.stmtList(fn, *defaultBody) 1745 fn.targets = fn.targets.tail 1746 } else { 1747 // A blocking select must match some case. 1748 // (This should really be a runtime.errorString, not a string.) 1749 fn.emit(&Panic{ 1750 X: emitConv(fn, stringConst("blocking select matched no case"), tEface), 1751 }) 1752 fn.currentBlock = fn.newBasicBlock("unreachable") 1753 } 1754 emitJump(fn, done) 1755 fn.currentBlock = done 1756 } 1757 1758 // forStmt emits to fn code for the for statement s, optionally 1759 // labelled by label. 1760 func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { 1761 // Use forStmtGo122 instead if it applies. 1762 if s.Init != nil { 1763 if assign, ok := s.Init.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE { 1764 if versions.AtLeast(fn.goversion, versions.Go1_22) { 1765 b.forStmtGo122(fn, s, label) 1766 return 1767 } 1768 } 1769 } 1770 1771 // ...init... 1772 // jump loop 1773 // loop: 1774 // if cond goto body else done 1775 // body: 1776 // ...body... 1777 // jump post 1778 // post: (target of continue) 1779 // ...post... 1780 // jump loop 1781 // done: (target of break) 1782 if s.Init != nil { 1783 b.stmt(fn, s.Init) 1784 } 1785 1786 body := fn.newBasicBlock("for.body") 1787 done := fn.newBasicBlock("for.done") // target of 'break' 1788 loop := body // target of back-edge 1789 if s.Cond != nil { 1790 loop = fn.newBasicBlock("for.loop") 1791 } 1792 cont := loop // target of 'continue' 1793 if s.Post != nil { 1794 cont = fn.newBasicBlock("for.post") 1795 } 1796 if label != nil { 1797 label._break = done 1798 label._continue = cont 1799 } 1800 emitJump(fn, loop) 1801 fn.currentBlock = loop 1802 if loop != body { 1803 b.cond(fn, s.Cond, body, done) 1804 fn.currentBlock = body 1805 } 1806 fn.targets = &targets{ 1807 tail: fn.targets, 1808 _break: done, 1809 _continue: cont, 1810 } 1811 b.stmt(fn, s.Body) 1812 fn.targets = fn.targets.tail 1813 emitJump(fn, cont) 1814 1815 if s.Post != nil { 1816 fn.currentBlock = cont 1817 b.stmt(fn, s.Post) 1818 emitJump(fn, loop) // back-edge 1819 } 1820 fn.currentBlock = done 1821 } 1822 1823 // forStmtGo122 emits to fn code for the for statement s, optionally 1824 // labelled by label. s must define its variables. 1825 // 1826 // This allocates once per loop iteration. This is only correct in 1827 // GoVersions >= go1.22. 1828 func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { 1829 // i_outer = alloc[T] 1830 // *i_outer = ...init... // under objects[i] = i_outer 1831 // jump loop 1832 // loop: 1833 // i = phi [head: i_outer, loop: i_next] 1834 // ...cond... // under objects[i] = i 1835 // if cond goto body else done 1836 // body: 1837 // ...body... // under objects[i] = i (same as loop) 1838 // jump post 1839 // post: 1840 // tmp = *i 1841 // i_next = alloc[T] 1842 // *i_next = tmp 1843 // ...post... // under objects[i] = i_next 1844 // goto loop 1845 // done: 1846 1847 init := s.Init.(*ast.AssignStmt) 1848 startingBlocks := len(fn.Blocks) 1849 1850 pre := fn.currentBlock // current block before starting 1851 loop := fn.newBasicBlock("for.loop") // target of back-edge 1852 body := fn.newBasicBlock("for.body") 1853 post := fn.newBasicBlock("for.post") // target of 'continue' 1854 done := fn.newBasicBlock("for.done") // target of 'break' 1855 1856 // For each of the n loop variables, we create five SSA values, 1857 // outer, phi, next, load, and store in pre, loop, and post. 1858 // There is no limit on n. 1859 type loopVar struct { 1860 obj *types.Var 1861 outer *Alloc 1862 phi *Phi 1863 load *UnOp 1864 next *Alloc 1865 store *Store 1866 } 1867 vars := make([]loopVar, len(init.Lhs)) 1868 for i, lhs := range init.Lhs { 1869 v := identVar(fn, lhs.(*ast.Ident)) 1870 typ := fn.typ(v.Type()) 1871 1872 fn.currentBlock = pre 1873 outer := emitLocal(fn, typ, v.Pos(), v.Name()) 1874 1875 fn.currentBlock = loop 1876 phi := &Phi{Comment: v.Name()} 1877 phi.pos = v.Pos() 1878 phi.typ = outer.Type() 1879 fn.emit(phi) 1880 1881 fn.currentBlock = post 1882 // If next is local, it reuses the address and zeroes the old value so 1883 // load before allocating next. 1884 load := emitLoad(fn, phi) 1885 next := emitLocal(fn, typ, v.Pos(), v.Name()) 1886 store := emitStore(fn, next, load, token.NoPos) 1887 1888 phi.Edges = []Value{outer, next} // pre edge is emitted before post edge. 1889 1890 vars[i] = loopVar{v, outer, phi, load, next, store} 1891 } 1892 1893 // ...init... under fn.objects[v] = i_outer 1894 fn.currentBlock = pre 1895 for _, v := range vars { 1896 fn.vars[v.obj] = v.outer 1897 } 1898 const isDef = false // assign to already-allocated outers 1899 b.assignStmt(fn, init.Lhs, init.Rhs, isDef) 1900 if label != nil { 1901 label._break = done 1902 label._continue = post 1903 } 1904 emitJump(fn, loop) 1905 1906 // ...cond... under fn.objects[v] = i 1907 fn.currentBlock = loop 1908 for _, v := range vars { 1909 fn.vars[v.obj] = v.phi 1910 } 1911 if s.Cond != nil { 1912 b.cond(fn, s.Cond, body, done) 1913 } else { 1914 emitJump(fn, body) 1915 } 1916 1917 // ...body... under fn.objects[v] = i 1918 fn.currentBlock = body 1919 fn.targets = &targets{ 1920 tail: fn.targets, 1921 _break: done, 1922 _continue: post, 1923 } 1924 b.stmt(fn, s.Body) 1925 fn.targets = fn.targets.tail 1926 emitJump(fn, post) 1927 1928 // ...post... under fn.objects[v] = i_next 1929 for _, v := range vars { 1930 fn.vars[v.obj] = v.next 1931 } 1932 fn.currentBlock = post 1933 if s.Post != nil { 1934 b.stmt(fn, s.Post) 1935 } 1936 emitJump(fn, loop) // back-edge 1937 fn.currentBlock = done 1938 1939 // For each loop variable that does not escape, 1940 // (the common case), fuse its next cells into its 1941 // (local) outer cell as they have disjoint live ranges. 1942 // 1943 // It is sufficient to test whether i_next escapes, 1944 // because its Heap flag will be marked true if either 1945 // the cond or post expression causes i to escape 1946 // (because escape distributes over phi). 1947 var nlocals int 1948 for _, v := range vars { 1949 if !v.next.Heap { 1950 nlocals++ 1951 } 1952 } 1953 if nlocals > 0 { 1954 replace := make(map[Value]Value, 2*nlocals) 1955 dead := make(map[Instruction]bool, 4*nlocals) 1956 for _, v := range vars { 1957 if !v.next.Heap { 1958 replace[v.next] = v.outer 1959 replace[v.phi] = v.outer 1960 dead[v.phi], dead[v.next], dead[v.load], dead[v.store] = true, true, true, true 1961 } 1962 } 1963 1964 // Replace all uses of i_next and phi with i_outer. 1965 // Referrers have not been built for fn yet so only update Instruction operands. 1966 // We need only look within the blocks added by the loop. 1967 var operands []*Value // recycle storage 1968 for _, b := range fn.Blocks[startingBlocks:] { 1969 for _, instr := range b.Instrs { 1970 operands = instr.Operands(operands[:0]) 1971 for _, ptr := range operands { 1972 k := *ptr 1973 if v := replace[k]; v != nil { 1974 *ptr = v 1975 } 1976 } 1977 } 1978 } 1979 1980 // Remove instructions for phi, load, and store. 1981 // lift() will remove the unused i_next *Alloc. 1982 isDead := func(i Instruction) bool { return dead[i] } 1983 loop.Instrs = removeInstrsIf(loop.Instrs, isDead) 1984 post.Instrs = removeInstrsIf(post.Instrs, isDead) 1985 } 1986 } 1987 1988 // rangeIndexed emits to fn the header for an integer-indexed loop 1989 // over array, *array or slice value x. 1990 // The v result is defined only if tv is non-nil. 1991 // forPos is the position of the "for" token. 1992 func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 1993 // 1994 // length = len(x) 1995 // index = -1 1996 // loop: (target of continue) 1997 // index++ 1998 // if index < length goto body else done 1999 // body: 2000 // k = index 2001 // v = x[index] 2002 // ...body... 2003 // jump loop 2004 // done: (target of break) 2005 2006 // Determine number of iterations. 2007 var length Value 2008 dt := typeparams.Deref(x.Type()) 2009 if arr, ok := typeparams.CoreType(dt).(*types.Array); ok { 2010 // For array or *array, the number of iterations is 2011 // known statically thanks to the type. We avoid a 2012 // data dependence upon x, permitting later dead-code 2013 // elimination if x is pure, static unrolling, etc. 2014 // Ranging over a nil *array may have >0 iterations. 2015 // We still generate code for x, in case it has effects. 2016 length = intConst(arr.Len()) 2017 } else { 2018 // length = len(x). 2019 var c Call 2020 c.Call.Value = makeLen(x.Type()) 2021 c.Call.Args = []Value{x} 2022 c.setType(tInt) 2023 length = fn.emit(&c) 2024 } 2025 2026 index := emitLocal(fn, tInt, token.NoPos, "rangeindex") 2027 emitStore(fn, index, intConst(-1), pos) 2028 2029 loop = fn.newBasicBlock("rangeindex.loop") 2030 emitJump(fn, loop) 2031 fn.currentBlock = loop 2032 2033 incr := &BinOp{ 2034 Op: token.ADD, 2035 X: emitLoad(fn, index), 2036 Y: vOne, 2037 } 2038 incr.setType(tInt) 2039 emitStore(fn, index, fn.emit(incr), pos) 2040 2041 body := fn.newBasicBlock("rangeindex.body") 2042 done = fn.newBasicBlock("rangeindex.done") 2043 emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) 2044 fn.currentBlock = body 2045 2046 k = emitLoad(fn, index) 2047 if tv != nil { 2048 switch t := typeparams.CoreType(x.Type()).(type) { 2049 case *types.Array: 2050 instr := &Index{ 2051 X: x, 2052 Index: k, 2053 } 2054 instr.setType(t.Elem()) 2055 instr.setPos(x.Pos()) 2056 v = fn.emit(instr) 2057 2058 case *types.Pointer: // *array 2059 instr := &IndexAddr{ 2060 X: x, 2061 Index: k, 2062 } 2063 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) 2064 instr.setPos(x.Pos()) 2065 v = emitLoad(fn, fn.emit(instr)) 2066 2067 case *types.Slice: 2068 instr := &IndexAddr{ 2069 X: x, 2070 Index: k, 2071 } 2072 instr.setType(types.NewPointer(t.Elem())) 2073 instr.setPos(x.Pos()) 2074 v = emitLoad(fn, fn.emit(instr)) 2075 2076 default: 2077 panic("rangeIndexed x:" + t.String()) 2078 } 2079 } 2080 return 2081 } 2082 2083 // rangeIter emits to fn the header for a loop using 2084 // Range/Next/Extract to iterate over map or string value x. 2085 // tk and tv are the types of the key/value results k and v, or nil 2086 // if the respective component is not wanted. 2087 func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 2088 // 2089 // it = range x 2090 // loop: (target of continue) 2091 // okv = next it (ok, key, value) 2092 // ok = extract okv #0 2093 // if ok goto body else done 2094 // body: 2095 // k = extract okv #1 2096 // v = extract okv #2 2097 // ...body... 2098 // jump loop 2099 // done: (target of break) 2100 // 2101 2102 if tk == nil { 2103 tk = tInvalid 2104 } 2105 if tv == nil { 2106 tv = tInvalid 2107 } 2108 2109 rng := &Range{X: x} 2110 rng.setPos(pos) 2111 rng.setType(tRangeIter) 2112 it := fn.emit(rng) 2113 2114 loop = fn.newBasicBlock("rangeiter.loop") 2115 emitJump(fn, loop) 2116 fn.currentBlock = loop 2117 2118 okv := &Next{ 2119 Iter: it, 2120 IsString: isBasic(typeparams.CoreType(x.Type())), 2121 } 2122 okv.setType(types.NewTuple( 2123 varOk, 2124 newVar("k", tk), 2125 newVar("v", tv), 2126 )) 2127 fn.emit(okv) 2128 2129 body := fn.newBasicBlock("rangeiter.body") 2130 done = fn.newBasicBlock("rangeiter.done") 2131 emitIf(fn, emitExtract(fn, okv, 0), body, done) 2132 fn.currentBlock = body 2133 2134 if tk != tInvalid { 2135 k = emitExtract(fn, okv, 1) 2136 } 2137 if tv != tInvalid { 2138 v = emitExtract(fn, okv, 2) 2139 } 2140 return 2141 } 2142 2143 // rangeChan emits to fn the header for a loop that receives from 2144 // channel x until it fails. 2145 // tk is the channel's element type, or nil if the k result is 2146 // not wanted 2147 // pos is the position of the '=' or ':=' token. 2148 func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { 2149 // 2150 // loop: (target of continue) 2151 // ko = <-x (key, ok) 2152 // ok = extract ko #1 2153 // if ok goto body else done 2154 // body: 2155 // k = extract ko #0 2156 // ...body... 2157 // goto loop 2158 // done: (target of break) 2159 2160 loop = fn.newBasicBlock("rangechan.loop") 2161 emitJump(fn, loop) 2162 fn.currentBlock = loop 2163 recv := &UnOp{ 2164 Op: token.ARROW, 2165 X: x, 2166 CommaOk: true, 2167 } 2168 recv.setPos(pos) 2169 recv.setType(types.NewTuple( 2170 newVar("k", typeparams.CoreType(x.Type()).(*types.Chan).Elem()), 2171 varOk, 2172 )) 2173 ko := fn.emit(recv) 2174 body := fn.newBasicBlock("rangechan.body") 2175 done = fn.newBasicBlock("rangechan.done") 2176 emitIf(fn, emitExtract(fn, ko, 1), body, done) 2177 fn.currentBlock = body 2178 if tk != nil { 2179 k = emitExtract(fn, ko, 0) 2180 } 2181 return 2182 } 2183 2184 // rangeInt emits to fn the header for a range loop with an integer operand. 2185 // tk is the key value's type, or nil if the k result is not wanted. 2186 // pos is the position of the "for" token. 2187 func (b *builder) rangeInt(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { 2188 // 2189 // iter = 0 2190 // if 0 < x goto body else done 2191 // loop: (target of continue) 2192 // iter++ 2193 // if iter < x goto body else done 2194 // body: 2195 // k = x 2196 // ...body... 2197 // jump loop 2198 // done: (target of break) 2199 2200 if isUntyped(x.Type()) { 2201 x = emitConv(fn, x, tInt) 2202 } 2203 2204 T := x.Type() 2205 iter := emitLocal(fn, T, token.NoPos, "rangeint.iter") 2206 // x may be unsigned. Avoid initializing x to -1. 2207 2208 body := fn.newBasicBlock("rangeint.body") 2209 done = fn.newBasicBlock("rangeint.done") 2210 emitIf(fn, emitCompare(fn, token.LSS, zeroConst(T), x, token.NoPos), body, done) 2211 2212 loop = fn.newBasicBlock("rangeint.loop") 2213 fn.currentBlock = loop 2214 2215 incr := &BinOp{ 2216 Op: token.ADD, 2217 X: emitLoad(fn, iter), 2218 Y: emitConv(fn, vOne, T), 2219 } 2220 incr.setType(T) 2221 emitStore(fn, iter, fn.emit(incr), pos) 2222 emitIf(fn, emitCompare(fn, token.LSS, incr, x, token.NoPos), body, done) 2223 fn.currentBlock = body 2224 2225 if tk != nil { 2226 // Integer types (int, uint8, etc.) are named and 2227 // we know that k is assignable to x when tk != nil. 2228 // This implies tk and T are identical so no conversion is needed. 2229 k = emitLoad(fn, iter) 2230 } 2231 2232 return 2233 } 2234 2235 // rangeStmt emits to fn code for the range statement s, optionally 2236 // labelled by label. 2237 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { 2238 var tk, tv types.Type 2239 if s.Key != nil && !isBlankIdent(s.Key) { 2240 tk = fn.typeOf(s.Key) 2241 } 2242 if s.Value != nil && !isBlankIdent(s.Value) { 2243 tv = fn.typeOf(s.Value) 2244 } 2245 2246 // create locals for s.Key and s.Value. 2247 createVars := func() { 2248 // Unlike a short variable declaration, a RangeStmt 2249 // using := never redeclares an existing variable; it 2250 // always creates a new one. 2251 if tk != nil { 2252 emitLocalVar(fn, identVar(fn, s.Key.(*ast.Ident))) 2253 } 2254 if tv != nil { 2255 emitLocalVar(fn, identVar(fn, s.Value.(*ast.Ident))) 2256 } 2257 } 2258 2259 afterGo122 := versions.AtLeast(fn.goversion, versions.Go1_22) 2260 if s.Tok == token.DEFINE && !afterGo122 { 2261 // pre-go1.22: If iteration variables are defined (:=), this 2262 // occurs once outside the loop. 2263 createVars() 2264 } 2265 2266 x := b.expr(fn, s.X) 2267 2268 var k, v Value 2269 var loop, done *BasicBlock 2270 switch rt := typeparams.CoreType(x.Type()).(type) { 2271 case *types.Slice, *types.Array, *types.Pointer: // *array 2272 k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) 2273 2274 case *types.Chan: 2275 k, loop, done = b.rangeChan(fn, x, tk, s.For) 2276 2277 case *types.Map: 2278 k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) 2279 2280 case *types.Basic: 2281 switch { 2282 case rt.Info()&types.IsString != 0: 2283 k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) 2284 2285 case rt.Info()&types.IsInteger != 0: 2286 k, loop, done = b.rangeInt(fn, x, tk, s.For) 2287 2288 default: 2289 panic("Cannot range over basic type: " + rt.String()) 2290 } 2291 2292 case *types.Signature: 2293 // Special case rewrite (fn.goversion >= go1.23): 2294 // for x := range f { ... } 2295 // into 2296 // f(func(x T) bool { ... }) 2297 b.rangeFunc(fn, x, tk, tv, s, label) 2298 return 2299 2300 default: 2301 panic("Cannot range over: " + rt.String()) 2302 } 2303 2304 if s.Tok == token.DEFINE && afterGo122 { 2305 // go1.22: If iteration variables are defined (:=), this occurs inside the loop. 2306 createVars() 2307 } 2308 2309 // Evaluate both LHS expressions before we update either. 2310 var kl, vl lvalue 2311 if tk != nil { 2312 kl = b.addr(fn, s.Key, false) // non-escaping 2313 } 2314 if tv != nil { 2315 vl = b.addr(fn, s.Value, false) // non-escaping 2316 } 2317 if tk != nil { 2318 kl.store(fn, k) 2319 } 2320 if tv != nil { 2321 vl.store(fn, v) 2322 } 2323 2324 if label != nil { 2325 label._break = done 2326 label._continue = loop 2327 } 2328 2329 fn.targets = &targets{ 2330 tail: fn.targets, 2331 _break: done, 2332 _continue: loop, 2333 } 2334 b.stmt(fn, s.Body) 2335 fn.targets = fn.targets.tail 2336 emitJump(fn, loop) // back-edge 2337 fn.currentBlock = done 2338 } 2339 2340 // rangeFunc emits to fn code for the range-over-func rng.Body of the iterator 2341 // function x, optionally labelled by label. It creates a new anonymous function 2342 // yield for rng and builds the function. 2343 func (b *builder) rangeFunc(fn *Function, x Value, tk, tv types.Type, rng *ast.RangeStmt, label *lblock) { 2344 // Consider the SSA code for the outermost range-over-func in fn: 2345 // 2346 // func fn(...) (ret R) { 2347 // ... 2348 // for k, v = range x { 2349 // ... 2350 // } 2351 // ... 2352 // } 2353 // 2354 // The code emitted into fn will look something like this. 2355 // 2356 // loop: 2357 // jump := READY 2358 // y := make closure yield [ret, deferstack, jump, k, v] 2359 // x(y) 2360 // switch jump { 2361 // [see resuming execution] 2362 // } 2363 // goto done 2364 // done: 2365 // ... 2366 // 2367 // where yield is a new synthetic yield function: 2368 // 2369 // func yield(_k tk, _v tv) bool 2370 // free variables: [ret, stack, jump, k, v] 2371 // { 2372 // entry: 2373 // if jump != READY then goto invalid else valid 2374 // invalid: 2375 // panic("iterator called when it is not in a ready state") 2376 // valid: 2377 // jump = BUSY 2378 // k = _k 2379 // v = _v 2380 // ... 2381 // cont: 2382 // jump = READY 2383 // return true 2384 // } 2385 // 2386 // Yield state: 2387 // 2388 // Each range loop has an associated jump variable that records 2389 // the state of the iterator. A yield function is initially 2390 // in a READY (0) and callable state. If the yield function is called 2391 // and is not in READY state, it panics. When it is called in a callable 2392 // state, it becomes BUSY. When execution reaches the end of the body 2393 // of the loop (or a continue statement targeting the loop is executed), 2394 // the yield function returns true and resumes being in a READY state. 2395 // After the iterator function x(y) returns, then if the yield function 2396 // is in a READY state, the yield enters the DONE state. 2397 // 2398 // Each lowered control statement (break X, continue X, goto Z, or return) 2399 // that exits the loop sets the variable to a unique positive EXIT value, 2400 // before returning false from the yield function. 2401 // 2402 // If the yield function returns abruptly due to a panic or GoExit, 2403 // it remains in a BUSY state. The generated code asserts that, after 2404 // the iterator call x(y) returns normally, the jump variable state 2405 // is DONE. 2406 // 2407 // Resuming execution: 2408 // 2409 // The code generated for the range statement checks the jump 2410 // variable to determine how to resume execution. 2411 // 2412 // switch jump { 2413 // case BUSY: panic("...") 2414 // case DONE: goto done 2415 // case READY: state = DONE; goto done 2416 // case 123: ... // action for exit 123. 2417 // case 456: ... // action for exit 456. 2418 // ... 2419 // } 2420 // 2421 // Forward goto statements within a yield are jumps to labels that 2422 // have not yet been traversed in fn. They may be in the Body of the 2423 // function. What we emit for these is: 2424 // 2425 // goto target 2426 // target: 2427 // ... 2428 // 2429 // We leave an unresolved exit in yield.exits to check at the end 2430 // of building yield if it encountered target in the body. If it 2431 // encountered target, no additional work is required. Otherwise, 2432 // the yield emits a new early exit in the basic block for target. 2433 // We expect that blockopt will fuse the early exit into the case 2434 // block later. The unresolved exit is then added to yield.parent.exits. 2435 2436 loop := fn.newBasicBlock("rangefunc.loop") 2437 done := fn.newBasicBlock("rangefunc.done") 2438 2439 // These are targets within y. 2440 fn.targets = &targets{ 2441 tail: fn.targets, 2442 _break: done, 2443 // _continue is within y. 2444 } 2445 if label != nil { 2446 label._break = done 2447 // _continue is within y 2448 } 2449 2450 emitJump(fn, loop) 2451 fn.currentBlock = loop 2452 2453 // loop: 2454 // jump := READY 2455 2456 anonIdx := len(fn.AnonFuncs) 2457 2458 jump := newVar(fmt.Sprintf("jump$%d", anonIdx+1), tInt) 2459 emitLocalVar(fn, jump) // zero value is READY 2460 2461 xsig := typeparams.CoreType(x.Type()).(*types.Signature) 2462 ysig := typeparams.CoreType(xsig.Params().At(0).Type()).(*types.Signature) 2463 2464 /* synthetic yield function for body of range-over-func loop */ 2465 y := &Function{ 2466 name: fmt.Sprintf("%s$%d", fn.Name(), anonIdx+1), 2467 Signature: ysig, 2468 Synthetic: "range-over-func yield", 2469 pos: rangePosition(rng), 2470 parent: fn, 2471 anonIdx: int32(len(fn.AnonFuncs)), 2472 Pkg: fn.Pkg, 2473 Prog: fn.Prog, 2474 syntax: rng, 2475 info: fn.info, 2476 goversion: fn.goversion, 2477 build: (*builder).buildYieldFunc, 2478 topLevelOrigin: nil, 2479 typeparams: fn.typeparams, 2480 typeargs: fn.typeargs, 2481 subst: fn.subst, 2482 jump: jump, 2483 deferstack: fn.deferstack, 2484 returnVars: fn.returnVars, // use the parent's return variables 2485 uniq: fn.uniq, // start from parent's unique values 2486 } 2487 2488 // If the RangeStmt has a label, this is how it is passed to buildYieldFunc. 2489 if label != nil { 2490 y.lblocks = map[*types.Label]*lblock{label.label: nil} 2491 } 2492 fn.AnonFuncs = append(fn.AnonFuncs, y) 2493 2494 // Build y immediately. It may: 2495 // * cause fn's locals to escape, and 2496 // * create new exit nodes in exits. 2497 // (y is not marked 'built' until the end of the enclosing FuncDecl.) 2498 unresolved := len(fn.exits) 2499 y.build(b, y) 2500 fn.uniq = y.uniq // resume after y's unique values 2501 2502 // Emit the call of y. 2503 // c := MakeClosure y 2504 // x(c) 2505 c := &MakeClosure{Fn: y} 2506 c.setType(ysig) 2507 for _, fv := range y.FreeVars { 2508 c.Bindings = append(c.Bindings, fv.outer) 2509 fv.outer = nil 2510 } 2511 fn.emit(c) 2512 call := Call{ 2513 Call: CallCommon{ 2514 Value: x, 2515 Args: []Value{c}, 2516 pos: token.NoPos, 2517 }, 2518 } 2519 call.setType(xsig.Results()) 2520 fn.emit(&call) 2521 2522 exits := fn.exits[unresolved:] 2523 b.buildYieldResume(fn, jump, exits, done) 2524 2525 emitJump(fn, done) 2526 fn.currentBlock = done 2527 } 2528 2529 // buildYieldResume emits to fn code for how to resume execution once a call to 2530 // the iterator function over the yield function returns x(y). It does this by building 2531 // a switch over the value of jump for when it is READY, BUSY, or EXIT(id). 2532 func (b *builder) buildYieldResume(fn *Function, jump *types.Var, exits []*exit, done *BasicBlock) { 2533 // v := *jump 2534 // switch v { 2535 // case BUSY: panic("...") 2536 // case READY: jump = DONE; goto done 2537 // case EXIT(a): ... 2538 // case EXIT(b): ... 2539 // ... 2540 // } 2541 v := emitLoad(fn, fn.lookup(jump, false)) 2542 2543 // case BUSY: panic("...") 2544 isbusy := fn.newBasicBlock("rangefunc.resume.busy") 2545 ifready := fn.newBasicBlock("rangefunc.resume.ready.check") 2546 emitIf(fn, emitCompare(fn, token.EQL, v, jBusy, token.NoPos), isbusy, ifready) 2547 fn.currentBlock = isbusy 2548 fn.emit(&Panic{ 2549 X: emitConv(fn, stringConst("iterator call did not preserve panic"), tEface), 2550 }) 2551 fn.currentBlock = ifready 2552 2553 // case READY: jump = DONE; goto done 2554 isready := fn.newBasicBlock("rangefunc.resume.ready") 2555 ifexit := fn.newBasicBlock("rangefunc.resume.exits") 2556 emitIf(fn, emitCompare(fn, token.EQL, v, jReady, token.NoPos), isready, ifexit) 2557 fn.currentBlock = isready 2558 storeVar(fn, jump, jDone, token.NoPos) 2559 emitJump(fn, done) 2560 fn.currentBlock = ifexit 2561 2562 for _, e := range exits { 2563 id := intConst(e.id) 2564 2565 // case EXIT(id): { /* do e */ } 2566 cond := emitCompare(fn, token.EQL, v, id, e.pos) 2567 matchb := fn.newBasicBlock("rangefunc.resume.match") 2568 cndb := fn.newBasicBlock("rangefunc.resume.cnd") 2569 emitIf(fn, cond, matchb, cndb) 2570 fn.currentBlock = matchb 2571 2572 // Cases to fill in the { /* do e */ } bit. 2573 switch { 2574 case e.label != nil: // forward goto? 2575 // case EXIT(id): goto lb // label 2576 lb := fn.lblockOf(e.label) 2577 // Do not mark lb as resolved. 2578 // If fn does not contain label, lb remains unresolved and 2579 // fn must itself be a range-over-func function. lb will be: 2580 // lb: 2581 // fn.jump = id 2582 // return false 2583 emitJump(fn, lb._goto) 2584 2585 case e.to != fn: // e jumps to an ancestor of fn? 2586 // case EXIT(id): { fn.jump = id; return false } 2587 // fn is a range-over-func function. 2588 storeVar(fn, fn.jump, id, token.NoPos) 2589 fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos}) 2590 2591 case e.block == nil && e.label == nil: // return from fn? 2592 // case EXIT(id): { return ... } 2593 fn.emit(new(RunDefers)) 2594 results := make([]Value, len(fn.results)) 2595 for i, r := range fn.results { 2596 results[i] = emitLoad(fn, r) 2597 } 2598 fn.emit(&Return{Results: results, pos: e.pos}) 2599 2600 case e.block != nil: 2601 // case EXIT(id): goto block 2602 emitJump(fn, e.block) 2603 2604 default: 2605 panic("unreachable") 2606 } 2607 fn.currentBlock = cndb 2608 } 2609 } 2610 2611 // stmt lowers statement s to SSA form, emitting code to fn. 2612 func (b *builder) stmt(fn *Function, _s ast.Stmt) { 2613 // The label of the current statement. If non-nil, its _goto 2614 // target is always set; its _break and _continue are set only 2615 // within the body of switch/typeswitch/select/for/range. 2616 // It is effectively an additional default-nil parameter of stmt(). 2617 var label *lblock 2618 start: 2619 switch s := _s.(type) { 2620 case *ast.EmptyStmt: 2621 // ignore. (Usually removed by gofmt.) 2622 2623 case *ast.DeclStmt: // Con, Var or Typ 2624 d := s.Decl.(*ast.GenDecl) 2625 if d.Tok == token.VAR { 2626 for _, spec := range d.Specs { 2627 if vs, ok := spec.(*ast.ValueSpec); ok { 2628 b.localValueSpec(fn, vs) 2629 } 2630 } 2631 } 2632 2633 case *ast.LabeledStmt: 2634 if s.Label.Name == "_" { 2635 // Blank labels can't be the target of a goto, break, 2636 // or continue statement, so we don't need a new block. 2637 _s = s.Stmt 2638 goto start 2639 } 2640 label = fn.lblockOf(fn.label(s.Label)) 2641 label.resolved = true 2642 emitJump(fn, label._goto) 2643 fn.currentBlock = label._goto 2644 _s = s.Stmt 2645 goto start // effectively: tailcall stmt(fn, s.Stmt, label) 2646 2647 case *ast.ExprStmt: 2648 b.expr(fn, s.X) 2649 2650 case *ast.SendStmt: 2651 chtyp := typeparams.CoreType(fn.typeOf(s.Chan)).(*types.Chan) 2652 fn.emit(&Send{ 2653 Chan: b.expr(fn, s.Chan), 2654 X: emitConv(fn, b.expr(fn, s.Value), chtyp.Elem()), 2655 pos: s.Arrow, 2656 }) 2657 2658 case *ast.IncDecStmt: 2659 op := token.ADD 2660 if s.Tok == token.DEC { 2661 op = token.SUB 2662 } 2663 loc := b.addr(fn, s.X, false) 2664 b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) 2665 2666 case *ast.AssignStmt: 2667 switch s.Tok { 2668 case token.ASSIGN, token.DEFINE: 2669 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) 2670 2671 default: // +=, etc. 2672 op := s.Tok + token.ADD - token.ADD_ASSIGN 2673 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) 2674 } 2675 2676 case *ast.GoStmt: 2677 // The "intrinsics" new/make/len/cap are forbidden here. 2678 // panic is treated like an ordinary function call. 2679 v := Go{pos: s.Go} 2680 b.setCall(fn, s.Call, &v.Call) 2681 fn.emit(&v) 2682 2683 case *ast.DeferStmt: 2684 // The "intrinsics" new/make/len/cap are forbidden here. 2685 // panic is treated like an ordinary function call. 2686 deferstack := emitLoad(fn, fn.lookup(fn.deferstack, false)) 2687 v := Defer{pos: s.Defer, _DeferStack: deferstack} 2688 b.setCall(fn, s.Call, &v.Call) 2689 fn.emit(&v) 2690 2691 // A deferred call can cause recovery from panic, 2692 // and control resumes at the Recover block. 2693 createRecoverBlock(fn.source) 2694 2695 case *ast.ReturnStmt: 2696 b.returnStmt(fn, s) 2697 2698 case *ast.BranchStmt: 2699 b.branchStmt(fn, s) 2700 2701 case *ast.BlockStmt: 2702 b.stmtList(fn, s.List) 2703 2704 case *ast.IfStmt: 2705 if s.Init != nil { 2706 b.stmt(fn, s.Init) 2707 } 2708 then := fn.newBasicBlock("if.then") 2709 done := fn.newBasicBlock("if.done") 2710 els := done 2711 if s.Else != nil { 2712 els = fn.newBasicBlock("if.else") 2713 } 2714 b.cond(fn, s.Cond, then, els) 2715 fn.currentBlock = then 2716 b.stmt(fn, s.Body) 2717 emitJump(fn, done) 2718 2719 if s.Else != nil { 2720 fn.currentBlock = els 2721 b.stmt(fn, s.Else) 2722 emitJump(fn, done) 2723 } 2724 2725 fn.currentBlock = done 2726 2727 case *ast.SwitchStmt: 2728 b.switchStmt(fn, s, label) 2729 2730 case *ast.TypeSwitchStmt: 2731 b.typeSwitchStmt(fn, s, label) 2732 2733 case *ast.SelectStmt: 2734 b.selectStmt(fn, s, label) 2735 2736 case *ast.ForStmt: 2737 b.forStmt(fn, s, label) 2738 2739 case *ast.RangeStmt: 2740 b.rangeStmt(fn, s, label) 2741 2742 default: 2743 panic(fmt.Sprintf("unexpected statement kind: %T", s)) 2744 } 2745 } 2746 2747 func (b *builder) branchStmt(fn *Function, s *ast.BranchStmt) { 2748 var block *BasicBlock 2749 if s.Label == nil { 2750 block = targetedBlock(fn, s.Tok) 2751 } else { 2752 target := fn.label(s.Label) 2753 block = labelledBlock(fn, target, s.Tok) 2754 if block == nil { // forward goto 2755 lb := fn.lblockOf(target) 2756 block = lb._goto // jump to lb._goto 2757 if fn.jump != nil { 2758 // fn is a range-over-func and the goto may exit fn. 2759 // Create an exit and resolve it at the end of 2760 // builder.buildYieldFunc. 2761 labelExit(fn, target, s.Pos()) 2762 } 2763 } 2764 } 2765 to := block.parent 2766 2767 if to == fn { 2768 emitJump(fn, block) 2769 } else { // break outside of fn. 2770 // fn must be a range-over-func 2771 e := blockExit(fn, block, s.Pos()) 2772 storeVar(fn, fn.jump, intConst(e.id), e.pos) 2773 fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos}) 2774 } 2775 fn.currentBlock = fn.newBasicBlock("unreachable") 2776 } 2777 2778 func (b *builder) returnStmt(fn *Function, s *ast.ReturnStmt) { 2779 var results []Value 2780 2781 sig := fn.source.Signature // signature of the enclosing source function 2782 2783 // Convert return operands to result type. 2784 if len(s.Results) == 1 && sig.Results().Len() > 1 { 2785 // Return of one expression in a multi-valued function. 2786 tuple := b.exprN(fn, s.Results[0]) 2787 ttuple := tuple.Type().(*types.Tuple) 2788 for i, n := 0, ttuple.Len(); i < n; i++ { 2789 results = append(results, 2790 emitConv(fn, emitExtract(fn, tuple, i), 2791 sig.Results().At(i).Type())) 2792 } 2793 } else { 2794 // 1:1 return, or no-arg return in non-void function. 2795 for i, r := range s.Results { 2796 v := emitConv(fn, b.expr(fn, r), sig.Results().At(i).Type()) 2797 results = append(results, v) 2798 } 2799 } 2800 2801 // Store the results. 2802 for i, r := range results { 2803 var result Value // fn.source.result[i] conceptually 2804 if fn == fn.source { 2805 result = fn.results[i] 2806 } else { // lookup needed? 2807 result = fn.lookup(fn.returnVars[i], false) 2808 } 2809 emitStore(fn, result, r, s.Return) 2810 } 2811 2812 if fn.jump != nil { 2813 // Return from body of a range-over-func. 2814 // The return statement is syntactically within the loop, 2815 // but the generated code is in the 'switch jump {...}' after it. 2816 e := returnExit(fn, s.Pos()) 2817 storeVar(fn, fn.jump, intConst(e.id), e.pos) 2818 fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos}) 2819 fn.currentBlock = fn.newBasicBlock("unreachable") 2820 return 2821 } 2822 2823 // Run function calls deferred in this 2824 // function when explicitly returning from it. 2825 fn.emit(new(RunDefers)) 2826 // Reload (potentially) named result variables to form the result tuple. 2827 results = results[:0] 2828 for _, nr := range fn.results { 2829 results = append(results, emitLoad(fn, nr)) 2830 } 2831 fn.emit(&Return{Results: results, pos: s.Return}) 2832 fn.currentBlock = fn.newBasicBlock("unreachable") 2833 } 2834 2835 // A buildFunc is a strategy for building the SSA body for a function. 2836 type buildFunc = func(*builder, *Function) 2837 2838 // iterate causes all created but unbuilt functions to be built. As 2839 // this may create new methods, the process is iterated until it 2840 // converges. 2841 func (b *builder) iterate() { 2842 for ; b.finished < b.created.Len(); b.finished++ { 2843 fn := b.created.At(b.finished) 2844 b.buildFunction(fn) 2845 } 2846 } 2847 2848 // buildFunction builds SSA code for the body of function fn. Idempotent. 2849 func (b *builder) buildFunction(fn *Function) { 2850 if fn.build != nil { 2851 assert(fn.parent == nil, "anonymous functions should not be built by buildFunction()") 2852 2853 if fn.Prog.mode&LogSource != 0 { 2854 defer logStack("build %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() 2855 } 2856 fn.build(b, fn) 2857 fn.done() 2858 } 2859 } 2860 2861 // buildParamsOnly builds fn.Params from fn.Signature, but does not build fn.Body. 2862 func (b *builder) buildParamsOnly(fn *Function) { 2863 // For external (C, asm) functions or functions loaded from 2864 // export data, we must set fn.Params even though there is no 2865 // body code to reference them. 2866 if recv := fn.Signature.Recv(); recv != nil { 2867 fn.addParamVar(recv) 2868 } 2869 params := fn.Signature.Params() 2870 for i, n := 0, params.Len(); i < n; i++ { 2871 fn.addParamVar(params.At(i)) 2872 } 2873 } 2874 2875 // buildFromSyntax builds fn.Body from fn.syntax, which must be non-nil. 2876 func (b *builder) buildFromSyntax(fn *Function) { 2877 var ( 2878 recvField *ast.FieldList 2879 body *ast.BlockStmt 2880 functype *ast.FuncType 2881 ) 2882 switch syntax := fn.syntax.(type) { 2883 case *ast.FuncDecl: 2884 functype = syntax.Type 2885 recvField = syntax.Recv 2886 body = syntax.Body 2887 if body == nil { 2888 b.buildParamsOnly(fn) // no body (non-Go function) 2889 return 2890 } 2891 case *ast.FuncLit: 2892 functype = syntax.Type 2893 body = syntax.Body 2894 case nil: 2895 panic("no syntax") 2896 default: 2897 panic(syntax) // unexpected syntax 2898 } 2899 fn.source = fn 2900 fn.startBody() 2901 fn.createSyntacticParams(recvField, functype) 2902 fn.createDeferStack() 2903 b.stmt(fn, body) 2904 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { 2905 // Control fell off the end of the function's body block. 2906 // 2907 // Block optimizations eliminate the current block, if 2908 // unreachable. It is a builder invariant that 2909 // if this no-arg return is ill-typed for 2910 // fn.Signature.Results, this block must be 2911 // unreachable. The sanity checker checks this. 2912 fn.emit(new(RunDefers)) 2913 fn.emit(new(Return)) 2914 } 2915 fn.finishBody() 2916 } 2917 2918 // buildYieldFunc builds the body of the yield function created 2919 // from a range-over-func *ast.RangeStmt. 2920 func (b *builder) buildYieldFunc(fn *Function) { 2921 // See builder.rangeFunc for detailed documentation on how fn is set up. 2922 // 2923 // In psuedo-Go this roughly builds: 2924 // func yield(_k tk, _v tv) bool { 2925 // if jump != READY { panic("yield function called after range loop exit") } 2926 // jump = BUSY 2927 // k, v = _k, _v // assign the iterator variable (if needed) 2928 // ... // rng.Body 2929 // continue: 2930 // jump = READY 2931 // return true 2932 // } 2933 s := fn.syntax.(*ast.RangeStmt) 2934 fn.source = fn.parent.source 2935 fn.startBody() 2936 params := fn.Signature.Params() 2937 for i := 0; i < params.Len(); i++ { 2938 fn.addParamVar(params.At(i)) 2939 } 2940 2941 // Initial targets 2942 ycont := fn.newBasicBlock("yield-continue") 2943 // lblocks is either {} or is {label: nil} where label is the label of syntax. 2944 for label := range fn.lblocks { 2945 fn.lblocks[label] = &lblock{ 2946 label: label, 2947 resolved: true, 2948 _goto: ycont, 2949 _continue: ycont, 2950 // `break label` statement targets fn.parent.targets._break 2951 } 2952 } 2953 fn.targets = &targets{ 2954 _continue: ycont, 2955 // `break` statement targets fn.parent.targets._break. 2956 } 2957 2958 // continue: 2959 // jump = READY 2960 // return true 2961 saved := fn.currentBlock 2962 fn.currentBlock = ycont 2963 storeVar(fn, fn.jump, jReady, s.Body.Rbrace) 2964 // A yield function's own deferstack is always empty, so rundefers is not needed. 2965 fn.emit(&Return{Results: []Value{vTrue}, pos: token.NoPos}) 2966 2967 // Emit header: 2968 // 2969 // if jump != READY { panic("yield iterator accessed after exit") } 2970 // jump = BUSY 2971 // k, v = _k, _v 2972 fn.currentBlock = saved 2973 yloop := fn.newBasicBlock("yield-loop") 2974 invalid := fn.newBasicBlock("yield-invalid") 2975 2976 jumpVal := emitLoad(fn, fn.lookup(fn.jump, true)) 2977 emitIf(fn, emitCompare(fn, token.EQL, jumpVal, jReady, token.NoPos), yloop, invalid) 2978 fn.currentBlock = invalid 2979 fn.emit(&Panic{ 2980 X: emitConv(fn, stringConst("yield function called after range loop exit"), tEface), 2981 }) 2982 2983 fn.currentBlock = yloop 2984 storeVar(fn, fn.jump, jBusy, s.Body.Rbrace) 2985 2986 // Initialize k and v from params. 2987 var tk, tv types.Type 2988 if s.Key != nil && !isBlankIdent(s.Key) { 2989 tk = fn.typeOf(s.Key) // fn.parent.typeOf is identical 2990 } 2991 if s.Value != nil && !isBlankIdent(s.Value) { 2992 tv = fn.typeOf(s.Value) 2993 } 2994 if s.Tok == token.DEFINE { 2995 if tk != nil { 2996 emitLocalVar(fn, identVar(fn, s.Key.(*ast.Ident))) 2997 } 2998 if tv != nil { 2999 emitLocalVar(fn, identVar(fn, s.Value.(*ast.Ident))) 3000 } 3001 } 3002 var k, v Value 3003 if len(fn.Params) > 0 { 3004 k = fn.Params[0] 3005 } 3006 if len(fn.Params) > 1 { 3007 v = fn.Params[1] 3008 } 3009 var kl, vl lvalue 3010 if tk != nil { 3011 kl = b.addr(fn, s.Key, false) // non-escaping 3012 } 3013 if tv != nil { 3014 vl = b.addr(fn, s.Value, false) // non-escaping 3015 } 3016 if tk != nil { 3017 kl.store(fn, k) 3018 } 3019 if tv != nil { 3020 vl.store(fn, v) 3021 } 3022 3023 // Build the body of the range loop. 3024 b.stmt(fn, s.Body) 3025 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { 3026 // Control fell off the end of the function's body block. 3027 // Block optimizations eliminate the current block, if 3028 // unreachable. 3029 emitJump(fn, ycont) 3030 } 3031 3032 // Clean up exits and promote any unresolved exits to fn.parent. 3033 for _, e := range fn.exits { 3034 if e.label != nil { 3035 lb := fn.lblocks[e.label] 3036 if lb.resolved { 3037 // label was resolved. Do not turn lb into an exit. 3038 // e does not need to be handled by the parent. 3039 continue 3040 } 3041 3042 // _goto becomes an exit. 3043 // _goto: 3044 // jump = id 3045 // return false 3046 fn.currentBlock = lb._goto 3047 id := intConst(e.id) 3048 storeVar(fn, fn.jump, id, e.pos) 3049 fn.emit(&Return{Results: []Value{vFalse}, pos: e.pos}) 3050 } 3051 3052 if e.to != fn { // e needs to be handled by the parent too. 3053 fn.parent.exits = append(fn.parent.exits, e) 3054 } 3055 } 3056 3057 fn.finishBody() 3058 } 3059 3060 // addRuntimeType records t as a runtime type, 3061 // along with all types derivable from it using reflection. 3062 // 3063 // Acquires prog.runtimeTypesMu. 3064 func addRuntimeType(prog *Program, t types.Type) { 3065 prog.runtimeTypesMu.Lock() 3066 defer prog.runtimeTypesMu.Unlock() 3067 forEachReachable(&prog.MethodSets, t, func(t types.Type) bool { 3068 prev, _ := prog.runtimeTypes.Set(t, true).(bool) 3069 return !prev // already seen? 3070 }) 3071 } 3072 3073 // Build calls Package.Build for each package in prog. 3074 // Building occurs in parallel unless the BuildSerially mode flag was set. 3075 // 3076 // Build is intended for whole-program analysis; a typical compiler 3077 // need only build a single package. 3078 // 3079 // Build is idempotent and thread-safe. 3080 func (prog *Program) Build() { 3081 var wg sync.WaitGroup 3082 for _, p := range prog.packages { 3083 if prog.mode&BuildSerially != 0 { 3084 p.Build() 3085 } else { 3086 wg.Add(1) 3087 cpuLimit <- struct{}{} // acquire a token 3088 go func(p *Package) { 3089 p.Build() 3090 wg.Done() 3091 <-cpuLimit // release a token 3092 }(p) 3093 } 3094 } 3095 wg.Wait() 3096 } 3097 3098 // cpuLimit is a counting semaphore to limit CPU parallelism. 3099 var cpuLimit = make(chan struct{}, runtime.GOMAXPROCS(0)) 3100 3101 // Build builds SSA code for all functions and vars in package p. 3102 // 3103 // CreatePackage must have been called for all of p's direct imports 3104 // (and hence its direct imports must have been error-free). It is not 3105 // necessary to call CreatePackage for indirect dependencies. 3106 // Functions will be created for all necessary methods in those 3107 // packages on demand. 3108 // 3109 // Build is idempotent and thread-safe. 3110 func (p *Package) Build() { p.buildOnce.Do(p.build) } 3111 3112 func (p *Package) build() { 3113 if p.info == nil { 3114 return // synthetic package, e.g. "testmain" 3115 } 3116 if p.Prog.mode&LogSource != 0 { 3117 defer logStack("build %s", p)() 3118 } 3119 3120 b := builder{created: &p.created} 3121 b.iterate() 3122 3123 // We no longer need transient information: ASTs or go/types deductions. 3124 p.info = nil 3125 p.created = nil 3126 p.files = nil 3127 p.initVersion = nil 3128 3129 if p.Prog.mode&SanityCheckFunctions != 0 { 3130 sanityCheckPackage(p) 3131 } 3132 } 3133 3134 // buildPackageInit builds fn.Body for the synthetic package initializer. 3135 func (b *builder) buildPackageInit(fn *Function) { 3136 p := fn.Pkg 3137 fn.startBody() 3138 3139 var done *BasicBlock 3140 3141 if p.Prog.mode&BareInits == 0 { 3142 // Make init() skip if package is already initialized. 3143 initguard := p.Var("init$guard") 3144 doinit := fn.newBasicBlock("init.start") 3145 done = fn.newBasicBlock("init.done") 3146 emitIf(fn, emitLoad(fn, initguard), done, doinit) 3147 fn.currentBlock = doinit 3148 emitStore(fn, initguard, vTrue, token.NoPos) 3149 3150 // Call the init() function of each package we import. 3151 for _, pkg := range p.Pkg.Imports() { 3152 prereq := p.Prog.packages[pkg] 3153 if prereq == nil { 3154 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) 3155 } 3156 var v Call 3157 v.Call.Value = prereq.init 3158 v.Call.pos = fn.pos 3159 v.setType(types.NewTuple()) 3160 fn.emit(&v) 3161 } 3162 } 3163 3164 // Initialize package-level vars in correct order. 3165 if len(p.info.InitOrder) > 0 && len(p.files) == 0 { 3166 panic("no source files provided for package. cannot initialize globals") 3167 } 3168 3169 for _, varinit := range p.info.InitOrder { 3170 if fn.Prog.mode&LogSource != 0 { 3171 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", 3172 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) 3173 } 3174 // Initializers for global vars are evaluated in dependency 3175 // order, but may come from arbitrary files of the package 3176 // with different versions, so we transiently update 3177 // fn.goversion for each one. (Since init is a synthetic 3178 // function it has no syntax of its own that needs a version.) 3179 fn.goversion = p.initVersion[varinit.Rhs] 3180 if len(varinit.Lhs) == 1 { 3181 // 1:1 initialization: var x, y = a(), b() 3182 var lval lvalue 3183 if v := varinit.Lhs[0]; v.Name() != "_" { 3184 lval = &address{addr: p.objects[v].(*Global), pos: v.Pos()} 3185 } else { 3186 lval = blank{} 3187 } 3188 b.assign(fn, lval, varinit.Rhs, true, nil) 3189 } else { 3190 // n:1 initialization: var x, y := f() 3191 tuple := b.exprN(fn, varinit.Rhs) 3192 for i, v := range varinit.Lhs { 3193 if v.Name() == "_" { 3194 continue 3195 } 3196 emitStore(fn, p.objects[v].(*Global), emitExtract(fn, tuple, i), v.Pos()) 3197 } 3198 } 3199 } 3200 3201 // The rest of the init function is synthetic: 3202 // no syntax, info, goversion. 3203 fn.info = nil 3204 fn.goversion = "" 3205 3206 // Call all of the declared init() functions in source order. 3207 for _, file := range p.files { 3208 for _, decl := range file.Decls { 3209 if decl, ok := decl.(*ast.FuncDecl); ok { 3210 id := decl.Name 3211 if !isBlankIdent(id) && id.Name == "init" && decl.Recv == nil { 3212 declaredInit := p.objects[p.info.Defs[id]].(*Function) 3213 var v Call 3214 v.Call.Value = declaredInit 3215 v.setType(types.NewTuple()) 3216 p.init.emit(&v) 3217 } 3218 } 3219 } 3220 } 3221 3222 // Finish up init(). 3223 if p.Prog.mode&BareInits == 0 { 3224 emitJump(fn, done) 3225 fn.currentBlock = done 3226 } 3227 fn.emit(new(Return)) 3228 fn.finishBody() 3229 }