golang.org/x/tools@v0.21.0/go/ssa/builder.go (about) 1 // Copyright 2013 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ssa 6 7 // This file defines the builder, which builds SSA-form IR for function bodies. 8 // 9 // SSA construction has two phases, "create" and "build". First, one 10 // or more packages are created in any order by a sequence of calls to 11 // CreatePackage, either from syntax or from mere type information. 12 // Each created package has a complete set of Members (const, var, 13 // type, func) that can be accessed through methods like 14 // Program.FuncValue. 15 // 16 // It is not necessary to call CreatePackage for all dependencies of 17 // each syntax package, only for its direct imports. (In future 18 // perhaps even this restriction may be lifted.) 19 // 20 // Second, packages created from syntax are built, by one or more 21 // calls to Package.Build, which may be concurrent; or by a call to 22 // Program.Build, which builds all packages in parallel. Building 23 // traverses the type-annotated syntax tree of each function body and 24 // creates SSA-form IR, a control-flow graph of instructions, 25 // populating fields such as Function.Body, .Params, and others. 26 // 27 // Building may create additional methods, including: 28 // - wrapper methods (e.g. for embeddding, or implicit &recv) 29 // - bound method closures (e.g. for use(recv.f)) 30 // - thunks (e.g. for use(I.f) or use(T.f)) 31 // - generic instances (e.g. to produce f[int] from f[any]). 32 // As these methods are created, they are added to the build queue, 33 // and then processed in turn, until a fixed point is reached, 34 // Since these methods might belong to packages that were not 35 // created (by a call to CreatePackage), their Pkg field is unset. 36 // 37 // Instances of generic functions may be either instantiated (f[int] 38 // is a copy of f[T] with substitutions) or wrapped (f[int] delegates 39 // to f[T]), depending on the availability of generic syntax and the 40 // InstantiateGenerics mode flag. 41 // 42 // Each package has an initializer function named "init" that calls 43 // the initializer functions of each direct import, computes and 44 // assigns the initial value of each global variable, and calls each 45 // source-level function named "init". (These generate SSA functions 46 // named "init#1", "init#2", etc.) 47 // 48 // Runtime types 49 // 50 // Each MakeInterface operation is a conversion from a non-interface 51 // type to an interface type. The semantics of this operation requires 52 // a runtime type descriptor, which is the type portion of an 53 // interface, and the value abstracted by reflect.Type. 54 // 55 // The program accumulates all non-parameterized types that are 56 // encountered as MakeInterface operands, along with all types that 57 // may be derived from them using reflection. This set is available as 58 // Program.RuntimeTypes, and the methods of these types may be 59 // reachable via interface calls or reflection even if they are never 60 // referenced from the SSA IR. (In practice, algorithms such as RTA 61 // that compute reachability from package main perform their own 62 // tracking of runtime types at a finer grain, so this feature is not 63 // very useful.) 64 // 65 // Function literals 66 // 67 // Anonymous functions must be built as soon as they are encountered, 68 // as it may affect locals of the enclosing function, but they are not 69 // marked 'built' until the end of the outermost enclosing function. 70 // (Among other things, this causes them to be logged in top-down order.) 71 // 72 // The Function.build fields determines the algorithm for building the 73 // function body. It is cleared to mark that building is complete. 74 75 import ( 76 "fmt" 77 "go/ast" 78 "go/constant" 79 "go/token" 80 "go/types" 81 "os" 82 "runtime" 83 "sync" 84 85 "golang.org/x/tools/internal/aliases" 86 "golang.org/x/tools/internal/typeparams" 87 "golang.org/x/tools/internal/versions" 88 ) 89 90 type opaqueType struct{ name string } 91 92 func (t *opaqueType) String() string { return t.name } 93 func (t *opaqueType) Underlying() types.Type { return t } 94 95 var ( 96 varOk = newVar("ok", tBool) 97 varIndex = newVar("index", tInt) 98 99 // Type constants. 100 tBool = types.Typ[types.Bool] 101 tByte = types.Typ[types.Byte] 102 tInt = types.Typ[types.Int] 103 tInvalid = types.Typ[types.Invalid] 104 tString = types.Typ[types.String] 105 tUntypedNil = types.Typ[types.UntypedNil] 106 tRangeIter = &opaqueType{"iter"} // the type of all "range" iterators 107 tEface = types.NewInterfaceType(nil, nil).Complete() 108 109 // SSA Value constants. 110 vZero = intConst(0) 111 vOne = intConst(1) 112 vTrue = NewConst(constant.MakeBool(true), tBool) 113 ) 114 115 // builder holds state associated with the package currently being built. 116 // Its methods contain all the logic for AST-to-SSA conversion. 117 type builder struct { 118 // Invariant: 0 <= rtypes <= finished <= created.Len() 119 created *creator // functions created during building 120 finished int // Invariant: create[i].built holds for i in [0,finished) 121 } 122 123 // cond emits to fn code to evaluate boolean condition e and jump 124 // to t or f depending on its value, performing various simplifications. 125 // 126 // Postcondition: fn.currentBlock is nil. 127 func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { 128 switch e := e.(type) { 129 case *ast.ParenExpr: 130 b.cond(fn, e.X, t, f) 131 return 132 133 case *ast.BinaryExpr: 134 switch e.Op { 135 case token.LAND: 136 ltrue := fn.newBasicBlock("cond.true") 137 b.cond(fn, e.X, ltrue, f) 138 fn.currentBlock = ltrue 139 b.cond(fn, e.Y, t, f) 140 return 141 142 case token.LOR: 143 lfalse := fn.newBasicBlock("cond.false") 144 b.cond(fn, e.X, t, lfalse) 145 fn.currentBlock = lfalse 146 b.cond(fn, e.Y, t, f) 147 return 148 } 149 150 case *ast.UnaryExpr: 151 if e.Op == token.NOT { 152 b.cond(fn, e.X, f, t) 153 return 154 } 155 } 156 157 // A traditional compiler would simplify "if false" (etc) here 158 // but we do not, for better fidelity to the source code. 159 // 160 // The value of a constant condition may be platform-specific, 161 // and may cause blocks that are reachable in some configuration 162 // to be hidden from subsequent analyses such as bug-finding tools. 163 emitIf(fn, b.expr(fn, e), t, f) 164 } 165 166 // logicalBinop emits code to fn to evaluate e, a &&- or 167 // ||-expression whose reified boolean value is wanted. 168 // The value is returned. 169 func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { 170 rhs := fn.newBasicBlock("binop.rhs") 171 done := fn.newBasicBlock("binop.done") 172 173 // T(e) = T(e.X) = T(e.Y) after untyped constants have been 174 // eliminated. 175 // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. 176 t := fn.typeOf(e) 177 178 var short Value // value of the short-circuit path 179 switch e.Op { 180 case token.LAND: 181 b.cond(fn, e.X, rhs, done) 182 short = NewConst(constant.MakeBool(false), t) 183 184 case token.LOR: 185 b.cond(fn, e.X, done, rhs) 186 short = NewConst(constant.MakeBool(true), t) 187 } 188 189 // Is rhs unreachable? 190 if rhs.Preds == nil { 191 // Simplify false&&y to false, true||y to true. 192 fn.currentBlock = done 193 return short 194 } 195 196 // Is done unreachable? 197 if done.Preds == nil { 198 // Simplify true&&y (or false||y) to y. 199 fn.currentBlock = rhs 200 return b.expr(fn, e.Y) 201 } 202 203 // All edges from e.X to done carry the short-circuit value. 204 var edges []Value 205 for range done.Preds { 206 edges = append(edges, short) 207 } 208 209 // The edge from e.Y to done carries the value of e.Y. 210 fn.currentBlock = rhs 211 edges = append(edges, b.expr(fn, e.Y)) 212 emitJump(fn, done) 213 fn.currentBlock = done 214 215 phi := &Phi{Edges: edges, Comment: e.Op.String()} 216 phi.pos = e.OpPos 217 phi.typ = t 218 return done.emit(phi) 219 } 220 221 // exprN lowers a multi-result expression e to SSA form, emitting code 222 // to fn and returning a single Value whose type is a *types.Tuple. 223 // The caller must access the components via Extract. 224 // 225 // Multi-result expressions include CallExprs in a multi-value 226 // assignment or return statement, and "value,ok" uses of 227 // TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op 228 // is token.ARROW). 229 func (b *builder) exprN(fn *Function, e ast.Expr) Value { 230 typ := fn.typeOf(e).(*types.Tuple) 231 switch e := e.(type) { 232 case *ast.ParenExpr: 233 return b.exprN(fn, e.X) 234 235 case *ast.CallExpr: 236 // Currently, no built-in function nor type conversion 237 // has multiple results, so we can avoid some of the 238 // cases for single-valued CallExpr. 239 var c Call 240 b.setCall(fn, e, &c.Call) 241 c.typ = typ 242 return fn.emit(&c) 243 244 case *ast.IndexExpr: 245 mapt := typeparams.CoreType(fn.typeOf(e.X)).(*types.Map) // ,ok must be a map. 246 lookup := &Lookup{ 247 X: b.expr(fn, e.X), 248 Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), 249 CommaOk: true, 250 } 251 lookup.setType(typ) 252 lookup.setPos(e.Lbrack) 253 return fn.emit(lookup) 254 255 case *ast.TypeAssertExpr: 256 return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) 257 258 case *ast.UnaryExpr: // must be receive <- 259 unop := &UnOp{ 260 Op: token.ARROW, 261 X: b.expr(fn, e.X), 262 CommaOk: true, 263 } 264 unop.setType(typ) 265 unop.setPos(e.OpPos) 266 return fn.emit(unop) 267 } 268 panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) 269 } 270 271 // builtin emits to fn SSA instructions to implement a call to the 272 // built-in function obj with the specified arguments 273 // and return type. It returns the value defined by the result. 274 // 275 // The result is nil if no special handling was required; in this case 276 // the caller should treat this like an ordinary library function 277 // call. 278 func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { 279 typ = fn.typ(typ) 280 switch obj.Name() { 281 case "make": 282 switch ct := typeparams.CoreType(typ).(type) { 283 case *types.Slice: 284 n := b.expr(fn, args[1]) 285 m := n 286 if len(args) == 3 { 287 m = b.expr(fn, args[2]) 288 } 289 if m, ok := m.(*Const); ok { 290 // treat make([]T, n, m) as new([m]T)[:n] 291 cap := m.Int64() 292 at := types.NewArray(ct.Elem(), cap) 293 v := &Slice{ 294 X: emitNew(fn, at, pos, "makeslice"), 295 High: n, 296 } 297 v.setPos(pos) 298 v.setType(typ) 299 return fn.emit(v) 300 } 301 v := &MakeSlice{ 302 Len: n, 303 Cap: m, 304 } 305 v.setPos(pos) 306 v.setType(typ) 307 return fn.emit(v) 308 309 case *types.Map: 310 var res Value 311 if len(args) == 2 { 312 res = b.expr(fn, args[1]) 313 } 314 v := &MakeMap{Reserve: res} 315 v.setPos(pos) 316 v.setType(typ) 317 return fn.emit(v) 318 319 case *types.Chan: 320 var sz Value = vZero 321 if len(args) == 2 { 322 sz = b.expr(fn, args[1]) 323 } 324 v := &MakeChan{Size: sz} 325 v.setPos(pos) 326 v.setType(typ) 327 return fn.emit(v) 328 } 329 330 case "new": 331 return emitNew(fn, typeparams.MustDeref(typ), pos, "new") 332 333 case "len", "cap": 334 // Special case: len or cap of an array or *array is 335 // based on the type, not the value which may be nil. 336 // We must still evaluate the value, though. (If it 337 // was side-effect free, the whole call would have 338 // been constant-folded.) 339 t := typeparams.Deref(fn.typeOf(args[0])) 340 if at, ok := typeparams.CoreType(t).(*types.Array); ok { 341 b.expr(fn, args[0]) // for effects only 342 return intConst(at.Len()) 343 } 344 // Otherwise treat as normal. 345 346 case "panic": 347 fn.emit(&Panic{ 348 X: emitConv(fn, b.expr(fn, args[0]), tEface), 349 pos: pos, 350 }) 351 fn.currentBlock = fn.newBasicBlock("unreachable") 352 return vTrue // any non-nil Value will do 353 } 354 return nil // treat all others as a regular function call 355 } 356 357 // addr lowers a single-result addressable expression e to SSA form, 358 // emitting code to fn and returning the location (an lvalue) defined 359 // by the expression. 360 // 361 // If escaping is true, addr marks the base variable of the 362 // addressable expression e as being a potentially escaping pointer 363 // value. For example, in this code: 364 // 365 // a := A{ 366 // b: [1]B{B{c: 1}} 367 // } 368 // return &a.b[0].c 369 // 370 // the application of & causes a.b[0].c to have its address taken, 371 // which means that ultimately the local variable a must be 372 // heap-allocated. This is a simple but very conservative escape 373 // analysis. 374 // 375 // Operations forming potentially escaping pointers include: 376 // - &x, including when implicit in method call or composite literals. 377 // - a[:] iff a is an array (not *array) 378 // - references to variables in lexically enclosing functions. 379 func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { 380 switch e := e.(type) { 381 case *ast.Ident: 382 if isBlankIdent(e) { 383 return blank{} 384 } 385 obj := fn.objectOf(e).(*types.Var) 386 var v Value 387 if g := fn.Prog.packageLevelMember(obj); g != nil { 388 v = g.(*Global) // var (address) 389 } else { 390 v = fn.lookup(obj, escaping) 391 } 392 return &address{addr: v, pos: e.Pos(), expr: e} 393 394 case *ast.CompositeLit: 395 typ := typeparams.Deref(fn.typeOf(e)) 396 var v *Alloc 397 if escaping { 398 v = emitNew(fn, typ, e.Lbrace, "complit") 399 } else { 400 v = emitLocal(fn, typ, e.Lbrace, "complit") 401 } 402 var sb storebuf 403 b.compLit(fn, v, e, true, &sb) 404 sb.emit(fn) 405 return &address{addr: v, pos: e.Lbrace, expr: e} 406 407 case *ast.ParenExpr: 408 return b.addr(fn, e.X, escaping) 409 410 case *ast.SelectorExpr: 411 sel := fn.selection(e) 412 if sel == nil { 413 // qualified identifier 414 return b.addr(fn, e.Sel, escaping) 415 } 416 if sel.kind != types.FieldVal { 417 panic(sel) 418 } 419 wantAddr := true 420 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 421 index := sel.index[len(sel.index)-1] 422 fld := fieldOf(typeparams.MustDeref(v.Type()), index) // v is an addr. 423 424 // Due to the two phases of resolving AssignStmt, a panic from x.f = p() 425 // when x is nil is required to come after the side-effects of 426 // evaluating x and p(). 427 emit := func(fn *Function) Value { 428 return emitFieldSelection(fn, v, index, true, e.Sel) 429 } 430 return &lazyAddress{addr: emit, t: fld.Type(), pos: e.Sel.Pos(), expr: e.Sel} 431 432 case *ast.IndexExpr: 433 xt := fn.typeOf(e.X) 434 elem, mode := indexType(xt) 435 var x Value 436 var et types.Type 437 switch mode { 438 case ixArrVar: // array, array|slice, array|*array, or array|*array|slice. 439 x = b.addr(fn, e.X, escaping).address(fn) 440 et = types.NewPointer(elem) 441 case ixVar: // *array, slice, *array|slice 442 x = b.expr(fn, e.X) 443 et = types.NewPointer(elem) 444 case ixMap: 445 mt := typeparams.CoreType(xt).(*types.Map) 446 return &element{ 447 m: b.expr(fn, e.X), 448 k: emitConv(fn, b.expr(fn, e.Index), mt.Key()), 449 t: mt.Elem(), 450 pos: e.Lbrack, 451 } 452 default: 453 panic("unexpected container type in IndexExpr: " + xt.String()) 454 } 455 index := b.expr(fn, e.Index) 456 if isUntyped(index.Type()) { 457 index = emitConv(fn, index, tInt) 458 } 459 // Due to the two phases of resolving AssignStmt, a panic from x[i] = p() 460 // when x is nil or i is out-of-bounds is required to come after the 461 // side-effects of evaluating x, i and p(). 462 emit := func(fn *Function) Value { 463 v := &IndexAddr{ 464 X: x, 465 Index: index, 466 } 467 v.setPos(e.Lbrack) 468 v.setType(et) 469 return fn.emit(v) 470 } 471 return &lazyAddress{addr: emit, t: typeparams.MustDeref(et), pos: e.Lbrack, expr: e} 472 473 case *ast.StarExpr: 474 return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} 475 } 476 477 panic(fmt.Sprintf("unexpected address expression: %T", e)) 478 } 479 480 type store struct { 481 lhs lvalue 482 rhs Value 483 } 484 485 type storebuf struct{ stores []store } 486 487 func (sb *storebuf) store(lhs lvalue, rhs Value) { 488 sb.stores = append(sb.stores, store{lhs, rhs}) 489 } 490 491 func (sb *storebuf) emit(fn *Function) { 492 for _, s := range sb.stores { 493 s.lhs.store(fn, s.rhs) 494 } 495 } 496 497 // assign emits to fn code to initialize the lvalue loc with the value 498 // of expression e. If isZero is true, assign assumes that loc holds 499 // the zero value for its type. 500 // 501 // This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate 502 // better code in some cases, e.g., for composite literals in an 503 // addressable location. 504 // 505 // If sb is not nil, assign generates code to evaluate expression e, but 506 // not to update loc. Instead, the necessary stores are appended to the 507 // storebuf sb so that they can be executed later. This allows correct 508 // in-place update of existing variables when the RHS is a composite 509 // literal that may reference parts of the LHS. 510 func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { 511 // Can we initialize it in place? 512 if e, ok := unparen(e).(*ast.CompositeLit); ok { 513 // A CompositeLit never evaluates to a pointer, 514 // so if the type of the location is a pointer, 515 // an &-operation is implied. 516 if !is[blank](loc) && isPointerCore(loc.typ()) { // avoid calling blank.typ() 517 ptr := b.addr(fn, e, true).address(fn) 518 // copy address 519 if sb != nil { 520 sb.store(loc, ptr) 521 } else { 522 loc.store(fn, ptr) 523 } 524 return 525 } 526 527 if _, ok := loc.(*address); ok { 528 if isNonTypeParamInterface(loc.typ()) { 529 // e.g. var x interface{} = T{...} 530 // Can't in-place initialize an interface value. 531 // Fall back to copying. 532 } else { 533 // x = T{...} or x := T{...} 534 addr := loc.address(fn) 535 if sb != nil { 536 b.compLit(fn, addr, e, isZero, sb) 537 } else { 538 var sb storebuf 539 b.compLit(fn, addr, e, isZero, &sb) 540 sb.emit(fn) 541 } 542 543 // Subtle: emit debug ref for aggregate types only; 544 // slice and map are handled by store ops in compLit. 545 switch typeparams.CoreType(loc.typ()).(type) { 546 case *types.Struct, *types.Array: 547 emitDebugRef(fn, e, addr, true) 548 } 549 550 return 551 } 552 } 553 } 554 555 // simple case: just copy 556 rhs := b.expr(fn, e) 557 if sb != nil { 558 sb.store(loc, rhs) 559 } else { 560 loc.store(fn, rhs) 561 } 562 } 563 564 // expr lowers a single-result expression e to SSA form, emitting code 565 // to fn and returning the Value defined by the expression. 566 func (b *builder) expr(fn *Function, e ast.Expr) Value { 567 e = unparen(e) 568 569 tv := fn.info.Types[e] 570 571 // Is expression a constant? 572 if tv.Value != nil { 573 return NewConst(tv.Value, fn.typ(tv.Type)) 574 } 575 576 var v Value 577 if tv.Addressable() { 578 // Prefer pointer arithmetic ({Index,Field}Addr) followed 579 // by Load over subelement extraction (e.g. Index, Field), 580 // to avoid large copies. 581 v = b.addr(fn, e, false).load(fn) 582 } else { 583 v = b.expr0(fn, e, tv) 584 } 585 if fn.debugInfo() { 586 emitDebugRef(fn, e, v, false) 587 } 588 return v 589 } 590 591 func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { 592 switch e := e.(type) { 593 case *ast.BasicLit: 594 panic("non-constant BasicLit") // unreachable 595 596 case *ast.FuncLit: 597 /* function literal */ 598 anon := &Function{ 599 name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), 600 Signature: fn.typeOf(e.Type).(*types.Signature), 601 pos: e.Type.Func, 602 parent: fn, 603 anonIdx: int32(len(fn.AnonFuncs)), 604 Pkg: fn.Pkg, 605 Prog: fn.Prog, 606 syntax: e, 607 info: fn.info, 608 goversion: fn.goversion, 609 build: (*builder).buildFromSyntax, 610 topLevelOrigin: nil, // use anonIdx to lookup an anon instance's origin. 611 typeparams: fn.typeparams, // share the parent's type parameters. 612 typeargs: fn.typeargs, // share the parent's type arguments. 613 subst: fn.subst, // share the parent's type substitutions. 614 } 615 fn.AnonFuncs = append(fn.AnonFuncs, anon) 616 // Build anon immediately, as it may cause fn's locals to escape. 617 // (It is not marked 'built' until the end of the enclosing FuncDecl.) 618 anon.build(b, anon) 619 if anon.FreeVars == nil { 620 return anon 621 } 622 v := &MakeClosure{Fn: anon} 623 v.setType(fn.typ(tv.Type)) 624 for _, fv := range anon.FreeVars { 625 v.Bindings = append(v.Bindings, fv.outer) 626 fv.outer = nil 627 } 628 return fn.emit(v) 629 630 case *ast.TypeAssertExpr: // single-result form only 631 return emitTypeAssert(fn, b.expr(fn, e.X), fn.typ(tv.Type), e.Lparen) 632 633 case *ast.CallExpr: 634 if fn.info.Types[e.Fun].IsType() { 635 // Explicit type conversion, e.g. string(x) or big.Int(x) 636 x := b.expr(fn, e.Args[0]) 637 y := emitConv(fn, x, fn.typ(tv.Type)) 638 if y != x { 639 switch y := y.(type) { 640 case *Convert: 641 y.pos = e.Lparen 642 case *ChangeType: 643 y.pos = e.Lparen 644 case *MakeInterface: 645 y.pos = e.Lparen 646 case *SliceToArrayPointer: 647 y.pos = e.Lparen 648 case *UnOp: // conversion from slice to array. 649 y.pos = e.Lparen 650 } 651 } 652 return y 653 } 654 // Call to "intrinsic" built-ins, e.g. new, make, panic. 655 if id, ok := unparen(e.Fun).(*ast.Ident); ok { 656 if obj, ok := fn.info.Uses[id].(*types.Builtin); ok { 657 if v := b.builtin(fn, obj, e.Args, fn.typ(tv.Type), e.Lparen); v != nil { 658 return v 659 } 660 } 661 } 662 // Regular function call. 663 var v Call 664 b.setCall(fn, e, &v.Call) 665 v.setType(fn.typ(tv.Type)) 666 return fn.emit(&v) 667 668 case *ast.UnaryExpr: 669 switch e.Op { 670 case token.AND: // &X --- potentially escaping. 671 addr := b.addr(fn, e.X, true) 672 if _, ok := unparen(e.X).(*ast.StarExpr); ok { 673 // &*p must panic if p is nil (http://golang.org/s/go12nil). 674 // For simplicity, we'll just (suboptimally) rely 675 // on the side effects of a load. 676 // TODO(adonovan): emit dedicated nilcheck. 677 addr.load(fn) 678 } 679 return addr.address(fn) 680 case token.ADD: 681 return b.expr(fn, e.X) 682 case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ 683 v := &UnOp{ 684 Op: e.Op, 685 X: b.expr(fn, e.X), 686 } 687 v.setPos(e.OpPos) 688 v.setType(fn.typ(tv.Type)) 689 return fn.emit(v) 690 default: 691 panic(e.Op) 692 } 693 694 case *ast.BinaryExpr: 695 switch e.Op { 696 case token.LAND, token.LOR: 697 return b.logicalBinop(fn, e) 698 case token.SHL, token.SHR: 699 fallthrough 700 case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: 701 return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), fn.typ(tv.Type), e.OpPos) 702 703 case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: 704 cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) 705 // The type of x==y may be UntypedBool. 706 return emitConv(fn, cmp, types.Default(fn.typ(tv.Type))) 707 default: 708 panic("illegal op in BinaryExpr: " + e.Op.String()) 709 } 710 711 case *ast.SliceExpr: 712 var low, high, max Value 713 var x Value 714 xtyp := fn.typeOf(e.X) 715 switch typeparams.CoreType(xtyp).(type) { 716 case *types.Array: 717 // Potentially escaping. 718 x = b.addr(fn, e.X, true).address(fn) 719 case *types.Basic, *types.Slice, *types.Pointer: // *array 720 x = b.expr(fn, e.X) 721 default: 722 // core type exception? 723 if isBytestring(xtyp) { 724 x = b.expr(fn, e.X) // bytestring is handled as string and []byte. 725 } else { 726 panic("unexpected sequence type in SliceExpr") 727 } 728 } 729 if e.Low != nil { 730 low = b.expr(fn, e.Low) 731 } 732 if e.High != nil { 733 high = b.expr(fn, e.High) 734 } 735 if e.Slice3 { 736 max = b.expr(fn, e.Max) 737 } 738 v := &Slice{ 739 X: x, 740 Low: low, 741 High: high, 742 Max: max, 743 } 744 v.setPos(e.Lbrack) 745 v.setType(fn.typ(tv.Type)) 746 return fn.emit(v) 747 748 case *ast.Ident: 749 obj := fn.info.Uses[e] 750 // Universal built-in or nil? 751 switch obj := obj.(type) { 752 case *types.Builtin: 753 return &Builtin{name: obj.Name(), sig: fn.instanceType(e).(*types.Signature)} 754 case *types.Nil: 755 return zeroConst(fn.instanceType(e)) 756 } 757 758 // Package-level func or var? 759 // (obj must belong to same package or a direct import.) 760 if v := fn.Prog.packageLevelMember(obj); v != nil { 761 if g, ok := v.(*Global); ok { 762 return emitLoad(fn, g) // var (address) 763 } 764 callee := v.(*Function) // (func) 765 if callee.typeparams.Len() > 0 { 766 targs := fn.subst.types(instanceArgs(fn.info, e)) 767 callee = callee.instance(targs, b.created) 768 } 769 return callee 770 } 771 // Local var. 772 return emitLoad(fn, fn.lookup(obj.(*types.Var), false)) // var (address) 773 774 case *ast.SelectorExpr: 775 sel := fn.selection(e) 776 if sel == nil { 777 // builtin unsafe.{Add,Slice} 778 if obj, ok := fn.info.Uses[e.Sel].(*types.Builtin); ok { 779 return &Builtin{name: obj.Name(), sig: fn.typ(tv.Type).(*types.Signature)} 780 } 781 // qualified identifier 782 return b.expr(fn, e.Sel) 783 } 784 switch sel.kind { 785 case types.MethodExpr: 786 // (*T).f or T.f, the method f from the method-set of type T. 787 // The result is a "thunk". 788 thunk := createThunk(fn.Prog, sel, b.created) 789 return emitConv(fn, thunk, fn.typ(tv.Type)) 790 791 case types.MethodVal: 792 // e.f where e is an expression and f is a method. 793 // The result is a "bound". 794 obj := sel.obj.(*types.Func) 795 rt := fn.typ(recvType(obj)) 796 wantAddr := isPointer(rt) 797 escaping := true 798 v := b.receiver(fn, e.X, wantAddr, escaping, sel) 799 800 if types.IsInterface(rt) { 801 // If v may be an interface type I (after instantiating), 802 // we must emit a check that v is non-nil. 803 if recv, ok := aliases.Unalias(sel.recv).(*types.TypeParam); ok { 804 // Emit a nil check if any possible instantiation of the 805 // type parameter is an interface type. 806 if typeSetOf(recv).Len() > 0 { 807 // recv has a concrete term its typeset. 808 // So it cannot be instantiated as an interface. 809 // 810 // Example: 811 // func _[T interface{~int; Foo()}] () { 812 // var v T 813 // _ = v.Foo // <-- MethodVal 814 // } 815 } else { 816 // rt may be instantiated as an interface. 817 // Emit nil check: typeassert (any(v)).(any). 818 emitTypeAssert(fn, emitConv(fn, v, tEface), tEface, token.NoPos) 819 } 820 } else { 821 // non-type param interface 822 // Emit nil check: typeassert v.(I). 823 emitTypeAssert(fn, v, rt, e.Sel.Pos()) 824 } 825 } 826 if targs := receiverTypeArgs(obj); len(targs) > 0 { 827 // obj is generic. 828 obj = fn.Prog.canon.instantiateMethod(obj, fn.subst.types(targs), fn.Prog.ctxt) 829 } 830 c := &MakeClosure{ 831 Fn: createBound(fn.Prog, obj, b.created), 832 Bindings: []Value{v}, 833 } 834 c.setPos(e.Sel.Pos()) 835 c.setType(fn.typ(tv.Type)) 836 return fn.emit(c) 837 838 case types.FieldVal: 839 indices := sel.index 840 last := len(indices) - 1 841 v := b.expr(fn, e.X) 842 v = emitImplicitSelections(fn, v, indices[:last], e.Pos()) 843 v = emitFieldSelection(fn, v, indices[last], false, e.Sel) 844 return v 845 } 846 847 panic("unexpected expression-relative selector") 848 849 case *ast.IndexListExpr: 850 // f[X, Y] must be a generic function 851 if !instance(fn.info, e.X) { 852 panic("unexpected expression-could not match index list to instantiation") 853 } 854 return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases. 855 856 case *ast.IndexExpr: 857 if instance(fn.info, e.X) { 858 return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases. 859 } 860 // not a generic instantiation. 861 xt := fn.typeOf(e.X) 862 switch et, mode := indexType(xt); mode { 863 case ixVar: 864 // Addressable slice/array; use IndexAddr and Load. 865 return b.addr(fn, e, false).load(fn) 866 867 case ixArrVar, ixValue: 868 // An array in a register, a string or a combined type that contains 869 // either an [_]array (ixArrVar) or string (ixValue). 870 871 // Note: for ixArrVar and CoreType(xt)==nil can be IndexAddr and Load. 872 index := b.expr(fn, e.Index) 873 if isUntyped(index.Type()) { 874 index = emitConv(fn, index, tInt) 875 } 876 v := &Index{ 877 X: b.expr(fn, e.X), 878 Index: index, 879 } 880 v.setPos(e.Lbrack) 881 v.setType(et) 882 return fn.emit(v) 883 884 case ixMap: 885 ct := typeparams.CoreType(xt).(*types.Map) 886 v := &Lookup{ 887 X: b.expr(fn, e.X), 888 Index: emitConv(fn, b.expr(fn, e.Index), ct.Key()), 889 } 890 v.setPos(e.Lbrack) 891 v.setType(ct.Elem()) 892 return fn.emit(v) 893 default: 894 panic("unexpected container type in IndexExpr: " + xt.String()) 895 } 896 897 case *ast.CompositeLit, *ast.StarExpr: 898 // Addressable types (lvalues) 899 return b.addr(fn, e, false).load(fn) 900 } 901 902 panic(fmt.Sprintf("unexpected expr: %T", e)) 903 } 904 905 // stmtList emits to fn code for all statements in list. 906 func (b *builder) stmtList(fn *Function, list []ast.Stmt) { 907 for _, s := range list { 908 b.stmt(fn, s) 909 } 910 } 911 912 // receiver emits to fn code for expression e in the "receiver" 913 // position of selection e.f (where f may be a field or a method) and 914 // returns the effective receiver after applying the implicit field 915 // selections of sel. 916 // 917 // wantAddr requests that the result is an address. If 918 // !sel.indirect, this may require that e be built in addr() mode; it 919 // must thus be addressable. 920 // 921 // escaping is defined as per builder.addr(). 922 func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value { 923 var v Value 924 if wantAddr && !sel.indirect && !isPointerCore(fn.typeOf(e)) { 925 v = b.addr(fn, e, escaping).address(fn) 926 } else { 927 v = b.expr(fn, e) 928 } 929 930 last := len(sel.index) - 1 931 // The position of implicit selection is the position of the inducing receiver expression. 932 v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos()) 933 if types.IsInterface(v.Type()) { 934 // When v is an interface, sel.Kind()==MethodValue and v.f is invoked. 935 // So v is not loaded, even if v has a pointer core type. 936 } else if !wantAddr && isPointerCore(v.Type()) { 937 v = emitLoad(fn, v) 938 } 939 return v 940 } 941 942 // setCallFunc populates the function parts of a CallCommon structure 943 // (Func, Method, Recv, Args[0]) based on the kind of invocation 944 // occurring in e. 945 func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { 946 c.pos = e.Lparen 947 948 // Is this a method call? 949 if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { 950 sel := fn.selection(selector) 951 if sel != nil && sel.kind == types.MethodVal { 952 obj := sel.obj.(*types.Func) 953 recv := recvType(obj) 954 955 wantAddr := isPointer(recv) 956 escaping := true 957 v := b.receiver(fn, selector.X, wantAddr, escaping, sel) 958 if types.IsInterface(recv) { 959 // Invoke-mode call. 960 c.Value = v // possibly type param 961 c.Method = obj 962 } else { 963 // "Call"-mode call. 964 c.Value = fn.Prog.objectMethod(obj, b.created) 965 c.Args = append(c.Args, v) 966 } 967 return 968 } 969 970 // sel.kind==MethodExpr indicates T.f() or (*T).f(): 971 // a statically dispatched call to the method f in the 972 // method-set of T or *T. T may be an interface. 973 // 974 // e.Fun would evaluate to a concrete method, interface 975 // wrapper function, or promotion wrapper. 976 // 977 // For now, we evaluate it in the usual way. 978 // 979 // TODO(adonovan): opt: inline expr() here, to make the 980 // call static and to avoid generation of wrappers. 981 // It's somewhat tricky as it may consume the first 982 // actual parameter if the call is "invoke" mode. 983 // 984 // Examples: 985 // type T struct{}; func (T) f() {} // "call" mode 986 // type T interface { f() } // "invoke" mode 987 // 988 // type S struct{ T } 989 // 990 // var s S 991 // S.f(s) 992 // (*S).f(&s) 993 // 994 // Suggested approach: 995 // - consume the first actual parameter expression 996 // and build it with b.expr(). 997 // - apply implicit field selections. 998 // - use MethodVal logic to populate fields of c. 999 } 1000 1001 // Evaluate the function operand in the usual way. 1002 c.Value = b.expr(fn, e.Fun) 1003 } 1004 1005 // emitCallArgs emits to f code for the actual parameters of call e to 1006 // a (possibly built-in) function of effective type sig. 1007 // The argument values are appended to args, which is then returned. 1008 func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { 1009 // f(x, y, z...): pass slice z straight through. 1010 if e.Ellipsis != 0 { 1011 for i, arg := range e.Args { 1012 v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) 1013 args = append(args, v) 1014 } 1015 return args 1016 } 1017 1018 offset := len(args) // 1 if call has receiver, 0 otherwise 1019 1020 // Evaluate actual parameter expressions. 1021 // 1022 // If this is a chained call of the form f(g()) where g has 1023 // multiple return values (MRV), they are flattened out into 1024 // args; a suffix of them may end up in a varargs slice. 1025 for _, arg := range e.Args { 1026 v := b.expr(fn, arg) 1027 if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain 1028 for i, n := 0, ttuple.Len(); i < n; i++ { 1029 args = append(args, emitExtract(fn, v, i)) 1030 } 1031 } else { 1032 args = append(args, v) 1033 } 1034 } 1035 1036 // Actual->formal assignability conversions for normal parameters. 1037 np := sig.Params().Len() // number of normal parameters 1038 if sig.Variadic() { 1039 np-- 1040 } 1041 for i := 0; i < np; i++ { 1042 args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) 1043 } 1044 1045 // Actual->formal assignability conversions for variadic parameter, 1046 // and construction of slice. 1047 if sig.Variadic() { 1048 varargs := args[offset+np:] 1049 st := sig.Params().At(np).Type().(*types.Slice) 1050 vt := st.Elem() 1051 if len(varargs) == 0 { 1052 args = append(args, zeroConst(st)) 1053 } else { 1054 // Replace a suffix of args with a slice containing it. 1055 at := types.NewArray(vt, int64(len(varargs))) 1056 a := emitNew(fn, at, token.NoPos, "varargs") 1057 a.setPos(e.Rparen) 1058 for i, arg := range varargs { 1059 iaddr := &IndexAddr{ 1060 X: a, 1061 Index: intConst(int64(i)), 1062 } 1063 iaddr.setType(types.NewPointer(vt)) 1064 fn.emit(iaddr) 1065 emitStore(fn, iaddr, arg, arg.Pos()) 1066 } 1067 s := &Slice{X: a} 1068 s.setType(st) 1069 args[offset+np] = fn.emit(s) 1070 args = args[:offset+np+1] 1071 } 1072 } 1073 return args 1074 } 1075 1076 // setCall emits to fn code to evaluate all the parameters of a function 1077 // call e, and populates *c with those values. 1078 func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { 1079 // First deal with the f(...) part and optional receiver. 1080 b.setCallFunc(fn, e, c) 1081 1082 // Then append the other actual parameters. 1083 sig, _ := typeparams.CoreType(fn.typeOf(e.Fun)).(*types.Signature) 1084 if sig == nil { 1085 panic(fmt.Sprintf("no signature for call of %s", e.Fun)) 1086 } 1087 c.Args = b.emitCallArgs(fn, sig, e, c.Args) 1088 } 1089 1090 // assignOp emits to fn code to perform loc <op>= val. 1091 func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { 1092 loc.store(fn, emitArith(fn, op, loc.load(fn), val, loc.typ(), pos)) 1093 } 1094 1095 // localValueSpec emits to fn code to define all of the vars in the 1096 // function-local ValueSpec, spec. 1097 func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { 1098 switch { 1099 case len(spec.Values) == len(spec.Names): 1100 // e.g. var x, y = 0, 1 1101 // 1:1 assignment 1102 for i, id := range spec.Names { 1103 if !isBlankIdent(id) { 1104 emitLocalVar(fn, identVar(fn, id)) 1105 } 1106 lval := b.addr(fn, id, false) // non-escaping 1107 b.assign(fn, lval, spec.Values[i], true, nil) 1108 } 1109 1110 case len(spec.Values) == 0: 1111 // e.g. var x, y int 1112 // Locals are implicitly zero-initialized. 1113 for _, id := range spec.Names { 1114 if !isBlankIdent(id) { 1115 lhs := emitLocalVar(fn, identVar(fn, id)) 1116 if fn.debugInfo() { 1117 emitDebugRef(fn, id, lhs, true) 1118 } 1119 } 1120 } 1121 1122 default: 1123 // e.g. var x, y = pos() 1124 tuple := b.exprN(fn, spec.Values[0]) 1125 for i, id := range spec.Names { 1126 if !isBlankIdent(id) { 1127 emitLocalVar(fn, identVar(fn, id)) 1128 lhs := b.addr(fn, id, false) // non-escaping 1129 lhs.store(fn, emitExtract(fn, tuple, i)) 1130 } 1131 } 1132 } 1133 } 1134 1135 // assignStmt emits code to fn for a parallel assignment of rhss to lhss. 1136 // isDef is true if this is a short variable declaration (:=). 1137 // 1138 // Note the similarity with localValueSpec. 1139 func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { 1140 // Side effects of all LHSs and RHSs must occur in left-to-right order. 1141 lvals := make([]lvalue, len(lhss)) 1142 isZero := make([]bool, len(lhss)) 1143 for i, lhs := range lhss { 1144 var lval lvalue = blank{} 1145 if !isBlankIdent(lhs) { 1146 if isDef { 1147 if obj, ok := fn.info.Defs[lhs.(*ast.Ident)].(*types.Var); ok { 1148 emitLocalVar(fn, obj) 1149 isZero[i] = true 1150 } 1151 } 1152 lval = b.addr(fn, lhs, false) // non-escaping 1153 } 1154 lvals[i] = lval 1155 } 1156 if len(lhss) == len(rhss) { 1157 // Simple assignment: x = f() (!isDef) 1158 // Parallel assignment: x, y = f(), g() (!isDef) 1159 // or short var decl: x, y := f(), g() (isDef) 1160 // 1161 // In all cases, the RHSs may refer to the LHSs, 1162 // so we need a storebuf. 1163 var sb storebuf 1164 for i := range rhss { 1165 b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) 1166 } 1167 sb.emit(fn) 1168 } else { 1169 // e.g. x, y = pos() 1170 tuple := b.exprN(fn, rhss[0]) 1171 emitDebugRef(fn, rhss[0], tuple, false) 1172 for i, lval := range lvals { 1173 lval.store(fn, emitExtract(fn, tuple, i)) 1174 } 1175 } 1176 } 1177 1178 // arrayLen returns the length of the array whose composite literal elements are elts. 1179 func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { 1180 var max int64 = -1 1181 var i int64 = -1 1182 for _, e := range elts { 1183 if kv, ok := e.(*ast.KeyValueExpr); ok { 1184 i = b.expr(fn, kv.Key).(*Const).Int64() 1185 } else { 1186 i++ 1187 } 1188 if i > max { 1189 max = i 1190 } 1191 } 1192 return max + 1 1193 } 1194 1195 // compLit emits to fn code to initialize a composite literal e at 1196 // address addr with type typ. 1197 // 1198 // Nested composite literals are recursively initialized in place 1199 // where possible. If isZero is true, compLit assumes that addr 1200 // holds the zero value for typ. 1201 // 1202 // Because the elements of a composite literal may refer to the 1203 // variables being updated, as in the second line below, 1204 // 1205 // x := T{a: 1} 1206 // x = T{a: x.a} 1207 // 1208 // all the reads must occur before all the writes. Thus all stores to 1209 // loc are emitted to the storebuf sb for later execution. 1210 // 1211 // A CompositeLit may have pointer type only in the recursive (nested) 1212 // case when the type name is implicit. e.g. in []*T{{}}, the inner 1213 // literal has type *T behaves like &T{}. 1214 // In that case, addr must hold a T, not a *T. 1215 func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { 1216 typ := typeparams.Deref(fn.typeOf(e)) // retain the named/alias/param type, if any 1217 switch t := typeparams.CoreType(typ).(type) { 1218 case *types.Struct: 1219 if !isZero && len(e.Elts) != t.NumFields() { 1220 // memclear 1221 zt := typeparams.MustDeref(addr.Type()) 1222 sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt)) 1223 isZero = true 1224 } 1225 for i, e := range e.Elts { 1226 fieldIndex := i 1227 pos := e.Pos() 1228 if kv, ok := e.(*ast.KeyValueExpr); ok { 1229 fname := kv.Key.(*ast.Ident).Name 1230 for i, n := 0, t.NumFields(); i < n; i++ { 1231 sf := t.Field(i) 1232 if sf.Name() == fname { 1233 fieldIndex = i 1234 pos = kv.Colon 1235 e = kv.Value 1236 break 1237 } 1238 } 1239 } 1240 sf := t.Field(fieldIndex) 1241 faddr := &FieldAddr{ 1242 X: addr, 1243 Field: fieldIndex, 1244 } 1245 faddr.setPos(pos) 1246 faddr.setType(types.NewPointer(sf.Type())) 1247 fn.emit(faddr) 1248 b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) 1249 } 1250 1251 case *types.Array, *types.Slice: 1252 var at *types.Array 1253 var array Value 1254 switch t := t.(type) { 1255 case *types.Slice: 1256 at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) 1257 array = emitNew(fn, at, e.Lbrace, "slicelit") 1258 case *types.Array: 1259 at = t 1260 array = addr 1261 1262 if !isZero && int64(len(e.Elts)) != at.Len() { 1263 // memclear 1264 zt := typeparams.MustDeref(array.Type()) 1265 sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt)) 1266 } 1267 } 1268 1269 var idx *Const 1270 for _, e := range e.Elts { 1271 pos := e.Pos() 1272 if kv, ok := e.(*ast.KeyValueExpr); ok { 1273 idx = b.expr(fn, kv.Key).(*Const) 1274 pos = kv.Colon 1275 e = kv.Value 1276 } else { 1277 var idxval int64 1278 if idx != nil { 1279 idxval = idx.Int64() + 1 1280 } 1281 idx = intConst(idxval) 1282 } 1283 iaddr := &IndexAddr{ 1284 X: array, 1285 Index: idx, 1286 } 1287 iaddr.setType(types.NewPointer(at.Elem())) 1288 fn.emit(iaddr) 1289 if t != at { // slice 1290 // backing array is unaliased => storebuf not needed. 1291 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) 1292 } else { 1293 b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) 1294 } 1295 } 1296 1297 if t != at { // slice 1298 s := &Slice{X: array} 1299 s.setPos(e.Lbrace) 1300 s.setType(typ) 1301 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) 1302 } 1303 1304 case *types.Map: 1305 m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} 1306 m.setPos(e.Lbrace) 1307 m.setType(typ) 1308 fn.emit(m) 1309 for _, e := range e.Elts { 1310 e := e.(*ast.KeyValueExpr) 1311 1312 // If a key expression in a map literal is itself a 1313 // composite literal, the type may be omitted. 1314 // For example: 1315 // map[*struct{}]bool{{}: true} 1316 // An &-operation may be implied: 1317 // map[*struct{}]bool{&struct{}{}: true} 1318 wantAddr := false 1319 if _, ok := unparen(e.Key).(*ast.CompositeLit); ok { 1320 wantAddr = isPointerCore(t.Key()) 1321 } 1322 1323 var key Value 1324 if wantAddr { 1325 // A CompositeLit never evaluates to a pointer, 1326 // so if the type of the location is a pointer, 1327 // an &-operation is implied. 1328 key = b.addr(fn, e.Key, true).address(fn) 1329 } else { 1330 key = b.expr(fn, e.Key) 1331 } 1332 1333 loc := element{ 1334 m: m, 1335 k: emitConv(fn, key, t.Key()), 1336 t: t.Elem(), 1337 pos: e.Colon, 1338 } 1339 1340 // We call assign() only because it takes care 1341 // of any &-operation required in the recursive 1342 // case, e.g., 1343 // map[int]*struct{}{0: {}} implies &struct{}{}. 1344 // In-place update is of course impossible, 1345 // and no storebuf is needed. 1346 b.assign(fn, &loc, e.Value, true, nil) 1347 } 1348 sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) 1349 1350 default: 1351 panic("unexpected CompositeLit type: " + typ.String()) 1352 } 1353 } 1354 1355 // switchStmt emits to fn code for the switch statement s, optionally 1356 // labelled by label. 1357 func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { 1358 // We treat SwitchStmt like a sequential if-else chain. 1359 // Multiway dispatch can be recovered later by ssautil.Switches() 1360 // to those cases that are free of side effects. 1361 if s.Init != nil { 1362 b.stmt(fn, s.Init) 1363 } 1364 var tag Value = vTrue 1365 if s.Tag != nil { 1366 tag = b.expr(fn, s.Tag) 1367 } 1368 done := fn.newBasicBlock("switch.done") 1369 if label != nil { 1370 label._break = done 1371 } 1372 // We pull the default case (if present) down to the end. 1373 // But each fallthrough label must point to the next 1374 // body block in source order, so we preallocate a 1375 // body block (fallthru) for the next case. 1376 // Unfortunately this makes for a confusing block order. 1377 var dfltBody *[]ast.Stmt 1378 var dfltFallthrough *BasicBlock 1379 var fallthru, dfltBlock *BasicBlock 1380 ncases := len(s.Body.List) 1381 for i, clause := range s.Body.List { 1382 body := fallthru 1383 if body == nil { 1384 body = fn.newBasicBlock("switch.body") // first case only 1385 } 1386 1387 // Preallocate body block for the next case. 1388 fallthru = done 1389 if i+1 < ncases { 1390 fallthru = fn.newBasicBlock("switch.body") 1391 } 1392 1393 cc := clause.(*ast.CaseClause) 1394 if cc.List == nil { 1395 // Default case. 1396 dfltBody = &cc.Body 1397 dfltFallthrough = fallthru 1398 dfltBlock = body 1399 continue 1400 } 1401 1402 var nextCond *BasicBlock 1403 for _, cond := range cc.List { 1404 nextCond = fn.newBasicBlock("switch.next") 1405 // TODO(adonovan): opt: when tag==vTrue, we'd 1406 // get better code if we use b.cond(cond) 1407 // instead of BinOp(EQL, tag, b.expr(cond)) 1408 // followed by If. Don't forget conversions 1409 // though. 1410 cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), cond.Pos()) 1411 emitIf(fn, cond, body, nextCond) 1412 fn.currentBlock = nextCond 1413 } 1414 fn.currentBlock = body 1415 fn.targets = &targets{ 1416 tail: fn.targets, 1417 _break: done, 1418 _fallthrough: fallthru, 1419 } 1420 b.stmtList(fn, cc.Body) 1421 fn.targets = fn.targets.tail 1422 emitJump(fn, done) 1423 fn.currentBlock = nextCond 1424 } 1425 if dfltBlock != nil { 1426 emitJump(fn, dfltBlock) 1427 fn.currentBlock = dfltBlock 1428 fn.targets = &targets{ 1429 tail: fn.targets, 1430 _break: done, 1431 _fallthrough: dfltFallthrough, 1432 } 1433 b.stmtList(fn, *dfltBody) 1434 fn.targets = fn.targets.tail 1435 } 1436 emitJump(fn, done) 1437 fn.currentBlock = done 1438 } 1439 1440 // typeSwitchStmt emits to fn code for the type switch statement s, optionally 1441 // labelled by label. 1442 func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { 1443 // We treat TypeSwitchStmt like a sequential if-else chain. 1444 // Multiway dispatch can be recovered later by ssautil.Switches(). 1445 1446 // Typeswitch lowering: 1447 // 1448 // var x X 1449 // switch y := x.(type) { 1450 // case T1, T2: S1 // >1 (y := x) 1451 // case nil: SN // nil (y := x) 1452 // default: SD // 0 types (y := x) 1453 // case T3: S3 // 1 type (y := x.(T3)) 1454 // } 1455 // 1456 // ...s.Init... 1457 // x := eval x 1458 // .caseT1: 1459 // t1, ok1 := typeswitch,ok x <T1> 1460 // if ok1 then goto S1 else goto .caseT2 1461 // .caseT2: 1462 // t2, ok2 := typeswitch,ok x <T2> 1463 // if ok2 then goto S1 else goto .caseNil 1464 // .S1: 1465 // y := x 1466 // ...S1... 1467 // goto done 1468 // .caseNil: 1469 // if t2, ok2 := typeswitch,ok x <T2> 1470 // if x == nil then goto SN else goto .caseT3 1471 // .SN: 1472 // y := x 1473 // ...SN... 1474 // goto done 1475 // .caseT3: 1476 // t3, ok3 := typeswitch,ok x <T3> 1477 // if ok3 then goto S3 else goto default 1478 // .S3: 1479 // y := t3 1480 // ...S3... 1481 // goto done 1482 // .default: 1483 // y := x 1484 // ...SD... 1485 // goto done 1486 // .done: 1487 if s.Init != nil { 1488 b.stmt(fn, s.Init) 1489 } 1490 1491 var x Value 1492 switch ass := s.Assign.(type) { 1493 case *ast.ExprStmt: // x.(type) 1494 x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) 1495 case *ast.AssignStmt: // y := x.(type) 1496 x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) 1497 } 1498 1499 done := fn.newBasicBlock("typeswitch.done") 1500 if label != nil { 1501 label._break = done 1502 } 1503 var default_ *ast.CaseClause 1504 for _, clause := range s.Body.List { 1505 cc := clause.(*ast.CaseClause) 1506 if cc.List == nil { 1507 default_ = cc 1508 continue 1509 } 1510 body := fn.newBasicBlock("typeswitch.body") 1511 var next *BasicBlock 1512 var casetype types.Type 1513 var ti Value // ti, ok := typeassert,ok x <Ti> 1514 for _, cond := range cc.List { 1515 next = fn.newBasicBlock("typeswitch.next") 1516 casetype = fn.typeOf(cond) 1517 var condv Value 1518 if casetype == tUntypedNil { 1519 condv = emitCompare(fn, token.EQL, x, zeroConst(x.Type()), cond.Pos()) 1520 ti = x 1521 } else { 1522 yok := emitTypeTest(fn, x, casetype, cc.Case) 1523 ti = emitExtract(fn, yok, 0) 1524 condv = emitExtract(fn, yok, 1) 1525 } 1526 emitIf(fn, condv, body, next) 1527 fn.currentBlock = next 1528 } 1529 if len(cc.List) != 1 { 1530 ti = x 1531 } 1532 fn.currentBlock = body 1533 b.typeCaseBody(fn, cc, ti, done) 1534 fn.currentBlock = next 1535 } 1536 if default_ != nil { 1537 b.typeCaseBody(fn, default_, x, done) 1538 } else { 1539 emitJump(fn, done) 1540 } 1541 fn.currentBlock = done 1542 } 1543 1544 func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { 1545 if obj, ok := fn.info.Implicits[cc].(*types.Var); ok { 1546 // In a switch y := x.(type), each case clause 1547 // implicitly declares a distinct object y. 1548 // In a single-type case, y has that type. 1549 // In multi-type cases, 'case nil' and default, 1550 // y has the same type as the interface operand. 1551 emitStore(fn, emitLocalVar(fn, obj), x, obj.Pos()) 1552 } 1553 fn.targets = &targets{ 1554 tail: fn.targets, 1555 _break: done, 1556 } 1557 b.stmtList(fn, cc.Body) 1558 fn.targets = fn.targets.tail 1559 emitJump(fn, done) 1560 } 1561 1562 // selectStmt emits to fn code for the select statement s, optionally 1563 // labelled by label. 1564 func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { 1565 // A blocking select of a single case degenerates to a 1566 // simple send or receive. 1567 // TODO(adonovan): opt: is this optimization worth its weight? 1568 if len(s.Body.List) == 1 { 1569 clause := s.Body.List[0].(*ast.CommClause) 1570 if clause.Comm != nil { 1571 b.stmt(fn, clause.Comm) 1572 done := fn.newBasicBlock("select.done") 1573 if label != nil { 1574 label._break = done 1575 } 1576 fn.targets = &targets{ 1577 tail: fn.targets, 1578 _break: done, 1579 } 1580 b.stmtList(fn, clause.Body) 1581 fn.targets = fn.targets.tail 1582 emitJump(fn, done) 1583 fn.currentBlock = done 1584 return 1585 } 1586 } 1587 1588 // First evaluate all channels in all cases, and find 1589 // the directions of each state. 1590 var states []*SelectState 1591 blocking := true 1592 debugInfo := fn.debugInfo() 1593 for _, clause := range s.Body.List { 1594 var st *SelectState 1595 switch comm := clause.(*ast.CommClause).Comm.(type) { 1596 case nil: // default case 1597 blocking = false 1598 continue 1599 1600 case *ast.SendStmt: // ch<- i 1601 ch := b.expr(fn, comm.Chan) 1602 chtyp := typeparams.CoreType(fn.typ(ch.Type())).(*types.Chan) 1603 st = &SelectState{ 1604 Dir: types.SendOnly, 1605 Chan: ch, 1606 Send: emitConv(fn, b.expr(fn, comm.Value), chtyp.Elem()), 1607 Pos: comm.Arrow, 1608 } 1609 if debugInfo { 1610 st.DebugNode = comm 1611 } 1612 1613 case *ast.AssignStmt: // x := <-ch 1614 recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) 1615 st = &SelectState{ 1616 Dir: types.RecvOnly, 1617 Chan: b.expr(fn, recv.X), 1618 Pos: recv.OpPos, 1619 } 1620 if debugInfo { 1621 st.DebugNode = recv 1622 } 1623 1624 case *ast.ExprStmt: // <-ch 1625 recv := unparen(comm.X).(*ast.UnaryExpr) 1626 st = &SelectState{ 1627 Dir: types.RecvOnly, 1628 Chan: b.expr(fn, recv.X), 1629 Pos: recv.OpPos, 1630 } 1631 if debugInfo { 1632 st.DebugNode = recv 1633 } 1634 } 1635 states = append(states, st) 1636 } 1637 1638 // We dispatch on the (fair) result of Select using a 1639 // sequential if-else chain, in effect: 1640 // 1641 // idx, recvOk, r0...r_n-1 := select(...) 1642 // if idx == 0 { // receive on channel 0 (first receive => r0) 1643 // x, ok := r0, recvOk 1644 // ...state0... 1645 // } else if v == 1 { // send on channel 1 1646 // ...state1... 1647 // } else { 1648 // ...default... 1649 // } 1650 sel := &Select{ 1651 States: states, 1652 Blocking: blocking, 1653 } 1654 sel.setPos(s.Select) 1655 var vars []*types.Var 1656 vars = append(vars, varIndex, varOk) 1657 for _, st := range states { 1658 if st.Dir == types.RecvOnly { 1659 chtyp := typeparams.CoreType(fn.typ(st.Chan.Type())).(*types.Chan) 1660 vars = append(vars, anonVar(chtyp.Elem())) 1661 } 1662 } 1663 sel.setType(types.NewTuple(vars...)) 1664 1665 fn.emit(sel) 1666 idx := emitExtract(fn, sel, 0) 1667 1668 done := fn.newBasicBlock("select.done") 1669 if label != nil { 1670 label._break = done 1671 } 1672 1673 var defaultBody *[]ast.Stmt 1674 state := 0 1675 r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV 1676 for _, cc := range s.Body.List { 1677 clause := cc.(*ast.CommClause) 1678 if clause.Comm == nil { 1679 defaultBody = &clause.Body 1680 continue 1681 } 1682 body := fn.newBasicBlock("select.body") 1683 next := fn.newBasicBlock("select.next") 1684 emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) 1685 fn.currentBlock = body 1686 fn.targets = &targets{ 1687 tail: fn.targets, 1688 _break: done, 1689 } 1690 switch comm := clause.Comm.(type) { 1691 case *ast.ExprStmt: // <-ch 1692 if debugInfo { 1693 v := emitExtract(fn, sel, r) 1694 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1695 } 1696 r++ 1697 1698 case *ast.AssignStmt: // x := <-states[state].Chan 1699 if comm.Tok == token.DEFINE { 1700 emitLocalVar(fn, identVar(fn, comm.Lhs[0].(*ast.Ident))) 1701 } 1702 x := b.addr(fn, comm.Lhs[0], false) // non-escaping 1703 v := emitExtract(fn, sel, r) 1704 if debugInfo { 1705 emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) 1706 } 1707 x.store(fn, v) 1708 1709 if len(comm.Lhs) == 2 { // x, ok := ... 1710 if comm.Tok == token.DEFINE { 1711 emitLocalVar(fn, identVar(fn, comm.Lhs[1].(*ast.Ident))) 1712 } 1713 ok := b.addr(fn, comm.Lhs[1], false) // non-escaping 1714 ok.store(fn, emitExtract(fn, sel, 1)) 1715 } 1716 r++ 1717 } 1718 b.stmtList(fn, clause.Body) 1719 fn.targets = fn.targets.tail 1720 emitJump(fn, done) 1721 fn.currentBlock = next 1722 state++ 1723 } 1724 if defaultBody != nil { 1725 fn.targets = &targets{ 1726 tail: fn.targets, 1727 _break: done, 1728 } 1729 b.stmtList(fn, *defaultBody) 1730 fn.targets = fn.targets.tail 1731 } else { 1732 // A blocking select must match some case. 1733 // (This should really be a runtime.errorString, not a string.) 1734 fn.emit(&Panic{ 1735 X: emitConv(fn, stringConst("blocking select matched no case"), tEface), 1736 }) 1737 fn.currentBlock = fn.newBasicBlock("unreachable") 1738 } 1739 emitJump(fn, done) 1740 fn.currentBlock = done 1741 } 1742 1743 // forStmt emits to fn code for the for statement s, optionally 1744 // labelled by label. 1745 func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { 1746 // Use forStmtGo122 instead if it applies. 1747 if s.Init != nil { 1748 if assign, ok := s.Init.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE { 1749 if versions.AtLeast(fn.goversion, versions.Go1_22) { 1750 b.forStmtGo122(fn, s, label) 1751 return 1752 } 1753 } 1754 } 1755 1756 // ...init... 1757 // jump loop 1758 // loop: 1759 // if cond goto body else done 1760 // body: 1761 // ...body... 1762 // jump post 1763 // post: (target of continue) 1764 // ...post... 1765 // jump loop 1766 // done: (target of break) 1767 if s.Init != nil { 1768 b.stmt(fn, s.Init) 1769 } 1770 1771 body := fn.newBasicBlock("for.body") 1772 done := fn.newBasicBlock("for.done") // target of 'break' 1773 loop := body // target of back-edge 1774 if s.Cond != nil { 1775 loop = fn.newBasicBlock("for.loop") 1776 } 1777 cont := loop // target of 'continue' 1778 if s.Post != nil { 1779 cont = fn.newBasicBlock("for.post") 1780 } 1781 if label != nil { 1782 label._break = done 1783 label._continue = cont 1784 } 1785 emitJump(fn, loop) 1786 fn.currentBlock = loop 1787 if loop != body { 1788 b.cond(fn, s.Cond, body, done) 1789 fn.currentBlock = body 1790 } 1791 fn.targets = &targets{ 1792 tail: fn.targets, 1793 _break: done, 1794 _continue: cont, 1795 } 1796 b.stmt(fn, s.Body) 1797 fn.targets = fn.targets.tail 1798 emitJump(fn, cont) 1799 1800 if s.Post != nil { 1801 fn.currentBlock = cont 1802 b.stmt(fn, s.Post) 1803 emitJump(fn, loop) // back-edge 1804 } 1805 fn.currentBlock = done 1806 } 1807 1808 // forStmtGo122 emits to fn code for the for statement s, optionally 1809 // labelled by label. s must define its variables. 1810 // 1811 // This allocates once per loop iteration. This is only correct in 1812 // GoVersions >= go1.22. 1813 func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { 1814 // i_outer = alloc[T] 1815 // *i_outer = ...init... // under objects[i] = i_outer 1816 // jump loop 1817 // loop: 1818 // i = phi [head: i_outer, loop: i_next] 1819 // ...cond... // under objects[i] = i 1820 // if cond goto body else done 1821 // body: 1822 // ...body... // under objects[i] = i (same as loop) 1823 // jump post 1824 // post: 1825 // tmp = *i 1826 // i_next = alloc[T] 1827 // *i_next = tmp 1828 // ...post... // under objects[i] = i_next 1829 // goto loop 1830 // done: 1831 1832 init := s.Init.(*ast.AssignStmt) 1833 startingBlocks := len(fn.Blocks) 1834 1835 pre := fn.currentBlock // current block before starting 1836 loop := fn.newBasicBlock("for.loop") // target of back-edge 1837 body := fn.newBasicBlock("for.body") 1838 post := fn.newBasicBlock("for.post") // target of 'continue' 1839 done := fn.newBasicBlock("for.done") // target of 'break' 1840 1841 // For each of the n loop variables, we create five SSA values, 1842 // outer, phi, next, load, and store in pre, loop, and post. 1843 // There is no limit on n. 1844 type loopVar struct { 1845 obj *types.Var 1846 outer *Alloc 1847 phi *Phi 1848 load *UnOp 1849 next *Alloc 1850 store *Store 1851 } 1852 vars := make([]loopVar, len(init.Lhs)) 1853 for i, lhs := range init.Lhs { 1854 v := identVar(fn, lhs.(*ast.Ident)) 1855 typ := fn.typ(v.Type()) 1856 1857 fn.currentBlock = pre 1858 outer := emitLocal(fn, typ, v.Pos(), v.Name()) 1859 1860 fn.currentBlock = loop 1861 phi := &Phi{Comment: v.Name()} 1862 phi.pos = v.Pos() 1863 phi.typ = outer.Type() 1864 fn.emit(phi) 1865 1866 fn.currentBlock = post 1867 // If next is local, it reuses the address and zeroes the old value so 1868 // load before allocating next. 1869 load := emitLoad(fn, phi) 1870 next := emitLocal(fn, typ, v.Pos(), v.Name()) 1871 store := emitStore(fn, next, load, token.NoPos) 1872 1873 phi.Edges = []Value{outer, next} // pre edge is emitted before post edge. 1874 1875 vars[i] = loopVar{v, outer, phi, load, next, store} 1876 } 1877 1878 // ...init... under fn.objects[v] = i_outer 1879 fn.currentBlock = pre 1880 for _, v := range vars { 1881 fn.vars[v.obj] = v.outer 1882 } 1883 const isDef = false // assign to already-allocated outers 1884 b.assignStmt(fn, init.Lhs, init.Rhs, isDef) 1885 if label != nil { 1886 label._break = done 1887 label._continue = post 1888 } 1889 emitJump(fn, loop) 1890 1891 // ...cond... under fn.objects[v] = i 1892 fn.currentBlock = loop 1893 for _, v := range vars { 1894 fn.vars[v.obj] = v.phi 1895 } 1896 if s.Cond != nil { 1897 b.cond(fn, s.Cond, body, done) 1898 } else { 1899 emitJump(fn, body) 1900 } 1901 1902 // ...body... under fn.objects[v] = i 1903 fn.currentBlock = body 1904 fn.targets = &targets{ 1905 tail: fn.targets, 1906 _break: done, 1907 _continue: post, 1908 } 1909 b.stmt(fn, s.Body) 1910 fn.targets = fn.targets.tail 1911 emitJump(fn, post) 1912 1913 // ...post... under fn.objects[v] = i_next 1914 for _, v := range vars { 1915 fn.vars[v.obj] = v.next 1916 } 1917 fn.currentBlock = post 1918 if s.Post != nil { 1919 b.stmt(fn, s.Post) 1920 } 1921 emitJump(fn, loop) // back-edge 1922 fn.currentBlock = done 1923 1924 // For each loop variable that does not escape, 1925 // (the common case), fuse its next cells into its 1926 // (local) outer cell as they have disjoint live ranges. 1927 // 1928 // It is sufficient to test whether i_next escapes, 1929 // because its Heap flag will be marked true if either 1930 // the cond or post expression causes i to escape 1931 // (because escape distributes over phi). 1932 var nlocals int 1933 for _, v := range vars { 1934 if !v.next.Heap { 1935 nlocals++ 1936 } 1937 } 1938 if nlocals > 0 { 1939 replace := make(map[Value]Value, 2*nlocals) 1940 dead := make(map[Instruction]bool, 4*nlocals) 1941 for _, v := range vars { 1942 if !v.next.Heap { 1943 replace[v.next] = v.outer 1944 replace[v.phi] = v.outer 1945 dead[v.phi], dead[v.next], dead[v.load], dead[v.store] = true, true, true, true 1946 } 1947 } 1948 1949 // Replace all uses of i_next and phi with i_outer. 1950 // Referrers have not been built for fn yet so only update Instruction operands. 1951 // We need only look within the blocks added by the loop. 1952 var operands []*Value // recycle storage 1953 for _, b := range fn.Blocks[startingBlocks:] { 1954 for _, instr := range b.Instrs { 1955 operands = instr.Operands(operands[:0]) 1956 for _, ptr := range operands { 1957 k := *ptr 1958 if v := replace[k]; v != nil { 1959 *ptr = v 1960 } 1961 } 1962 } 1963 } 1964 1965 // Remove instructions for phi, load, and store. 1966 // lift() will remove the unused i_next *Alloc. 1967 isDead := func(i Instruction) bool { return dead[i] } 1968 loop.Instrs = removeInstrsIf(loop.Instrs, isDead) 1969 post.Instrs = removeInstrsIf(post.Instrs, isDead) 1970 } 1971 } 1972 1973 // rangeIndexed emits to fn the header for an integer-indexed loop 1974 // over array, *array or slice value x. 1975 // The v result is defined only if tv is non-nil. 1976 // forPos is the position of the "for" token. 1977 func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 1978 // 1979 // length = len(x) 1980 // index = -1 1981 // loop: (target of continue) 1982 // index++ 1983 // if index < length goto body else done 1984 // body: 1985 // k = index 1986 // v = x[index] 1987 // ...body... 1988 // jump loop 1989 // done: (target of break) 1990 1991 // Determine number of iterations. 1992 var length Value 1993 dt := typeparams.Deref(x.Type()) 1994 if arr, ok := typeparams.CoreType(dt).(*types.Array); ok { 1995 // For array or *array, the number of iterations is 1996 // known statically thanks to the type. We avoid a 1997 // data dependence upon x, permitting later dead-code 1998 // elimination if x is pure, static unrolling, etc. 1999 // Ranging over a nil *array may have >0 iterations. 2000 // We still generate code for x, in case it has effects. 2001 length = intConst(arr.Len()) 2002 } else { 2003 // length = len(x). 2004 var c Call 2005 c.Call.Value = makeLen(x.Type()) 2006 c.Call.Args = []Value{x} 2007 c.setType(tInt) 2008 length = fn.emit(&c) 2009 } 2010 2011 index := emitLocal(fn, tInt, token.NoPos, "rangeindex") 2012 emitStore(fn, index, intConst(-1), pos) 2013 2014 loop = fn.newBasicBlock("rangeindex.loop") 2015 emitJump(fn, loop) 2016 fn.currentBlock = loop 2017 2018 incr := &BinOp{ 2019 Op: token.ADD, 2020 X: emitLoad(fn, index), 2021 Y: vOne, 2022 } 2023 incr.setType(tInt) 2024 emitStore(fn, index, fn.emit(incr), pos) 2025 2026 body := fn.newBasicBlock("rangeindex.body") 2027 done = fn.newBasicBlock("rangeindex.done") 2028 emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) 2029 fn.currentBlock = body 2030 2031 k = emitLoad(fn, index) 2032 if tv != nil { 2033 switch t := typeparams.CoreType(x.Type()).(type) { 2034 case *types.Array: 2035 instr := &Index{ 2036 X: x, 2037 Index: k, 2038 } 2039 instr.setType(t.Elem()) 2040 instr.setPos(x.Pos()) 2041 v = fn.emit(instr) 2042 2043 case *types.Pointer: // *array 2044 instr := &IndexAddr{ 2045 X: x, 2046 Index: k, 2047 } 2048 instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) 2049 instr.setPos(x.Pos()) 2050 v = emitLoad(fn, fn.emit(instr)) 2051 2052 case *types.Slice: 2053 instr := &IndexAddr{ 2054 X: x, 2055 Index: k, 2056 } 2057 instr.setType(types.NewPointer(t.Elem())) 2058 instr.setPos(x.Pos()) 2059 v = emitLoad(fn, fn.emit(instr)) 2060 2061 default: 2062 panic("rangeIndexed x:" + t.String()) 2063 } 2064 } 2065 return 2066 } 2067 2068 // rangeIter emits to fn the header for a loop using 2069 // Range/Next/Extract to iterate over map or string value x. 2070 // tk and tv are the types of the key/value results k and v, or nil 2071 // if the respective component is not wanted. 2072 func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { 2073 // 2074 // it = range x 2075 // loop: (target of continue) 2076 // okv = next it (ok, key, value) 2077 // ok = extract okv #0 2078 // if ok goto body else done 2079 // body: 2080 // k = extract okv #1 2081 // v = extract okv #2 2082 // ...body... 2083 // jump loop 2084 // done: (target of break) 2085 // 2086 2087 if tk == nil { 2088 tk = tInvalid 2089 } 2090 if tv == nil { 2091 tv = tInvalid 2092 } 2093 2094 rng := &Range{X: x} 2095 rng.setPos(pos) 2096 rng.setType(tRangeIter) 2097 it := fn.emit(rng) 2098 2099 loop = fn.newBasicBlock("rangeiter.loop") 2100 emitJump(fn, loop) 2101 fn.currentBlock = loop 2102 2103 okv := &Next{ 2104 Iter: it, 2105 IsString: isBasic(typeparams.CoreType(x.Type())), 2106 } 2107 okv.setType(types.NewTuple( 2108 varOk, 2109 newVar("k", tk), 2110 newVar("v", tv), 2111 )) 2112 fn.emit(okv) 2113 2114 body := fn.newBasicBlock("rangeiter.body") 2115 done = fn.newBasicBlock("rangeiter.done") 2116 emitIf(fn, emitExtract(fn, okv, 0), body, done) 2117 fn.currentBlock = body 2118 2119 if tk != tInvalid { 2120 k = emitExtract(fn, okv, 1) 2121 } 2122 if tv != tInvalid { 2123 v = emitExtract(fn, okv, 2) 2124 } 2125 return 2126 } 2127 2128 // rangeChan emits to fn the header for a loop that receives from 2129 // channel x until it fails. 2130 // tk is the channel's element type, or nil if the k result is 2131 // not wanted 2132 // pos is the position of the '=' or ':=' token. 2133 func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { 2134 // 2135 // loop: (target of continue) 2136 // ko = <-x (key, ok) 2137 // ok = extract ko #1 2138 // if ok goto body else done 2139 // body: 2140 // k = extract ko #0 2141 // ...body... 2142 // goto loop 2143 // done: (target of break) 2144 2145 loop = fn.newBasicBlock("rangechan.loop") 2146 emitJump(fn, loop) 2147 fn.currentBlock = loop 2148 recv := &UnOp{ 2149 Op: token.ARROW, 2150 X: x, 2151 CommaOk: true, 2152 } 2153 recv.setPos(pos) 2154 recv.setType(types.NewTuple( 2155 newVar("k", typeparams.CoreType(x.Type()).(*types.Chan).Elem()), 2156 varOk, 2157 )) 2158 ko := fn.emit(recv) 2159 body := fn.newBasicBlock("rangechan.body") 2160 done = fn.newBasicBlock("rangechan.done") 2161 emitIf(fn, emitExtract(fn, ko, 1), body, done) 2162 fn.currentBlock = body 2163 if tk != nil { 2164 k = emitExtract(fn, ko, 0) 2165 } 2166 return 2167 } 2168 2169 // rangeInt emits to fn the header for a range loop with an integer operand. 2170 // tk is the key value's type, or nil if the k result is not wanted. 2171 // pos is the position of the "for" token. 2172 func (b *builder) rangeInt(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { 2173 // 2174 // iter = 0 2175 // if 0 < x goto body else done 2176 // loop: (target of continue) 2177 // iter++ 2178 // if iter < x goto body else done 2179 // body: 2180 // k = x 2181 // ...body... 2182 // jump loop 2183 // done: (target of break) 2184 2185 if isUntyped(x.Type()) { 2186 x = emitConv(fn, x, tInt) 2187 } 2188 2189 T := x.Type() 2190 iter := emitLocal(fn, T, token.NoPos, "rangeint.iter") 2191 // x may be unsigned. Avoid initializing x to -1. 2192 2193 body := fn.newBasicBlock("rangeint.body") 2194 done = fn.newBasicBlock("rangeint.done") 2195 emitIf(fn, emitCompare(fn, token.LSS, zeroConst(T), x, token.NoPos), body, done) 2196 2197 loop = fn.newBasicBlock("rangeint.loop") 2198 fn.currentBlock = loop 2199 2200 incr := &BinOp{ 2201 Op: token.ADD, 2202 X: emitLoad(fn, iter), 2203 Y: emitConv(fn, vOne, T), 2204 } 2205 incr.setType(T) 2206 emitStore(fn, iter, fn.emit(incr), pos) 2207 emitIf(fn, emitCompare(fn, token.LSS, incr, x, token.NoPos), body, done) 2208 fn.currentBlock = body 2209 2210 if tk != nil { 2211 // Integer types (int, uint8, etc.) are named and 2212 // we know that k is assignable to x when tk != nil. 2213 // This implies tk and T are identical so no conversion is needed. 2214 k = emitLoad(fn, iter) 2215 } 2216 2217 return 2218 } 2219 2220 // rangeStmt emits to fn code for the range statement s, optionally 2221 // labelled by label. 2222 func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { 2223 var tk, tv types.Type 2224 if s.Key != nil && !isBlankIdent(s.Key) { 2225 tk = fn.typeOf(s.Key) 2226 } 2227 if s.Value != nil && !isBlankIdent(s.Value) { 2228 tv = fn.typeOf(s.Value) 2229 } 2230 2231 // create locals for s.Key and s.Value. 2232 createVars := func() { 2233 // Unlike a short variable declaration, a RangeStmt 2234 // using := never redeclares an existing variable; it 2235 // always creates a new one. 2236 if tk != nil { 2237 emitLocalVar(fn, identVar(fn, s.Key.(*ast.Ident))) 2238 } 2239 if tv != nil { 2240 emitLocalVar(fn, identVar(fn, s.Value.(*ast.Ident))) 2241 } 2242 } 2243 2244 afterGo122 := versions.AtLeast(fn.goversion, versions.Go1_22) 2245 if s.Tok == token.DEFINE && !afterGo122 { 2246 // pre-go1.22: If iteration variables are defined (:=), this 2247 // occurs once outside the loop. 2248 createVars() 2249 } 2250 2251 x := b.expr(fn, s.X) 2252 2253 var k, v Value 2254 var loop, done *BasicBlock 2255 switch rt := typeparams.CoreType(x.Type()).(type) { 2256 case *types.Slice, *types.Array, *types.Pointer: // *array 2257 k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) 2258 2259 case *types.Chan: 2260 k, loop, done = b.rangeChan(fn, x, tk, s.For) 2261 2262 case *types.Map: 2263 k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) 2264 2265 case *types.Basic: 2266 switch { 2267 case rt.Info()&types.IsString != 0: 2268 k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) 2269 2270 case rt.Info()&types.IsInteger != 0: 2271 k, loop, done = b.rangeInt(fn, x, tk, s.For) 2272 2273 default: 2274 panic("Cannot range over basic type: " + rt.String()) 2275 } 2276 2277 default: 2278 panic("Cannot range over: " + rt.String()) 2279 } 2280 2281 if s.Tok == token.DEFINE && afterGo122 { 2282 // go1.22: If iteration variables are defined (:=), this occurs inside the loop. 2283 createVars() 2284 } 2285 2286 // Evaluate both LHS expressions before we update either. 2287 var kl, vl lvalue 2288 if tk != nil { 2289 kl = b.addr(fn, s.Key, false) // non-escaping 2290 } 2291 if tv != nil { 2292 vl = b.addr(fn, s.Value, false) // non-escaping 2293 } 2294 if tk != nil { 2295 kl.store(fn, k) 2296 } 2297 if tv != nil { 2298 vl.store(fn, v) 2299 } 2300 2301 if label != nil { 2302 label._break = done 2303 label._continue = loop 2304 } 2305 2306 fn.targets = &targets{ 2307 tail: fn.targets, 2308 _break: done, 2309 _continue: loop, 2310 } 2311 b.stmt(fn, s.Body) 2312 fn.targets = fn.targets.tail 2313 emitJump(fn, loop) // back-edge 2314 fn.currentBlock = done 2315 } 2316 2317 // stmt lowers statement s to SSA form, emitting code to fn. 2318 func (b *builder) stmt(fn *Function, _s ast.Stmt) { 2319 // The label of the current statement. If non-nil, its _goto 2320 // target is always set; its _break and _continue are set only 2321 // within the body of switch/typeswitch/select/for/range. 2322 // It is effectively an additional default-nil parameter of stmt(). 2323 var label *lblock 2324 start: 2325 switch s := _s.(type) { 2326 case *ast.EmptyStmt: 2327 // ignore. (Usually removed by gofmt.) 2328 2329 case *ast.DeclStmt: // Con, Var or Typ 2330 d := s.Decl.(*ast.GenDecl) 2331 if d.Tok == token.VAR { 2332 for _, spec := range d.Specs { 2333 if vs, ok := spec.(*ast.ValueSpec); ok { 2334 b.localValueSpec(fn, vs) 2335 } 2336 } 2337 } 2338 2339 case *ast.LabeledStmt: 2340 if s.Label.Name == "_" { 2341 // Blank labels can't be the target of a goto, break, 2342 // or continue statement, so we don't need a new block. 2343 _s = s.Stmt 2344 goto start 2345 } 2346 label = fn.labelledBlock(s.Label) 2347 emitJump(fn, label._goto) 2348 fn.currentBlock = label._goto 2349 _s = s.Stmt 2350 goto start // effectively: tailcall stmt(fn, s.Stmt, label) 2351 2352 case *ast.ExprStmt: 2353 b.expr(fn, s.X) 2354 2355 case *ast.SendStmt: 2356 chtyp := typeparams.CoreType(fn.typeOf(s.Chan)).(*types.Chan) 2357 fn.emit(&Send{ 2358 Chan: b.expr(fn, s.Chan), 2359 X: emitConv(fn, b.expr(fn, s.Value), chtyp.Elem()), 2360 pos: s.Arrow, 2361 }) 2362 2363 case *ast.IncDecStmt: 2364 op := token.ADD 2365 if s.Tok == token.DEC { 2366 op = token.SUB 2367 } 2368 loc := b.addr(fn, s.X, false) 2369 b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) 2370 2371 case *ast.AssignStmt: 2372 switch s.Tok { 2373 case token.ASSIGN, token.DEFINE: 2374 b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) 2375 2376 default: // +=, etc. 2377 op := s.Tok + token.ADD - token.ADD_ASSIGN 2378 b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) 2379 } 2380 2381 case *ast.GoStmt: 2382 // The "intrinsics" new/make/len/cap are forbidden here. 2383 // panic is treated like an ordinary function call. 2384 v := Go{pos: s.Go} 2385 b.setCall(fn, s.Call, &v.Call) 2386 fn.emit(&v) 2387 2388 case *ast.DeferStmt: 2389 // The "intrinsics" new/make/len/cap are forbidden here. 2390 // panic is treated like an ordinary function call. 2391 v := Defer{pos: s.Defer} 2392 b.setCall(fn, s.Call, &v.Call) 2393 fn.emit(&v) 2394 2395 // A deferred call can cause recovery from panic, 2396 // and control resumes at the Recover block. 2397 createRecoverBlock(fn) 2398 2399 case *ast.ReturnStmt: 2400 var results []Value 2401 if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { 2402 // Return of one expression in a multi-valued function. 2403 tuple := b.exprN(fn, s.Results[0]) 2404 ttuple := tuple.Type().(*types.Tuple) 2405 for i, n := 0, ttuple.Len(); i < n; i++ { 2406 results = append(results, 2407 emitConv(fn, emitExtract(fn, tuple, i), 2408 fn.Signature.Results().At(i).Type())) 2409 } 2410 } else { 2411 // 1:1 return, or no-arg return in non-void function. 2412 for i, r := range s.Results { 2413 v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) 2414 results = append(results, v) 2415 } 2416 } 2417 if fn.namedResults != nil { 2418 // Function has named result parameters (NRPs). 2419 // Perform parallel assignment of return operands to NRPs. 2420 for i, r := range results { 2421 emitStore(fn, fn.namedResults[i], r, s.Return) 2422 } 2423 } 2424 // Run function calls deferred in this 2425 // function when explicitly returning from it. 2426 fn.emit(new(RunDefers)) 2427 if fn.namedResults != nil { 2428 // Reload NRPs to form the result tuple. 2429 results = results[:0] 2430 for _, r := range fn.namedResults { 2431 results = append(results, emitLoad(fn, r)) 2432 } 2433 } 2434 fn.emit(&Return{Results: results, pos: s.Return}) 2435 fn.currentBlock = fn.newBasicBlock("unreachable") 2436 2437 case *ast.BranchStmt: 2438 var block *BasicBlock 2439 switch s.Tok { 2440 case token.BREAK: 2441 if s.Label != nil { 2442 block = fn.labelledBlock(s.Label)._break 2443 } else { 2444 for t := fn.targets; t != nil && block == nil; t = t.tail { 2445 block = t._break 2446 } 2447 } 2448 2449 case token.CONTINUE: 2450 if s.Label != nil { 2451 block = fn.labelledBlock(s.Label)._continue 2452 } else { 2453 for t := fn.targets; t != nil && block == nil; t = t.tail { 2454 block = t._continue 2455 } 2456 } 2457 2458 case token.FALLTHROUGH: 2459 for t := fn.targets; t != nil && block == nil; t = t.tail { 2460 block = t._fallthrough 2461 } 2462 2463 case token.GOTO: 2464 block = fn.labelledBlock(s.Label)._goto 2465 } 2466 emitJump(fn, block) 2467 fn.currentBlock = fn.newBasicBlock("unreachable") 2468 2469 case *ast.BlockStmt: 2470 b.stmtList(fn, s.List) 2471 2472 case *ast.IfStmt: 2473 if s.Init != nil { 2474 b.stmt(fn, s.Init) 2475 } 2476 then := fn.newBasicBlock("if.then") 2477 done := fn.newBasicBlock("if.done") 2478 els := done 2479 if s.Else != nil { 2480 els = fn.newBasicBlock("if.else") 2481 } 2482 b.cond(fn, s.Cond, then, els) 2483 fn.currentBlock = then 2484 b.stmt(fn, s.Body) 2485 emitJump(fn, done) 2486 2487 if s.Else != nil { 2488 fn.currentBlock = els 2489 b.stmt(fn, s.Else) 2490 emitJump(fn, done) 2491 } 2492 2493 fn.currentBlock = done 2494 2495 case *ast.SwitchStmt: 2496 b.switchStmt(fn, s, label) 2497 2498 case *ast.TypeSwitchStmt: 2499 b.typeSwitchStmt(fn, s, label) 2500 2501 case *ast.SelectStmt: 2502 b.selectStmt(fn, s, label) 2503 2504 case *ast.ForStmt: 2505 b.forStmt(fn, s, label) 2506 2507 case *ast.RangeStmt: 2508 b.rangeStmt(fn, s, label) 2509 2510 default: 2511 panic(fmt.Sprintf("unexpected statement kind: %T", s)) 2512 } 2513 } 2514 2515 // A buildFunc is a strategy for building the SSA body for a function. 2516 type buildFunc = func(*builder, *Function) 2517 2518 // iterate causes all created but unbuilt functions to be built. As 2519 // this may create new methods, the process is iterated until it 2520 // converges. 2521 func (b *builder) iterate() { 2522 for ; b.finished < b.created.Len(); b.finished++ { 2523 fn := b.created.At(b.finished) 2524 b.buildFunction(fn) 2525 } 2526 } 2527 2528 // buildFunction builds SSA code for the body of function fn. Idempotent. 2529 func (b *builder) buildFunction(fn *Function) { 2530 if fn.build != nil { 2531 assert(fn.parent == nil, "anonymous functions should not be built by buildFunction()") 2532 2533 if fn.Prog.mode&LogSource != 0 { 2534 defer logStack("build %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() 2535 } 2536 fn.build(b, fn) 2537 fn.done() 2538 } 2539 } 2540 2541 // buildParamsOnly builds fn.Params from fn.Signature, but does not build fn.Body. 2542 func (b *builder) buildParamsOnly(fn *Function) { 2543 // For external (C, asm) functions or functions loaded from 2544 // export data, we must set fn.Params even though there is no 2545 // body code to reference them. 2546 if recv := fn.Signature.Recv(); recv != nil { 2547 fn.addParamVar(recv) 2548 } 2549 params := fn.Signature.Params() 2550 for i, n := 0, params.Len(); i < n; i++ { 2551 fn.addParamVar(params.At(i)) 2552 } 2553 } 2554 2555 // buildFromSyntax builds fn.Body from fn.syntax, which must be non-nil. 2556 func (b *builder) buildFromSyntax(fn *Function) { 2557 var ( 2558 recvField *ast.FieldList 2559 body *ast.BlockStmt 2560 functype *ast.FuncType 2561 ) 2562 switch syntax := fn.syntax.(type) { 2563 case *ast.FuncDecl: 2564 functype = syntax.Type 2565 recvField = syntax.Recv 2566 body = syntax.Body 2567 if body == nil { 2568 b.buildParamsOnly(fn) // no body (non-Go function) 2569 return 2570 } 2571 case *ast.FuncLit: 2572 functype = syntax.Type 2573 body = syntax.Body 2574 case nil: 2575 panic("no syntax") 2576 default: 2577 panic(syntax) // unexpected syntax 2578 } 2579 2580 fn.startBody() 2581 fn.createSyntacticParams(recvField, functype) 2582 b.stmt(fn, body) 2583 if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { 2584 // Control fell off the end of the function's body block. 2585 // 2586 // Block optimizations eliminate the current block, if 2587 // unreachable. It is a builder invariant that 2588 // if this no-arg return is ill-typed for 2589 // fn.Signature.Results, this block must be 2590 // unreachable. The sanity checker checks this. 2591 fn.emit(new(RunDefers)) 2592 fn.emit(new(Return)) 2593 } 2594 fn.finishBody() 2595 } 2596 2597 // addRuntimeType records t as a runtime type, 2598 // along with all types derivable from it using reflection. 2599 // 2600 // Acquires prog.runtimeTypesMu. 2601 func addRuntimeType(prog *Program, t types.Type) { 2602 prog.runtimeTypesMu.Lock() 2603 defer prog.runtimeTypesMu.Unlock() 2604 forEachReachable(&prog.MethodSets, t, func(t types.Type) bool { 2605 prev, _ := prog.runtimeTypes.Set(t, true).(bool) 2606 return !prev // already seen? 2607 }) 2608 } 2609 2610 // Build calls Package.Build for each package in prog. 2611 // Building occurs in parallel unless the BuildSerially mode flag was set. 2612 // 2613 // Build is intended for whole-program analysis; a typical compiler 2614 // need only build a single package. 2615 // 2616 // Build is idempotent and thread-safe. 2617 func (prog *Program) Build() { 2618 var wg sync.WaitGroup 2619 for _, p := range prog.packages { 2620 if prog.mode&BuildSerially != 0 { 2621 p.Build() 2622 } else { 2623 wg.Add(1) 2624 cpuLimit <- struct{}{} // acquire a token 2625 go func(p *Package) { 2626 p.Build() 2627 wg.Done() 2628 <-cpuLimit // release a token 2629 }(p) 2630 } 2631 } 2632 wg.Wait() 2633 } 2634 2635 // cpuLimit is a counting semaphore to limit CPU parallelism. 2636 var cpuLimit = make(chan struct{}, runtime.GOMAXPROCS(0)) 2637 2638 // Build builds SSA code for all functions and vars in package p. 2639 // 2640 // CreatePackage must have been called for all of p's direct imports 2641 // (and hence its direct imports must have been error-free). It is not 2642 // necessary to call CreatePackage for indirect dependencies. 2643 // Functions will be created for all necessary methods in those 2644 // packages on demand. 2645 // 2646 // Build is idempotent and thread-safe. 2647 func (p *Package) Build() { p.buildOnce.Do(p.build) } 2648 2649 func (p *Package) build() { 2650 if p.info == nil { 2651 return // synthetic package, e.g. "testmain" 2652 } 2653 if p.Prog.mode&LogSource != 0 { 2654 defer logStack("build %s", p)() 2655 } 2656 2657 b := builder{created: &p.created} 2658 b.iterate() 2659 2660 // We no longer need transient information: ASTs or go/types deductions. 2661 p.info = nil 2662 p.created = nil 2663 p.files = nil 2664 p.initVersion = nil 2665 2666 if p.Prog.mode&SanityCheckFunctions != 0 { 2667 sanityCheckPackage(p) 2668 } 2669 } 2670 2671 // buildPackageInit builds fn.Body for the synthetic package initializer. 2672 func (b *builder) buildPackageInit(fn *Function) { 2673 p := fn.Pkg 2674 fn.startBody() 2675 2676 var done *BasicBlock 2677 2678 if p.Prog.mode&BareInits == 0 { 2679 // Make init() skip if package is already initialized. 2680 initguard := p.Var("init$guard") 2681 doinit := fn.newBasicBlock("init.start") 2682 done = fn.newBasicBlock("init.done") 2683 emitIf(fn, emitLoad(fn, initguard), done, doinit) 2684 fn.currentBlock = doinit 2685 emitStore(fn, initguard, vTrue, token.NoPos) 2686 2687 // Call the init() function of each package we import. 2688 for _, pkg := range p.Pkg.Imports() { 2689 prereq := p.Prog.packages[pkg] 2690 if prereq == nil { 2691 panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) 2692 } 2693 var v Call 2694 v.Call.Value = prereq.init 2695 v.Call.pos = fn.pos 2696 v.setType(types.NewTuple()) 2697 fn.emit(&v) 2698 } 2699 } 2700 2701 // Initialize package-level vars in correct order. 2702 if len(p.info.InitOrder) > 0 && len(p.files) == 0 { 2703 panic("no source files provided for package. cannot initialize globals") 2704 } 2705 2706 for _, varinit := range p.info.InitOrder { 2707 if fn.Prog.mode&LogSource != 0 { 2708 fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", 2709 varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) 2710 } 2711 // Initializers for global vars are evaluated in dependency 2712 // order, but may come from arbitrary files of the package 2713 // with different versions, so we transiently update 2714 // fn.goversion for each one. (Since init is a synthetic 2715 // function it has no syntax of its own that needs a version.) 2716 fn.goversion = p.initVersion[varinit.Rhs] 2717 if len(varinit.Lhs) == 1 { 2718 // 1:1 initialization: var x, y = a(), b() 2719 var lval lvalue 2720 if v := varinit.Lhs[0]; v.Name() != "_" { 2721 lval = &address{addr: p.objects[v].(*Global), pos: v.Pos()} 2722 } else { 2723 lval = blank{} 2724 } 2725 b.assign(fn, lval, varinit.Rhs, true, nil) 2726 } else { 2727 // n:1 initialization: var x, y := f() 2728 tuple := b.exprN(fn, varinit.Rhs) 2729 for i, v := range varinit.Lhs { 2730 if v.Name() == "_" { 2731 continue 2732 } 2733 emitStore(fn, p.objects[v].(*Global), emitExtract(fn, tuple, i), v.Pos()) 2734 } 2735 } 2736 } 2737 2738 // The rest of the init function is synthetic: 2739 // no syntax, info, goversion. 2740 fn.info = nil 2741 fn.goversion = "" 2742 2743 // Call all of the declared init() functions in source order. 2744 for _, file := range p.files { 2745 for _, decl := range file.Decls { 2746 if decl, ok := decl.(*ast.FuncDecl); ok { 2747 id := decl.Name 2748 if !isBlankIdent(id) && id.Name == "init" && decl.Recv == nil { 2749 declaredInit := p.objects[p.info.Defs[id]].(*Function) 2750 var v Call 2751 v.Call.Value = declaredInit 2752 v.setType(types.NewTuple()) 2753 p.init.emit(&v) 2754 } 2755 } 2756 } 2757 } 2758 2759 // Finish up init(). 2760 if p.Prog.mode&BareInits == 0 { 2761 emitJump(fn, done) 2762 fn.currentBlock = done 2763 } 2764 fn.emit(new(Return)) 2765 fn.finishBody() 2766 }