github.com/axw/llgo@v0.0.0-20160805011314-95b5fe4dca20/irgen/ssa.go (about) 1 //===- ssa.go - IR generation from go/ssa ---------------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the top-level LLVM IR generation from go/ssa form. 11 // 12 //===----------------------------------------------------------------------===// 13 14 package irgen 15 16 import ( 17 "fmt" 18 "go/ast" 19 "go/token" 20 "os" 21 "sort" 22 23 "llvm.org/llgo/ssaopt" 24 "llvm.org/llgo/third_party/gotools/go/ssa" 25 "llvm.org/llgo/third_party/gotools/go/ssa/ssautil" 26 "llvm.org/llgo/third_party/gotools/go/types" 27 "llvm.org/llvm/bindings/go/llvm" 28 ) 29 30 // A globalInit is used to temporarily store a global's initializer until 31 // we are ready to build it. 32 type globalInit struct { 33 val llvm.Value 34 elems []globalInit 35 } 36 37 func (gi *globalInit) update(typ llvm.Type, indices []uint32, val llvm.Value) { 38 if len(indices) == 0 { 39 gi.val = val 40 return 41 } 42 43 if gi.val.C != nil { 44 gi.val = llvm.ConstInsertValue(gi.val, val, indices) 45 } 46 47 tk := typ.TypeKind() 48 49 if len(gi.elems) == 0 { 50 switch tk { 51 case llvm.StructTypeKind: 52 gi.elems = make([]globalInit, typ.StructElementTypesCount()) 53 case llvm.ArrayTypeKind: 54 gi.elems = make([]globalInit, typ.ArrayLength()) 55 default: 56 panic("unexpected type") 57 } 58 } 59 60 var eltyp llvm.Type 61 switch tk { 62 case llvm.StructTypeKind: 63 eltyp = typ.StructElementTypes()[indices[0]] 64 case llvm.ArrayTypeKind: 65 eltyp = typ.ElementType() 66 default: 67 panic("unexpected type") 68 } 69 70 gi.elems[indices[0]].update(eltyp, indices[1:], val) 71 } 72 73 func (gi *globalInit) build(typ llvm.Type) llvm.Value { 74 if gi.val.C != nil { 75 return gi.val 76 } 77 if len(gi.elems) == 0 { 78 return llvm.ConstNull(typ) 79 } 80 81 switch typ.TypeKind() { 82 case llvm.StructTypeKind: 83 eltypes := typ.StructElementTypes() 84 elems := make([]llvm.Value, len(eltypes)) 85 for i, eltyp := range eltypes { 86 elems[i] = gi.elems[i].build(eltyp) 87 } 88 return llvm.ConstStruct(elems, false) 89 case llvm.ArrayTypeKind: 90 eltyp := typ.ElementType() 91 elems := make([]llvm.Value, len(gi.elems)) 92 for i := range gi.elems { 93 elems[i] = gi.elems[i].build(eltyp) 94 } 95 return llvm.ConstArray(eltyp, elems) 96 default: 97 panic("unexpected type") 98 } 99 } 100 101 type unit struct { 102 *compiler 103 pkg *ssa.Package 104 globals map[ssa.Value]llvm.Value 105 globalInits map[llvm.Value]*globalInit 106 107 // funcDescriptors maps *ssa.Functions to function descriptors, 108 // the first-class representation of functions. 109 funcDescriptors map[*ssa.Function]llvm.Value 110 111 // undefinedFuncs contains functions that have been resolved 112 // (declared) but not defined. 113 undefinedFuncs map[*ssa.Function]bool 114 115 gcRoots []llvm.Value 116 } 117 118 func newUnit(c *compiler, pkg *ssa.Package) *unit { 119 u := &unit{ 120 compiler: c, 121 pkg: pkg, 122 globals: make(map[ssa.Value]llvm.Value), 123 globalInits: make(map[llvm.Value]*globalInit), 124 funcDescriptors: make(map[*ssa.Function]llvm.Value), 125 undefinedFuncs: make(map[*ssa.Function]bool), 126 } 127 return u 128 } 129 130 type byMemberName []ssa.Member 131 132 func (ms byMemberName) Len() int { return len(ms) } 133 func (ms byMemberName) Swap(i, j int) { 134 ms[i], ms[j] = ms[j], ms[i] 135 } 136 func (ms byMemberName) Less(i, j int) bool { 137 return ms[i].Name() < ms[j].Name() 138 } 139 140 type byFunctionString []*ssa.Function 141 142 func (fns byFunctionString) Len() int { return len(fns) } 143 func (fns byFunctionString) Swap(i, j int) { 144 fns[i], fns[j] = fns[j], fns[i] 145 } 146 func (fns byFunctionString) Less(i, j int) bool { 147 return fns[i].String() < fns[j].String() 148 } 149 150 // Emit functions in order of their fully qualified names. This is so that a 151 // bootstrap build can be verified by comparing the stage2 and stage3 binaries. 152 func (u *unit) defineFunctionsInOrder(functions map[*ssa.Function]bool) { 153 fns := []*ssa.Function{} 154 for f, _ := range functions { 155 fns = append(fns, f) 156 } 157 sort.Sort(byFunctionString(fns)) 158 for _, f := range fns { 159 u.defineFunction(f) 160 } 161 } 162 163 // translatePackage translates an *ssa.Package into an LLVM module, and returns 164 // the translation unit information. 165 func (u *unit) translatePackage(pkg *ssa.Package) { 166 ms := make([]ssa.Member, len(pkg.Members)) 167 i := 0 168 for _, m := range pkg.Members { 169 ms[i] = m 170 i++ 171 } 172 173 sort.Sort(byMemberName(ms)) 174 175 // Initialize global storage and type descriptors for this package. 176 // We must create globals regardless of whether they're referenced, 177 // hence the duplication in frame.value. 178 for _, m := range ms { 179 switch v := m.(type) { 180 case *ssa.Global: 181 elemtyp := deref(v.Type()) 182 llelemtyp := u.llvmtypes.ToLLVM(elemtyp) 183 vname := u.types.mc.mangleGlobalName(v) 184 global := llvm.AddGlobal(u.module.Module, llelemtyp, vname) 185 if !v.Object().Exported() { 186 global.SetLinkage(llvm.InternalLinkage) 187 } 188 u.addGlobal(global, elemtyp) 189 global = llvm.ConstBitCast(global, u.llvmtypes.ToLLVM(v.Type())) 190 u.globals[v] = global 191 case *ssa.Type: 192 u.types.getTypeDescriptorPointer(v.Type()) 193 } 194 } 195 196 // Define functions. 197 u.defineFunctionsInOrder(ssautil.AllFunctions(pkg.Prog)) 198 199 // Emit initializers for type descriptors, which may trigger 200 // the resolution of additional functions. 201 u.types.emitTypeDescInitializers() 202 203 // Define remaining functions that were resolved during 204 // runtime type mapping, but not defined. 205 u.defineFunctionsInOrder(u.undefinedFuncs) 206 207 // Set initializers for globals. 208 for global, init := range u.globalInits { 209 initval := init.build(global.Type().ElementType()) 210 global.SetInitializer(initval) 211 } 212 } 213 214 func (u *unit) addGlobal(global llvm.Value, ty types.Type) { 215 u.globalInits[global] = new(globalInit) 216 217 if hasPointers(ty) { 218 global = llvm.ConstBitCast(global, llvm.PointerType(llvm.Int8Type(), 0)) 219 size := llvm.ConstInt(u.types.inttype, uint64(u.types.Sizeof(ty)), false) 220 root := llvm.ConstStruct([]llvm.Value{global, size}, false) 221 u.gcRoots = append(u.gcRoots, root) 222 } 223 } 224 225 // ResolveMethod implements MethodResolver.ResolveMethod. 226 func (u *unit) ResolveMethod(s *types.Selection) *govalue { 227 m := u.pkg.Prog.Method(s) 228 llfn := u.resolveFunctionGlobal(m) 229 llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)) 230 return newValue(llfn, m.Signature) 231 } 232 233 // resolveFunctionDescriptorGlobal returns a reference to the LLVM global 234 // storing the function's descriptor. 235 func (u *unit) resolveFunctionDescriptorGlobal(f *ssa.Function) llvm.Value { 236 llfd, ok := u.funcDescriptors[f] 237 if !ok { 238 name := u.types.mc.mangleFunctionName(f) + "$descriptor" 239 llfd = llvm.AddGlobal(u.module.Module, llvm.PointerType(llvm.Int8Type(), 0), name) 240 llfd.SetGlobalConstant(true) 241 u.funcDescriptors[f] = llfd 242 } 243 return llfd 244 } 245 246 // resolveFunctionDescriptor returns a function's 247 // first-class value representation. 248 func (u *unit) resolveFunctionDescriptor(f *ssa.Function) *govalue { 249 llfd := u.resolveFunctionDescriptorGlobal(f) 250 llfd = llvm.ConstBitCast(llfd, llvm.PointerType(llvm.Int8Type(), 0)) 251 return newValue(llfd, f.Signature) 252 } 253 254 // resolveFunctionGlobal returns an llvm.Value for a function global. 255 func (u *unit) resolveFunctionGlobal(f *ssa.Function) llvm.Value { 256 if v, ok := u.globals[f]; ok { 257 return v 258 } 259 name := u.types.mc.mangleFunctionName(f) 260 // It's possible that the function already exists in the module; 261 // for example, if it's a runtime intrinsic that the compiler 262 // has already referenced. 263 llvmFunction := u.module.Module.NamedFunction(name) 264 if llvmFunction.IsNil() { 265 fti := u.llvmtypes.getSignatureInfo(f.Signature) 266 llvmFunction = fti.declare(u.module.Module, name) 267 u.undefinedFuncs[f] = true 268 } 269 u.globals[f] = llvmFunction 270 return llvmFunction 271 } 272 273 func (u *unit) getFunctionLinkage(f *ssa.Function) llvm.Linkage { 274 switch { 275 case f.Pkg == nil: 276 // Synthetic functions outside packages may appear in multiple packages. 277 return llvm.LinkOnceODRLinkage 278 279 case f.Parent() != nil: 280 // Anonymous. 281 return llvm.InternalLinkage 282 283 case f.Signature.Recv() == nil && !ast.IsExported(f.Name()) && 284 !(f.Name() == "main" && f.Pkg.Object.Path() == "main") && 285 f.Name() != "init": 286 // Unexported methods may be referenced as part of an interface method 287 // table in another package. TODO(pcc): detect when this cannot happen. 288 return llvm.InternalLinkage 289 290 default: 291 return llvm.ExternalLinkage 292 } 293 } 294 295 func (u *unit) defineFunction(f *ssa.Function) { 296 // Only define functions from this package, or synthetic 297 // wrappers (which do not have a package). 298 if f.Pkg != nil && f.Pkg != u.pkg { 299 return 300 } 301 302 llfn := u.resolveFunctionGlobal(f) 303 linkage := u.getFunctionLinkage(f) 304 305 isMethod := f.Signature.Recv() != nil 306 307 // Methods cannot be referred to via a descriptor. 308 if !isMethod { 309 llfd := u.resolveFunctionDescriptorGlobal(f) 310 llfd.SetInitializer(llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0))) 311 llfd.SetLinkage(linkage) 312 } 313 314 // We only need to emit a descriptor for functions without bodies. 315 if len(f.Blocks) == 0 { 316 return 317 } 318 319 ssaopt.LowerAllocsToStack(f) 320 321 if u.DumpSSA { 322 f.WriteTo(os.Stderr) 323 } 324 325 fr := newFrame(u, llfn) 326 defer fr.dispose() 327 fr.addCommonFunctionAttrs(fr.function) 328 fr.function.SetLinkage(linkage) 329 330 fr.logf("Define function: %s", f.String()) 331 fti := u.llvmtypes.getSignatureInfo(f.Signature) 332 delete(u.undefinedFuncs, f) 333 fr.retInf = fti.retInf 334 335 // Push the compile unit and function onto the debug context. 336 if u.GenerateDebug { 337 u.debug.PushFunction(fr.function, f.Signature, f.Pos()) 338 defer u.debug.PopFunction() 339 u.debug.SetLocation(fr.builder, f.Pos()) 340 } 341 342 // If a function calls recover, we create a separate function to 343 // hold the real function, and this function calls __go_can_recover 344 // and bridges to it. 345 if callsRecover(f) { 346 fr = fr.bridgeRecoverFunc(fr.function, fti) 347 } 348 349 fr.blocks = make([]llvm.BasicBlock, len(f.Blocks)) 350 fr.lastBlocks = make([]llvm.BasicBlock, len(f.Blocks)) 351 for i, block := range f.Blocks { 352 fr.blocks[i] = llvm.AddBasicBlock(fr.function, fmt.Sprintf(".%d.%s", i, block.Comment)) 353 } 354 fr.builder.SetInsertPointAtEnd(fr.blocks[0]) 355 fr.transformSwitches(f) 356 357 prologueBlock := llvm.InsertBasicBlock(fr.blocks[0], "prologue") 358 fr.builder.SetInsertPointAtEnd(prologueBlock) 359 360 for i, param := range f.Params { 361 llparam := fti.argInfos[i].decode(llvm.GlobalContext(), fr.builder, fr.builder) 362 if isMethod && i == 0 { 363 if _, ok := param.Type().Underlying().(*types.Pointer); !ok { 364 llparam = fr.builder.CreateBitCast(llparam, llvm.PointerType(fr.types.ToLLVM(param.Type()), 0), "") 365 llparam = fr.builder.CreateLoad(llparam, "") 366 } 367 } 368 fr.env[param] = newValue(llparam, param.Type()) 369 } 370 371 // Load closure, extract free vars. 372 if len(f.FreeVars) > 0 { 373 for _, fv := range f.FreeVars { 374 fr.env[fv] = newValue(llvm.ConstNull(u.llvmtypes.ToLLVM(fv.Type())), fv.Type()) 375 } 376 elemTypes := make([]llvm.Type, len(f.FreeVars)+1) 377 elemTypes[0] = llvm.PointerType(llvm.Int8Type(), 0) // function pointer 378 for i, fv := range f.FreeVars { 379 elemTypes[i+1] = u.llvmtypes.ToLLVM(fv.Type()) 380 } 381 structType := llvm.StructType(elemTypes, false) 382 closure := fr.function.Param(fti.chainIndex) 383 closure = fr.builder.CreateBitCast(closure, llvm.PointerType(structType, 0), "") 384 for i, fv := range f.FreeVars { 385 ptr := fr.builder.CreateStructGEP(closure, i+1, "") 386 ptr = fr.builder.CreateLoad(ptr, "") 387 fr.env[fv] = newValue(ptr, fv.Type()) 388 } 389 } 390 391 // Allocate stack space for locals in the prologue block. 392 for _, local := range f.Locals { 393 typ := fr.llvmtypes.ToLLVM(deref(local.Type())) 394 alloca := fr.builder.CreateAlloca(typ, local.Comment) 395 fr.memsetZero(alloca, llvm.SizeOf(typ)) 396 bcalloca := fr.builder.CreateBitCast(alloca, llvm.PointerType(llvm.Int8Type(), 0), "") 397 value := newValue(bcalloca, local.Type()) 398 fr.env[local] = value 399 } 400 401 // If the function contains any defers, we must first create 402 // an unwind block. We can short-circuit the check for defers with 403 // f.Recover != nil. 404 if f.Recover != nil || hasDefer(f) { 405 fr.unwindBlock = llvm.AddBasicBlock(fr.function, "unwind") 406 fr.frameptr = fr.builder.CreateAlloca(llvm.Int8Type(), "") 407 } 408 409 // Keep track of the block into which we need to insert the call 410 // to __go_register_gc_roots. This needs to be inserted after the 411 // init guard check under the llgo ABI. 412 var registerGcBlock llvm.BasicBlock 413 414 // If this is the "init" function, emit the init guard check and 415 // enable init-specific optimizations. 416 if !isMethod && f.Name() == "init" { 417 registerGcBlock = fr.emitInitPrologue() 418 fr.isInit = true 419 } 420 421 fr.builder.CreateBr(fr.blocks[0]) 422 fr.allocaBuilder.SetInsertPointBefore(prologueBlock.FirstInstruction()) 423 424 for _, block := range f.DomPreorder() { 425 llblock := fr.blocks[block.Index] 426 if llblock.IsNil() { 427 continue 428 } 429 fr.translateBlock(block, llblock) 430 } 431 432 fr.fixupPhis() 433 434 if !fr.unwindBlock.IsNil() { 435 fr.setupUnwindBlock(f.Recover) 436 } 437 438 // The init function needs to register the GC roots first. We do this 439 // after generating code for it because allocations may have caused 440 // additional GC roots to be created. 441 if fr.isInit { 442 fr.builder.SetInsertPointBefore(registerGcBlock.FirstInstruction()) 443 fr.registerGcRoots() 444 } 445 } 446 447 type pendingPhi struct { 448 ssa *ssa.Phi 449 llvm llvm.Value 450 } 451 452 type frame struct { 453 *unit 454 function llvm.Value 455 builder, allocaBuilder llvm.Builder 456 retInf retInfo 457 blocks []llvm.BasicBlock 458 lastBlocks []llvm.BasicBlock 459 runtimeErrorBlocks [gccgoRuntimeErrorCount]llvm.BasicBlock 460 unwindBlock llvm.BasicBlock 461 frameptr llvm.Value 462 env map[ssa.Value]*govalue 463 ptr map[ssa.Value]llvm.Value 464 tuples map[ssa.Value][]*govalue 465 phis []pendingPhi 466 canRecover llvm.Value 467 isInit bool 468 } 469 470 func newFrame(u *unit, fn llvm.Value) *frame { 471 return &frame{ 472 unit: u, 473 function: fn, 474 builder: llvm.GlobalContext().NewBuilder(), 475 allocaBuilder: llvm.GlobalContext().NewBuilder(), 476 env: make(map[ssa.Value]*govalue), 477 ptr: make(map[ssa.Value]llvm.Value), 478 tuples: make(map[ssa.Value][]*govalue), 479 } 480 } 481 482 func (fr *frame) dispose() { 483 fr.builder.Dispose() 484 fr.allocaBuilder.Dispose() 485 } 486 487 // emitInitPrologue emits the init-specific function prologue (guard check and 488 // initialization of dependent packages under the llgo native ABI), and returns 489 // the basic block into which the GC registration call should be emitted. 490 func (fr *frame) emitInitPrologue() llvm.BasicBlock { 491 if fr.GccgoABI { 492 return fr.builder.GetInsertBlock() 493 } 494 495 initGuard := llvm.AddGlobal(fr.module.Module, llvm.Int1Type(), "init$guard") 496 initGuard.SetLinkage(llvm.InternalLinkage) 497 initGuard.SetInitializer(llvm.ConstNull(llvm.Int1Type())) 498 499 returnBlock := llvm.AddBasicBlock(fr.function, "") 500 initBlock := llvm.AddBasicBlock(fr.function, "") 501 502 initGuardVal := fr.builder.CreateLoad(initGuard, "") 503 fr.builder.CreateCondBr(initGuardVal, returnBlock, initBlock) 504 505 fr.builder.SetInsertPointAtEnd(returnBlock) 506 fr.builder.CreateRetVoid() 507 508 fr.builder.SetInsertPointAtEnd(initBlock) 509 fr.builder.CreateStore(llvm.ConstInt(llvm.Int1Type(), 1, false), initGuard) 510 int8ptr := llvm.PointerType(fr.types.ctx.Int8Type(), 0) 511 ftyp := llvm.FunctionType(llvm.VoidType(), []llvm.Type{int8ptr}, false) 512 for _, pkg := range fr.pkg.Object.Imports() { 513 initname := ManglePackagePath(pkg.Path()) + "..import" 514 initfn := fr.module.Module.NamedFunction(initname) 515 if initfn.IsNil() { 516 initfn = llvm.AddFunction(fr.module.Module, initname, ftyp) 517 } 518 args := []llvm.Value{llvm.Undef(int8ptr)} 519 fr.builder.CreateCall(initfn, args, "") 520 } 521 522 return initBlock 523 } 524 525 // bridgeRecoverFunc creates a function that may call recover(), and creates 526 // a call to it from the current frame. The created function will be called 527 // with a boolean parameter that indicates whether it may call recover(). 528 // 529 // The created function will have the same name as the current frame's function 530 // with "$recover" appended, having the same return types and parameters with 531 // an additional boolean parameter appended. 532 // 533 // A new frame will be returned for the newly created function. 534 func (fr *frame) bridgeRecoverFunc(llfn llvm.Value, fti functionTypeInfo) *frame { 535 // The bridging function must not be inlined, or the return address 536 // may not correspond to the source function. 537 llfn.AddFunctionAttr(llvm.NoInlineAttribute) 538 539 // Call __go_can_recover, passing in the function's return address. 540 entry := llvm.AddBasicBlock(llfn, "entry") 541 fr.builder.SetInsertPointAtEnd(entry) 542 canRecover := fr.runtime.canRecover.call(fr, fr.returnAddress(0))[0] 543 returnType := fti.functionType.ReturnType() 544 argTypes := fti.functionType.ParamTypes() 545 argTypes = append(argTypes, canRecover.Type()) 546 547 // Create and call the $recover function. 548 ftiRecover := fti 549 ftiRecover.functionType = llvm.FunctionType(returnType, argTypes, false) 550 llfnRecover := ftiRecover.declare(fr.module.Module, llfn.Name()+"$recover") 551 fr.addCommonFunctionAttrs(llfnRecover) 552 llfnRecover.SetLinkage(llvm.InternalLinkage) 553 args := make([]llvm.Value, len(argTypes)-1, len(argTypes)) 554 for i := range args { 555 args[i] = llfn.Param(i) 556 } 557 args = append(args, canRecover) 558 result := fr.builder.CreateCall(llfnRecover, args, "") 559 if returnType.TypeKind() == llvm.VoidTypeKind { 560 fr.builder.CreateRetVoid() 561 } else { 562 fr.builder.CreateRet(result) 563 } 564 565 // The $recover function must condition calls to __go_recover on 566 // the result of __go_can_recover passed in as an argument. 567 fr = newFrame(fr.unit, llfnRecover) 568 fr.retInf = ftiRecover.retInf 569 fr.canRecover = fr.function.Param(len(argTypes) - 1) 570 return fr 571 } 572 573 func (fr *frame) registerGcRoots() { 574 if len(fr.gcRoots) != 0 { 575 rootty := fr.gcRoots[0].Type() 576 roots := append(fr.gcRoots, llvm.ConstNull(rootty)) 577 rootsarr := llvm.ConstArray(rootty, roots) 578 rootsstruct := llvm.ConstStruct([]llvm.Value{llvm.ConstNull(llvm.PointerType(llvm.Int8Type(), 0)), rootsarr}, false) 579 580 rootsglobal := llvm.AddGlobal(fr.module.Module, rootsstruct.Type(), "") 581 rootsglobal.SetInitializer(rootsstruct) 582 rootsglobal.SetLinkage(llvm.InternalLinkage) 583 fr.runtime.registerGcRoots.callOnly(fr, llvm.ConstBitCast(rootsglobal, llvm.PointerType(llvm.Int8Type(), 0))) 584 } 585 } 586 587 func (fr *frame) fixupPhis() { 588 for _, phi := range fr.phis { 589 values := make([]llvm.Value, len(phi.ssa.Edges)) 590 blocks := make([]llvm.BasicBlock, len(phi.ssa.Edges)) 591 block := phi.ssa.Block() 592 for i, edge := range phi.ssa.Edges { 593 values[i] = fr.llvmvalue(edge) 594 blocks[i] = fr.lastBlock(block.Preds[i]) 595 } 596 phi.llvm.AddIncoming(values, blocks) 597 } 598 } 599 600 func (fr *frame) createLandingPad(cleanup bool) llvm.Value { 601 fr.function.SetPersonality(fr.runtime.gccgoPersonality) 602 lp := fr.builder.CreateLandingPad(fr.runtime.gccgoExceptionType, 0, "") 603 if cleanup { 604 lp.SetCleanup(true) 605 } else { 606 lp.AddClause(llvm.ConstNull(llvm.PointerType(llvm.Int8Type(), 0))) 607 } 608 return lp 609 } 610 611 // Runs defers. If a defer panics, check for recovers in later defers. 612 func (fr *frame) runDefers() { 613 loopbb := llvm.AddBasicBlock(fr.function, "") 614 fr.builder.CreateBr(loopbb) 615 616 retrylpad := llvm.AddBasicBlock(fr.function, "") 617 fr.builder.SetInsertPointAtEnd(retrylpad) 618 fr.createLandingPad(false) 619 fr.runtime.checkDefer.callOnly(fr, fr.frameptr) 620 fr.builder.CreateBr(loopbb) 621 622 fr.builder.SetInsertPointAtEnd(loopbb) 623 fr.runtime.undefer.invoke(fr, retrylpad, fr.frameptr) 624 } 625 626 func (fr *frame) setupUnwindBlock(rec *ssa.BasicBlock) { 627 var recoverbb llvm.BasicBlock 628 if rec != nil { 629 recoverbb = fr.blocks[rec.Index] 630 } else { 631 recoverbb = llvm.AddBasicBlock(fr.function, "recover") 632 fr.builder.SetInsertPointAtEnd(recoverbb) 633 fr.builder.CreateUnreachable() 634 } 635 636 checkunwindbb := llvm.AddBasicBlock(fr.function, "") 637 fr.builder.SetInsertPointAtEnd(checkunwindbb) 638 exc := fr.createLandingPad(true) 639 fr.runDefers() 640 641 frame := fr.builder.CreateLoad(fr.frameptr, "") 642 shouldresume := fr.builder.CreateIsNull(frame, "") 643 644 resumebb := llvm.AddBasicBlock(fr.function, "") 645 fr.builder.CreateCondBr(shouldresume, resumebb, recoverbb) 646 647 fr.builder.SetInsertPointAtEnd(resumebb) 648 fr.builder.CreateResume(exc) 649 650 fr.builder.SetInsertPointAtEnd(fr.unwindBlock) 651 fr.createLandingPad(false) 652 fr.runtime.checkDefer.invoke(fr, checkunwindbb, fr.frameptr) 653 fr.runDefers() 654 fr.builder.CreateBr(recoverbb) 655 } 656 657 func (fr *frame) translateBlock(b *ssa.BasicBlock, llb llvm.BasicBlock) { 658 fr.builder.SetInsertPointAtEnd(llb) 659 for _, instr := range b.Instrs { 660 fr.instruction(instr) 661 } 662 fr.lastBlocks[b.Index] = fr.builder.GetInsertBlock() 663 } 664 665 func (fr *frame) block(b *ssa.BasicBlock) llvm.BasicBlock { 666 return fr.blocks[b.Index] 667 } 668 669 func (fr *frame) lastBlock(b *ssa.BasicBlock) llvm.BasicBlock { 670 return fr.lastBlocks[b.Index] 671 } 672 673 func (fr *frame) value(v ssa.Value) (result *govalue) { 674 switch v := v.(type) { 675 case nil: 676 return nil 677 case *ssa.Function: 678 return fr.resolveFunctionDescriptor(v) 679 case *ssa.Const: 680 return fr.newValueFromConst(v.Value, v.Type()) 681 case *ssa.Global: 682 if g, ok := fr.globals[v]; ok { 683 return newValue(g, v.Type()) 684 } 685 // Create an external global. Globals for this package are defined 686 // on entry to translatePackage, and have initialisers. 687 llelemtyp := fr.llvmtypes.ToLLVM(deref(v.Type())) 688 vname := fr.types.mc.mangleGlobalName(v) 689 llglobal := llvm.AddGlobal(fr.module.Module, llelemtyp, vname) 690 llglobal = llvm.ConstBitCast(llglobal, fr.llvmtypes.ToLLVM(v.Type())) 691 fr.globals[v] = llglobal 692 return newValue(llglobal, v.Type()) 693 } 694 if value, ok := fr.env[v]; ok { 695 return value 696 } 697 698 panic(fmt.Errorf("Instruction %q not visited yet", v.Name())) 699 } 700 701 func (fr *frame) llvmvalue(v ssa.Value) llvm.Value { 702 if gv := fr.value(v); gv != nil { 703 return gv.value 704 } else { 705 return llvm.Value{nil} 706 } 707 } 708 709 func (fr *frame) isNonNull(v ssa.Value) bool { 710 switch v.(type) { 711 case 712 // Globals have a fixed (non-nil) address. 713 *ssa.Global, 714 // The language does not specify what happens if an allocation fails. 715 *ssa.Alloc, 716 // These have already been nil checked. 717 *ssa.FieldAddr, *ssa.IndexAddr: 718 return true 719 default: 720 return false 721 } 722 } 723 724 func (fr *frame) nilCheck(v ssa.Value, llptr llvm.Value) { 725 if !fr.isNonNull(v) { 726 ptrnull := fr.builder.CreateIsNull(llptr, "") 727 fr.condBrRuntimeError(ptrnull, gccgoRuntimeErrorNIL_DEREFERENCE) 728 } 729 } 730 731 func (fr *frame) canAvoidElementLoad(ptr ssa.Value) bool { 732 for _, ref := range *ptr.Referrers() { 733 switch ref := ref.(type) { 734 case *ssa.Field: 735 case *ssa.Index: 736 if ref.X != ptr { 737 return false 738 } 739 // ok 740 default: 741 return false 742 } 743 } 744 745 return true 746 } 747 748 // If this value is sufficiently large, look through referrers to see if we can 749 // avoid a load. 750 func (fr *frame) canAvoidLoad(instr *ssa.UnOp, op llvm.Value) bool { 751 if fr.types.Sizeof(instr.Type()) < 2*fr.types.Sizeof(types.Typ[types.Int]) { 752 // Don't bother with small values. 753 return false 754 } 755 756 // Keep track of whether our pointer may escape. We conservatively assume 757 // that MakeInterfaces will escape. 758 esc := false 759 760 // We only know how to avoid loads if they are used to create an interface 761 // or read an element of the structure. If we see any other referrer, abort. 762 for _, ref := range *instr.Referrers() { 763 switch ref := ref.(type) { 764 case *ssa.MakeInterface: 765 esc = true 766 case *ssa.Field: 767 case *ssa.Index: 768 if ref.X != instr { 769 // This should never happen, as indices are always of type int 770 // and we don't bother with values smaller than 2*sizeof(int). 771 panic("impossible") 772 } 773 // ok 774 default: 775 return false 776 } 777 } 778 779 var opcopy llvm.Value 780 if esc { 781 opcopy = fr.createTypeMalloc(instr.Type()) 782 } else { 783 opcopy = fr.allocaBuilder.CreateAlloca(fr.types.ToLLVM(instr.Type()), "") 784 } 785 fr.memcpy(opcopy, op, llvm.ConstInt(fr.types.inttype, uint64(fr.types.Sizeof(instr.Type())), false)) 786 787 fr.ptr[instr] = opcopy 788 return true 789 } 790 791 // Return true iff we think it might be beneficial to turn this alloc instruction 792 // into a statically allocated global. 793 // Precondition: we are compiling the init function. 794 func (fr *frame) shouldStaticallyAllocate(alloc *ssa.Alloc) bool { 795 // First, see if the allocated type is an array or struct, and if so determine 796 // the number of elements in the type. If the type is anything else, we 797 // statically allocate unconditionally. 798 var numElems int64 799 switch ty := deref(alloc.Type()).Underlying().(type) { 800 case *types.Array: 801 numElems = ty.Len() 802 case *types.Struct: 803 numElems = int64(ty.NumFields()) 804 default: 805 return true 806 } 807 808 // We treat the number of referrers to the alloc instruction as a rough 809 // proxy for the number of elements initialized. If the data structure 810 // is densely initialized (> 1/4 elements initialized), enable the 811 // optimization. 812 return int64(len(*alloc.Referrers()))*4 > numElems 813 } 814 815 // If val is a constant and addr refers to a global variable which is defined in 816 // this module or an element thereof, simulate the effect of storing val at addr 817 // in the global variable's initializer and return true, otherwise return false. 818 // Precondition: we are compiling the init function. 819 func (fr *frame) maybeStoreInInitializer(val, addr llvm.Value) bool { 820 if val.IsAConstant().IsNil() { 821 return false 822 } 823 824 if !addr.IsAConstantExpr().IsNil() && addr.OperandsCount() >= 2 && 825 // TODO(pcc): Explicitly check that this is a constant GEP. 826 // I don't think there are any other kinds of constantexpr which 827 // satisfy the conditions we test for here, so this is probably safe. 828 !addr.Operand(0).IsAGlobalVariable().IsNil() && 829 addr.Operand(1).IsNull() { 830 gv := addr.Operand(0) 831 globalInit, ok := fr.globalInits[gv] 832 if !ok { 833 return false 834 } 835 indices := make([]uint32, addr.OperandsCount()-2) 836 for i := range indices { 837 op := addr.Operand(i + 2) 838 if op.IsAConstantInt().IsNil() { 839 return false 840 } 841 indices[i] = uint32(op.ZExtValue()) 842 } 843 globalInit.update(gv.Type().ElementType(), indices, val) 844 return true 845 } else if !addr.IsAGlobalVariable().IsNil() { 846 if globalInit, ok := fr.globalInits[addr]; ok { 847 globalInit.update(addr.Type().ElementType(), nil, val) 848 return true 849 } 850 return false 851 } else { 852 return false 853 } 854 } 855 856 func (fr *frame) instruction(instr ssa.Instruction) { 857 fr.logf("[%T] %v @ %s\n", instr, instr, fr.pkg.Prog.Fset.Position(instr.Pos())) 858 if fr.GenerateDebug { 859 fr.debug.SetLocation(fr.builder, instr.Pos()) 860 } 861 862 switch instr := instr.(type) { 863 case *ssa.Alloc: 864 typ := deref(instr.Type()) 865 llvmtyp := fr.llvmtypes.ToLLVM(typ) 866 var value llvm.Value 867 if !instr.Heap { 868 value = fr.env[instr].value 869 fr.memsetZero(value, llvm.SizeOf(llvmtyp)) 870 } else if fr.isInit && fr.shouldStaticallyAllocate(instr) { 871 // If this is the init function and we think it may be beneficial, 872 // allocate memory statically in the object file rather than on the 873 // heap. This allows us to optimize constant stores into such 874 // variables as static initializations. 875 global := llvm.AddGlobal(fr.module.Module, llvmtyp, "") 876 global.SetLinkage(llvm.InternalLinkage) 877 fr.addGlobal(global, typ) 878 ptr := llvm.ConstBitCast(global, llvm.PointerType(llvm.Int8Type(), 0)) 879 fr.env[instr] = newValue(ptr, instr.Type()) 880 } else { 881 value = fr.createTypeMalloc(typ) 882 value.SetName(instr.Comment) 883 value = fr.builder.CreateBitCast(value, llvm.PointerType(llvm.Int8Type(), 0), "") 884 fr.env[instr] = newValue(value, instr.Type()) 885 } 886 887 case *ssa.BinOp: 888 lhs, rhs := fr.value(instr.X), fr.value(instr.Y) 889 fr.env[instr] = fr.binaryOp(lhs, instr.Op, rhs) 890 891 case *ssa.Call: 892 tuple := fr.callInstruction(instr) 893 if len(tuple) == 1 { 894 fr.env[instr] = tuple[0] 895 } else { 896 fr.tuples[instr] = tuple 897 } 898 899 case *ssa.ChangeInterface: 900 x := fr.value(instr.X) 901 // The source type must be a non-empty interface, 902 // as ChangeInterface cannot fail (E2I may fail). 903 if instr.Type().Underlying().(*types.Interface).NumMethods() > 0 { 904 x = fr.changeInterface(x, instr.Type(), false) 905 } else { 906 x = fr.convertI2E(x) 907 } 908 fr.env[instr] = x 909 910 case *ssa.ChangeType: 911 value := fr.llvmvalue(instr.X) 912 if _, ok := instr.Type().Underlying().(*types.Pointer); ok { 913 value = fr.builder.CreateBitCast(value, fr.llvmtypes.ToLLVM(instr.Type()), "") 914 } 915 fr.env[instr] = newValue(value, instr.Type()) 916 917 case *ssa.Convert: 918 v := fr.value(instr.X) 919 fr.env[instr] = fr.convert(v, instr.Type()) 920 921 case *ssa.Defer: 922 fn, arg := fr.createThunk(instr) 923 fr.runtime.Defer.call(fr, fr.frameptr, fn, arg) 924 925 case *ssa.Extract: 926 var elem llvm.Value 927 if t, ok := fr.tuples[instr.Tuple]; ok { 928 elem = t[instr.Index].value 929 } else { 930 tuple := fr.llvmvalue(instr.Tuple) 931 elem = fr.builder.CreateExtractValue(tuple, instr.Index, instr.Name()) 932 } 933 elemtyp := instr.Type() 934 fr.env[instr] = newValue(elem, elemtyp) 935 936 case *ssa.Field: 937 fieldtyp := instr.Type() 938 if p, ok := fr.ptr[instr.X]; ok { 939 field := fr.builder.CreateStructGEP(p, instr.Field, instr.Name()) 940 if fr.canAvoidElementLoad(instr) { 941 fr.ptr[instr] = field 942 } else { 943 fr.env[instr] = newValue(fr.builder.CreateLoad(field, ""), fieldtyp) 944 } 945 } else { 946 value := fr.llvmvalue(instr.X) 947 field := fr.builder.CreateExtractValue(value, instr.Field, instr.Name()) 948 fr.env[instr] = newValue(field, fieldtyp) 949 } 950 951 case *ssa.FieldAddr: 952 ptr := fr.llvmvalue(instr.X) 953 fr.nilCheck(instr.X, ptr) 954 xtyp := instr.X.Type().Underlying().(*types.Pointer).Elem() 955 ptrtyp := llvm.PointerType(fr.llvmtypes.ToLLVM(xtyp), 0) 956 ptr = fr.builder.CreateBitCast(ptr, ptrtyp, "") 957 fieldptr := fr.builder.CreateStructGEP(ptr, instr.Field, instr.Name()) 958 fieldptr = fr.builder.CreateBitCast(fieldptr, llvm.PointerType(llvm.Int8Type(), 0), "") 959 fieldptrtyp := instr.Type() 960 fr.env[instr] = newValue(fieldptr, fieldptrtyp) 961 962 case *ssa.Go: 963 fn, arg := fr.createThunk(instr) 964 fr.runtime.Go.call(fr, fn, arg) 965 966 case *ssa.If: 967 cond := fr.llvmvalue(instr.Cond) 968 block := instr.Block() 969 trueBlock := fr.block(block.Succs[0]) 970 falseBlock := fr.block(block.Succs[1]) 971 cond = fr.builder.CreateTrunc(cond, llvm.Int1Type(), "") 972 fr.builder.CreateCondBr(cond, trueBlock, falseBlock) 973 974 case *ssa.Index: 975 var arrayptr llvm.Value 976 977 if ptr, ok := fr.ptr[instr.X]; ok { 978 arrayptr = ptr 979 } else { 980 array := fr.llvmvalue(instr.X) 981 arrayptr = fr.allocaBuilder.CreateAlloca(array.Type(), "") 982 983 fr.builder.CreateStore(array, arrayptr) 984 } 985 index := fr.llvmvalue(instr.Index) 986 987 arraytyp := instr.X.Type().Underlying().(*types.Array) 988 arraylen := llvm.ConstInt(fr.llvmtypes.inttype, uint64(arraytyp.Len()), false) 989 990 // The index may not have been promoted to int (for example, if it 991 // came from a composite literal). 992 index = fr.createZExtOrTrunc(index, fr.types.inttype, "") 993 994 // Bounds checking: 0 <= index < len 995 zero := llvm.ConstNull(fr.types.inttype) 996 i0 := fr.builder.CreateICmp(llvm.IntSLT, index, zero, "") 997 li := fr.builder.CreateICmp(llvm.IntSLE, arraylen, index, "") 998 999 cond := fr.builder.CreateOr(i0, li, "") 1000 1001 fr.condBrRuntimeError(cond, gccgoRuntimeErrorARRAY_INDEX_OUT_OF_BOUNDS) 1002 1003 addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{zero, index}, "") 1004 if fr.canAvoidElementLoad(instr) { 1005 fr.ptr[instr] = addr 1006 } else { 1007 fr.env[instr] = newValue(fr.builder.CreateLoad(addr, ""), instr.Type()) 1008 } 1009 1010 case *ssa.IndexAddr: 1011 x := fr.llvmvalue(instr.X) 1012 index := fr.llvmvalue(instr.Index) 1013 var arrayptr, arraylen llvm.Value 1014 var elemtyp types.Type 1015 var errcode uint64 1016 switch typ := instr.X.Type().Underlying().(type) { 1017 case *types.Slice: 1018 elemtyp = typ.Elem() 1019 arrayptr = fr.builder.CreateExtractValue(x, 0, "") 1020 arraylen = fr.builder.CreateExtractValue(x, 1, "") 1021 errcode = gccgoRuntimeErrorSLICE_INDEX_OUT_OF_BOUNDS 1022 case *types.Pointer: // *array 1023 arraytyp := typ.Elem().Underlying().(*types.Array) 1024 elemtyp = arraytyp.Elem() 1025 fr.nilCheck(instr.X, x) 1026 arrayptr = x 1027 arraylen = llvm.ConstInt(fr.llvmtypes.inttype, uint64(arraytyp.Len()), false) 1028 errcode = gccgoRuntimeErrorARRAY_INDEX_OUT_OF_BOUNDS 1029 } 1030 1031 // The index may not have been promoted to int (for example, if it 1032 // came from a composite literal). 1033 index = fr.createZExtOrTrunc(index, fr.types.inttype, "") 1034 1035 // Bounds checking: 0 <= index < len 1036 zero := llvm.ConstNull(fr.types.inttype) 1037 i0 := fr.builder.CreateICmp(llvm.IntSLT, index, zero, "") 1038 li := fr.builder.CreateICmp(llvm.IntSLE, arraylen, index, "") 1039 1040 cond := fr.builder.CreateOr(i0, li, "") 1041 1042 fr.condBrRuntimeError(cond, errcode) 1043 1044 ptrtyp := llvm.PointerType(fr.llvmtypes.ToLLVM(elemtyp), 0) 1045 arrayptr = fr.builder.CreateBitCast(arrayptr, ptrtyp, "") 1046 addr := fr.builder.CreateGEP(arrayptr, []llvm.Value{index}, "") 1047 addr = fr.builder.CreateBitCast(addr, llvm.PointerType(llvm.Int8Type(), 0), "") 1048 fr.env[instr] = newValue(addr, types.NewPointer(elemtyp)) 1049 1050 case *ssa.Jump: 1051 succ := instr.Block().Succs[0] 1052 fr.builder.CreateBr(fr.block(succ)) 1053 1054 case *ssa.Lookup: 1055 x := fr.value(instr.X) 1056 index := fr.value(instr.Index) 1057 if isString(x.Type().Underlying()) { 1058 fr.env[instr] = fr.stringIndex(x, index) 1059 } else { 1060 v, ok := fr.mapLookup(x, index) 1061 if instr.CommaOk { 1062 fr.tuples[instr] = []*govalue{v, ok} 1063 } else { 1064 fr.env[instr] = v 1065 } 1066 } 1067 1068 case *ssa.MakeChan: 1069 fr.env[instr] = fr.makeChan(instr.Type(), fr.value(instr.Size)) 1070 1071 case *ssa.MakeClosure: 1072 llfn := fr.resolveFunctionGlobal(instr.Fn.(*ssa.Function)) 1073 llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)) 1074 fn := newValue(llfn, instr.Fn.(*ssa.Function).Signature) 1075 bindings := make([]*govalue, len(instr.Bindings)) 1076 for i, binding := range instr.Bindings { 1077 bindings[i] = fr.value(binding) 1078 } 1079 fr.env[instr] = fr.makeClosure(fn, bindings) 1080 1081 case *ssa.MakeInterface: 1082 // fr.ptr[instr.X] will be set if a pointer load was elided by canAvoidLoad 1083 if ptr, ok := fr.ptr[instr.X]; ok { 1084 fr.env[instr] = fr.makeInterfaceFromPointer(ptr, instr.X.Type(), instr.Type()) 1085 } else { 1086 receiver := fr.llvmvalue(instr.X) 1087 fr.env[instr] = fr.makeInterface(receiver, instr.X.Type(), instr.Type()) 1088 } 1089 1090 case *ssa.MakeMap: 1091 fr.env[instr] = fr.makeMap(instr.Type(), fr.value(instr.Reserve)) 1092 1093 case *ssa.MakeSlice: 1094 length := fr.value(instr.Len) 1095 capacity := fr.value(instr.Cap) 1096 fr.env[instr] = fr.makeSlice(instr.Type(), length, capacity) 1097 1098 case *ssa.MapUpdate: 1099 m := fr.value(instr.Map) 1100 k := fr.value(instr.Key) 1101 v := fr.value(instr.Value) 1102 fr.mapUpdate(m, k, v) 1103 1104 case *ssa.Next: 1105 iter := fr.tuples[instr.Iter] 1106 if instr.IsString { 1107 fr.tuples[instr] = fr.stringIterNext(iter) 1108 } else { 1109 fr.tuples[instr] = fr.mapIterNext(iter) 1110 } 1111 1112 case *ssa.Panic: 1113 arg := fr.value(instr.X) 1114 fr.callPanic(arg, true) 1115 1116 case *ssa.Phi: 1117 typ := instr.Type() 1118 phi := fr.builder.CreatePHI(fr.llvmtypes.ToLLVM(typ), instr.Comment) 1119 fr.env[instr] = newValue(phi, typ) 1120 fr.phis = append(fr.phis, pendingPhi{instr, phi}) 1121 1122 case *ssa.Range: 1123 x := fr.value(instr.X) 1124 switch x.Type().Underlying().(type) { 1125 case *types.Map: 1126 fr.tuples[instr] = fr.mapIterInit(x) 1127 case *types.Basic: // string 1128 fr.tuples[instr] = fr.stringIterInit(x) 1129 default: 1130 panic(fmt.Sprintf("unhandled range for type %T", x.Type())) 1131 } 1132 1133 case *ssa.Return: 1134 vals := make([]llvm.Value, len(instr.Results)) 1135 for i, res := range instr.Results { 1136 vals[i] = fr.llvmvalue(res) 1137 } 1138 fr.retInf.encode(llvm.GlobalContext(), fr.allocaBuilder, fr.builder, vals) 1139 1140 case *ssa.RunDefers: 1141 fr.runDefers() 1142 1143 case *ssa.Select: 1144 index, recvOk, recvElems := fr.chanSelect(instr) 1145 tuple := append([]*govalue{index, recvOk}, recvElems...) 1146 fr.tuples[instr] = tuple 1147 1148 case *ssa.Send: 1149 fr.chanSend(fr.value(instr.Chan), fr.value(instr.X)) 1150 1151 case *ssa.Slice: 1152 x := fr.llvmvalue(instr.X) 1153 low := fr.llvmvalue(instr.Low) 1154 high := fr.llvmvalue(instr.High) 1155 max := fr.llvmvalue(instr.Max) 1156 slice := fr.slice(x, instr.X.Type(), low, high, max) 1157 fr.env[instr] = newValue(slice, instr.Type()) 1158 1159 case *ssa.Store: 1160 addr := fr.llvmvalue(instr.Addr) 1161 value := fr.llvmvalue(instr.Val) 1162 addr = fr.builder.CreateBitCast(addr, llvm.PointerType(value.Type(), 0), "") 1163 // If this is the init function, see if we can simulate the effect 1164 // of the store in a global's initializer, in which case we can avoid 1165 // generating code for it. 1166 if !fr.isInit || !fr.maybeStoreInInitializer(value, addr) { 1167 fr.nilCheck(instr.Addr, addr) 1168 fr.builder.CreateStore(value, addr) 1169 } 1170 1171 case *switchInstr: 1172 fr.emitSwitch(instr) 1173 1174 case *ssa.TypeAssert: 1175 x := fr.value(instr.X) 1176 if instr.CommaOk { 1177 v, ok := fr.interfaceTypeCheck(x, instr.AssertedType) 1178 fr.tuples[instr] = []*govalue{v, ok} 1179 } else { 1180 fr.env[instr] = fr.interfaceTypeAssert(x, instr.AssertedType) 1181 } 1182 1183 case *ssa.UnOp: 1184 operand := fr.value(instr.X) 1185 switch instr.Op { 1186 case token.ARROW: 1187 x, ok := fr.chanRecv(operand, instr.CommaOk) 1188 if instr.CommaOk { 1189 fr.tuples[instr] = []*govalue{x, ok} 1190 } else { 1191 fr.env[instr] = x 1192 } 1193 case token.MUL: 1194 fr.nilCheck(instr.X, operand.value) 1195 if !fr.canAvoidLoad(instr, operand.value) { 1196 // The bitcast is necessary to handle recursive pointer loads. 1197 llptr := fr.builder.CreateBitCast(operand.value, llvm.PointerType(fr.llvmtypes.ToLLVM(instr.Type()), 0), "") 1198 fr.env[instr] = newValue(fr.builder.CreateLoad(llptr, ""), instr.Type()) 1199 } 1200 default: 1201 fr.env[instr] = fr.unaryOp(operand, instr.Op) 1202 } 1203 1204 default: 1205 panic(fmt.Sprintf("unhandled: %v", instr)) 1206 } 1207 } 1208 1209 func (fr *frame) callBuiltin(typ types.Type, builtin *ssa.Builtin, args []ssa.Value) []*govalue { 1210 switch builtin.Name() { 1211 case "print", "println": 1212 llargs := make([]*govalue, len(args)) 1213 for i, arg := range args { 1214 llargs[i] = fr.value(arg) 1215 } 1216 fr.printValues(builtin.Name() == "println", llargs...) 1217 return nil 1218 1219 case "panic": 1220 fr.callPanic(fr.value(args[0]), false) 1221 return nil 1222 1223 case "recover": 1224 return []*govalue{fr.callRecover(false)} 1225 1226 case "append": 1227 return []*govalue{fr.callAppend(fr.value(args[0]), fr.value(args[1]))} 1228 1229 case "close": 1230 fr.chanClose(fr.value(args[0])) 1231 return nil 1232 1233 case "cap": 1234 return []*govalue{fr.callCap(fr.value(args[0]))} 1235 1236 case "len": 1237 return []*govalue{fr.callLen(fr.value(args[0]))} 1238 1239 case "copy": 1240 return []*govalue{fr.callCopy(fr.value(args[0]), fr.value(args[1]))} 1241 1242 case "delete": 1243 fr.mapDelete(fr.value(args[0]), fr.value(args[1])) 1244 return nil 1245 1246 case "real": 1247 return []*govalue{fr.extractRealValue(fr.value(args[0]))} 1248 1249 case "imag": 1250 return []*govalue{fr.extractImagValue(fr.value(args[0]))} 1251 1252 case "complex": 1253 r := fr.llvmvalue(args[0]) 1254 i := fr.llvmvalue(args[1]) 1255 cmplx := llvm.Undef(fr.llvmtypes.ToLLVM(typ)) 1256 cmplx = fr.builder.CreateInsertValue(cmplx, r, 0, "") 1257 cmplx = fr.builder.CreateInsertValue(cmplx, i, 1, "") 1258 return []*govalue{newValue(cmplx, typ)} 1259 1260 case "ssa:wrapnilchk": 1261 ptr := fr.value(args[0]) 1262 fr.nilCheck(args[0], ptr.value) 1263 return []*govalue{ptr} 1264 1265 default: 1266 panic("unimplemented: " + builtin.Name()) 1267 } 1268 } 1269 1270 // callInstruction translates function call instructions. 1271 func (fr *frame) callInstruction(instr ssa.CallInstruction) []*govalue { 1272 call := instr.Common() 1273 if builtin, ok := call.Value.(*ssa.Builtin); ok { 1274 var typ types.Type 1275 if v := instr.Value(); v != nil { 1276 typ = v.Type() 1277 } 1278 return fr.callBuiltin(typ, builtin, call.Args) 1279 } 1280 1281 args := make([]*govalue, len(call.Args)) 1282 for i, arg := range call.Args { 1283 args[i] = fr.value(arg) 1284 } 1285 1286 var fn *govalue 1287 var chain llvm.Value 1288 if call.IsInvoke() { 1289 var recv *govalue 1290 fn, recv = fr.interfaceMethod(fr.llvmvalue(call.Value), call.Value.Type(), call.Method) 1291 args = append([]*govalue{recv}, args...) 1292 } else { 1293 if ssafn, ok := call.Value.(*ssa.Function); ok { 1294 llfn := fr.resolveFunctionGlobal(ssafn) 1295 llfn = llvm.ConstBitCast(llfn, llvm.PointerType(llvm.Int8Type(), 0)) 1296 fn = newValue(llfn, ssafn.Type()) 1297 } else { 1298 // First-class function values are stored as *{*fnptr}, so 1299 // we must extract the function pointer. We must also 1300 // set the chain, in case the function is a closure. 1301 fn = fr.value(call.Value) 1302 chain = fn.value 1303 fnptr := fr.builder.CreateBitCast(fn.value, llvm.PointerType(fn.value.Type(), 0), "") 1304 fnptr = fr.builder.CreateLoad(fnptr, "") 1305 fn = newValue(fnptr, fn.Type()) 1306 } 1307 if recv := call.Signature().Recv(); recv != nil { 1308 if _, ok := recv.Type().Underlying().(*types.Pointer); !ok { 1309 recvalloca := fr.allocaBuilder.CreateAlloca(args[0].value.Type(), "") 1310 fr.builder.CreateStore(args[0].value, recvalloca) 1311 args[0] = newValue(recvalloca, types.NewPointer(args[0].Type())) 1312 } 1313 } 1314 } 1315 return fr.createCall(fn, chain, args) 1316 } 1317 1318 func hasDefer(f *ssa.Function) bool { 1319 for _, b := range f.Blocks { 1320 for _, instr := range b.Instrs { 1321 if _, ok := instr.(*ssa.Defer); ok { 1322 return true 1323 } 1324 } 1325 } 1326 return false 1327 } 1328 1329 func callsRecover(f *ssa.Function) bool { 1330 for _, b := range f.Blocks { 1331 for _, instr := range b.Instrs { 1332 if instr, ok := instr.(ssa.CallInstruction); ok { 1333 b, ok := instr.Common().Value.(*ssa.Builtin) 1334 if ok && b.Name() == "recover" { 1335 return true 1336 } 1337 } 1338 } 1339 } 1340 return false 1341 }