github.com/axw/llgo@v0.0.0-20160805011314-95b5fe4dca20/irgen/cabi.go (about) 1 //===- cabi.go - C ABI abstraction layer ----------------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements an abstraction layer for the platform's C ABI (currently 11 // supports only Linux/x86_64). 12 // 13 //===----------------------------------------------------------------------===// 14 15 package irgen 16 17 import ( 18 "llvm.org/llgo/third_party/gotools/go/types" 19 "llvm.org/llvm/bindings/go/llvm" 20 ) 21 22 type abiArgInfo int 23 24 const ( 25 AIK_Direct = abiArgInfo(iota) 26 AIK_Indirect 27 ) 28 29 type backendType interface { 30 ToLLVM(llvm.Context) llvm.Type 31 } 32 33 type ptrBType struct { 34 } 35 36 func (t ptrBType) ToLLVM(c llvm.Context) llvm.Type { 37 return llvm.PointerType(c.Int8Type(), 0) 38 } 39 40 type intBType struct { 41 width int 42 signed bool 43 } 44 45 func (t intBType) ToLLVM(c llvm.Context) llvm.Type { 46 return c.IntType(t.width * 8) 47 } 48 49 type floatBType struct { 50 isDouble bool 51 } 52 53 func (t floatBType) ToLLVM(c llvm.Context) llvm.Type { 54 if t.isDouble { 55 return c.DoubleType() 56 } else { 57 return c.FloatType() 58 } 59 } 60 61 type structBType struct { 62 fields []backendType 63 } 64 65 func (t structBType) ToLLVM(c llvm.Context) llvm.Type { 66 var lfields []llvm.Type 67 for _, f := range t.fields { 68 lfields = append(lfields, f.ToLLVM(c)) 69 } 70 return c.StructType(lfields, false) 71 } 72 73 type arrayBType struct { 74 length uint64 75 elem backendType 76 } 77 78 func (t arrayBType) ToLLVM(c llvm.Context) llvm.Type { 79 return llvm.ArrayType(t.elem.ToLLVM(c), int(t.length)) 80 } 81 82 // align returns the smallest y >= x such that y % a == 0. 83 func align(x, a int64) int64 { 84 y := x + a - 1 85 return y - y%a 86 } 87 88 func (tm *llvmTypeMap) sizeofStruct(fields ...types.Type) int64 { 89 var o int64 90 for _, f := range fields { 91 a := tm.Alignof(f) 92 o = align(o, a) 93 o += tm.Sizeof(f) 94 } 95 return o 96 } 97 98 // This decides whether the x86_64 classification algorithm produces MEMORY for 99 // the given type. Given the subset of types that Go supports, this is exactly 100 // equivalent to testing the type's size. See in particular the first step of 101 // the algorithm and its footnote. 102 func (tm *llvmTypeMap) classify(t ...types.Type) abiArgInfo { 103 if tm.sizeofStruct(t...) > 16 { 104 return AIK_Indirect 105 } 106 return AIK_Direct 107 } 108 109 func (tm *llvmTypeMap) sliceBackendType() backendType { 110 i8ptr := &ptrBType{} 111 uintptr := &intBType{tm.target.PointerSize(), false} 112 return &structBType{[]backendType{i8ptr, uintptr, uintptr}} 113 } 114 115 func (tm *llvmTypeMap) getBackendType(t types.Type) backendType { 116 switch t := t.(type) { 117 case *types.Named: 118 return tm.getBackendType(t.Underlying()) 119 120 case *types.Basic: 121 switch t.Kind() { 122 case types.Bool, types.Uint8: 123 return &intBType{1, false} 124 case types.Int8: 125 return &intBType{1, true} 126 case types.Uint16: 127 return &intBType{2, false} 128 case types.Int16: 129 return &intBType{2, true} 130 case types.Uint32: 131 return &intBType{4, false} 132 case types.Int32: 133 return &intBType{4, true} 134 case types.Uint64: 135 return &intBType{8, false} 136 case types.Int64: 137 return &intBType{8, true} 138 case types.Uint, types.Uintptr: 139 return &intBType{tm.target.PointerSize(), false} 140 case types.Int: 141 return &intBType{tm.target.PointerSize(), true} 142 case types.Float32: 143 return &floatBType{false} 144 case types.Float64: 145 return &floatBType{true} 146 case types.UnsafePointer: 147 return &ptrBType{} 148 case types.Complex64: 149 f32 := &floatBType{false} 150 return &structBType{[]backendType{f32, f32}} 151 case types.Complex128: 152 f64 := &floatBType{true} 153 return &structBType{[]backendType{f64, f64}} 154 case types.String: 155 return &structBType{[]backendType{&ptrBType{}, &intBType{tm.target.PointerSize(), false}}} 156 } 157 158 case *types.Struct: 159 var fields []backendType 160 for i := 0; i != t.NumFields(); i++ { 161 f := t.Field(i) 162 fields = append(fields, tm.getBackendType(f.Type())) 163 } 164 return &structBType{fields} 165 166 case *types.Pointer, *types.Signature, *types.Map, *types.Chan: 167 return &ptrBType{} 168 169 case *types.Interface: 170 i8ptr := &ptrBType{} 171 return &structBType{[]backendType{i8ptr, i8ptr}} 172 173 case *types.Slice: 174 return tm.sliceBackendType() 175 176 case *types.Array: 177 return &arrayBType{uint64(t.Len()), tm.getBackendType(t.Elem())} 178 } 179 180 panic("unhandled type: " + t.String()) 181 } 182 183 type offsetedType struct { 184 typ backendType 185 offset uint64 186 } 187 188 func (tm *llvmTypeMap) getBackendOffsets(bt backendType) (offsets []offsetedType) { 189 switch bt := bt.(type) { 190 case *structBType: 191 t := bt.ToLLVM(tm.ctx) 192 for i, f := range bt.fields { 193 offset := tm.target.ElementOffset(t, i) 194 fieldOffsets := tm.getBackendOffsets(f) 195 for _, fo := range fieldOffsets { 196 offsets = append(offsets, offsetedType{fo.typ, offset + fo.offset}) 197 } 198 } 199 200 case *arrayBType: 201 size := tm.target.TypeAllocSize(bt.elem.ToLLVM(tm.ctx)) 202 fieldOffsets := tm.getBackendOffsets(bt.elem) 203 for i := uint64(0); i != bt.length; i++ { 204 for _, fo := range fieldOffsets { 205 offsets = append(offsets, offsetedType{fo.typ, i*size + fo.offset}) 206 } 207 } 208 209 default: 210 offsets = []offsetedType{offsetedType{bt, 0}} 211 } 212 213 return 214 } 215 216 func (tm *llvmTypeMap) classifyEightbyte(offsets []offsetedType, numInt, numSSE *int) llvm.Type { 217 if len(offsets) == 1 { 218 if _, ok := offsets[0].typ.(*floatBType); ok { 219 *numSSE++ 220 } else { 221 *numInt++ 222 } 223 return offsets[0].typ.ToLLVM(tm.ctx) 224 } 225 // This implements classification for the basic types and step 4 of the 226 // classification algorithm. At this point, the only two possible 227 // classifications are SSE (floats) and INTEGER (everything else). 228 sse := true 229 for _, ot := range offsets { 230 if _, ok := ot.typ.(*floatBType); !ok { 231 sse = false 232 break 233 } 234 } 235 if sse { 236 // This can only be (float, float), which uses an SSE vector. 237 *numSSE++ 238 return llvm.VectorType(tm.ctx.FloatType(), 2) 239 } else { 240 *numInt++ 241 width := offsets[len(offsets)-1].offset + tm.target.TypeAllocSize(offsets[len(offsets)-1].typ.ToLLVM(tm.ctx)) - offsets[0].offset 242 return tm.ctx.IntType(int(width) * 8) 243 } 244 } 245 246 func (tm *llvmTypeMap) expandType(argTypes []llvm.Type, argAttrs []llvm.Attribute, bt backendType) ([]llvm.Type, []llvm.Attribute, int, int) { 247 var numInt, numSSE int 248 var argAttr llvm.Attribute 249 250 switch bt := bt.(type) { 251 case *structBType, *arrayBType: 252 bo := tm.getBackendOffsets(bt) 253 sp := 0 254 for sp != len(bo) && bo[sp].offset < 8 { 255 sp++ 256 } 257 eb1 := bo[0:sp] 258 eb2 := bo[sp:] 259 if len(eb2) > 0 { 260 argTypes = append(argTypes, tm.classifyEightbyte(eb1, &numInt, &numSSE), tm.classifyEightbyte(eb2, &numInt, &numSSE)) 261 argAttrs = append(argAttrs, 0, 0) 262 } else { 263 argTypes = append(argTypes, tm.classifyEightbyte(eb1, &numInt, &numSSE)) 264 argAttrs = append(argAttrs, 0) 265 } 266 267 return argTypes, argAttrs, numInt, numSSE 268 269 case *intBType: 270 if bt.width < 4 { 271 if bt.signed { 272 argAttr = llvm.SExtAttribute 273 } else { 274 argAttr = llvm.ZExtAttribute 275 } 276 } 277 } 278 279 argTypes = append(argTypes, tm.classifyEightbyte([]offsetedType{{bt, 0}}, &numInt, &numSSE)) 280 argAttrs = append(argAttrs, argAttr) 281 282 return argTypes, argAttrs, numInt, numSSE 283 } 284 285 type argInfo interface { 286 // Emit instructions to builder to ABI encode val and store result to args. 287 encode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, args []llvm.Value, val llvm.Value) 288 289 // Emit instructions to builder to ABI decode and return the resulting Value. 290 decode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder) llvm.Value 291 } 292 293 type retInfo interface { 294 // Prepare args to receive a value. allocaBuilder refers to a builder in the entry block. 295 prepare(ctx llvm.Context, allocaBuilder llvm.Builder, args []llvm.Value) 296 297 // Emit instructions to builder to ABI decode the return value(s), if any. call is the 298 // call instruction. Must be called after prepare(). 299 decode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, call llvm.Value) []llvm.Value 300 301 // Emit instructions to builder to ABI encode the return value(s), if any, and return. 302 encode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, vals []llvm.Value) 303 } 304 305 type directArgInfo struct { 306 argOffset int 307 argTypes []llvm.Type 308 valType llvm.Type 309 } 310 311 func directEncode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, argTypes []llvm.Type, args []llvm.Value, val llvm.Value) { 312 valType := val.Type() 313 314 switch len(argTypes) { 315 case 0: 316 // do nothing 317 318 case 1: 319 if argTypes[0].C == valType.C { 320 args[0] = val 321 return 322 } 323 alloca := allocaBuilder.CreateAlloca(valType, "") 324 bitcast := builder.CreateBitCast(alloca, llvm.PointerType(argTypes[0], 0), "") 325 builder.CreateStore(val, alloca) 326 args[0] = builder.CreateLoad(bitcast, "") 327 328 case 2: 329 encodeType := llvm.StructType(argTypes, false) 330 alloca := allocaBuilder.CreateAlloca(valType, "") 331 bitcast := builder.CreateBitCast(alloca, llvm.PointerType(encodeType, 0), "") 332 builder.CreateStore(val, alloca) 333 args[0] = builder.CreateLoad(builder.CreateStructGEP(bitcast, 0, ""), "") 334 args[1] = builder.CreateLoad(builder.CreateStructGEP(bitcast, 1, ""), "") 335 336 default: 337 panic("unexpected argTypes size") 338 } 339 } 340 341 func (ai *directArgInfo) encode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, args []llvm.Value, val llvm.Value) { 342 directEncode(ctx, allocaBuilder, builder, ai.argTypes, args[ai.argOffset:ai.argOffset+len(ai.argTypes)], val) 343 } 344 345 func directDecode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, valType llvm.Type, args []llvm.Value) llvm.Value { 346 var alloca llvm.Value 347 348 switch len(args) { 349 case 0: 350 return llvm.ConstNull(ctx.StructType(nil, false)) 351 352 case 1: 353 if args[0].Type().C == valType.C { 354 return args[0] 355 } 356 alloca = allocaBuilder.CreateAlloca(valType, "") 357 bitcast := builder.CreateBitCast(alloca, llvm.PointerType(args[0].Type(), 0), "") 358 builder.CreateStore(args[0], bitcast) 359 360 case 2: 361 alloca = allocaBuilder.CreateAlloca(valType, "") 362 var argTypes []llvm.Type 363 for _, a := range args { 364 argTypes = append(argTypes, a.Type()) 365 } 366 encodeType := ctx.StructType(argTypes, false) 367 bitcast := builder.CreateBitCast(alloca, llvm.PointerType(encodeType, 0), "") 368 builder.CreateStore(args[0], builder.CreateStructGEP(bitcast, 0, "")) 369 builder.CreateStore(args[1], builder.CreateStructGEP(bitcast, 1, "")) 370 371 default: 372 panic("unexpected argTypes size") 373 } 374 375 return builder.CreateLoad(alloca, "") 376 } 377 378 func (ai *directArgInfo) decode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder) llvm.Value { 379 var args []llvm.Value 380 fn := builder.GetInsertBlock().Parent() 381 for i, _ := range ai.argTypes { 382 args = append(args, fn.Param(ai.argOffset+i)) 383 } 384 return directDecode(ctx, allocaBuilder, builder, ai.valType, args) 385 } 386 387 type indirectArgInfo struct { 388 argOffset int 389 } 390 391 func (ai *indirectArgInfo) encode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, args []llvm.Value, val llvm.Value) { 392 alloca := allocaBuilder.CreateAlloca(val.Type(), "") 393 builder.CreateStore(val, alloca) 394 args[ai.argOffset] = alloca 395 } 396 397 func (ai *indirectArgInfo) decode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder) llvm.Value { 398 fn := builder.GetInsertBlock().Parent() 399 return builder.CreateLoad(fn.Param(ai.argOffset), "") 400 } 401 402 type directRetInfo struct { 403 numResults int 404 retTypes []llvm.Type 405 resultsType llvm.Type 406 } 407 408 func (ri *directRetInfo) prepare(ctx llvm.Context, allocaBuilder llvm.Builder, args []llvm.Value) { 409 } 410 411 func (ri *directRetInfo) decode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, call llvm.Value) []llvm.Value { 412 var args []llvm.Value 413 switch len(ri.retTypes) { 414 case 0: 415 return nil 416 case 1: 417 args = []llvm.Value{call} 418 default: 419 args = make([]llvm.Value, len(ri.retTypes)) 420 for i := 0; i != len(ri.retTypes); i++ { 421 args[i] = builder.CreateExtractValue(call, i, "") 422 } 423 } 424 425 d := directDecode(ctx, allocaBuilder, builder, ri.resultsType, args) 426 427 if ri.numResults == 1 { 428 return []llvm.Value{d} 429 } else { 430 results := make([]llvm.Value, ri.numResults) 431 for i := 0; i != ri.numResults; i++ { 432 results[i] = builder.CreateExtractValue(d, i, "") 433 } 434 return results 435 } 436 } 437 438 func (ri *directRetInfo) encode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, vals []llvm.Value) { 439 if len(ri.retTypes) == 0 { 440 builder.CreateRetVoid() 441 return 442 } 443 444 var val llvm.Value 445 switch ri.numResults { 446 case 1: 447 val = vals[0] 448 default: 449 val = llvm.Undef(ri.resultsType) 450 for i, v := range vals { 451 val = builder.CreateInsertValue(val, v, i, "") 452 } 453 } 454 455 args := make([]llvm.Value, len(ri.retTypes)) 456 directEncode(ctx, allocaBuilder, builder, ri.retTypes, args, val) 457 458 var retval llvm.Value 459 switch len(ri.retTypes) { 460 case 1: 461 retval = args[0] 462 default: 463 retval = llvm.Undef(ctx.StructType(ri.retTypes, false)) 464 for i, a := range args { 465 retval = builder.CreateInsertValue(retval, a, i, "") 466 } 467 } 468 builder.CreateRet(retval) 469 } 470 471 type indirectRetInfo struct { 472 numResults int 473 sretSlot llvm.Value 474 resultsType llvm.Type 475 } 476 477 func (ri *indirectRetInfo) prepare(ctx llvm.Context, allocaBuilder llvm.Builder, args []llvm.Value) { 478 ri.sretSlot = allocaBuilder.CreateAlloca(ri.resultsType, "") 479 args[0] = ri.sretSlot 480 } 481 482 func (ri *indirectRetInfo) decode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, call llvm.Value) []llvm.Value { 483 if ri.numResults == 1 { 484 return []llvm.Value{builder.CreateLoad(ri.sretSlot, "")} 485 } else { 486 vals := make([]llvm.Value, ri.numResults) 487 for i, _ := range vals { 488 vals[i] = builder.CreateLoad(builder.CreateStructGEP(ri.sretSlot, i, ""), "") 489 } 490 return vals 491 } 492 } 493 494 func (ri *indirectRetInfo) encode(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, vals []llvm.Value) { 495 fn := builder.GetInsertBlock().Parent() 496 sretSlot := fn.Param(0) 497 498 if ri.numResults == 1 { 499 builder.CreateStore(vals[0], sretSlot) 500 } else { 501 for i, v := range vals { 502 builder.CreateStore(v, builder.CreateStructGEP(sretSlot, i, "")) 503 } 504 } 505 builder.CreateRetVoid() 506 } 507 508 type functionTypeInfo struct { 509 functionType llvm.Type 510 argAttrs []llvm.Attribute 511 retAttr llvm.Attribute 512 argInfos []argInfo 513 retInf retInfo 514 chainIndex int 515 } 516 517 func (fi *functionTypeInfo) declare(m llvm.Module, name string) llvm.Value { 518 fn := llvm.AddFunction(m, name, fi.functionType) 519 fn.AddFunctionAttr(fi.retAttr) 520 for i, a := range fi.argAttrs { 521 if a != 0 { 522 fn.Param(i).AddAttribute(a) 523 } 524 } 525 return fn 526 } 527 528 func (fi *functionTypeInfo) call(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, callee llvm.Value, chain llvm.Value, args []llvm.Value) []llvm.Value { 529 callArgs := make([]llvm.Value, len(fi.argAttrs)) 530 if chain.C == nil { 531 chain = llvm.Undef(llvm.PointerType(ctx.Int8Type(), 0)) 532 } 533 callArgs[fi.chainIndex] = chain 534 for i, a := range args { 535 fi.argInfos[i].encode(ctx, allocaBuilder, builder, callArgs, a) 536 } 537 fi.retInf.prepare(ctx, allocaBuilder, callArgs) 538 typedCallee := builder.CreateBitCast(callee, llvm.PointerType(fi.functionType, 0), "") 539 call := builder.CreateCall(typedCallee, callArgs, "") 540 call.AddInstrAttribute(0, fi.retAttr) 541 for i, a := range fi.argAttrs { 542 call.AddInstrAttribute(i+1, a) 543 } 544 return fi.retInf.decode(ctx, allocaBuilder, builder, call) 545 } 546 547 func (fi *functionTypeInfo) invoke(ctx llvm.Context, allocaBuilder llvm.Builder, builder llvm.Builder, callee llvm.Value, chain llvm.Value, args []llvm.Value, cont, lpad llvm.BasicBlock) []llvm.Value { 548 callArgs := make([]llvm.Value, len(fi.argAttrs)) 549 if chain.C == nil { 550 chain = llvm.Undef(llvm.PointerType(ctx.Int8Type(), 0)) 551 } 552 callArgs[fi.chainIndex] = chain 553 for i, a := range args { 554 fi.argInfos[i].encode(ctx, allocaBuilder, builder, callArgs, a) 555 } 556 fi.retInf.prepare(ctx, allocaBuilder, callArgs) 557 typedCallee := builder.CreateBitCast(callee, llvm.PointerType(fi.functionType, 0), "") 558 call := builder.CreateInvoke(typedCallee, callArgs, cont, lpad, "") 559 call.AddInstrAttribute(0, fi.retAttr) 560 for i, a := range fi.argAttrs { 561 call.AddInstrAttribute(i+1, a) 562 } 563 builder.SetInsertPointAtEnd(cont) 564 return fi.retInf.decode(ctx, allocaBuilder, builder, call) 565 } 566 567 func (tm *llvmTypeMap) getFunctionTypeInfo(args []types.Type, results []types.Type) (fi functionTypeInfo) { 568 var returnType llvm.Type 569 var argTypes []llvm.Type 570 if len(results) == 0 { 571 returnType = llvm.VoidType() 572 fi.retInf = &directRetInfo{} 573 } else { 574 aik := tm.classify(results...) 575 576 var resultsType llvm.Type 577 if len(results) == 1 { 578 resultsType = tm.ToLLVM(results[0]) 579 } else { 580 elements := make([]llvm.Type, len(results)) 581 for i := range elements { 582 elements[i] = tm.ToLLVM(results[i]) 583 } 584 resultsType = tm.ctx.StructType(elements, false) 585 } 586 587 switch aik { 588 case AIK_Direct: 589 var retFields []backendType 590 for _, t := range results { 591 retFields = append(retFields, tm.getBackendType(t)) 592 } 593 bt := &structBType{retFields} 594 595 retTypes, retAttrs, _, _ := tm.expandType(nil, nil, bt) 596 switch len(retTypes) { 597 case 0: // e.g., empty struct 598 returnType = llvm.VoidType() 599 case 1: 600 returnType = retTypes[0] 601 fi.retAttr = retAttrs[0] 602 case 2: 603 returnType = llvm.StructType(retTypes, false) 604 default: 605 panic("unexpected expandType result") 606 } 607 fi.retInf = &directRetInfo{numResults: len(results), retTypes: retTypes, resultsType: resultsType} 608 609 case AIK_Indirect: 610 returnType = llvm.VoidType() 611 argTypes = []llvm.Type{llvm.PointerType(resultsType, 0)} 612 fi.argAttrs = []llvm.Attribute{llvm.StructRetAttribute} 613 fi.retInf = &indirectRetInfo{numResults: len(results), resultsType: resultsType} 614 } 615 } 616 617 // Allocate an argument for the call chain. 618 fi.chainIndex = len(argTypes) 619 argTypes = append(argTypes, llvm.PointerType(tm.ctx.Int8Type(), 0)) 620 fi.argAttrs = append(fi.argAttrs, llvm.NestAttribute) 621 622 // Keep track of the number of INTEGER/SSE class registers remaining. 623 remainingInt := 6 624 remainingSSE := 8 625 626 for _, arg := range args { 627 aik := tm.classify(arg) 628 629 isDirect := aik == AIK_Direct 630 if isDirect { 631 bt := tm.getBackendType(arg) 632 directArgTypes, directArgAttrs, numInt, numSSE := tm.expandType(argTypes, fi.argAttrs, bt) 633 634 // Check if the argument can fit into the remaining registers, or if 635 // it would just occupy one register (which pushes the whole argument 636 // onto the stack anyway). 637 if numInt <= remainingInt && numSSE <= remainingSSE || numInt+numSSE == 1 { 638 remainingInt -= numInt 639 remainingSSE -= numSSE 640 argInfo := &directArgInfo{argOffset: len(argTypes), valType: bt.ToLLVM(tm.ctx)} 641 fi.argInfos = append(fi.argInfos, argInfo) 642 argTypes = directArgTypes 643 fi.argAttrs = directArgAttrs 644 argInfo.argTypes = argTypes[argInfo.argOffset:len(argTypes)] 645 } else { 646 // No remaining registers; pass on the stack. 647 isDirect = false 648 } 649 } 650 651 if !isDirect { 652 fi.argInfos = append(fi.argInfos, &indirectArgInfo{len(argTypes)}) 653 argTypes = append(argTypes, llvm.PointerType(tm.ToLLVM(arg), 0)) 654 fi.argAttrs = append(fi.argAttrs, llvm.ByValAttribute) 655 } 656 } 657 658 fi.functionType = llvm.FunctionType(returnType, argTypes, false) 659 return 660 } 661 662 func (tm *llvmTypeMap) getSignatureInfo(sig *types.Signature) functionTypeInfo { 663 var args, results []types.Type 664 if sig.Recv() != nil { 665 recvtype := sig.Recv().Type() 666 if _, ok := recvtype.Underlying().(*types.Pointer); !ok && recvtype != types.Typ[types.UnsafePointer] { 667 recvtype = types.NewPointer(recvtype) 668 } 669 args = []types.Type{recvtype} 670 } 671 672 for i := 0; i != sig.Params().Len(); i++ { 673 args = append(args, sig.Params().At(i).Type()) 674 } 675 for i := 0; i != sig.Results().Len(); i++ { 676 results = append(results, sig.Results().At(i).Type()) 677 } 678 return tm.getFunctionTypeInfo(args, results) 679 }