github.com/graybobo/golang.org-package-offline-cache@v0.0.0-20200626051047-6608995c132f/x/tools/go/pointer/gen14.go (about) 1 // Copyright 2013 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // +build !go1.5 6 7 package pointer 8 9 // This file defines the constraint generation phase. 10 11 // TODO(adonovan): move the constraint definitions and the store() etc 12 // functions which add them (and are also used by the solver) into a 13 // new file, constraints.go. 14 15 import ( 16 "fmt" 17 "go/token" 18 19 "golang.org/x/tools/go/callgraph" 20 "golang.org/x/tools/go/ssa" 21 "golang.org/x/tools/go/types" 22 ) 23 24 var ( 25 tEface = types.NewInterface(nil, nil).Complete() 26 tInvalid = types.Typ[types.Invalid] 27 tUnsafePtr = types.Typ[types.UnsafePointer] 28 ) 29 30 // ---------- Node creation ---------- 31 32 // nextNode returns the index of the next unused node. 33 func (a *analysis) nextNode() nodeid { 34 return nodeid(len(a.nodes)) 35 } 36 37 // addNodes creates nodes for all scalar elements in type typ, and 38 // returns the id of the first one, or zero if the type was 39 // analytically uninteresting. 40 // 41 // comment explains the origin of the nodes, as a debugging aid. 42 // 43 func (a *analysis) addNodes(typ types.Type, comment string) nodeid { 44 id := a.nextNode() 45 for _, fi := range a.flatten(typ) { 46 a.addOneNode(fi.typ, comment, fi) 47 } 48 if id == a.nextNode() { 49 return 0 // type contained no pointers 50 } 51 return id 52 } 53 54 // addOneNode creates a single node with type typ, and returns its id. 55 // 56 // typ should generally be scalar (except for tagged.T nodes 57 // and struct/array identity nodes). Use addNodes for non-scalar types. 58 // 59 // comment explains the origin of the nodes, as a debugging aid. 60 // subelement indicates the subelement, e.g. ".a.b[*].c". 61 // 62 func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldInfo) nodeid { 63 id := a.nextNode() 64 a.nodes = append(a.nodes, &node{typ: typ, subelement: subelement, solve: new(solverState)}) 65 if a.log != nil { 66 fmt.Fprintf(a.log, "\tcreate n%d %s for %s%s\n", 67 id, typ, comment, subelement.path()) 68 } 69 return id 70 } 71 72 // setValueNode associates node id with the value v. 73 // cgn identifies the context iff v is a local variable. 74 // 75 func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) { 76 if cgn != nil { 77 a.localval[v] = id 78 } else { 79 a.globalval[v] = id 80 } 81 if a.log != nil { 82 fmt.Fprintf(a.log, "\tval[%s] = n%d (%T)\n", v.Name(), id, v) 83 } 84 85 // Due to context-sensitivity, we may encounter the same Value 86 // in many contexts. We merge them to a canonical node, since 87 // that's what all clients want. 88 89 // Record the (v, id) relation if the client has queried pts(v). 90 if _, ok := a.config.Queries[v]; ok { 91 t := v.Type() 92 ptr, ok := a.result.Queries[v] 93 if !ok { 94 // First time? Create the canonical query node. 95 ptr = Pointer{a, a.addNodes(t, "query")} 96 a.result.Queries[v] = ptr 97 } 98 a.result.Queries[v] = ptr 99 a.copy(ptr.n, id, a.sizeof(t)) 100 } 101 102 // Record the (*v, id) relation if the client has queried pts(*v). 103 if _, ok := a.config.IndirectQueries[v]; ok { 104 t := v.Type() 105 ptr, ok := a.result.IndirectQueries[v] 106 if !ok { 107 // First time? Create the canonical indirect query node. 108 ptr = Pointer{a, a.addNodes(v.Type(), "query.indirect")} 109 a.result.IndirectQueries[v] = ptr 110 } 111 a.genLoad(cgn, ptr.n, v, 0, a.sizeof(t)) 112 } 113 } 114 115 // endObject marks the end of a sequence of calls to addNodes denoting 116 // a single object allocation. 117 // 118 // obj is the start node of the object, from a prior call to nextNode. 119 // Its size, flags and optional data will be updated. 120 // 121 func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object { 122 // Ensure object is non-empty by padding; 123 // the pad will be the object node. 124 size := uint32(a.nextNode() - obj) 125 if size == 0 { 126 a.addOneNode(tInvalid, "padding", nil) 127 } 128 objNode := a.nodes[obj] 129 o := &object{ 130 size: size, // excludes padding 131 cgn: cgn, 132 data: data, 133 } 134 objNode.obj = o 135 136 return o 137 } 138 139 // makeFunctionObject creates and returns a new function object 140 // (contour) for fn, and returns the id of its first node. It also 141 // enqueues fn for subsequent constraint generation. 142 // 143 // For a context-sensitive contour, callersite identifies the sole 144 // callsite; for shared contours, caller is nil. 145 // 146 func (a *analysis) makeFunctionObject(fn *ssa.Function, callersite *callsite) nodeid { 147 if a.log != nil { 148 fmt.Fprintf(a.log, "\t---- makeFunctionObject %s\n", fn) 149 } 150 151 // obj is the function object (identity, params, results). 152 obj := a.nextNode() 153 cgn := a.makeCGNode(fn, obj, callersite) 154 sig := fn.Signature 155 a.addOneNode(sig, "func.cgnode", nil) // (scalar with Signature type) 156 if recv := sig.Recv(); recv != nil { 157 a.addNodes(recv.Type(), "func.recv") 158 } 159 a.addNodes(sig.Params(), "func.params") 160 a.addNodes(sig.Results(), "func.results") 161 a.endObject(obj, cgn, fn).flags |= otFunction 162 163 if a.log != nil { 164 fmt.Fprintf(a.log, "\t----\n") 165 } 166 167 // Queue it up for constraint processing. 168 a.genq = append(a.genq, cgn) 169 170 return obj 171 } 172 173 // makeTagged creates a tagged object of type typ. 174 func (a *analysis) makeTagged(typ types.Type, cgn *cgnode, data interface{}) nodeid { 175 obj := a.addOneNode(typ, "tagged.T", nil) // NB: type may be non-scalar! 176 a.addNodes(typ, "tagged.v") 177 a.endObject(obj, cgn, data).flags |= otTagged 178 return obj 179 } 180 181 // makeRtype returns the canonical tagged object of type *rtype whose 182 // payload points to the sole rtype object for T. 183 // 184 // TODO(adonovan): move to reflect.go; it's part of the solver really. 185 // 186 func (a *analysis) makeRtype(T types.Type) nodeid { 187 if v := a.rtypes.At(T); v != nil { 188 return v.(nodeid) 189 } 190 191 // Create the object for the reflect.rtype itself, which is 192 // ordinarily a large struct but here a single node will do. 193 obj := a.nextNode() 194 a.addOneNode(T, "reflect.rtype", nil) 195 a.endObject(obj, nil, T) 196 197 id := a.makeTagged(a.reflectRtypePtr, nil, T) 198 a.nodes[id+1].typ = T // trick (each *rtype tagged object is a singleton) 199 a.addressOf(a.reflectRtypePtr, id+1, obj) 200 201 a.rtypes.Set(T, id) 202 return id 203 } 204 205 // rtypeValue returns the type of the *reflect.rtype-tagged object obj. 206 func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type { 207 tDyn, t, _ := a.taggedValue(obj) 208 if tDyn != a.reflectRtypePtr { 209 panic(fmt.Sprintf("not a *reflect.rtype-tagged object: obj=n%d tag=%v payload=n%d", obj, tDyn, t)) 210 } 211 return a.nodes[t].typ 212 } 213 214 // valueNode returns the id of the value node for v, creating it (and 215 // the association) as needed. It may return zero for uninteresting 216 // values containing no pointers. 217 // 218 func (a *analysis) valueNode(v ssa.Value) nodeid { 219 // Value nodes for locals are created en masse by genFunc. 220 if id, ok := a.localval[v]; ok { 221 return id 222 } 223 224 // Value nodes for globals are created on demand. 225 id, ok := a.globalval[v] 226 if !ok { 227 var comment string 228 if a.log != nil { 229 comment = v.String() 230 } 231 id = a.addNodes(v.Type(), comment) 232 if obj := a.objectNode(nil, v); obj != 0 { 233 a.addressOf(v.Type(), id, obj) 234 } 235 a.setValueNode(v, id, nil) 236 } 237 return id 238 } 239 240 // valueOffsetNode ascertains the node for tuple/struct value v, 241 // then returns the node for its subfield #index. 242 // 243 func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid { 244 id := a.valueNode(v) 245 if id == 0 { 246 panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v)) 247 } 248 return id + nodeid(a.offsetOf(v.Type(), index)) 249 } 250 251 // isTaggedObject reports whether object obj is a tagged object. 252 func (a *analysis) isTaggedObject(obj nodeid) bool { 253 return a.nodes[obj].obj.flags&otTagged != 0 254 } 255 256 // taggedValue returns the dynamic type tag, the (first node of the) 257 // payload, and the indirect flag of the tagged object starting at id. 258 // Panic ensues if !isTaggedObject(id). 259 // 260 func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect bool) { 261 n := a.nodes[obj] 262 flags := n.obj.flags 263 if flags&otTagged == 0 { 264 panic(fmt.Sprintf("not a tagged object: n%d", obj)) 265 } 266 return n.typ, obj + 1, flags&otIndirect != 0 267 } 268 269 // funcParams returns the first node of the params (P) block of the 270 // function whose object node (obj.flags&otFunction) is id. 271 // 272 func (a *analysis) funcParams(id nodeid) nodeid { 273 n := a.nodes[id] 274 if n.obj == nil || n.obj.flags&otFunction == 0 { 275 panic(fmt.Sprintf("funcParams(n%d): not a function object block", id)) 276 } 277 return id + 1 278 } 279 280 // funcResults returns the first node of the results (R) block of the 281 // function whose object node (obj.flags&otFunction) is id. 282 // 283 func (a *analysis) funcResults(id nodeid) nodeid { 284 n := a.nodes[id] 285 if n.obj == nil || n.obj.flags&otFunction == 0 { 286 panic(fmt.Sprintf("funcResults(n%d): not a function object block", id)) 287 } 288 sig := n.typ.(*types.Signature) 289 id += 1 + nodeid(a.sizeof(sig.Params())) 290 if sig.Recv() != nil { 291 id += nodeid(a.sizeof(sig.Recv().Type())) 292 } 293 return id 294 } 295 296 // ---------- Constraint creation ---------- 297 298 // copy creates a constraint of the form dst = src. 299 // sizeof is the width (in logical fields) of the copied type. 300 // 301 func (a *analysis) copy(dst, src nodeid, sizeof uint32) { 302 if src == dst || sizeof == 0 { 303 return // trivial 304 } 305 if src == 0 || dst == 0 { 306 panic(fmt.Sprintf("ill-typed copy dst=n%d src=n%d", dst, src)) 307 } 308 for i := uint32(0); i < sizeof; i++ { 309 a.addConstraint(©Constraint{dst, src}) 310 src++ 311 dst++ 312 } 313 } 314 315 // addressOf creates a constraint of the form id = &obj. 316 // T is the type of the address. 317 func (a *analysis) addressOf(T types.Type, id, obj nodeid) { 318 if id == 0 { 319 panic("addressOf: zero id") 320 } 321 if obj == 0 { 322 panic("addressOf: zero obj") 323 } 324 if a.shouldTrack(T) { 325 a.addConstraint(&addrConstraint{id, obj}) 326 } 327 } 328 329 // load creates a load constraint of the form dst = src[offset]. 330 // offset is the pointer offset in logical fields. 331 // sizeof is the width (in logical fields) of the loaded type. 332 // 333 func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) { 334 if dst == 0 { 335 return // load of non-pointerlike value 336 } 337 if src == 0 && dst == 0 { 338 return // non-pointerlike operation 339 } 340 if src == 0 || dst == 0 { 341 panic(fmt.Sprintf("ill-typed load dst=n%d src=n%d", dst, src)) 342 } 343 for i := uint32(0); i < sizeof; i++ { 344 a.addConstraint(&loadConstraint{offset, dst, src}) 345 offset++ 346 dst++ 347 } 348 } 349 350 // store creates a store constraint of the form dst[offset] = src. 351 // offset is the pointer offset in logical fields. 352 // sizeof is the width (in logical fields) of the stored type. 353 // 354 func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) { 355 if src == 0 { 356 return // store of non-pointerlike value 357 } 358 if src == 0 && dst == 0 { 359 return // non-pointerlike operation 360 } 361 if src == 0 || dst == 0 { 362 panic(fmt.Sprintf("ill-typed store dst=n%d src=n%d", dst, src)) 363 } 364 for i := uint32(0); i < sizeof; i++ { 365 a.addConstraint(&storeConstraint{offset, dst, src}) 366 offset++ 367 src++ 368 } 369 } 370 371 // offsetAddr creates an offsetAddr constraint of the form dst = &src.#offset. 372 // offset is the field offset in logical fields. 373 // T is the type of the address. 374 // 375 func (a *analysis) offsetAddr(T types.Type, dst, src nodeid, offset uint32) { 376 if !a.shouldTrack(T) { 377 return 378 } 379 if offset == 0 { 380 // Simplify dst = &src->f0 381 // to dst = src 382 // (NB: this optimisation is defeated by the identity 383 // field prepended to struct and array objects.) 384 a.copy(dst, src, 1) 385 } else { 386 a.addConstraint(&offsetAddrConstraint{offset, dst, src}) 387 } 388 } 389 390 // typeAssert creates a typeFilter or untag constraint of the form dst = src.(T): 391 // typeFilter for an interface, untag for a concrete type. 392 // The exact flag is specified as for untagConstraint. 393 // 394 func (a *analysis) typeAssert(T types.Type, dst, src nodeid, exact bool) { 395 if isInterface(T) { 396 a.addConstraint(&typeFilterConstraint{T, dst, src}) 397 } else { 398 a.addConstraint(&untagConstraint{T, dst, src, exact}) 399 } 400 } 401 402 // addConstraint adds c to the constraint set. 403 func (a *analysis) addConstraint(c constraint) { 404 a.constraints = append(a.constraints, c) 405 if a.log != nil { 406 fmt.Fprintf(a.log, "\t%s\n", c) 407 } 408 } 409 410 // copyElems generates load/store constraints for *dst = *src, 411 // where src and dst are slices or *arrays. 412 // 413 func (a *analysis) copyElems(cgn *cgnode, typ types.Type, dst, src ssa.Value) { 414 tmp := a.addNodes(typ, "copy") 415 sz := a.sizeof(typ) 416 a.genLoad(cgn, tmp, src, 1, sz) 417 a.genStore(cgn, dst, tmp, 1, sz) 418 } 419 420 // ---------- Constraint generation ---------- 421 422 // genConv generates constraints for the conversion operation conv. 423 func (a *analysis) genConv(conv *ssa.Convert, cgn *cgnode) { 424 res := a.valueNode(conv) 425 if res == 0 { 426 return // result is non-pointerlike 427 } 428 429 tSrc := conv.X.Type() 430 tDst := conv.Type() 431 432 switch utSrc := tSrc.Underlying().(type) { 433 case *types.Slice: 434 // []byte/[]rune -> string? 435 return 436 437 case *types.Pointer: 438 // *T -> unsafe.Pointer? 439 if tDst.Underlying() == tUnsafePtr { 440 return // we don't model unsafe aliasing (unsound) 441 } 442 443 case *types.Basic: 444 switch tDst.Underlying().(type) { 445 case *types.Pointer: 446 // Treat unsafe.Pointer->*T conversions like 447 // new(T) and create an unaliased object. 448 if utSrc == tUnsafePtr { 449 obj := a.addNodes(mustDeref(tDst), "unsafe.Pointer conversion") 450 a.endObject(obj, cgn, conv) 451 a.addressOf(tDst, res, obj) 452 return 453 } 454 455 case *types.Slice: 456 // string -> []byte/[]rune (or named aliases)? 457 if utSrc.Info()&types.IsString != 0 { 458 obj := a.addNodes(sliceToArray(tDst), "convert") 459 a.endObject(obj, cgn, conv) 460 a.addressOf(tDst, res, obj) 461 return 462 } 463 464 case *types.Basic: 465 // All basic-to-basic type conversions are no-ops. 466 // This includes uintptr<->unsafe.Pointer conversions, 467 // which we (unsoundly) ignore. 468 return 469 } 470 } 471 472 panic(fmt.Sprintf("illegal *ssa.Convert %s -> %s: %s", tSrc, tDst, conv.Parent())) 473 } 474 475 // genAppend generates constraints for a call to append. 476 func (a *analysis) genAppend(instr *ssa.Call, cgn *cgnode) { 477 // Consider z = append(x, y). y is optional. 478 // This may allocate a new [1]T array; call its object w. 479 // We get the following constraints: 480 // z = x 481 // z = &w 482 // *z = *y 483 484 x := instr.Call.Args[0] 485 486 z := instr 487 a.copy(a.valueNode(z), a.valueNode(x), 1) // z = x 488 489 if len(instr.Call.Args) == 1 { 490 return // no allocation for z = append(x) or _ = append(x). 491 } 492 493 // TODO(adonovan): test append([]byte, ...string) []byte. 494 495 y := instr.Call.Args[1] 496 tArray := sliceToArray(instr.Call.Args[0].Type()) 497 498 var w nodeid 499 w = a.nextNode() 500 a.addNodes(tArray, "append") 501 a.endObject(w, cgn, instr) 502 503 a.copyElems(cgn, tArray.Elem(), z, y) // *z = *y 504 a.addressOf(instr.Type(), a.valueNode(z), w) // z = &w 505 } 506 507 // genBuiltinCall generates contraints for a call to a built-in. 508 func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) { 509 call := instr.Common() 510 switch call.Value.(*ssa.Builtin).Name() { 511 case "append": 512 // Safe cast: append cannot appear in a go or defer statement. 513 a.genAppend(instr.(*ssa.Call), cgn) 514 515 case "copy": 516 tElem := call.Args[0].Type().Underlying().(*types.Slice).Elem() 517 a.copyElems(cgn, tElem, call.Args[0], call.Args[1]) 518 519 case "panic": 520 a.copy(a.panicNode, a.valueNode(call.Args[0]), 1) 521 522 case "recover": 523 if v := instr.Value(); v != nil { 524 a.copy(a.valueNode(v), a.panicNode, 1) 525 } 526 527 case "print": 528 // In the tests, the probe might be the sole reference 529 // to its arg, so make sure we create nodes for it. 530 if len(call.Args) > 0 { 531 a.valueNode(call.Args[0]) 532 } 533 534 case "ssa:wrapnilchk": 535 a.copy(a.valueNode(instr.Value()), a.valueNode(call.Args[0]), 1) 536 537 default: 538 // No-ops: close len cap real imag complex print println delete. 539 } 540 } 541 542 // shouldUseContext defines the context-sensitivity policy. It 543 // returns true if we should analyse all static calls to fn anew. 544 // 545 // Obviously this interface rather limits how much freedom we have to 546 // choose a policy. The current policy, rather arbitrarily, is true 547 // for intrinsics and accessor methods (actually: short, single-block, 548 // call-free functions). This is just a starting point. 549 // 550 func (a *analysis) shouldUseContext(fn *ssa.Function) bool { 551 if a.findIntrinsic(fn) != nil { 552 return true // treat intrinsics context-sensitively 553 } 554 if len(fn.Blocks) != 1 { 555 return false // too expensive 556 } 557 blk := fn.Blocks[0] 558 if len(blk.Instrs) > 10 { 559 return false // too expensive 560 } 561 if fn.Synthetic != "" && (fn.Pkg == nil || fn != fn.Pkg.Func("init")) { 562 return true // treat synthetic wrappers context-sensitively 563 } 564 for _, instr := range blk.Instrs { 565 switch instr := instr.(type) { 566 case ssa.CallInstruction: 567 // Disallow function calls (except to built-ins) 568 // because of the danger of unbounded recursion. 569 if _, ok := instr.Common().Value.(*ssa.Builtin); !ok { 570 return false 571 } 572 } 573 } 574 return true 575 } 576 577 // genStaticCall generates constraints for a statically dispatched function call. 578 func (a *analysis) genStaticCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { 579 fn := call.StaticCallee() 580 581 // Special cases for inlined intrinsics. 582 switch fn { 583 case a.runtimeSetFinalizer: 584 // Inline SetFinalizer so the call appears direct. 585 site.targets = a.addOneNode(tInvalid, "SetFinalizer.targets", nil) 586 a.addConstraint(&runtimeSetFinalizerConstraint{ 587 targets: site.targets, 588 x: a.valueNode(call.Args[0]), 589 f: a.valueNode(call.Args[1]), 590 }) 591 return 592 593 case a.reflectValueCall: 594 // Inline (reflect.Value).Call so the call appears direct. 595 dotdotdot := false 596 ret := reflectCallImpl(a, caller, site, a.valueNode(call.Args[0]), a.valueNode(call.Args[1]), dotdotdot) 597 if result != 0 { 598 a.addressOf(fn.Signature.Results().At(0).Type(), result, ret) 599 } 600 return 601 } 602 603 // Ascertain the context (contour/cgnode) for a particular call. 604 var obj nodeid 605 if a.shouldUseContext(fn) { 606 obj = a.makeFunctionObject(fn, site) // new contour 607 } else { 608 obj = a.objectNode(nil, fn) // shared contour 609 } 610 a.callEdge(caller, site, obj) 611 612 sig := call.Signature() 613 614 // Copy receiver, if any. 615 params := a.funcParams(obj) 616 args := call.Args 617 if sig.Recv() != nil { 618 sz := a.sizeof(sig.Recv().Type()) 619 a.copy(params, a.valueNode(args[0]), sz) 620 params += nodeid(sz) 621 args = args[1:] 622 } 623 624 // Copy actual parameters into formal params block. 625 // Must loop, since the actuals aren't contiguous. 626 for i, arg := range args { 627 sz := a.sizeof(sig.Params().At(i).Type()) 628 a.copy(params, a.valueNode(arg), sz) 629 params += nodeid(sz) 630 } 631 632 // Copy formal results block to actual result. 633 if result != 0 { 634 a.copy(result, a.funcResults(obj), a.sizeof(sig.Results())) 635 } 636 } 637 638 // genDynamicCall generates constraints for a dynamic function call. 639 func (a *analysis) genDynamicCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { 640 // pts(targets) will be the set of possible call targets. 641 site.targets = a.valueNode(call.Value) 642 643 // We add dynamic closure rules that store the arguments into 644 // the P-block and load the results from the R-block of each 645 // function discovered in pts(targets). 646 647 sig := call.Signature() 648 var offset uint32 = 1 // P/R block starts at offset 1 649 for i, arg := range call.Args { 650 sz := a.sizeof(sig.Params().At(i).Type()) 651 a.genStore(caller, call.Value, a.valueNode(arg), offset, sz) 652 offset += sz 653 } 654 if result != 0 { 655 a.genLoad(caller, result, call.Value, offset, a.sizeof(sig.Results())) 656 } 657 } 658 659 // genInvoke generates constraints for a dynamic method invocation. 660 func (a *analysis) genInvoke(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { 661 if call.Value.Type() == a.reflectType { 662 a.genInvokeReflectType(caller, site, call, result) 663 return 664 } 665 666 sig := call.Signature() 667 668 // Allocate a contiguous targets/params/results block for this call. 669 block := a.nextNode() 670 // pts(targets) will be the set of possible call targets 671 site.targets = a.addOneNode(sig, "invoke.targets", nil) 672 p := a.addNodes(sig.Params(), "invoke.params") 673 r := a.addNodes(sig.Results(), "invoke.results") 674 675 // Copy the actual parameters into the call's params block. 676 for i, n := 0, sig.Params().Len(); i < n; i++ { 677 sz := a.sizeof(sig.Params().At(i).Type()) 678 a.copy(p, a.valueNode(call.Args[i]), sz) 679 p += nodeid(sz) 680 } 681 // Copy the call's results block to the actual results. 682 if result != 0 { 683 a.copy(result, r, a.sizeof(sig.Results())) 684 } 685 686 // We add a dynamic invoke constraint that will connect the 687 // caller's and the callee's P/R blocks for each discovered 688 // call target. 689 a.addConstraint(&invokeConstraint{call.Method, a.valueNode(call.Value), block}) 690 } 691 692 // genInvokeReflectType is a specialization of genInvoke where the 693 // receiver type is a reflect.Type, under the assumption that there 694 // can be at most one implementation of this interface, *reflect.rtype. 695 // 696 // (Though this may appear to be an instance of a pattern---method 697 // calls on interfaces known to have exactly one implementation---in 698 // practice it occurs rarely, so we special case for reflect.Type.) 699 // 700 // In effect we treat this: 701 // var rt reflect.Type = ... 702 // rt.F() 703 // as this: 704 // rt.(*reflect.rtype).F() 705 // 706 func (a *analysis) genInvokeReflectType(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) { 707 // Unpack receiver into rtype 708 rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil) 709 recv := a.valueNode(call.Value) 710 a.typeAssert(a.reflectRtypePtr, rtype, recv, true) 711 712 // Look up the concrete method. 713 fn := a.prog.LookupMethod(a.reflectRtypePtr, call.Method.Pkg(), call.Method.Name()) 714 715 obj := a.makeFunctionObject(fn, site) // new contour for this call 716 a.callEdge(caller, site, obj) 717 718 // From now on, it's essentially a static call, but little is 719 // gained by factoring together the code for both cases. 720 721 sig := fn.Signature // concrete method 722 targets := a.addOneNode(sig, "call.targets", nil) 723 a.addressOf(sig, targets, obj) // (a singleton) 724 725 // Copy receiver. 726 params := a.funcParams(obj) 727 a.copy(params, rtype, 1) 728 params++ 729 730 // Copy actual parameters into formal P-block. 731 // Must loop, since the actuals aren't contiguous. 732 for i, arg := range call.Args { 733 sz := a.sizeof(sig.Params().At(i).Type()) 734 a.copy(params, a.valueNode(arg), sz) 735 params += nodeid(sz) 736 } 737 738 // Copy formal R-block to actual R-block. 739 if result != 0 { 740 a.copy(result, a.funcResults(obj), a.sizeof(sig.Results())) 741 } 742 } 743 744 // genCall generates constraints for call instruction instr. 745 func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) { 746 call := instr.Common() 747 748 // Intrinsic implementations of built-in functions. 749 if _, ok := call.Value.(*ssa.Builtin); ok { 750 a.genBuiltinCall(instr, caller) 751 return 752 } 753 754 var result nodeid 755 if v := instr.Value(); v != nil { 756 result = a.valueNode(v) 757 } 758 759 site := &callsite{instr: instr} 760 if call.StaticCallee() != nil { 761 a.genStaticCall(caller, site, call, result) 762 } else if call.IsInvoke() { 763 a.genInvoke(caller, site, call, result) 764 } else { 765 a.genDynamicCall(caller, site, call, result) 766 } 767 768 caller.sites = append(caller.sites, site) 769 770 if a.log != nil { 771 // TODO(adonovan): debug: improve log message. 772 fmt.Fprintf(a.log, "\t%s to targets %s from %s\n", site, site.targets, caller) 773 } 774 } 775 776 // objectNode returns the object to which v points, if known. 777 // In other words, if the points-to set of v is a singleton, it 778 // returns the sole label, zero otherwise. 779 // 780 // We exploit this information to make the generated constraints less 781 // dynamic. For example, a complex load constraint can be replaced by 782 // a simple copy constraint when the sole destination is known a priori. 783 // 784 // Some SSA instructions always have singletons points-to sets: 785 // Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice. 786 // Others may be singletons depending on their operands: 787 // FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice. 788 // 789 // Idempotent. Objects are created as needed, possibly via recursion 790 // down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))). 791 // 792 func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid { 793 switch v.(type) { 794 case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar: 795 // Global object. 796 obj, ok := a.globalobj[v] 797 if !ok { 798 switch v := v.(type) { 799 case *ssa.Global: 800 obj = a.nextNode() 801 a.addNodes(mustDeref(v.Type()), "global") 802 a.endObject(obj, nil, v) 803 804 case *ssa.Function: 805 obj = a.makeFunctionObject(v, nil) 806 807 case *ssa.Const: 808 // not addressable 809 810 case *ssa.FreeVar: 811 // not addressable 812 } 813 814 if a.log != nil { 815 fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj) 816 } 817 a.globalobj[v] = obj 818 } 819 return obj 820 } 821 822 // Local object. 823 obj, ok := a.localobj[v] 824 if !ok { 825 switch v := v.(type) { 826 case *ssa.Alloc: 827 obj = a.nextNode() 828 a.addNodes(mustDeref(v.Type()), "alloc") 829 a.endObject(obj, cgn, v) 830 831 case *ssa.MakeSlice: 832 obj = a.nextNode() 833 a.addNodes(sliceToArray(v.Type()), "makeslice") 834 a.endObject(obj, cgn, v) 835 836 case *ssa.MakeChan: 837 obj = a.nextNode() 838 a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan") 839 a.endObject(obj, cgn, v) 840 841 case *ssa.MakeMap: 842 obj = a.nextNode() 843 tmap := v.Type().Underlying().(*types.Map) 844 a.addNodes(tmap.Key(), "makemap.key") 845 elem := a.addNodes(tmap.Elem(), "makemap.value") 846 847 // To update the value field, MapUpdate 848 // generates store-with-offset constraints which 849 // the presolver can't model, so we must mark 850 // those nodes indirect. 851 for id, end := elem, elem+nodeid(a.sizeof(tmap.Elem())); id < end; id++ { 852 a.mapValues = append(a.mapValues, id) 853 } 854 a.endObject(obj, cgn, v) 855 856 case *ssa.MakeInterface: 857 tConc := v.X.Type() 858 obj = a.makeTagged(tConc, cgn, v) 859 860 // Copy the value into it, if nontrivial. 861 if x := a.valueNode(v.X); x != 0 { 862 a.copy(obj+1, x, a.sizeof(tConc)) 863 } 864 865 case *ssa.FieldAddr: 866 if xobj := a.objectNode(cgn, v.X); xobj != 0 { 867 obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field)) 868 } 869 870 case *ssa.IndexAddr: 871 if xobj := a.objectNode(cgn, v.X); xobj != 0 { 872 obj = xobj + 1 873 } 874 875 case *ssa.Slice: 876 obj = a.objectNode(cgn, v.X) 877 878 case *ssa.Convert: 879 // TODO(adonovan): opt: handle these cases too: 880 // - unsafe.Pointer->*T conversion acts like Alloc 881 // - string->[]byte/[]rune conversion acts like MakeSlice 882 } 883 884 if a.log != nil { 885 fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj) 886 } 887 a.localobj[v] = obj 888 } 889 return obj 890 } 891 892 // genLoad generates constraints for result = *(ptr + val). 893 func (a *analysis) genLoad(cgn *cgnode, result nodeid, ptr ssa.Value, offset, sizeof uint32) { 894 if obj := a.objectNode(cgn, ptr); obj != 0 { 895 // Pre-apply loadConstraint.solve(). 896 a.copy(result, obj+nodeid(offset), sizeof) 897 } else { 898 a.load(result, a.valueNode(ptr), offset, sizeof) 899 } 900 } 901 902 // genOffsetAddr generates constraints for a 'v=ptr.field' (FieldAddr) 903 // or 'v=ptr[*]' (IndexAddr) instruction v. 904 func (a *analysis) genOffsetAddr(cgn *cgnode, v ssa.Value, ptr nodeid, offset uint32) { 905 dst := a.valueNode(v) 906 if obj := a.objectNode(cgn, v); obj != 0 { 907 // Pre-apply offsetAddrConstraint.solve(). 908 a.addressOf(v.Type(), dst, obj) 909 } else { 910 a.offsetAddr(v.Type(), dst, ptr, offset) 911 } 912 } 913 914 // genStore generates constraints for *(ptr + offset) = val. 915 func (a *analysis) genStore(cgn *cgnode, ptr ssa.Value, val nodeid, offset, sizeof uint32) { 916 if obj := a.objectNode(cgn, ptr); obj != 0 { 917 // Pre-apply storeConstraint.solve(). 918 a.copy(obj+nodeid(offset), val, sizeof) 919 } else { 920 a.store(a.valueNode(ptr), val, offset, sizeof) 921 } 922 } 923 924 // genInstr generates constraints for instruction instr in context cgn. 925 func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) { 926 if a.log != nil { 927 var prefix string 928 if val, ok := instr.(ssa.Value); ok { 929 prefix = val.Name() + " = " 930 } 931 fmt.Fprintf(a.log, "; %s%s\n", prefix, instr) 932 } 933 934 switch instr := instr.(type) { 935 case *ssa.DebugRef: 936 // no-op. 937 938 case *ssa.UnOp: 939 switch instr.Op { 940 case token.ARROW: // <-x 941 // We can ignore instr.CommaOk because the node we're 942 // altering is always at zero offset relative to instr 943 tElem := instr.X.Type().Underlying().(*types.Chan).Elem() 944 a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(tElem)) 945 946 case token.MUL: // *x 947 a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type())) 948 949 default: 950 // NOT, SUB, XOR: no-op. 951 } 952 953 case *ssa.BinOp: 954 // All no-ops. 955 956 case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer 957 a.genCall(cgn, instr) 958 959 case *ssa.ChangeType: 960 a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) 961 962 case *ssa.Convert: 963 a.genConv(instr, cgn) 964 965 case *ssa.Extract: 966 a.copy(a.valueNode(instr), 967 a.valueOffsetNode(instr.Tuple, instr.Index), 968 a.sizeof(instr.Type())) 969 970 case *ssa.FieldAddr: 971 a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 972 a.offsetOf(mustDeref(instr.X.Type()), instr.Field)) 973 974 case *ssa.IndexAddr: 975 a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1) 976 977 case *ssa.Field: 978 a.copy(a.valueNode(instr), 979 a.valueOffsetNode(instr.X, instr.Field), 980 a.sizeof(instr.Type())) 981 982 case *ssa.Index: 983 a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type())) 984 985 case *ssa.Select: 986 recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1) 987 for _, st := range instr.States { 988 elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem()) 989 switch st.Dir { 990 case types.RecvOnly: 991 a.genLoad(cgn, recv, st.Chan, 0, elemSize) 992 recv += nodeid(elemSize) 993 994 case types.SendOnly: 995 a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize) 996 } 997 } 998 999 case *ssa.Return: 1000 results := a.funcResults(cgn.obj) 1001 for _, r := range instr.Results { 1002 sz := a.sizeof(r.Type()) 1003 a.copy(results, a.valueNode(r), sz) 1004 results += nodeid(sz) 1005 } 1006 1007 case *ssa.Send: 1008 a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type())) 1009 1010 case *ssa.Store: 1011 a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type())) 1012 1013 case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface: 1014 v := instr.(ssa.Value) 1015 a.addressOf(v.Type(), a.valueNode(v), a.objectNode(cgn, v)) 1016 1017 case *ssa.ChangeInterface: 1018 a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) 1019 1020 case *ssa.TypeAssert: 1021 a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true) 1022 1023 case *ssa.Slice: 1024 a.copy(a.valueNode(instr), a.valueNode(instr.X), 1) 1025 1026 case *ssa.If, *ssa.Jump: 1027 // no-op. 1028 1029 case *ssa.Phi: 1030 sz := a.sizeof(instr.Type()) 1031 for _, e := range instr.Edges { 1032 a.copy(a.valueNode(instr), a.valueNode(e), sz) 1033 } 1034 1035 case *ssa.MakeClosure: 1036 fn := instr.Fn.(*ssa.Function) 1037 a.copy(a.valueNode(instr), a.valueNode(fn), 1) 1038 // Free variables are treated like global variables. 1039 for i, b := range instr.Bindings { 1040 a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type())) 1041 } 1042 1043 case *ssa.RunDefers: 1044 // The analysis is flow insensitive, so we just "call" 1045 // defers as we encounter them. 1046 1047 case *ssa.Range: 1048 // Do nothing. Next{Iter: *ssa.Range} handles this case. 1049 1050 case *ssa.Next: 1051 if !instr.IsString { // map 1052 // Assumes that Next is always directly applied to a Range result. 1053 theMap := instr.Iter.(*ssa.Range).X 1054 tMap := theMap.Type().Underlying().(*types.Map) 1055 1056 ksize := a.sizeof(tMap.Key()) 1057 vsize := a.sizeof(tMap.Elem()) 1058 1059 // The k/v components of the Next tuple may each be invalid. 1060 tTuple := instr.Type().(*types.Tuple) 1061 1062 // Load from the map's (k,v) into the tuple's (ok, k, v). 1063 osrc := uint32(0) // offset within map object 1064 odst := uint32(1) // offset within tuple (initially just after 'ok bool') 1065 sz := uint32(0) // amount to copy 1066 1067 // Is key valid? 1068 if tTuple.At(1).Type() != tInvalid { 1069 sz += ksize 1070 } else { 1071 odst += ksize 1072 osrc += ksize 1073 } 1074 1075 // Is value valid? 1076 if tTuple.At(2).Type() != tInvalid { 1077 sz += vsize 1078 } 1079 1080 a.genLoad(cgn, a.valueNode(instr)+nodeid(odst), theMap, osrc, sz) 1081 } 1082 1083 case *ssa.Lookup: 1084 if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok { 1085 // CommaOk can be ignored: field 0 is a no-op. 1086 ksize := a.sizeof(tMap.Key()) 1087 vsize := a.sizeof(tMap.Elem()) 1088 a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize) 1089 } 1090 1091 case *ssa.MapUpdate: 1092 tmap := instr.Map.Type().Underlying().(*types.Map) 1093 ksize := a.sizeof(tmap.Key()) 1094 vsize := a.sizeof(tmap.Elem()) 1095 a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize) 1096 a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize) 1097 1098 case *ssa.Panic: 1099 a.copy(a.panicNode, a.valueNode(instr.X), 1) 1100 1101 default: 1102 panic(fmt.Sprintf("unimplemented: %T", instr)) 1103 } 1104 } 1105 1106 func (a *analysis) makeCGNode(fn *ssa.Function, obj nodeid, callersite *callsite) *cgnode { 1107 cgn := &cgnode{fn: fn, obj: obj, callersite: callersite} 1108 a.cgnodes = append(a.cgnodes, cgn) 1109 return cgn 1110 } 1111 1112 // genRootCalls generates the synthetic root of the callgraph and the 1113 // initial calls from it to the analysis scope, such as main, a test 1114 // or a library. 1115 // 1116 func (a *analysis) genRootCalls() *cgnode { 1117 r := a.prog.NewFunction("<root>", new(types.Signature), "root of callgraph") 1118 root := a.makeCGNode(r, 0, nil) 1119 1120 // TODO(adonovan): make an ssa utility to construct an actual 1121 // root function so we don't need to special-case site-less 1122 // call edges. 1123 1124 // For each main package, call main.init(), main.main(). 1125 for _, mainPkg := range a.config.Mains { 1126 main := mainPkg.Func("main") 1127 if main == nil { 1128 panic(fmt.Sprintf("%s has no main function", mainPkg)) 1129 } 1130 1131 targets := a.addOneNode(main.Signature, "root.targets", nil) 1132 site := &callsite{targets: targets} 1133 root.sites = append(root.sites, site) 1134 for _, fn := range [2]*ssa.Function{mainPkg.Func("init"), main} { 1135 if a.log != nil { 1136 fmt.Fprintf(a.log, "\troot call to %s:\n", fn) 1137 } 1138 a.copy(targets, a.valueNode(fn), 1) 1139 } 1140 } 1141 1142 return root 1143 } 1144 1145 // genFunc generates constraints for function fn. 1146 func (a *analysis) genFunc(cgn *cgnode) { 1147 fn := cgn.fn 1148 1149 impl := a.findIntrinsic(fn) 1150 1151 if a.log != nil { 1152 fmt.Fprintf(a.log, "\n\n==== Generating constraints for %s, %s\n", cgn, cgn.contour()) 1153 1154 // Hack: don't display body if intrinsic. 1155 if impl != nil { 1156 fn2 := *cgn.fn // copy 1157 fn2.Locals = nil 1158 fn2.Blocks = nil 1159 fn2.WriteTo(a.log) 1160 } else { 1161 cgn.fn.WriteTo(a.log) 1162 } 1163 } 1164 1165 if impl != nil { 1166 impl(a, cgn) 1167 return 1168 } 1169 1170 if fn.Blocks == nil { 1171 // External function with no intrinsic treatment. 1172 // We'll warn about calls to such functions at the end. 1173 return 1174 } 1175 1176 if a.log != nil { 1177 fmt.Fprintln(a.log, "; Creating nodes for local values") 1178 } 1179 1180 a.localval = make(map[ssa.Value]nodeid) 1181 a.localobj = make(map[ssa.Value]nodeid) 1182 1183 // The value nodes for the params are in the func object block. 1184 params := a.funcParams(cgn.obj) 1185 for _, p := range fn.Params { 1186 a.setValueNode(p, params, cgn) 1187 params += nodeid(a.sizeof(p.Type())) 1188 } 1189 1190 // Free variables have global cardinality: 1191 // the outer function sets them with MakeClosure; 1192 // the inner function accesses them with FreeVar. 1193 // 1194 // TODO(adonovan): treat free vars context-sensitively. 1195 1196 // Create value nodes for all value instructions 1197 // since SSA may contain forward references. 1198 var space [10]*ssa.Value 1199 for _, b := range fn.Blocks { 1200 for _, instr := range b.Instrs { 1201 switch instr := instr.(type) { 1202 case *ssa.Range: 1203 // do nothing: it has a funky type, 1204 // and *ssa.Next does all the work. 1205 1206 case ssa.Value: 1207 var comment string 1208 if a.log != nil { 1209 comment = instr.Name() 1210 } 1211 id := a.addNodes(instr.Type(), comment) 1212 a.setValueNode(instr, id, cgn) 1213 } 1214 1215 // Record all address-taken functions (for presolver). 1216 rands := instr.Operands(space[:0]) 1217 if call, ok := instr.(ssa.CallInstruction); ok && !call.Common().IsInvoke() { 1218 // Skip CallCommon.Value in "call" mode. 1219 // TODO(adonovan): fix: relies on unspecified ordering. Specify it. 1220 rands = rands[1:] 1221 } 1222 for _, rand := range rands { 1223 if atf, ok := (*rand).(*ssa.Function); ok { 1224 a.atFuncs[atf] = true 1225 } 1226 } 1227 } 1228 } 1229 1230 // Generate constraints for instructions. 1231 for _, b := range fn.Blocks { 1232 for _, instr := range b.Instrs { 1233 a.genInstr(cgn, instr) 1234 } 1235 } 1236 1237 a.localval = nil 1238 a.localobj = nil 1239 } 1240 1241 // genMethodsOf generates nodes and constraints for all methods of type T. 1242 func (a *analysis) genMethodsOf(T types.Type) { 1243 itf := isInterface(T) 1244 1245 // TODO(adonovan): can we skip this entirely if itf is true? 1246 // I think so, but the answer may depend on reflection. 1247 mset := a.prog.MethodSets.MethodSet(T) 1248 for i, n := 0, mset.Len(); i < n; i++ { 1249 m := a.prog.MethodValue(mset.At(i)) 1250 a.valueNode(m) 1251 1252 if !itf { 1253 // Methods of concrete types are address-taken functions. 1254 a.atFuncs[m] = true 1255 } 1256 } 1257 } 1258 1259 // generate generates offline constraints for the entire program. 1260 func (a *analysis) generate() { 1261 start("Constraint generation") 1262 if a.log != nil { 1263 fmt.Fprintln(a.log, "==== Generating constraints") 1264 } 1265 1266 // Create a dummy node since we use the nodeid 0 for 1267 // non-pointerlike variables. 1268 a.addNodes(tInvalid, "(zero)") 1269 1270 // Create the global node for panic values. 1271 a.panicNode = a.addNodes(tEface, "panic") 1272 1273 // Create nodes and constraints for all methods of reflect.rtype. 1274 // (Shared contours are used by dynamic calls to reflect.Type 1275 // methods---typically just String().) 1276 if rtype := a.reflectRtypePtr; rtype != nil { 1277 a.genMethodsOf(rtype) 1278 } 1279 1280 root := a.genRootCalls() 1281 1282 if a.config.BuildCallGraph { 1283 a.result.CallGraph = callgraph.New(root.fn) 1284 } 1285 1286 // Create nodes and constraints for all methods of all types 1287 // that are dynamically accessible via reflection or interfaces. 1288 for _, T := range a.prog.RuntimeTypes() { 1289 a.genMethodsOf(T) 1290 } 1291 1292 // Generate constraints for entire program. 1293 for len(a.genq) > 0 { 1294 cgn := a.genq[0] 1295 a.genq = a.genq[1:] 1296 a.genFunc(cgn) 1297 } 1298 1299 // The runtime magically allocates os.Args; so should we. 1300 if os := a.prog.ImportedPackage("os"); os != nil { 1301 // In effect: os.Args = new([1]string)[:] 1302 T := types.NewSlice(types.Typ[types.String]) 1303 obj := a.addNodes(sliceToArray(T), "<command-line args>") 1304 a.endObject(obj, nil, "<command-line args>") 1305 a.addressOf(T, a.objectNode(nil, os.Var("Args")), obj) 1306 } 1307 1308 // Discard generation state, to avoid confusion after node renumbering. 1309 a.panicNode = 0 1310 a.globalval = nil 1311 a.localval = nil 1312 a.localobj = nil 1313 1314 stop("Constraint generation") 1315 }