gitlab.com/Raven-IO/raven-delve@v1.22.4/pkg/proc/eval.go (about) 1 package proc 2 3 import ( 4 "bytes" 5 "debug/dwarf" 6 "errors" 7 "fmt" 8 "go/ast" 9 "go/constant" 10 "go/parser" 11 "go/printer" 12 "go/token" 13 "reflect" 14 "runtime/debug" 15 "sort" 16 "strings" 17 18 "gitlab.com/Raven-IO/raven-delve/pkg/dwarf/godwarf" 19 "gitlab.com/Raven-IO/raven-delve/pkg/dwarf/op" 20 "gitlab.com/Raven-IO/raven-delve/pkg/dwarf/reader" 21 "gitlab.com/Raven-IO/raven-delve/pkg/goversion" 22 "gitlab.com/Raven-IO/raven-delve/pkg/logflags" 23 "gitlab.com/Raven-IO/raven-delve/pkg/proc/evalop" 24 ) 25 26 var errOperationOnSpecialFloat = errors.New("operations on non-finite floats not implemented") 27 28 const goDictionaryName = ".dict" 29 30 // EvalScope is the scope for variable evaluation. Contains the thread, 31 // current location (PC), and canonical frame address. 32 type EvalScope struct { 33 Location 34 Regs op.DwarfRegisters 35 Mem MemoryReadWriter // Target's memory 36 g *G 37 threadID int 38 BinInfo *BinaryInfo 39 target *Target 40 loadCfg *LoadConfig 41 42 frameOffset int64 43 44 // When the following pointer is not nil this EvalScope was created 45 // by EvalExpressionWithCalls and function call injection are allowed. 46 // See the top comment in fncall.go for a description of how the call 47 // injection protocol is handled. 48 callCtx *callContext 49 50 dictAddr uint64 // dictionary address for instantiated generic functions 51 } 52 53 type localsFlags uint8 54 55 const ( 56 // If localsTrustArgOrder is set function arguments that don't have an 57 // address will have one assigned by looking at their position in the argument 58 // list. 59 localsTrustArgOrder localsFlags = 1 << iota 60 61 // If localsNoDeclLineCheck the declaration line isn't checked at 62 // all to determine if the variable is in scope. 63 localsNoDeclLineCheck 64 ) 65 66 // ConvertEvalScope returns a new EvalScope in the context of the 67 // specified goroutine ID and stack frame. 68 // If deferCall is > 0 the eval scope will be relative to the specified deferred call. 69 func ConvertEvalScope(dbp *Target, gid int64, frame, deferCall int) (*EvalScope, error) { 70 if _, err := dbp.Valid(); err != nil { 71 return nil, err 72 } 73 ct := dbp.CurrentThread() 74 threadID := ct.ThreadID() 75 g, err := FindGoroutine(dbp, gid) 76 if err != nil { 77 return nil, err 78 } 79 80 var opts StacktraceOptions 81 if deferCall > 0 { 82 opts = StacktraceReadDefers 83 } 84 85 var locs []Stackframe 86 if g != nil { 87 if g.Thread != nil { 88 threadID = g.Thread.ThreadID() 89 } 90 locs, err = GoroutineStacktrace(dbp, g, frame+1, opts) 91 } else { 92 locs, err = ThreadStacktrace(dbp, ct, frame+1) 93 } 94 if err != nil { 95 return nil, err 96 } 97 98 if frame >= len(locs) { 99 return nil, fmt.Errorf("Frame %d does not exist in goroutine %d", frame, gid) 100 } 101 102 if deferCall > 0 { 103 if deferCall-1 >= len(locs[frame].Defers) { 104 return nil, fmt.Errorf("Frame %d only has %d deferred calls", frame, len(locs[frame].Defers)) 105 } 106 107 d := locs[frame].Defers[deferCall-1] 108 if d.Unreadable != nil { 109 return nil, d.Unreadable 110 } 111 112 return d.EvalScope(dbp, ct) 113 } 114 115 return FrameToScope(dbp, dbp.Memory(), g, threadID, locs[frame:]...), nil 116 } 117 118 // FrameToScope returns a new EvalScope for frames[0]. 119 // If frames has at least two elements all memory between 120 // frames[0].Regs.SP() and frames[1].Regs.CFA will be cached. 121 // Otherwise all memory between frames[0].Regs.SP() and frames[0].Regs.CFA 122 // will be cached. 123 func FrameToScope(t *Target, thread MemoryReadWriter, g *G, threadID int, frames ...Stackframe) *EvalScope { 124 // Creates a cacheMem that will preload the entire stack frame the first 125 // time any local variable is read. 126 // Remember that the stack grows downward in memory. 127 minaddr := frames[0].Regs.SP() 128 var maxaddr uint64 129 if len(frames) > 1 && frames[0].SystemStack == frames[1].SystemStack { 130 maxaddr = uint64(frames[1].Regs.CFA) 131 } else { 132 maxaddr = uint64(frames[0].Regs.CFA) 133 } 134 if maxaddr > minaddr && maxaddr-minaddr < maxFramePrefetchSize { 135 thread = cacheMemory(thread, minaddr, int(maxaddr-minaddr)) 136 } 137 138 s := &EvalScope{Location: frames[0].Call, Regs: frames[0].Regs, Mem: thread, g: g, BinInfo: t.BinInfo(), target: t, frameOffset: frames[0].FrameOffset(), threadID: threadID} 139 s.PC = frames[0].lastpc 140 return s 141 } 142 143 // ThreadScope returns an EvalScope for the given thread. 144 func ThreadScope(t *Target, thread Thread) (*EvalScope, error) { 145 locations, err := ThreadStacktrace(t, thread, 1) 146 if err != nil { 147 return nil, err 148 } 149 if len(locations) < 1 { 150 return nil, errors.New("could not decode first frame") 151 } 152 return FrameToScope(t, thread.ProcessMemory(), nil, thread.ThreadID(), locations...), nil 153 } 154 155 // GoroutineScope returns an EvalScope for the goroutine running on the given thread. 156 func GoroutineScope(t *Target, thread Thread) (*EvalScope, error) { 157 locations, err := ThreadStacktrace(t, thread, 1) 158 if err != nil { 159 return nil, err 160 } 161 if len(locations) < 1 { 162 return nil, errors.New("could not decode first frame") 163 } 164 g, err := GetG(thread) 165 if err != nil { 166 return nil, err 167 } 168 threadID := 0 169 if g.Thread != nil { 170 threadID = g.Thread.ThreadID() 171 } 172 return FrameToScope(t, thread.ProcessMemory(), g, threadID, locations...), nil 173 } 174 175 // EvalExpression returns the value of the given expression. 176 func (scope *EvalScope) EvalExpression(expr string, cfg LoadConfig) (*Variable, error) { 177 ops, err := evalop.Compile(scopeToEvalLookup{scope}, expr, false) 178 if err != nil { 179 return nil, err 180 } 181 182 stack := &evalStack{} 183 184 scope.loadCfg = &cfg 185 stack.eval(scope, ops) 186 ev, err := stack.result(&cfg) 187 if err != nil { 188 return nil, err 189 } 190 191 ev.loadValue(cfg) 192 if ev.Name == "" { 193 ev.Name = expr 194 } 195 return ev, nil 196 } 197 198 type scopeToEvalLookup struct { 199 *EvalScope 200 } 201 202 func (s scopeToEvalLookup) FindTypeExpr(expr ast.Expr) (godwarf.Type, error) { 203 return s.BinInfo.findTypeExpr(expr) 204 } 205 206 func (scope scopeToEvalLookup) HasLocal(name string) bool { 207 if scope.Fn == nil { 208 return false 209 } 210 211 flags := reader.VariablesOnlyVisible 212 if scope.BinInfo.Producer() != "" && goversion.ProducerAfterOrEqual(scope.BinInfo.Producer(), 1, 15) { 213 flags |= reader.VariablesTrustDeclLine 214 } 215 216 dwarfTree, err := scope.image().getDwarfTree(scope.Fn.offset) 217 if err != nil { 218 return false 219 } 220 221 varEntries := reader.Variables(dwarfTree, scope.PC, scope.Line, flags) 222 for _, entry := range varEntries { 223 curname, _ := entry.Val(dwarf.AttrName).(string) 224 if curname == name { 225 return true 226 } 227 if len(curname) > 0 && curname[0] == '&' { 228 if curname[1:] == name { 229 return true 230 } 231 } 232 } 233 return false 234 } 235 236 func (scope scopeToEvalLookup) HasGlobal(pkgName, varName string) bool { 237 hasGlobalInternal := func(name string) bool { 238 for _, pkgvar := range scope.BinInfo.packageVars { 239 if pkgvar.name == name || strings.HasSuffix(pkgvar.name, "/"+name) { 240 return true 241 } 242 } 243 for _, fn := range scope.BinInfo.Functions { 244 if fn.Name == name || strings.HasSuffix(fn.Name, "/"+name) { 245 return true 246 } 247 } 248 for _, ctyp := range scope.BinInfo.consts { 249 for _, cval := range ctyp.values { 250 if cval.fullName == name || strings.HasSuffix(cval.fullName, "/"+name) { 251 return true 252 } 253 } 254 } 255 return false 256 } 257 258 if pkgName == "" { 259 if scope.Fn == nil { 260 return false 261 } 262 return hasGlobalInternal(scope.Fn.PackageName() + "." + varName) 263 } 264 265 for _, pkgPath := range scope.BinInfo.PackageMap[pkgName] { 266 if hasGlobalInternal(pkgPath + "." + varName) { 267 return true 268 } 269 } 270 return hasGlobalInternal(pkgName + "." + varName) 271 } 272 273 func (scope scopeToEvalLookup) LookupRegisterName(name string) (int, bool) { 274 s := validRegisterName(name) 275 if s == "" { 276 return 0, false 277 } 278 return scope.BinInfo.Arch.RegisterNameToDwarf(s) 279 } 280 281 func (scope scopeToEvalLookup) HasBuiltin(name string) bool { 282 return supportedBuiltins[name] != nil 283 } 284 285 // ChanGoroutines returns the list of goroutines waiting to receive from or 286 // send to the channel. 287 func (scope *EvalScope) ChanGoroutines(expr string, start, count int) ([]int64, error) { 288 t, err := parser.ParseExpr(expr) 289 if err != nil { 290 return nil, err 291 } 292 v, err := scope.evalAST(t) 293 if err != nil { 294 return nil, err 295 } 296 if v.Kind != reflect.Chan { 297 return nil, nil 298 } 299 300 structMemberMulti := func(v *Variable, names ...string) *Variable { 301 for _, name := range names { 302 var err error 303 v, err = v.structMember(name) 304 if err != nil { 305 return nil 306 } 307 } 308 return v 309 } 310 311 waitqFirst := func(qname string) *Variable { 312 qvar := structMemberMulti(v, qname, "first") 313 if qvar == nil { 314 return nil 315 } 316 return qvar.maybeDereference() 317 } 318 319 var goids []int64 320 321 waitqToGoIDSlice := func(qvar *Variable) error { 322 if qvar == nil { 323 return nil 324 } 325 for { 326 if qvar.Addr == 0 { 327 return nil 328 } 329 if len(goids) > count { 330 return nil 331 } 332 goidVar := structMemberMulti(qvar, "g", "goid") 333 if goidVar == nil { 334 return nil 335 } 336 goidVar.loadValue(loadSingleValue) 337 if goidVar.Unreadable != nil { 338 return goidVar.Unreadable 339 } 340 goid, _ := constant.Int64Val(goidVar.Value) 341 if start > 0 { 342 start-- 343 } else { 344 goids = append(goids, goid) 345 } 346 347 nextVar, err := qvar.structMember("next") 348 if err != nil { 349 return err 350 } 351 qvar = nextVar.maybeDereference() 352 } 353 } 354 355 recvqVar := waitqFirst("recvq") 356 err = waitqToGoIDSlice(recvqVar) 357 if err != nil { 358 return nil, err 359 } 360 sendqVar := waitqFirst("sendq") 361 err = waitqToGoIDSlice(sendqVar) 362 if err != nil { 363 return nil, err 364 } 365 return goids, nil 366 } 367 368 // Locals returns all variables in 'scope'. 369 func (scope *EvalScope) Locals(flags localsFlags) ([]*Variable, error) { 370 if scope.Fn == nil { 371 return nil, errors.New("unable to find function context") 372 } 373 374 if scope.image().Stripped() { 375 return nil, errors.New("unable to find locals: no debug information present in binary") 376 } 377 378 trustArgOrder := (flags&localsTrustArgOrder != 0) && scope.BinInfo.Producer() != "" && goversion.ProducerAfterOrEqual(scope.BinInfo.Producer(), 1, 12) && scope.Fn != nil && (scope.PC == scope.Fn.Entry) 379 380 dwarfTree, err := scope.image().getDwarfTree(scope.Fn.offset) 381 if err != nil { 382 return nil, err 383 } 384 385 variablesFlags := reader.VariablesOnlyVisible 386 if flags&localsNoDeclLineCheck != 0 { 387 variablesFlags = reader.VariablesNoDeclLineCheck 388 } 389 if scope.BinInfo.Producer() != "" && goversion.ProducerAfterOrEqual(scope.BinInfo.Producer(), 1, 15) { 390 variablesFlags |= reader.VariablesTrustDeclLine 391 } 392 393 varEntries := reader.Variables(dwarfTree, scope.PC, scope.Line, variablesFlags) 394 395 // look for dictionary entry 396 if scope.dictAddr == 0 { 397 for _, entry := range varEntries { 398 name, _ := entry.Val(dwarf.AttrName).(string) 399 if name == goDictionaryName { 400 dictVar, err := extractVarInfoFromEntry(scope.target, scope.BinInfo, scope.image(), scope.Regs, scope.Mem, entry.Tree, 0) 401 if err != nil { 402 logflags.DebuggerLogger().Errorf("could not load %s variable: %v", name, err) 403 } else if dictVar.Unreadable != nil { 404 logflags.DebuggerLogger().Errorf("could not load %s variable: %v", name, dictVar.Unreadable) 405 } else { 406 scope.dictAddr, err = readUintRaw(dictVar.mem, dictVar.Addr, int64(scope.BinInfo.Arch.PtrSize())) 407 if err != nil { 408 logflags.DebuggerLogger().Errorf("could not load %s variable: %v", name, err) 409 } 410 } 411 break 412 } 413 } 414 } 415 416 vars := make([]*Variable, 0, len(varEntries)) 417 depths := make([]int, 0, len(varEntries)) 418 for _, entry := range varEntries { 419 if name, _ := entry.Val(dwarf.AttrName).(string); name == goDictionaryName { 420 continue 421 } 422 val, err := extractVarInfoFromEntry(scope.target, scope.BinInfo, scope.image(), scope.Regs, scope.Mem, entry.Tree, scope.dictAddr) 423 if err != nil { 424 // skip variables that we can't parse yet 425 continue 426 } 427 if trustArgOrder && ((val.Unreadable != nil && val.Addr == 0) || val.Flags&VariableFakeAddress != 0) && entry.Tag == dwarf.TagFormalParameter { 428 addr := afterLastArgAddr(vars) 429 if addr == 0 { 430 addr = uint64(scope.Regs.CFA) 431 } 432 addr = uint64(alignAddr(int64(addr), val.DwarfType.Align())) 433 val = newVariable(val.Name, addr, val.DwarfType, scope.BinInfo, scope.Mem) 434 } 435 vars = append(vars, val) 436 depth := entry.Depth 437 if entry.Tag == dwarf.TagFormalParameter { 438 if depth <= 1 { 439 depth = 0 440 } 441 isret, _ := entry.Val(dwarf.AttrVarParam).(bool) 442 if isret { 443 val.Flags |= VariableReturnArgument 444 } else { 445 val.Flags |= VariableArgument 446 } 447 } 448 depths = append(depths, depth) 449 } 450 451 if len(vars) == 0 { 452 return vars, nil 453 } 454 455 sort.Stable(&variablesByDepthAndDeclLine{vars, depths}) 456 457 lvn := map[string]*Variable{} // lvn[n] is the last variable we saw named n 458 459 for i, v := range vars { 460 if name := v.Name; len(name) > 1 && name[0] == '&' { 461 locationExpr := v.LocationExpr 462 declLine := v.DeclLine 463 v = v.maybeDereference() 464 if v.Addr == 0 && v.Unreadable == nil { 465 v.Unreadable = fmt.Errorf("no address for escaped variable") 466 } 467 v.Name = name[1:] 468 v.Flags |= VariableEscaped 469 // See https://gitlab.com/Raven-IO/raven-delve/issues/2049 for details 470 if locationExpr != nil { 471 locationExpr.isEscaped = true 472 v.LocationExpr = locationExpr 473 } 474 v.DeclLine = declLine 475 vars[i] = v 476 } 477 if otherv := lvn[v.Name]; otherv != nil { 478 otherv.Flags |= VariableShadowed 479 } 480 lvn[v.Name] = v 481 } 482 483 return vars, nil 484 } 485 486 func afterLastArgAddr(vars []*Variable) uint64 { 487 for i := len(vars) - 1; i >= 0; i-- { 488 v := vars[i] 489 if (v.Flags&VariableArgument != 0) || (v.Flags&VariableReturnArgument != 0) { 490 return v.Addr + uint64(v.DwarfType.Size()) 491 } 492 } 493 return 0 494 } 495 496 // setValue writes the value of srcv to dstv. 497 // - If srcv is a numerical literal constant and srcv is of a compatible type 498 // the necessary type conversion is performed. 499 // - If srcv is nil and dstv is of a nil'able type then dstv is nilled. 500 // - If srcv is the empty string and dstv is a string then dstv is set to the 501 // empty string. 502 // - If dstv is an "interface {}" and srcv is either an interface (possibly 503 // non-empty) or a pointer shaped type (map, channel, pointer or struct 504 // containing a single pointer field) the type conversion to "interface {}" 505 // is performed. 506 // - If srcv and dstv have the same type and are both addressable then the 507 // contents of srcv are copied byte-by-byte into dstv 508 func (scope *EvalScope) setValue(dstv, srcv *Variable, srcExpr string) error { 509 srcv.loadValue(loadSingleValue) 510 511 typerr := srcv.isType(dstv.RealType, dstv.Kind) 512 if _, isTypeConvErr := typerr.(*typeConvErr); isTypeConvErr { 513 // attempt iface -> eface and ptr-shaped -> eface conversions. 514 return convertToEface(srcv, dstv) 515 } 516 if typerr != nil { 517 return typerr 518 } 519 520 if srcv.Unreadable != nil { 521 //lint:ignore ST1005 backwards compatibility 522 return fmt.Errorf("Expression %q is unreadable: %v", srcExpr, srcv.Unreadable) 523 } 524 525 // Numerical types 526 switch dstv.Kind { 527 case reflect.Float32, reflect.Float64: 528 f, _ := constant.Float64Val(srcv.Value) 529 return dstv.writeFloatRaw(f, dstv.RealType.Size()) 530 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 531 n, _ := constant.Int64Val(srcv.Value) 532 return dstv.writeUint(uint64(n), dstv.RealType.Size()) 533 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: 534 n, _ := constant.Uint64Val(srcv.Value) 535 return dstv.writeUint(n, dstv.RealType.Size()) 536 case reflect.Bool: 537 return dstv.writeBool(constant.BoolVal(srcv.Value)) 538 case reflect.Complex64, reflect.Complex128: 539 real, _ := constant.Float64Val(constant.Real(srcv.Value)) 540 imag, _ := constant.Float64Val(constant.Imag(srcv.Value)) 541 return dstv.writeComplex(real, imag, dstv.RealType.Size()) 542 case reflect.Func: 543 if dstv.RealType.Size() == 0 { 544 if dstv.Name != "" { 545 return fmt.Errorf("can not assign to %s", dstv.Name) 546 } 547 return errors.New("can not assign to function expression") 548 } 549 } 550 551 // nilling nillable variables 552 if srcv == nilVariable { 553 return dstv.writeZero() 554 } 555 556 if srcv.Kind == reflect.String { 557 if srcv.Base == 0 && srcv.Len > 0 && srcv.Flags&VariableConstant != 0 { 558 return errFuncCallNotAllowedStrAlloc 559 } 560 return dstv.writeString(uint64(srcv.Len), srcv.Base) 561 } 562 563 // slice assignment (this is not handled by the writeCopy below so that 564 // results of a reslice operation can be used here). 565 if srcv.Kind == reflect.Slice { 566 return dstv.writeSlice(srcv.Len, srcv.Cap, srcv.Base) 567 } 568 569 // allow any integer to be converted to any pointer 570 if t, isptr := dstv.RealType.(*godwarf.PtrType); isptr { 571 return dstv.writeUint(srcv.Children[0].Addr, t.ByteSize) 572 } 573 574 // byte-by-byte copying for everything else, but the source must be addressable 575 if srcv.Addr != 0 { 576 return dstv.writeCopy(srcv) 577 } 578 579 return fmt.Errorf("can not set variables of type %s (not implemented)", dstv.Kind.String()) 580 } 581 582 // SetVariable sets the value of the named variable 583 func (scope *EvalScope) SetVariable(name, value string) error { 584 ops, err := evalop.CompileSet(scopeToEvalLookup{scope}, name, value) 585 if err != nil { 586 return err 587 } 588 589 stack := &evalStack{} 590 stack.eval(scope, ops) 591 _, err = stack.result(nil) 592 return err 593 } 594 595 // LocalVariables returns all local variables from the current function scope. 596 func (scope *EvalScope) LocalVariables(cfg LoadConfig) ([]*Variable, error) { 597 vars, err := scope.Locals(0) 598 if err != nil { 599 return nil, err 600 } 601 vars = filterVariables(vars, func(v *Variable) bool { 602 return (v.Flags & (VariableArgument | VariableReturnArgument)) == 0 603 }) 604 cfg.MaxMapBuckets = maxMapBucketsFactor * cfg.MaxArrayValues 605 loadValues(vars, cfg) 606 return vars, nil 607 } 608 609 // FunctionArguments returns the name, value, and type of all current function arguments. 610 func (scope *EvalScope) FunctionArguments(cfg LoadConfig) ([]*Variable, error) { 611 vars, err := scope.Locals(0) 612 if err != nil { 613 return nil, err 614 } 615 vars = filterVariables(vars, func(v *Variable) bool { 616 return (v.Flags & (VariableArgument | VariableReturnArgument)) != 0 617 }) 618 cfg.MaxMapBuckets = maxMapBucketsFactor * cfg.MaxArrayValues 619 loadValues(vars, cfg) 620 return vars, nil 621 } 622 623 func filterVariables(vars []*Variable, pred func(v *Variable) bool) []*Variable { 624 r := make([]*Variable, 0, len(vars)) 625 for i := range vars { 626 if pred(vars[i]) { 627 r = append(r, vars[i]) 628 } 629 } 630 return r 631 } 632 633 func regsReplaceStaticBase(regs op.DwarfRegisters, image *Image) op.DwarfRegisters { 634 regs.StaticBase = image.StaticBase 635 return regs 636 } 637 638 // PackageVariables returns the name, value, and type of all package variables in the application. 639 func (scope *EvalScope) PackageVariables(cfg LoadConfig) ([]*Variable, error) { 640 pkgvars := make([]packageVar, len(scope.BinInfo.packageVars)) 641 copy(pkgvars, scope.BinInfo.packageVars) 642 sort.Slice(pkgvars, func(i, j int) bool { 643 if pkgvars[i].cu.image.addr == pkgvars[j].cu.image.addr { 644 return pkgvars[i].offset < pkgvars[j].offset 645 } 646 return pkgvars[i].cu.image.addr < pkgvars[j].cu.image.addr 647 }) 648 vars := make([]*Variable, 0, len(scope.BinInfo.packageVars)) 649 for _, pkgvar := range pkgvars { 650 reader := pkgvar.cu.image.dwarfReader 651 reader.Seek(pkgvar.offset) 652 entry, err := reader.Next() 653 if err != nil { 654 return nil, err 655 } 656 657 // Ignore errors trying to extract values 658 val, err := extractVarInfoFromEntry(scope.target, scope.BinInfo, pkgvar.cu.image, regsReplaceStaticBase(scope.Regs, pkgvar.cu.image), scope.Mem, godwarf.EntryToTree(entry), 0) 659 if val != nil && val.Kind == reflect.Invalid { 660 continue 661 } 662 if err != nil { 663 continue 664 } 665 val.loadValue(cfg) 666 vars = append(vars, val) 667 } 668 669 return vars, nil 670 } 671 672 func (scope *EvalScope) findGlobal(pkgName, varName string) (*Variable, error) { 673 for _, pkgPath := range scope.BinInfo.PackageMap[pkgName] { 674 v, err := scope.findGlobalInternal(pkgPath + "." + varName) 675 if err != nil || v != nil { 676 return v, err 677 } 678 } 679 v, err := scope.findGlobalInternal(pkgName + "." + varName) 680 if err != nil || v != nil { 681 return v, err 682 } 683 return nil, fmt.Errorf("could not find symbol value for %s.%s", pkgName, varName) 684 } 685 686 func (scope *EvalScope) findGlobalInternal(name string) (*Variable, error) { 687 for _, pkgvar := range scope.BinInfo.packageVars { 688 if pkgvar.name == name || strings.HasSuffix(pkgvar.name, "/"+name) { 689 reader := pkgvar.cu.image.dwarfReader 690 reader.Seek(pkgvar.offset) 691 entry, err := reader.Next() 692 if err != nil { 693 return nil, err 694 } 695 return extractVarInfoFromEntry(scope.target, scope.BinInfo, pkgvar.cu.image, regsReplaceStaticBase(scope.Regs, pkgvar.cu.image), scope.Mem, godwarf.EntryToTree(entry), 0) 696 } 697 } 698 for _, fn := range scope.BinInfo.Functions { 699 if fn.Name == name || strings.HasSuffix(fn.Name, "/"+name) { 700 //TODO(aarzilli): convert function entry into a function type? 701 r := newVariable(fn.Name, fn.Entry, &godwarf.FuncType{}, scope.BinInfo, scope.Mem) 702 r.Value = constant.MakeString(fn.Name) 703 r.Base = fn.Entry 704 r.loaded = true 705 if fn.Entry == 0 { 706 r.Unreadable = fmt.Errorf("function %s is inlined", fn.Name) 707 } 708 return r, nil 709 } 710 } 711 for dwref, ctyp := range scope.BinInfo.consts { 712 for _, cval := range ctyp.values { 713 if cval.fullName == name || strings.HasSuffix(cval.fullName, "/"+name) { 714 t, err := scope.BinInfo.Images[dwref.imageIndex].Type(dwref.offset) 715 if err != nil { 716 return nil, err 717 } 718 v := newVariable(name, 0x0, t, scope.BinInfo, scope.Mem) 719 switch v.Kind { 720 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 721 v.Value = constant.MakeInt64(cval.value) 722 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: 723 v.Value = constant.MakeUint64(uint64(cval.value)) 724 default: 725 return nil, fmt.Errorf("unsupported constant kind %v", v.Kind) 726 } 727 v.Flags |= VariableConstant 728 v.loaded = true 729 return v, nil 730 } 731 } 732 } 733 return nil, nil 734 } 735 736 // image returns the image containing the current function. 737 func (scope *EvalScope) image() *Image { 738 return scope.BinInfo.funcToImage(scope.Fn) 739 } 740 741 // evalStack stores the stack machine used to evaluate a program made of 742 // evalop.Ops. 743 // When an opcode sets callInjectionContinue execution of the program will be suspended 744 // and the call injection protocol will be executed instead. 745 type evalStack struct { 746 stack []*Variable // current stack of Variable values 747 fncalls []*functionCallState // stack of call injections currently being executed 748 ops []evalop.Op // program being executed 749 opidx int // program counter for the stack program 750 callInjectionContinue bool // when set program execution suspends and the call injection protocol is executed instead 751 err error 752 753 spoff, bpoff, fboff int64 754 scope *EvalScope 755 curthread Thread 756 lastRetiredFncall *functionCallState 757 } 758 759 func (s *evalStack) push(v *Variable) { 760 s.stack = append(s.stack, v) 761 } 762 763 func (s *evalStack) pop() *Variable { 764 v := s.stack[len(s.stack)-1] 765 s.stack = s.stack[:len(s.stack)-1] 766 return v 767 } 768 769 func (s *evalStack) peek() *Variable { 770 return s.stack[len(s.stack)-1] 771 } 772 773 func (s *evalStack) fncallPush(fncall *functionCallState) { 774 s.fncalls = append(s.fncalls, fncall) 775 } 776 777 func (s *evalStack) fncallPop() *functionCallState { 778 fncall := s.fncalls[len(s.fncalls)-1] 779 s.fncalls = s.fncalls[:len(s.fncalls)-1] 780 return fncall 781 } 782 783 func (s *evalStack) fncallPeek() *functionCallState { 784 return s.fncalls[len(s.fncalls)-1] 785 } 786 787 func (s *evalStack) pushErr(v *Variable, err error) { 788 s.err = err 789 s.stack = append(s.stack, v) 790 } 791 792 // eval evaluates ops. When it returns if callInjectionContinue is set the 793 // target program should be resumed to execute the call injection protocol. 794 // Otherwise the result of the evaluation can be retrieved using 795 // stack.result. 796 func (stack *evalStack) eval(scope *EvalScope, ops []evalop.Op) { 797 if logflags.FnCall() { 798 fncallLog("eval program:\n%s", evalop.Listing(nil, ops)) 799 } 800 801 stack.ops = ops 802 stack.scope = scope 803 804 if scope.g != nil { 805 stack.spoff = int64(scope.Regs.Uint64Val(scope.Regs.SPRegNum)) - int64(scope.g.stack.hi) 806 stack.bpoff = int64(scope.Regs.Uint64Val(scope.Regs.BPRegNum)) - int64(scope.g.stack.hi) 807 stack.fboff = scope.Regs.FrameBase - int64(scope.g.stack.hi) 808 } 809 810 if scope.g != nil && scope.g.Thread != nil { 811 stack.curthread = scope.g.Thread 812 } 813 814 stack.run() 815 } 816 817 // resume resumes evaluation of stack.ops. When it returns if 818 // callInjectionContinue is set the target program should be resumed to 819 // execute the call injection protocol. Otherwise the result of the 820 // evaluation can be retrieved using stack.result. 821 func (stack *evalStack) resume(g *G) { 822 stack.callInjectionContinue = false 823 scope := stack.scope 824 // Go 1.15 will move call injection execution to a different goroutine, 825 // but we want to keep evaluation on the original goroutine. 826 if g.ID == scope.g.ID { 827 scope.g = g 828 } else { 829 // We are in Go 1.15 and we switched to a new goroutine, the original 830 // goroutine is now parked and therefore does not have a thread 831 // associated. 832 scope.g.Thread = nil 833 scope.g.Status = Gwaiting 834 scope.callCtx.injectionThread = g.Thread 835 } 836 837 // adjust the value of registers inside scope 838 pcreg, bpreg, spreg := scope.Regs.Reg(scope.Regs.PCRegNum), scope.Regs.Reg(scope.Regs.BPRegNum), scope.Regs.Reg(scope.Regs.SPRegNum) 839 scope.Regs.ClearRegisters() 840 scope.Regs.AddReg(scope.Regs.PCRegNum, pcreg) 841 scope.Regs.AddReg(scope.Regs.BPRegNum, bpreg) 842 scope.Regs.AddReg(scope.Regs.SPRegNum, spreg) 843 scope.Regs.Reg(scope.Regs.SPRegNum).Uint64Val = uint64(stack.spoff + int64(scope.g.stack.hi)) 844 scope.Regs.Reg(scope.Regs.BPRegNum).Uint64Val = uint64(stack.bpoff + int64(scope.g.stack.hi)) 845 scope.Regs.FrameBase = stack.fboff + int64(scope.g.stack.hi) 846 scope.Regs.CFA = scope.frameOffset + int64(scope.g.stack.hi) 847 stack.curthread = g.Thread 848 849 finished := funcCallStep(scope, stack, g.Thread) 850 if finished { 851 funcCallFinish(scope, stack) 852 } 853 854 if stack.callInjectionContinue { 855 // not done with call injection, stay in this mode 856 stack.scope.callCtx.injectionThread = nil 857 return 858 } 859 860 // call injection protocol suspended or concluded, resume normal opcode execution 861 stack.run() 862 } 863 864 func (stack *evalStack) run() { 865 scope, curthread := stack.scope, stack.curthread 866 for stack.opidx < len(stack.ops) && stack.err == nil { 867 stack.callInjectionContinue = false 868 stack.executeOp() 869 // If the instruction we just executed requests the call injection 870 // protocol by setting callInjectionContinue we switch to it. 871 if stack.callInjectionContinue { 872 scope.callCtx.injectionThread = nil 873 return 874 } 875 } 876 877 if stack.err == nil && len(stack.fncalls) > 0 { 878 stack.err = fmt.Errorf("internal debugger error: eval program finished without error but %d call injections still active", len(stack.fncalls)) 879 return 880 } 881 882 // If there is an error we must undo all currently executing call 883 // injections before returning. 884 885 if len(stack.fncalls) > 0 { 886 fncall := stack.fncallPeek() 887 if fncall == stack.lastRetiredFncall { 888 stack.err = fmt.Errorf("internal debugger error: could not undo injected call during error recovery, original error: %v", stack.err) 889 return 890 } 891 if fncall.undoInjection != nil { 892 // setTargetExecuted is set if evalop.CallInjectionSetTarget has been 893 // executed but evalop.CallInjectionComplete hasn't, we must undo the callOP 894 // call in evalop.CallInjectionSetTarget before continuing. 895 switch scope.BinInfo.Arch.Name { 896 case "amd64": 897 regs, _ := curthread.Registers() 898 setSP(curthread, regs.SP()+uint64(scope.BinInfo.Arch.PtrSize())) 899 setPC(curthread, fncall.undoInjection.oldpc) 900 case "arm64", "ppc64le": 901 setLR(curthread, fncall.undoInjection.oldlr) 902 setPC(curthread, fncall.undoInjection.oldpc) 903 default: 904 panic("not implemented") 905 } 906 } 907 stack.lastRetiredFncall = fncall 908 // Resume target to undo one call 909 stack.callInjectionContinue = true 910 scope.callCtx.injectionThread = nil 911 return 912 } 913 } 914 915 func (stack *evalStack) result(cfg *LoadConfig) (*Variable, error) { 916 var r *Variable 917 switch len(stack.stack) { 918 case 0: 919 // ok 920 case 1: 921 r = stack.peek() 922 default: 923 if stack.err == nil { 924 stack.err = fmt.Errorf("internal debugger error: wrong stack size at end %d", len(stack.stack)) 925 } 926 } 927 if r != nil && cfg != nil && stack.err == nil { 928 r.loadValue(*cfg) 929 } 930 return r, stack.err 931 } 932 933 // executeOp executes the opcode at stack.ops[stack.opidx] and increments stack.opidx. 934 func (stack *evalStack) executeOp() { 935 scope, ops, curthread := stack.scope, stack.ops, stack.curthread 936 defer func() { 937 err := recover() 938 if err != nil { 939 stack.err = fmt.Errorf("internal debugger error: %v (recovered)\n%s", err, string(debug.Stack())) 940 } 941 }() 942 switch op := ops[stack.opidx].(type) { 943 case *evalop.PushCurg: 944 if scope.g != nil { 945 stack.push(scope.g.variable.clone()) 946 } else { 947 typ, err := scope.BinInfo.findType("runtime.g") 948 if err != nil { 949 stack.err = fmt.Errorf("could not find runtime.g: %v", err) 950 return 951 } 952 gvar := newVariable("curg", fakeAddressUnresolv, typ, scope.BinInfo, scope.Mem) 953 gvar.loaded = true 954 gvar.Flags = VariableFakeAddress 955 gvar.Children = append(gvar.Children, *newConstant(constant.MakeInt64(0), scope.Mem)) 956 gvar.Children[0].Name = "goid" 957 stack.push(gvar) 958 } 959 960 case *evalop.PushFrameoff: 961 stack.push(newConstant(constant.MakeInt64(scope.frameOffset), scope.Mem)) 962 963 case *evalop.PushThreadID: 964 stack.push(newConstant(constant.MakeInt64(int64(scope.threadID)), scope.Mem)) 965 966 case *evalop.PushConst: 967 stack.push(newConstant(op.Value, scope.Mem)) 968 969 case *evalop.PushLocal: 970 var vars []*Variable 971 var err error 972 if op.Frame != 0 { 973 frameScope, err2 := ConvertEvalScope(scope.target, -1, int(op.Frame), 0) 974 if err2 != nil { 975 stack.err = err2 976 return 977 } 978 vars, err = frameScope.Locals(0) 979 } else { 980 vars, err = scope.Locals(0) 981 } 982 if err != nil { 983 stack.err = err 984 return 985 } 986 found := false 987 for i := range vars { 988 if vars[i].Name == op.Name && vars[i].Flags&VariableShadowed == 0 { 989 stack.push(vars[i]) 990 found = true 991 break 992 } 993 } 994 if !found { 995 stack.err = fmt.Errorf("could not find symbol value for %s", op.Name) 996 } 997 998 case *evalop.PushNil: 999 stack.push(nilVariable) 1000 1001 case *evalop.PushRegister: 1002 reg := scope.Regs.Reg(uint64(op.Regnum)) 1003 if reg == nil { 1004 stack.err = fmt.Errorf("could not find symbol value for %s", op.Regname) 1005 return 1006 } 1007 reg.FillBytes() 1008 1009 var typ godwarf.Type 1010 if len(reg.Bytes) <= 8 { 1011 typ = godwarf.FakeBasicType("uint", 64) 1012 } else { 1013 var err error 1014 typ, err = scope.BinInfo.findType("string") 1015 if err != nil { 1016 stack.err = err 1017 return 1018 } 1019 } 1020 1021 v := newVariable(op.Regname, 0, typ, scope.BinInfo, scope.Mem) 1022 if v.Kind == reflect.String { 1023 v.Len = int64(len(reg.Bytes) * 2) 1024 v.Base = fakeAddressUnresolv 1025 } 1026 v.Addr = fakeAddressUnresolv 1027 v.Flags = VariableCPURegister 1028 v.reg = reg 1029 stack.push(v) 1030 1031 case *evalop.PushPackageVar: 1032 pkgName := op.PkgName 1033 replaceName := false 1034 if pkgName == "" { 1035 replaceName = true 1036 pkgName = scope.Fn.PackageName() 1037 } 1038 v, err := scope.findGlobal(pkgName, op.Name) 1039 if err != nil { 1040 stack.err = err 1041 return 1042 } 1043 if replaceName { 1044 v.Name = op.Name 1045 } 1046 stack.push(v) 1047 1048 case *evalop.PushLen: 1049 v := stack.peek() 1050 stack.push(newConstant(constant.MakeInt64(v.Len), scope.Mem)) 1051 1052 case *evalop.Select: 1053 scope.evalStructSelector(op, stack) 1054 1055 case *evalop.TypeAssert: 1056 scope.evalTypeAssert(op, stack) 1057 1058 case *evalop.PointerDeref: 1059 scope.evalPointerDeref(op, stack) 1060 1061 case *evalop.Unary: 1062 scope.evalUnary(op, stack) 1063 1064 case *evalop.AddrOf: 1065 scope.evalAddrOf(op, stack) 1066 1067 case *evalop.TypeCast: 1068 scope.evalTypeCast(op, stack) 1069 1070 case *evalop.Reslice: 1071 scope.evalReslice(op, stack) 1072 1073 case *evalop.Index: 1074 scope.evalIndex(op, stack) 1075 1076 case *evalop.Jump: 1077 scope.evalJump(op, stack) 1078 1079 case *evalop.Binary: 1080 scope.evalBinary(op, stack) 1081 1082 case *evalop.BoolToConst: 1083 x := stack.pop() 1084 if x.Kind != reflect.Bool { 1085 stack.err = errors.New("internal debugger error: expected boolean") 1086 return 1087 } 1088 x.loadValue(loadFullValue) 1089 stack.push(newConstant(x.Value, scope.Mem)) 1090 1091 case *evalop.Pop: 1092 stack.pop() 1093 1094 case *evalop.BuiltinCall: 1095 vars := make([]*Variable, len(op.Args)) 1096 for i := len(op.Args) - 1; i >= 0; i-- { 1097 vars[i] = stack.pop() 1098 } 1099 stack.pushErr(supportedBuiltins[op.Name](vars, op.Args)) 1100 1101 case *evalop.CallInjectionStart: 1102 scope.evalCallInjectionStart(op, stack) 1103 1104 case *evalop.CallInjectionSetTarget: 1105 scope.evalCallInjectionSetTarget(op, stack, curthread) 1106 1107 case *evalop.CallInjectionCopyArg: 1108 fncall := stack.fncallPeek() 1109 actualArg := stack.pop() 1110 if actualArg.Name == "" { 1111 actualArg.Name = exprToString(op.ArgExpr) 1112 } 1113 stack.err = funcCallCopyOneArg(scope, fncall, actualArg, &fncall.formalArgs[op.ArgNum], curthread) 1114 1115 case *evalop.CallInjectionComplete: 1116 stack.fncallPeek().undoInjection = nil 1117 stack.callInjectionContinue = true 1118 1119 case *evalop.CallInjectionStartSpecial: 1120 stack.callInjectionContinue = scope.callInjectionStartSpecial(stack, op, curthread) 1121 1122 case *evalop.ConvertAllocToString: 1123 scope.convertAllocToString(stack) 1124 1125 case *evalop.SetValue: 1126 lhv := stack.pop() 1127 rhv := stack.pop() 1128 stack.err = scope.setValue(lhv, rhv, exprToString(op.Rhe)) 1129 1130 default: 1131 stack.err = fmt.Errorf("internal debugger error: unknown eval opcode: %#v", op) 1132 } 1133 1134 stack.opidx++ 1135 } 1136 1137 func (scope *EvalScope) evalAST(t ast.Expr) (*Variable, error) { 1138 ops, err := evalop.CompileAST(scopeToEvalLookup{scope}, t) 1139 if err != nil { 1140 return nil, err 1141 } 1142 stack := &evalStack{} 1143 stack.eval(scope, ops) 1144 return stack.result(nil) 1145 } 1146 1147 func exprToString(t ast.Expr) string { 1148 var buf bytes.Buffer 1149 printer.Fprint(&buf, token.NewFileSet(), t) 1150 return buf.String() 1151 } 1152 1153 func (scope *EvalScope) evalJump(op *evalop.Jump, stack *evalStack) { 1154 x := stack.peek() 1155 if op.Pop { 1156 stack.pop() 1157 } 1158 1159 var v bool 1160 switch op.When { 1161 case evalop.JumpIfTrue: 1162 v = true 1163 case evalop.JumpIfFalse: 1164 v = false 1165 case evalop.JumpIfAllocStringChecksFail: 1166 stringChecksFailed := x.Kind != reflect.String || x.Addr != 0 || x.Flags&VariableConstant == 0 || x.Len <= 0 1167 nilCallCtx := scope.callCtx == nil // do not complain here, setValue will if no other errors happen 1168 if stringChecksFailed || nilCallCtx { 1169 stack.opidx = op.Target - 1 1170 return 1171 } 1172 return 1173 } 1174 1175 if x.Kind != reflect.Bool { 1176 if op.Node != nil { 1177 stack.err = fmt.Errorf("expression %q should be boolean not %s", exprToString(op.Node), x.Kind) 1178 } else { 1179 stack.err = errors.New("internal debugger error: expected boolean") 1180 } 1181 return 1182 } 1183 x.loadValue(loadFullValue) 1184 if x.Unreadable != nil { 1185 stack.err = x.Unreadable 1186 return 1187 } 1188 if constant.BoolVal(x.Value) == v { 1189 stack.opidx = op.Target - 1 1190 } 1191 } 1192 1193 // Eval type cast expressions 1194 func (scope *EvalScope) evalTypeCast(op *evalop.TypeCast, stack *evalStack) { 1195 argv := stack.pop() 1196 1197 typ := resolveTypedef(op.DwarfType) 1198 1199 converr := fmt.Errorf("can not convert %q to %s", exprToString(op.Node.Args[0]), typ.String()) 1200 1201 // compatible underlying types 1202 if typeCastCompatibleTypes(argv.RealType, typ) { 1203 if ptyp, isptr := typ.(*godwarf.PtrType); argv.Kind == reflect.Ptr && argv.loaded && len(argv.Children) > 0 && isptr { 1204 cv := argv.Children[0] 1205 argv.Children[0] = *newVariable(cv.Name, cv.Addr, ptyp.Type, cv.bi, cv.mem) 1206 argv.Children[0].OnlyAddr = true 1207 } 1208 argv.RealType = typ 1209 argv.DwarfType = op.DwarfType 1210 stack.push(argv) 1211 return 1212 } 1213 1214 v := newVariable("", 0, op.DwarfType, scope.BinInfo, scope.Mem) 1215 v.loaded = true 1216 1217 switch ttyp := typ.(type) { 1218 case *godwarf.PtrType: 1219 switch argv.Kind { 1220 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 1221 // ok 1222 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: 1223 // ok 1224 default: 1225 stack.err = converr 1226 return 1227 } 1228 1229 argv.loadValue(loadSingleValue) 1230 if argv.Unreadable != nil { 1231 stack.err = argv.Unreadable 1232 return 1233 } 1234 1235 n, _ := constant.Int64Val(argv.Value) 1236 1237 mem := scope.Mem 1238 if scope.target != nil { 1239 if mem2 := scope.target.findFakeMemory(uint64(n)); mem2 != nil { 1240 mem = mem2 1241 } 1242 } 1243 1244 v.Children = []Variable{*(newVariable("", uint64(n), ttyp.Type, scope.BinInfo, mem))} 1245 v.Children[0].OnlyAddr = true 1246 stack.push(v) 1247 return 1248 1249 case *godwarf.UintType: 1250 argv.loadValue(loadSingleValue) 1251 if argv.Unreadable != nil { 1252 stack.err = argv.Unreadable 1253 return 1254 } 1255 switch argv.Kind { 1256 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 1257 n, _ := constant.Int64Val(argv.Value) 1258 v.Value = constant.MakeUint64(convertInt(uint64(n), false, ttyp.Size())) 1259 stack.push(v) 1260 return 1261 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: 1262 n, _ := constant.Uint64Val(argv.Value) 1263 v.Value = constant.MakeUint64(convertInt(n, false, ttyp.Size())) 1264 stack.push(v) 1265 return 1266 case reflect.Float32, reflect.Float64: 1267 x, _ := constant.Float64Val(argv.Value) 1268 v.Value = constant.MakeUint64(uint64(x)) 1269 stack.push(v) 1270 return 1271 case reflect.Ptr: 1272 v.Value = constant.MakeUint64(argv.Children[0].Addr) 1273 stack.push(v) 1274 return 1275 } 1276 case *godwarf.IntType: 1277 argv.loadValue(loadSingleValue) 1278 if argv.Unreadable != nil { 1279 stack.err = argv.Unreadable 1280 return 1281 } 1282 switch argv.Kind { 1283 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 1284 n, _ := constant.Int64Val(argv.Value) 1285 v.Value = constant.MakeInt64(int64(convertInt(uint64(n), true, ttyp.Size()))) 1286 stack.push(v) 1287 return 1288 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: 1289 n, _ := constant.Uint64Val(argv.Value) 1290 v.Value = constant.MakeInt64(int64(convertInt(n, true, ttyp.Size()))) 1291 stack.push(v) 1292 return 1293 case reflect.Float32, reflect.Float64: 1294 x, _ := constant.Float64Val(argv.Value) 1295 v.Value = constant.MakeInt64(int64(x)) 1296 stack.push(v) 1297 return 1298 } 1299 case *godwarf.FloatType: 1300 argv.loadValue(loadSingleValue) 1301 if argv.Unreadable != nil { 1302 stack.err = argv.Unreadable 1303 return 1304 } 1305 switch argv.Kind { 1306 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 1307 fallthrough 1308 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: 1309 fallthrough 1310 case reflect.Float32, reflect.Float64: 1311 v.Value = argv.Value 1312 stack.push(v) 1313 return 1314 } 1315 case *godwarf.ComplexType: 1316 argv.loadValue(loadSingleValue) 1317 if argv.Unreadable != nil { 1318 stack.err = argv.Unreadable 1319 return 1320 } 1321 switch argv.Kind { 1322 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 1323 fallthrough 1324 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: 1325 fallthrough 1326 case reflect.Float32, reflect.Float64: 1327 v.Value = argv.Value 1328 stack.push(v) 1329 return 1330 } 1331 } 1332 1333 cfg := loadFullValue 1334 if scope.loadCfg != nil { 1335 cfg = *scope.loadCfg 1336 } 1337 1338 switch ttyp := typ.(type) { 1339 case *godwarf.SliceType: 1340 switch ttyp.ElemType.Common().ReflectKind { 1341 case reflect.Uint8: 1342 // string -> []uint8 1343 if argv.Kind != reflect.String { 1344 stack.err = converr 1345 return 1346 } 1347 cfg.MaxStringLen = cfg.MaxArrayValues 1348 argv.loadValue(cfg) 1349 if argv.Unreadable != nil { 1350 stack.err = argv.Unreadable 1351 return 1352 } 1353 for i, ch := range []byte(constant.StringVal(argv.Value)) { 1354 e := newVariable("", argv.Addr+uint64(i), typ.(*godwarf.SliceType).ElemType, scope.BinInfo, argv.mem) 1355 e.loaded = true 1356 e.Value = constant.MakeInt64(int64(ch)) 1357 v.Children = append(v.Children, *e) 1358 } 1359 v.Len = argv.Len 1360 v.Cap = v.Len 1361 stack.push(v) 1362 return 1363 1364 case reflect.Int32: 1365 // string -> []rune 1366 if argv.Kind != reflect.String { 1367 stack.err = converr 1368 return 1369 } 1370 argv.loadValue(cfg) 1371 if argv.Unreadable != nil { 1372 stack.err = argv.Unreadable 1373 return 1374 } 1375 for i, ch := range constant.StringVal(argv.Value) { 1376 e := newVariable("", argv.Addr+uint64(i), typ.(*godwarf.SliceType).ElemType, scope.BinInfo, argv.mem) 1377 e.loaded = true 1378 e.Value = constant.MakeInt64(int64(ch)) 1379 v.Children = append(v.Children, *e) 1380 } 1381 v.Len = int64(len(v.Children)) 1382 v.Cap = v.Len 1383 stack.push(v) 1384 return 1385 } 1386 1387 case *godwarf.StringType: 1388 switch argv.Kind { 1389 case reflect.String: 1390 // string -> string 1391 argv.DwarfType = v.DwarfType 1392 argv.RealType = v.RealType 1393 stack.push(argv) 1394 return 1395 case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint, reflect.Uintptr: 1396 // integer -> string 1397 argv.loadValue(cfg) 1398 if argv.Unreadable != nil { 1399 stack.err = argv.Unreadable 1400 return 1401 } 1402 b, _ := constant.Int64Val(argv.Value) 1403 s := string(rune(b)) 1404 v.Value = constant.MakeString(s) 1405 v.Len = int64(len(s)) 1406 stack.push(v) 1407 return 1408 case reflect.Slice, reflect.Array: 1409 var elem godwarf.Type 1410 if argv.Kind == reflect.Slice { 1411 elem = argv.RealType.(*godwarf.SliceType).ElemType 1412 } else { 1413 elem = argv.RealType.(*godwarf.ArrayType).Type 1414 } 1415 switch elemType := elem.(type) { 1416 case *godwarf.UintType: 1417 // []uint8 -> string 1418 if elemType.Name != "uint8" && elemType.Name != "byte" { 1419 stack.err = converr 1420 return 1421 } 1422 cfg.MaxArrayValues = cfg.MaxStringLen 1423 argv.loadValue(cfg) 1424 if argv.Unreadable != nil { 1425 stack.err = argv.Unreadable 1426 return 1427 } 1428 bytes := make([]byte, len(argv.Children)) 1429 for i := range argv.Children { 1430 n, _ := constant.Int64Val(argv.Children[i].Value) 1431 bytes[i] = byte(n) 1432 } 1433 v.Value = constant.MakeString(string(bytes)) 1434 v.Len = argv.Len 1435 1436 case *godwarf.IntType: 1437 // []rune -> string 1438 if elemType.Name != "int32" && elemType.Name != "rune" { 1439 stack.err = converr 1440 return 1441 } 1442 cfg.MaxArrayValues = cfg.MaxStringLen 1443 argv.loadValue(cfg) 1444 if argv.Unreadable != nil { 1445 stack.err = argv.Unreadable 1446 return 1447 } 1448 runes := make([]rune, len(argv.Children)) 1449 for i := range argv.Children { 1450 n, _ := constant.Int64Val(argv.Children[i].Value) 1451 runes[i] = rune(n) 1452 } 1453 v.Value = constant.MakeString(string(runes)) 1454 // The following line is wrong but the only way to get the correct value 1455 // would be to decode the entire slice. 1456 v.Len = int64(len(constant.StringVal(v.Value))) 1457 1458 default: 1459 stack.err = converr 1460 return 1461 } 1462 stack.push(v) 1463 return 1464 } 1465 } 1466 1467 stack.err = converr 1468 } 1469 1470 // typeCastCompatibleTypes returns true if typ1 and typ2 are compatible for 1471 // a type cast where only the type of the variable is changed. 1472 func typeCastCompatibleTypes(typ1, typ2 godwarf.Type) bool { 1473 if typ1 == nil || typ2 == nil || typ1.Common().Size() != typ2.Common().Size() || typ1.Common().Align() != typ2.Common().Align() { 1474 return false 1475 } 1476 1477 if typ1.String() == typ2.String() { 1478 return true 1479 } 1480 1481 switch ttyp1 := typ1.(type) { 1482 case *godwarf.PtrType: 1483 if ttyp2, ok := typ2.(*godwarf.PtrType); ok { 1484 _, isvoid1 := ttyp1.Type.(*godwarf.VoidType) 1485 _, isvoid2 := ttyp2.Type.(*godwarf.VoidType) 1486 if isvoid1 || isvoid2 { 1487 return true 1488 } 1489 // pointer types are compatible if their element types are compatible 1490 return typeCastCompatibleTypes(resolveTypedef(ttyp1.Type), resolveTypedef(ttyp2.Type)) 1491 } 1492 case *godwarf.StringType: 1493 if _, ok := typ2.(*godwarf.StringType); ok { 1494 return true 1495 } 1496 case *godwarf.StructType: 1497 if ttyp2, ok := typ2.(*godwarf.StructType); ok { 1498 // struct types are compatible if they have the same fields 1499 if len(ttyp1.Field) != len(ttyp2.Field) { 1500 return false 1501 } 1502 for i := range ttyp1.Field { 1503 if *ttyp1.Field[i] != *ttyp2.Field[i] { 1504 return false 1505 } 1506 } 1507 return true 1508 } 1509 case *godwarf.ComplexType: 1510 if _, ok := typ2.(*godwarf.ComplexType); ok { 1511 // size and alignment already checked above 1512 return true 1513 } 1514 case *godwarf.FloatType: 1515 if _, ok := typ2.(*godwarf.FloatType); ok { 1516 // size and alignment already checked above 1517 return true 1518 } 1519 case *godwarf.IntType: 1520 if _, ok := typ2.(*godwarf.IntType); ok { 1521 // size and alignment already checked above 1522 return true 1523 } 1524 case *godwarf.UintType: 1525 if _, ok := typ2.(*godwarf.UintType); ok { 1526 // size and alignment already checked above 1527 return true 1528 } 1529 case *godwarf.BoolType: 1530 if _, ok := typ2.(*godwarf.BoolType); ok { 1531 // size and alignment already checked above 1532 return true 1533 } 1534 } 1535 1536 return false 1537 } 1538 1539 func convertInt(n uint64, signed bool, size int64) uint64 { 1540 bits := uint64(size) * 8 1541 mask := uint64((1 << bits) - 1) 1542 r := n & mask 1543 if signed && (r>>(bits-1)) != 0 { 1544 // sign extension 1545 r |= ^uint64(0) &^ mask 1546 } 1547 return r 1548 } 1549 1550 var supportedBuiltins = map[string]func([]*Variable, []ast.Expr) (*Variable, error){ 1551 "cap": capBuiltin, 1552 "len": lenBuiltin, 1553 "complex": complexBuiltin, 1554 "imag": imagBuiltin, 1555 "real": realBuiltin, 1556 "min": minBuiltin, 1557 "max": maxBuiltin, 1558 } 1559 1560 func capBuiltin(args []*Variable, nodeargs []ast.Expr) (*Variable, error) { 1561 if len(args) != 1 { 1562 return nil, fmt.Errorf("wrong number of arguments to cap: %d", len(args)) 1563 } 1564 1565 arg := args[0] 1566 invalidArgErr := fmt.Errorf("invalid argument %s (type %s) for cap", exprToString(nodeargs[0]), arg.TypeString()) 1567 1568 switch arg.Kind { 1569 case reflect.Ptr: 1570 arg = arg.maybeDereference() 1571 if arg.Kind != reflect.Array { 1572 return nil, invalidArgErr 1573 } 1574 fallthrough 1575 case reflect.Array: 1576 return newConstant(constant.MakeInt64(arg.Len), arg.mem), nil 1577 case reflect.Slice: 1578 return newConstant(constant.MakeInt64(arg.Cap), arg.mem), nil 1579 case reflect.Chan: 1580 arg.loadValue(loadFullValue) 1581 if arg.Unreadable != nil { 1582 return nil, arg.Unreadable 1583 } 1584 if arg.Base == 0 { 1585 return newConstant(constant.MakeInt64(0), arg.mem), nil 1586 } 1587 return newConstant(arg.Children[1].Value, arg.mem), nil 1588 default: 1589 return nil, invalidArgErr 1590 } 1591 } 1592 1593 func lenBuiltin(args []*Variable, nodeargs []ast.Expr) (*Variable, error) { 1594 if len(args) != 1 { 1595 return nil, fmt.Errorf("wrong number of arguments to len: %d", len(args)) 1596 } 1597 arg := args[0] 1598 invalidArgErr := fmt.Errorf("invalid argument %s (type %s) for len", exprToString(nodeargs[0]), arg.TypeString()) 1599 1600 switch arg.Kind { 1601 case reflect.Ptr: 1602 arg = arg.maybeDereference() 1603 if arg.Kind != reflect.Array { 1604 return nil, invalidArgErr 1605 } 1606 fallthrough 1607 case reflect.Array, reflect.Slice, reflect.String: 1608 if arg.Unreadable != nil { 1609 return nil, arg.Unreadable 1610 } 1611 return newConstant(constant.MakeInt64(arg.Len), arg.mem), nil 1612 case reflect.Chan: 1613 arg.loadValue(loadFullValue) 1614 if arg.Unreadable != nil { 1615 return nil, arg.Unreadable 1616 } 1617 if arg.Base == 0 { 1618 return newConstant(constant.MakeInt64(0), arg.mem), nil 1619 } 1620 return newConstant(arg.Children[0].Value, arg.mem), nil 1621 case reflect.Map: 1622 it := arg.mapIterator() 1623 if arg.Unreadable != nil { 1624 return nil, arg.Unreadable 1625 } 1626 if it == nil { 1627 return newConstant(constant.MakeInt64(0), arg.mem), nil 1628 } 1629 return newConstant(constant.MakeInt64(arg.Len), arg.mem), nil 1630 default: 1631 return nil, invalidArgErr 1632 } 1633 } 1634 1635 func complexBuiltin(args []*Variable, nodeargs []ast.Expr) (*Variable, error) { 1636 if len(args) != 2 { 1637 return nil, fmt.Errorf("wrong number of arguments to complex: %d", len(args)) 1638 } 1639 1640 realev := args[0] 1641 imagev := args[1] 1642 1643 realev.loadValue(loadSingleValue) 1644 imagev.loadValue(loadSingleValue) 1645 1646 if realev.Unreadable != nil { 1647 return nil, realev.Unreadable 1648 } 1649 1650 if imagev.Unreadable != nil { 1651 return nil, imagev.Unreadable 1652 } 1653 1654 if realev.Value == nil || ((realev.Value.Kind() != constant.Int) && (realev.Value.Kind() != constant.Float)) { 1655 return nil, fmt.Errorf("invalid argument 1 %s (type %s) to complex", exprToString(nodeargs[0]), realev.TypeString()) 1656 } 1657 1658 if imagev.Value == nil || ((imagev.Value.Kind() != constant.Int) && (imagev.Value.Kind() != constant.Float)) { 1659 return nil, fmt.Errorf("invalid argument 2 %s (type %s) to complex", exprToString(nodeargs[1]), imagev.TypeString()) 1660 } 1661 1662 sz := int64(0) 1663 if realev.RealType != nil { 1664 sz = realev.RealType.(*godwarf.FloatType).Size() 1665 } 1666 if imagev.RealType != nil { 1667 isz := imagev.RealType.(*godwarf.FloatType).Size() 1668 if isz > sz { 1669 sz = isz 1670 } 1671 } 1672 1673 if sz == 0 { 1674 sz = 128 1675 } 1676 1677 typ := godwarf.FakeBasicType("complex", int(sz)) 1678 1679 r := realev.newVariable("", 0, typ, nil) 1680 r.Value = constant.BinaryOp(realev.Value, token.ADD, constant.MakeImag(imagev.Value)) 1681 return r, nil 1682 } 1683 1684 func imagBuiltin(args []*Variable, nodeargs []ast.Expr) (*Variable, error) { 1685 if len(args) != 1 { 1686 return nil, fmt.Errorf("wrong number of arguments to imag: %d", len(args)) 1687 } 1688 1689 arg := args[0] 1690 arg.loadValue(loadSingleValue) 1691 1692 if arg.Unreadable != nil { 1693 return nil, arg.Unreadable 1694 } 1695 1696 if arg.Kind != reflect.Complex64 && arg.Kind != reflect.Complex128 { 1697 return nil, fmt.Errorf("invalid argument %s (type %s) to imag", exprToString(nodeargs[0]), arg.TypeString()) 1698 } 1699 1700 return newConstant(constant.Imag(arg.Value), arg.mem), nil 1701 } 1702 1703 func realBuiltin(args []*Variable, nodeargs []ast.Expr) (*Variable, error) { 1704 if len(args) != 1 { 1705 return nil, fmt.Errorf("wrong number of arguments to real: %d", len(args)) 1706 } 1707 1708 arg := args[0] 1709 arg.loadValue(loadSingleValue) 1710 1711 if arg.Unreadable != nil { 1712 return nil, arg.Unreadable 1713 } 1714 1715 if arg.Value == nil || ((arg.Value.Kind() != constant.Int) && (arg.Value.Kind() != constant.Float) && (arg.Value.Kind() != constant.Complex)) { 1716 return nil, fmt.Errorf("invalid argument %s (type %s) to real", exprToString(nodeargs[0]), arg.TypeString()) 1717 } 1718 1719 return newConstant(constant.Real(arg.Value), arg.mem), nil 1720 } 1721 1722 func minBuiltin(args []*Variable, nodeargs []ast.Expr) (*Variable, error) { 1723 return minmaxBuiltin("min", token.LSS, args, nodeargs) 1724 } 1725 1726 func maxBuiltin(args []*Variable, nodeargs []ast.Expr) (*Variable, error) { 1727 return minmaxBuiltin("max", token.GTR, args, nodeargs) 1728 } 1729 1730 func minmaxBuiltin(name string, op token.Token, args []*Variable, nodeargs []ast.Expr) (*Variable, error) { 1731 var best *Variable 1732 1733 for i := range args { 1734 if args[i].Kind == reflect.String { 1735 args[i].loadValue(loadFullValueLongerStrings) 1736 } else { 1737 args[i].loadValue(loadFullValue) 1738 } 1739 1740 if args[i].Unreadable != nil { 1741 return nil, fmt.Errorf("could not load %q: %v", exprToString(nodeargs[i]), args[i].Unreadable) 1742 } 1743 if args[i].FloatSpecial != 0 { 1744 return nil, errOperationOnSpecialFloat 1745 } 1746 1747 if best == nil { 1748 best = args[i] 1749 continue 1750 } 1751 1752 _, err := negotiateType(op, args[i], best) 1753 if err != nil { 1754 return nil, err 1755 } 1756 1757 v, err := compareOp(op, args[i], best) 1758 if err != nil { 1759 return nil, err 1760 } 1761 1762 if v { 1763 best = args[i] 1764 } 1765 } 1766 1767 if best == nil { 1768 return nil, fmt.Errorf("not enough arguments to %s", name) 1769 } 1770 return best, nil 1771 } 1772 1773 // Evaluates expressions <subexpr>.<field name> where subexpr is not a package name 1774 func (scope *EvalScope) evalStructSelector(op *evalop.Select, stack *evalStack) { 1775 xv := stack.pop() 1776 // Prevent abuse, attempting to call "nil.member" directly. 1777 if xv.Addr == 0 && xv.Name == "nil" { 1778 stack.err = fmt.Errorf("%s (type %s) is not a struct", xv.Name, xv.TypeString()) 1779 return 1780 } 1781 // Prevent abuse, attempting to call "\"fake\".member" directly. 1782 if xv.Addr == 0 && xv.Name == "" && xv.DwarfType == nil && xv.RealType == nil { 1783 stack.err = fmt.Errorf("%s (type %s) is not a struct", xv.Value, xv.TypeString()) 1784 return 1785 } 1786 // Special type conversions for CPU register variables (REGNAME.int8, etc) 1787 if xv.Flags&VariableCPURegister != 0 && !xv.loaded { 1788 stack.pushErr(xv.registerVariableTypeConv(op.Name)) 1789 return 1790 } 1791 1792 rv, err := xv.findMethod(op.Name) 1793 if err != nil { 1794 stack.err = err 1795 return 1796 } 1797 if rv != nil { 1798 stack.push(rv) 1799 return 1800 } 1801 stack.pushErr(xv.structMember(op.Name)) 1802 } 1803 1804 // Evaluates expressions <subexpr>.(<type>) 1805 func (scope *EvalScope) evalTypeAssert(op *evalop.TypeAssert, stack *evalStack) { 1806 xv := stack.pop() 1807 if xv.Kind != reflect.Interface { 1808 stack.err = fmt.Errorf("expression %q not an interface", exprToString(op.Node.X)) 1809 return 1810 } 1811 xv.loadInterface(0, false, loadFullValue) 1812 if xv.Unreadable != nil { 1813 stack.err = xv.Unreadable 1814 return 1815 } 1816 if xv.Children[0].Unreadable != nil { 1817 stack.err = xv.Children[0].Unreadable 1818 return 1819 } 1820 if xv.Children[0].Addr == 0 { 1821 stack.err = fmt.Errorf("interface conversion: %s is nil, not %s", xv.DwarfType.String(), exprToString(op.Node.Type)) 1822 return 1823 } 1824 typ := op.DwarfType 1825 if typ != nil && xv.Children[0].DwarfType.Common().Name != typ.Common().Name { 1826 stack.err = fmt.Errorf("interface conversion: %s is %s, not %s", xv.DwarfType.Common().Name, xv.Children[0].TypeString(), typ.Common().Name) 1827 return 1828 } 1829 // loadInterface will set OnlyAddr for the data member since here we are 1830 // passing false to loadData, however returning the variable with OnlyAddr 1831 // set here would be wrong since, once the expression evaluation 1832 // terminates, the value of this variable will be loaded. 1833 xv.Children[0].OnlyAddr = false 1834 stack.push(&xv.Children[0]) 1835 } 1836 1837 // Evaluates expressions <subexpr>[<subexpr>] (subscript access to arrays, slices and maps) 1838 func (scope *EvalScope) evalIndex(op *evalop.Index, stack *evalStack) { 1839 idxev := stack.pop() 1840 xev := stack.pop() 1841 if xev.Unreadable != nil { 1842 stack.err = xev.Unreadable 1843 return 1844 } 1845 1846 if xev.Flags&VariableCPtr == 0 { 1847 xev = xev.maybeDereference() 1848 } 1849 1850 cantindex := fmt.Errorf("expression %q (%s) does not support indexing", exprToString(op.Node.X), xev.TypeString()) 1851 1852 switch xev.Kind { 1853 case reflect.Ptr: 1854 if xev == nilVariable { 1855 stack.err = cantindex 1856 return 1857 } 1858 if xev.Flags&VariableCPtr == 0 { 1859 _, isarrptr := xev.RealType.(*godwarf.PtrType).Type.(*godwarf.ArrayType) 1860 if !isarrptr { 1861 stack.err = cantindex 1862 return 1863 } 1864 xev = xev.maybeDereference() 1865 } 1866 fallthrough 1867 1868 case reflect.Slice, reflect.Array, reflect.String: 1869 if xev.Base == 0 { 1870 stack.err = fmt.Errorf("can not index %q", exprToString(op.Node.X)) 1871 return 1872 } 1873 n, err := idxev.asInt() 1874 if err != nil { 1875 stack.err = err 1876 return 1877 } 1878 stack.pushErr(xev.sliceAccess(int(n))) 1879 return 1880 1881 case reflect.Map: 1882 idxev.loadValue(loadFullValue) 1883 if idxev.Unreadable != nil { 1884 stack.err = idxev.Unreadable 1885 return 1886 } 1887 stack.pushErr(xev.mapAccess(idxev)) 1888 return 1889 default: 1890 stack.err = cantindex 1891 return 1892 } 1893 } 1894 1895 // Evaluates expressions <subexpr>[<subexpr>:<subexpr>] 1896 // HACK: slicing a map expression with [0:0] will return the whole map 1897 func (scope *EvalScope) evalReslice(op *evalop.Reslice, stack *evalStack) { 1898 low, err := stack.pop().asInt() 1899 if err != nil { 1900 stack.err = err 1901 return 1902 } 1903 var high int64 1904 if op.HasHigh { 1905 high, err = stack.pop().asInt() 1906 if err != nil { 1907 stack.err = err 1908 return 1909 } 1910 } 1911 xev := stack.pop() 1912 if xev.Unreadable != nil { 1913 stack.err = xev.Unreadable 1914 return 1915 } 1916 if !op.HasHigh { 1917 high = xev.Len 1918 } 1919 1920 switch xev.Kind { 1921 case reflect.Slice, reflect.Array, reflect.String: 1922 if xev.Base == 0 { 1923 stack.err = fmt.Errorf("can not slice %q", exprToString(op.Node.X)) 1924 return 1925 } 1926 stack.pushErr(xev.reslice(low, high, op.TrustLen)) 1927 return 1928 case reflect.Map: 1929 if op.Node.High != nil { 1930 stack.err = fmt.Errorf("second slice argument must be empty for maps") 1931 return 1932 } 1933 xev.mapSkip += int(low) 1934 xev.mapIterator() // reads map length 1935 if int64(xev.mapSkip) >= xev.Len { 1936 stack.err = fmt.Errorf("map index out of bounds") 1937 return 1938 } 1939 stack.push(xev) 1940 return 1941 case reflect.Ptr: 1942 if xev.Flags&VariableCPtr != 0 { 1943 stack.pushErr(xev.reslice(low, high, op.TrustLen)) 1944 return 1945 } 1946 fallthrough 1947 default: 1948 stack.err = fmt.Errorf("can not slice %q (type %s)", exprToString(op.Node.X), xev.TypeString()) 1949 return 1950 } 1951 } 1952 1953 // Evaluates a pointer dereference expression: *<subexpr> 1954 func (scope *EvalScope) evalPointerDeref(op *evalop.PointerDeref, stack *evalStack) { 1955 xev := stack.pop() 1956 1957 if xev.Kind != reflect.Ptr { 1958 stack.err = fmt.Errorf("expression %q (%s) can not be dereferenced", exprToString(op.Node.X), xev.TypeString()) 1959 return 1960 } 1961 1962 if xev == nilVariable { 1963 stack.err = fmt.Errorf("nil can not be dereferenced") 1964 return 1965 } 1966 1967 if len(xev.Children) == 1 { 1968 // this branch is here to support pointers constructed with typecasts from ints 1969 xev.Children[0].OnlyAddr = false 1970 stack.push(&(xev.Children[0])) 1971 return 1972 } 1973 xev.loadPtr() 1974 if xev.Unreadable != nil { 1975 val, ok := constant.Uint64Val(xev.Value) 1976 if ok && val == 0 { 1977 stack.err = fmt.Errorf("couldn't read pointer: %w", xev.Unreadable) 1978 return 1979 } 1980 } 1981 rv := &xev.Children[0] 1982 if rv.Addr == 0 { 1983 stack.err = fmt.Errorf("nil pointer dereference") 1984 return 1985 } 1986 stack.push(rv) 1987 } 1988 1989 // Evaluates expressions &<subexpr> 1990 func (scope *EvalScope) evalAddrOf(op *evalop.AddrOf, stack *evalStack) { 1991 xev := stack.pop() 1992 if xev.Addr == 0 || xev.DwarfType == nil { 1993 stack.err = fmt.Errorf("can not take address of %q", exprToString(op.Node.X)) 1994 return 1995 } 1996 1997 stack.push(xev.pointerToVariable()) 1998 } 1999 2000 func (v *Variable) pointerToVariable() *Variable { 2001 v.OnlyAddr = true 2002 2003 typename := "*" + v.DwarfType.Common().Name 2004 rv := v.newVariable("", 0, &godwarf.PtrType{CommonType: godwarf.CommonType{ByteSize: int64(v.bi.Arch.PtrSize()), Name: typename}, Type: v.DwarfType}, v.mem) 2005 rv.Children = []Variable{*v} 2006 rv.loaded = true 2007 2008 return rv 2009 } 2010 2011 func constantUnaryOp(op token.Token, y constant.Value) (r constant.Value, err error) { 2012 defer func() { 2013 if ierr := recover(); ierr != nil { 2014 err = fmt.Errorf("%v", ierr) 2015 } 2016 }() 2017 r = constant.UnaryOp(op, y, 0) 2018 return 2019 } 2020 2021 func constantBinaryOp(op token.Token, x, y constant.Value) (r constant.Value, err error) { 2022 defer func() { 2023 if ierr := recover(); ierr != nil { 2024 err = fmt.Errorf("%v", ierr) 2025 } 2026 }() 2027 switch op { 2028 case token.SHL, token.SHR: 2029 n, _ := constant.Uint64Val(y) 2030 r = constant.Shift(x, op, uint(n)) 2031 default: 2032 r = constant.BinaryOp(x, op, y) 2033 } 2034 return 2035 } 2036 2037 func constantCompare(op token.Token, x, y constant.Value) (r bool, err error) { 2038 defer func() { 2039 if ierr := recover(); ierr != nil { 2040 err = fmt.Errorf("%v", ierr) 2041 } 2042 }() 2043 r = constant.Compare(x, op, y) 2044 return 2045 } 2046 2047 // Evaluates expressions: -<subexpr> and +<subexpr> 2048 func (scope *EvalScope) evalUnary(op *evalop.Unary, stack *evalStack) { 2049 xv := stack.pop() 2050 2051 xv.loadValue(loadSingleValue) 2052 if xv.Unreadable != nil { 2053 stack.err = xv.Unreadable 2054 return 2055 } 2056 if xv.FloatSpecial != 0 { 2057 stack.err = errOperationOnSpecialFloat 2058 return 2059 } 2060 if xv.Value == nil { 2061 stack.err = fmt.Errorf("operator %s can not be applied to %q", op.Node.Op.String(), exprToString(op.Node.X)) 2062 return 2063 } 2064 rc, err := constantUnaryOp(op.Node.Op, xv.Value) 2065 if err != nil { 2066 stack.err = err 2067 return 2068 } 2069 if xv.DwarfType != nil { 2070 r := xv.newVariable("", 0, xv.DwarfType, scope.Mem) 2071 r.Value = rc 2072 stack.push(r) 2073 return 2074 } 2075 stack.push(newConstant(rc, xv.mem)) 2076 } 2077 2078 func negotiateType(op token.Token, xv, yv *Variable) (godwarf.Type, error) { 2079 if xv == nilVariable { 2080 return nil, negotiateTypeNil(op, yv) 2081 } 2082 2083 if yv == nilVariable { 2084 return nil, negotiateTypeNil(op, xv) 2085 } 2086 2087 if op == token.SHR || op == token.SHL { 2088 if xv.Value == nil || xv.Value.Kind() != constant.Int { 2089 return nil, fmt.Errorf("shift of type %s", xv.Kind) 2090 } 2091 2092 switch yv.Kind { 2093 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: 2094 // ok 2095 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 2096 if constant.Sign(yv.Value) < 0 { 2097 return nil, fmt.Errorf("shift count must not be negative") 2098 } 2099 default: 2100 return nil, fmt.Errorf("shift count type %s, must be unsigned integer", yv.Kind.String()) 2101 } 2102 2103 return xv.DwarfType, nil 2104 } 2105 2106 if xv.DwarfType == nil && yv.DwarfType == nil { 2107 return nil, nil 2108 } 2109 2110 if xv.DwarfType != nil && yv.DwarfType != nil { 2111 if xv.DwarfType.String() != yv.DwarfType.String() { 2112 return nil, fmt.Errorf("mismatched types %q and %q", xv.DwarfType.String(), yv.DwarfType.String()) 2113 } 2114 return xv.DwarfType, nil 2115 } else if xv.DwarfType != nil && yv.DwarfType == nil { 2116 if err := yv.isType(xv.DwarfType, xv.Kind); err != nil { 2117 return nil, err 2118 } 2119 return xv.DwarfType, nil 2120 } else if xv.DwarfType == nil && yv.DwarfType != nil { 2121 if err := xv.isType(yv.DwarfType, yv.Kind); err != nil { 2122 return nil, err 2123 } 2124 return yv.DwarfType, nil 2125 } 2126 2127 panic("unreachable") 2128 } 2129 2130 func negotiateTypeNil(op token.Token, v *Variable) error { 2131 if op != token.EQL && op != token.NEQ { 2132 return fmt.Errorf("operator %s can not be applied to \"nil\"", op.String()) 2133 } 2134 switch v.Kind { 2135 case reflect.Ptr, reflect.UnsafePointer, reflect.Chan, reflect.Map, reflect.Interface, reflect.Slice, reflect.Func: 2136 return nil 2137 default: 2138 return fmt.Errorf("can not compare %s to nil", v.Kind.String()) 2139 } 2140 } 2141 2142 func (scope *EvalScope) evalBinary(binop *evalop.Binary, stack *evalStack) { 2143 node := binop.Node 2144 2145 yv := stack.pop() 2146 xv := stack.pop() 2147 2148 if xv.Kind != reflect.String { // delay loading strings until we use them 2149 xv.loadValue(loadFullValue) 2150 } 2151 if xv.Unreadable != nil { 2152 stack.err = xv.Unreadable 2153 return 2154 } 2155 if yv.Kind != reflect.String { // delay loading strings until we use them 2156 yv.loadValue(loadFullValue) 2157 } 2158 if yv.Unreadable != nil { 2159 stack.err = yv.Unreadable 2160 return 2161 } 2162 2163 if xv.FloatSpecial != 0 || yv.FloatSpecial != 0 { 2164 stack.err = errOperationOnSpecialFloat 2165 return 2166 } 2167 2168 typ, err := negotiateType(node.Op, xv, yv) 2169 if err != nil { 2170 stack.err = err 2171 return 2172 } 2173 2174 op := node.Op 2175 if typ != nil && (op == token.QUO) { 2176 _, isint := typ.(*godwarf.IntType) 2177 _, isuint := typ.(*godwarf.UintType) 2178 if isint || isuint { 2179 // forces integer division if the result type is integer 2180 op = token.QUO_ASSIGN 2181 } 2182 } 2183 2184 switch op { 2185 case token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: 2186 v, err := compareOp(op, xv, yv) 2187 if err != nil { 2188 stack.err = err 2189 return 2190 } 2191 stack.push(newConstant(constant.MakeBool(v), xv.mem)) 2192 2193 default: 2194 if xv.Kind == reflect.String { 2195 xv.loadValue(loadFullValueLongerStrings) 2196 } 2197 if yv.Kind == reflect.String { 2198 yv.loadValue(loadFullValueLongerStrings) 2199 } 2200 if xv.Value == nil { 2201 stack.err = fmt.Errorf("operator %s can not be applied to %q", node.Op.String(), exprToString(node.X)) 2202 return 2203 } 2204 2205 if yv.Value == nil { 2206 stack.err = fmt.Errorf("operator %s can not be applied to %q", node.Op.String(), exprToString(node.Y)) 2207 return 2208 } 2209 2210 rc, err := constantBinaryOp(op, xv.Value, yv.Value) 2211 if err != nil { 2212 stack.err = err 2213 return 2214 } 2215 2216 if typ == nil { 2217 stack.push(newConstant(rc, xv.mem)) 2218 return 2219 } 2220 2221 r := xv.newVariable("", 0, typ, scope.Mem) 2222 r.Value = rc 2223 switch r.Kind { 2224 case reflect.String: 2225 r.Len = xv.Len + yv.Len 2226 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 2227 n, _ := constant.Int64Val(r.Value) 2228 r.Value = constant.MakeInt64(int64(convertInt(uint64(n), true, typ.Size()))) 2229 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: 2230 n, _ := constant.Uint64Val(r.Value) 2231 r.Value = constant.MakeUint64(convertInt(n, false, typ.Size())) 2232 } 2233 stack.push(r) 2234 } 2235 } 2236 2237 // Compares xv to yv using operator op 2238 // Both xv and yv must be loaded and have a compatible type (as determined by negotiateType) 2239 func compareOp(op token.Token, xv *Variable, yv *Variable) (bool, error) { 2240 switch xv.Kind { 2241 case reflect.Bool: 2242 fallthrough 2243 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: 2244 fallthrough 2245 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: 2246 fallthrough 2247 case reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128: 2248 return constantCompare(op, xv.Value, yv.Value) 2249 case reflect.String: 2250 if xv.Len != yv.Len { 2251 switch op { 2252 case token.EQL: 2253 return false, nil 2254 case token.NEQ: 2255 return true, nil 2256 } 2257 } 2258 if xv.Kind == reflect.String { 2259 xv.loadValue(loadFullValueLongerStrings) 2260 } 2261 if yv.Kind == reflect.String { 2262 yv.loadValue(loadFullValueLongerStrings) 2263 } 2264 if int64(len(constant.StringVal(xv.Value))) != xv.Len || int64(len(constant.StringVal(yv.Value))) != yv.Len { 2265 return false, fmt.Errorf("string too long for comparison") 2266 } 2267 return constantCompare(op, xv.Value, yv.Value) 2268 } 2269 2270 if op != token.EQL && op != token.NEQ { 2271 return false, fmt.Errorf("operator %s not defined on %s", op.String(), xv.Kind.String()) 2272 } 2273 2274 var eql bool 2275 var err error 2276 2277 if xv == nilVariable { 2278 switch op { 2279 case token.EQL: 2280 return yv.isNil(), nil 2281 case token.NEQ: 2282 return !yv.isNil(), nil 2283 } 2284 } 2285 2286 if yv == nilVariable { 2287 switch op { 2288 case token.EQL: 2289 return xv.isNil(), nil 2290 case token.NEQ: 2291 return !xv.isNil(), nil 2292 } 2293 } 2294 2295 switch xv.Kind { 2296 case reflect.Ptr: 2297 eql = xv.Children[0].Addr == yv.Children[0].Addr 2298 case reflect.Array: 2299 if int64(len(xv.Children)) != xv.Len || int64(len(yv.Children)) != yv.Len { 2300 return false, fmt.Errorf("array too long for comparison") 2301 } 2302 eql, err = equalChildren(xv, yv, true) 2303 case reflect.Struct: 2304 if len(xv.Children) != len(yv.Children) { 2305 return false, nil 2306 } 2307 if int64(len(xv.Children)) != xv.Len || int64(len(yv.Children)) != yv.Len { 2308 return false, fmt.Errorf("structure too deep for comparison") 2309 } 2310 eql, err = equalChildren(xv, yv, false) 2311 case reflect.Slice, reflect.Map, reflect.Func, reflect.Chan: 2312 return false, fmt.Errorf("can not compare %s variables", xv.Kind.String()) 2313 case reflect.Interface: 2314 if xv.Children[0].RealType.String() != yv.Children[0].RealType.String() { 2315 eql = false 2316 } else { 2317 eql, err = compareOp(token.EQL, &xv.Children[0], &yv.Children[0]) 2318 } 2319 default: 2320 return false, fmt.Errorf("unimplemented comparison of %s variables", xv.Kind.String()) 2321 } 2322 2323 if op == token.NEQ { 2324 return !eql, err 2325 } 2326 return eql, err 2327 } 2328 2329 func (v *Variable) isNil() bool { 2330 switch v.Kind { 2331 case reflect.Ptr: 2332 return v.Children[0].Addr == 0 2333 case reflect.Interface: 2334 return v.Children[0].Addr == 0 && v.Children[0].Kind == reflect.Invalid 2335 case reflect.Slice, reflect.Map, reflect.Func, reflect.Chan: 2336 return v.Base == 0 2337 } 2338 return false 2339 } 2340 2341 func equalChildren(xv, yv *Variable, shortcircuit bool) (bool, error) { 2342 r := true 2343 for i := range xv.Children { 2344 eql, err := compareOp(token.EQL, &xv.Children[i], &yv.Children[i]) 2345 if err != nil { 2346 return false, err 2347 } 2348 r = r && eql 2349 if !r && shortcircuit { 2350 return false, nil 2351 } 2352 } 2353 return r, nil 2354 } 2355 2356 func (v *Variable) asInt() (int64, error) { 2357 if v.DwarfType == nil { 2358 if v.Value.Kind() != constant.Int { 2359 return 0, fmt.Errorf("can not convert constant %s to int", v.Value) 2360 } 2361 } else { 2362 v.loadValue(loadSingleValue) 2363 if v.Unreadable != nil { 2364 return 0, v.Unreadable 2365 } 2366 if _, ok := v.DwarfType.(*godwarf.IntType); !ok { 2367 return 0, fmt.Errorf("can not convert value of type %s to int", v.DwarfType.String()) 2368 } 2369 } 2370 n, _ := constant.Int64Val(v.Value) 2371 return n, nil 2372 } 2373 2374 func (v *Variable) asUint() (uint64, error) { 2375 if v.DwarfType == nil { 2376 if v.Value.Kind() != constant.Int { 2377 return 0, fmt.Errorf("can not convert constant %s to uint", v.Value) 2378 } 2379 } else { 2380 v.loadValue(loadSingleValue) 2381 if v.Unreadable != nil { 2382 return 0, v.Unreadable 2383 } 2384 if _, ok := v.DwarfType.(*godwarf.UintType); !ok { 2385 return 0, fmt.Errorf("can not convert value of type %s to uint", v.DwarfType.String()) 2386 } 2387 } 2388 n, _ := constant.Uint64Val(v.Value) 2389 return n, nil 2390 } 2391 2392 type typeConvErr struct { 2393 srcType, dstType godwarf.Type 2394 } 2395 2396 func (err *typeConvErr) Error() string { 2397 return fmt.Sprintf("can not convert value of type %s to %s", err.srcType.String(), err.dstType.String()) 2398 } 2399 2400 func (v *Variable) isType(typ godwarf.Type, kind reflect.Kind) error { 2401 if v.DwarfType != nil { 2402 if typ == nil || !sameType(typ, v.RealType) { 2403 return &typeConvErr{v.DwarfType, typ} 2404 } 2405 return nil 2406 } 2407 2408 if typ == nil { 2409 return nil 2410 } 2411 2412 if v == nilVariable { 2413 switch kind { 2414 case reflect.Slice, reflect.Map, reflect.Func, reflect.Ptr, reflect.Chan, reflect.Interface: 2415 return nil 2416 default: 2417 return fmt.Errorf("mismatched types nil and %s", typ.String()) 2418 } 2419 } 2420 2421 converr := fmt.Errorf("can not convert %s constant to %s", v.Value, typ.String()) 2422 2423 if v.Value == nil { 2424 return converr 2425 } 2426 2427 switch typ.(type) { 2428 case *godwarf.IntType: 2429 if v.Value.Kind() != constant.Int { 2430 return converr 2431 } 2432 case *godwarf.UintType: 2433 if v.Value.Kind() != constant.Int { 2434 return converr 2435 } 2436 case *godwarf.FloatType: 2437 if (v.Value.Kind() != constant.Int) && (v.Value.Kind() != constant.Float) { 2438 return converr 2439 } 2440 case *godwarf.BoolType: 2441 if v.Value.Kind() != constant.Bool { 2442 return converr 2443 } 2444 case *godwarf.StringType: 2445 if v.Value.Kind() != constant.String { 2446 return converr 2447 } 2448 case *godwarf.ComplexType: 2449 if v.Value.Kind() != constant.Complex && v.Value.Kind() != constant.Float && v.Value.Kind() != constant.Int { 2450 return converr 2451 } 2452 default: 2453 return converr 2454 } 2455 2456 return nil 2457 } 2458 2459 func sameType(t1, t2 godwarf.Type) bool { 2460 // Because of a bug in the go linker a type that refers to another type 2461 // (for example a pointer type) will usually use the typedef but rarely use 2462 // the non-typedef entry directly. 2463 // For types that we read directly from go this is fine because it's 2464 // consistent, however we also synthesize some types ourselves 2465 // (specifically pointers and slices) and we always use a reference through 2466 // a typedef. 2467 t1 = resolveTypedef(t1) 2468 t2 = resolveTypedef(t2) 2469 2470 if tt1, isptr1 := t1.(*godwarf.PtrType); isptr1 { 2471 tt2, isptr2 := t2.(*godwarf.PtrType) 2472 if !isptr2 { 2473 return false 2474 } 2475 return sameType(tt1.Type, tt2.Type) 2476 } 2477 if tt1, isslice1 := t1.(*godwarf.SliceType); isslice1 { 2478 tt2, isslice2 := t2.(*godwarf.SliceType) 2479 if !isslice2 { 2480 return false 2481 } 2482 return sameType(tt1.ElemType, tt2.ElemType) 2483 } 2484 return t1.String() == t2.String() 2485 } 2486 2487 func (v *Variable) sliceAccess(idx int) (*Variable, error) { 2488 wrong := false 2489 if v.Flags&VariableCPtr == 0 { 2490 wrong = idx < 0 || int64(idx) >= v.Len 2491 } else { 2492 wrong = idx < 0 2493 } 2494 if wrong { 2495 return nil, fmt.Errorf("index out of bounds") 2496 } 2497 if v.loaded { 2498 if v.Kind == reflect.String { 2499 s := constant.StringVal(v.Value) 2500 if idx >= len(s) { 2501 return nil, fmt.Errorf("index out of bounds") 2502 } 2503 r := v.newVariable("", v.Base+uint64(int64(idx)*v.stride), v.fieldType, v.mem) 2504 r.loaded = true 2505 r.Value = constant.MakeInt64(int64(s[idx])) 2506 return r, nil 2507 } else { 2508 if idx >= len(v.Children) { 2509 return nil, fmt.Errorf("index out of bounds") 2510 } 2511 return &v.Children[idx], nil 2512 } 2513 } 2514 mem := v.mem 2515 if v.Kind != reflect.Array { 2516 mem = DereferenceMemory(mem) 2517 } 2518 return v.newVariable("", v.Base+uint64(int64(idx)*v.stride), v.fieldType, mem), nil 2519 } 2520 2521 func (v *Variable) mapAccess(idx *Variable) (*Variable, error) { 2522 it := v.mapIterator() 2523 if it == nil { 2524 return nil, fmt.Errorf("can not access unreadable map: %v", v.Unreadable) 2525 } 2526 2527 lcfg := loadFullValue 2528 if idx.Kind == reflect.String && int64(len(constant.StringVal(idx.Value))) == idx.Len && idx.Len > int64(lcfg.MaxStringLen) { 2529 // If the index is a string load as much of the keys to at least match the length of the index. 2530 //TODO(aarzilli): when struct literals are implemented this needs to be 2531 //done recursively for literal struct fields. 2532 lcfg.MaxStringLen = int(idx.Len) 2533 } 2534 2535 first := true 2536 for it.next() { 2537 key := it.key() 2538 key.loadValue(lcfg) 2539 if key.Unreadable != nil { 2540 return nil, fmt.Errorf("can not access unreadable map: %v", key.Unreadable) 2541 } 2542 if first { 2543 first = false 2544 if err := idx.isType(key.RealType, key.Kind); err != nil { 2545 return nil, err 2546 } 2547 } 2548 eql, err := compareOp(token.EQL, key, idx) 2549 if err != nil { 2550 return nil, err 2551 } 2552 if eql { 2553 return it.value(), nil 2554 } 2555 } 2556 if v.Unreadable != nil { 2557 return nil, v.Unreadable 2558 } 2559 // go would return zero for the map value type here, we do not have the ability to create zeroes 2560 return nil, fmt.Errorf("key not found") 2561 } 2562 2563 // LoadResliced returns a new array, slice or map that starts at index start and contains 2564 // up to cfg.MaxArrayValues children. 2565 func (v *Variable) LoadResliced(start int, cfg LoadConfig) (newV *Variable, err error) { 2566 switch v.Kind { 2567 case reflect.Array, reflect.Slice: 2568 low, high := int64(start), int64(start+cfg.MaxArrayValues) 2569 if high > v.Len { 2570 high = v.Len 2571 } 2572 newV, err = v.reslice(low, high, false) 2573 if err != nil { 2574 return nil, err 2575 } 2576 case reflect.Map: 2577 newV = v.clone() 2578 newV.Children = nil 2579 newV.loaded = false 2580 newV.mapSkip = start 2581 default: 2582 return nil, fmt.Errorf("variable to reslice is not an array, slice, or map") 2583 } 2584 newV.loadValue(cfg) 2585 return newV, nil 2586 } 2587 2588 func (v *Variable) reslice(low int64, high int64, trustLen bool) (*Variable, error) { 2589 wrong := false 2590 cptrNeedsFakeSlice := false 2591 if v.Flags&VariableCPtr == 0 { 2592 wrong = low < 0 || low > v.Len || high < 0 || high > v.Len 2593 } else { 2594 wrong = low < 0 || high < 0 2595 if high == 0 { 2596 high = low 2597 } 2598 cptrNeedsFakeSlice = v.Kind != reflect.String 2599 } 2600 if wrong { 2601 return nil, fmt.Errorf("index out of bounds") 2602 } 2603 2604 base := v.Base + uint64(low*v.stride) 2605 len := high - low 2606 2607 if high-low < 0 { 2608 return nil, fmt.Errorf("index out of bounds") 2609 } 2610 2611 typ := v.DwarfType 2612 if _, isarr := v.DwarfType.(*godwarf.ArrayType); isarr || cptrNeedsFakeSlice { 2613 typ = godwarf.FakeSliceType(v.fieldType) 2614 } 2615 2616 mem := v.mem 2617 if v.Kind != reflect.Array { 2618 mem = DereferenceMemory(mem) 2619 } 2620 2621 r := v.newVariable("", 0, typ, mem) 2622 r.Cap = len 2623 r.Len = len 2624 r.Base = base 2625 r.stride = v.stride 2626 r.fieldType = v.fieldType 2627 r.Flags = v.Flags 2628 if trustLen { 2629 r.Flags |= variableTrustLen 2630 } 2631 r.reg = v.reg 2632 2633 return r, nil 2634 } 2635 2636 // findMethod finds method mname in the type of variable v 2637 func (v *Variable) findMethod(mname string) (*Variable, error) { 2638 if _, isiface := v.RealType.(*godwarf.InterfaceType); isiface { 2639 v.loadInterface(0, false, loadFullValue) 2640 if v.Unreadable != nil { 2641 return nil, v.Unreadable 2642 } 2643 return v.Children[0].findMethod(mname) 2644 } 2645 2646 queue := []*Variable{v} 2647 seen := map[string]struct{}{} 2648 2649 for len(queue) > 0 { 2650 v := queue[0] 2651 queue = append(queue[:0], queue[1:]...) 2652 if _, isseen := seen[v.RealType.String()]; isseen { 2653 continue 2654 } 2655 seen[v.RealType.String()] = struct{}{} 2656 2657 typ := v.DwarfType 2658 ptyp, isptr := typ.(*godwarf.PtrType) 2659 if isptr { 2660 typ = ptyp.Type 2661 } 2662 2663 typePath := typ.Common().Name 2664 dot := strings.LastIndex(typePath, ".") 2665 if dot < 0 { 2666 // probably just a C type 2667 continue 2668 } 2669 2670 pkg := typePath[:dot] 2671 receiver := typePath[dot+1:] 2672 2673 //TODO(aarzilli): support generic functions? 2674 2675 if fns := v.bi.LookupFunc()[fmt.Sprintf("%s.%s.%s", pkg, receiver, mname)]; len(fns) == 1 { 2676 r, err := functionToVariable(fns[0], v.bi, v.mem) 2677 if err != nil { 2678 return nil, err 2679 } 2680 if isptr { 2681 r.Children = append(r.Children, *(v.maybeDereference())) 2682 } else { 2683 r.Children = append(r.Children, *v) 2684 } 2685 return r, nil 2686 } 2687 2688 if fns := v.bi.LookupFunc()[fmt.Sprintf("%s.(*%s).%s", pkg, receiver, mname)]; len(fns) == 1 { 2689 r, err := functionToVariable(fns[0], v.bi, v.mem) 2690 if err != nil { 2691 return nil, err 2692 } 2693 if isptr { 2694 r.Children = append(r.Children, *v) 2695 } else { 2696 r.Children = append(r.Children, *(v.pointerToVariable())) 2697 } 2698 return r, nil 2699 } 2700 2701 // queue embedded fields for search 2702 structVar := v.maybeDereference() 2703 structVar.Name = v.Name 2704 if structVar.Unreadable != nil { 2705 return structVar, nil 2706 } 2707 switch t := structVar.RealType.(type) { 2708 case *godwarf.StructType: 2709 for _, field := range t.Field { 2710 if field.Embedded { 2711 embeddedVar, err := structVar.toField(field) 2712 if err != nil { 2713 return nil, err 2714 } 2715 queue = append(queue, embeddedVar) 2716 } 2717 } 2718 } 2719 } 2720 2721 return nil, nil 2722 } 2723 2724 func functionToVariable(fn *Function, bi *BinaryInfo, mem MemoryReadWriter) (*Variable, error) { 2725 typ, err := fn.fakeType(bi, true) 2726 if err != nil { 2727 return nil, err 2728 } 2729 v := newVariable(fn.Name, 0, typ, bi, mem) 2730 v.Value = constant.MakeString(fn.Name) 2731 v.loaded = true 2732 v.Base = fn.Entry 2733 return v, nil 2734 } 2735 2736 func fakeArrayType(n uint64, fieldType godwarf.Type) godwarf.Type { 2737 stride := alignAddr(fieldType.Common().ByteSize, fieldType.Align()) 2738 return &godwarf.ArrayType{ 2739 CommonType: godwarf.CommonType{ 2740 ReflectKind: reflect.Array, 2741 ByteSize: int64(n) * stride, 2742 Name: fmt.Sprintf("[%d]%s", n, fieldType.String())}, 2743 Type: fieldType, 2744 StrideBitSize: stride * 8, 2745 Count: int64(n)} 2746 } 2747 2748 var errMethodEvalUnsupported = errors.New("evaluating methods not supported on this version of Go") 2749 2750 func (fn *Function) fakeType(bi *BinaryInfo, removeReceiver bool) (*godwarf.FuncType, error) { 2751 if producer := bi.Producer(); producer == "" || !goversion.ProducerAfterOrEqual(producer, 1, 10) { 2752 // versions of Go prior to 1.10 do not distinguish between parameters and 2753 // return values, therefore we can't use a subprogram DIE to derive a 2754 // function type. 2755 return nil, errMethodEvalUnsupported 2756 } 2757 _, formalArgs, err := funcCallArgs(fn, bi, true) 2758 if err != nil { 2759 return nil, err 2760 } 2761 2762 // Only try and remove the receiver if it is actually being passed in as a formal argument. 2763 // In the case of: 2764 // 2765 // func (_ X) Method() { ... } 2766 // 2767 // that would not be true, the receiver is not used and thus 2768 // not being passed in as a formal argument. 2769 // 2770 // TODO(derekparker) This, I think, creates a new bug where 2771 // if the receiver is not passed in as a formal argument but 2772 // there are other arguments, such as: 2773 // 2774 // func (_ X) Method(i int) { ... } 2775 // 2776 // The first argument 'i int' will be removed. We must actually detect 2777 // here if the receiver is being used. While this is a bug, it's not a 2778 // functional bug, it only affects the string representation of the fake 2779 // function type we create. It's not really easy to tell here if we use 2780 // the receiver or not. Perhaps we should not perform this manipulation at all? 2781 if removeReceiver && len(formalArgs) > 0 { 2782 formalArgs = formalArgs[1:] 2783 } 2784 2785 args := make([]string, 0, len(formalArgs)) 2786 rets := make([]string, 0, len(formalArgs)) 2787 2788 for _, formalArg := range formalArgs { 2789 var s string 2790 if strings.HasPrefix(formalArg.name, "~") { 2791 s = formalArg.typ.String() 2792 } else { 2793 s = fmt.Sprintf("%s %s", formalArg.name, formalArg.typ.String()) 2794 } 2795 if formalArg.isret { 2796 rets = append(rets, s) 2797 } else { 2798 args = append(args, s) 2799 } 2800 } 2801 2802 argstr := strings.Join(args, ", ") 2803 var retstr string 2804 switch len(rets) { 2805 case 0: 2806 retstr = "" 2807 case 1: 2808 retstr = " " + rets[0] 2809 default: 2810 retstr = " (" + strings.Join(rets, ", ") + ")" 2811 } 2812 return &godwarf.FuncType{ 2813 CommonType: godwarf.CommonType{ 2814 Name: "func(" + argstr + ")" + retstr, 2815 ReflectKind: reflect.Func, 2816 }, 2817 //TODO(aarzilli): at the moment we aren't using the ParamType and 2818 // ReturnType fields of FuncType anywhere (when this is returned to the 2819 // client it's first converted to a string and the function calling code 2820 // reads the subroutine entry because it needs to know the stack offsets). 2821 // If we start using them they should be filled here. 2822 }, nil 2823 } 2824 2825 func validRegisterName(s string) string { 2826 for len(s) > 0 && s[0] == '_' { 2827 s = s[1:] 2828 } 2829 for i := range s { 2830 if (s[i] < '0' || s[i] > '9') && (s[i] < 'A' || s[i] > 'Z') { 2831 return "" 2832 } 2833 } 2834 return s 2835 }