github.com/tidwall/go@v0.0.0-20170415222209-6694a6888b7d/src/cmd/compile/internal/gc/plive.go (about) 1 // Copyright 2013 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Garbage collector liveness bitmap generation. 6 7 // The command line flag -live causes this code to print debug information. 8 // The levels are: 9 // 10 // -live (aka -live=1): print liveness lists as code warnings at safe points 11 // -live=2: print an assembly listing with liveness annotations 12 // 13 // Each level includes the earlier output as well. 14 15 package gc 16 17 import ( 18 "cmd/compile/internal/ssa" 19 "cmd/compile/internal/types" 20 "cmd/internal/obj" 21 "crypto/md5" 22 "fmt" 23 "strings" 24 ) 25 26 // BlockEffects summarizes the liveness effects on an SSA block. 27 type BlockEffects struct { 28 lastbitmapindex int // for livenessepilogue 29 30 // Computed during livenessprologue using only the content of 31 // individual blocks: 32 // 33 // uevar: upward exposed variables (used before set in block) 34 // varkill: killed variables (set in block) 35 // avarinit: addrtaken variables set or used (proof of initialization) 36 uevar bvec 37 varkill bvec 38 avarinit bvec 39 40 // Computed during livenesssolve using control flow information: 41 // 42 // livein: variables live at block entry 43 // liveout: variables live at block exit 44 // avarinitany: addrtaken variables possibly initialized at block exit 45 // (initialized in block or at exit from any predecessor block) 46 // avarinitall: addrtaken variables certainly initialized at block exit 47 // (initialized in block or at exit from all predecessor blocks) 48 livein bvec 49 liveout bvec 50 avarinitany bvec 51 avarinitall bvec 52 } 53 54 // A collection of global state used by liveness analysis. 55 type Liveness struct { 56 fn *Node 57 f *ssa.Func 58 vars []*Node 59 stkptrsize int64 60 61 be []BlockEffects 62 63 // stackMapIndex maps from safe points (i.e., CALLs) to their 64 // index within the stack maps. 65 stackMapIndex map[*ssa.Value]int 66 67 // An array with a bit vector for each safe point tracking 68 // live variables, indexed by bb.rpo. 69 livevars []bvec 70 71 cache progeffectscache 72 } 73 74 type progeffectscache struct { 75 textavarinit []int32 76 retuevar []int32 77 tailuevar []int32 78 initialized bool 79 } 80 81 // livenessShouldTrack reports whether the liveness analysis 82 // should track the variable n. 83 // We don't care about variables that have no pointers, 84 // nor do we care about non-local variables, 85 // nor do we care about empty structs (handled by the pointer check), 86 // nor do we care about the fake PAUTOHEAP variables. 87 func livenessShouldTrack(n *Node) bool { 88 return n.Op == ONAME && (n.Class == PAUTO || n.Class == PPARAM || n.Class == PPARAMOUT) && types.Haspointers(n.Type) 89 } 90 91 // getvariables returns the list of on-stack variables that we need to track. 92 func getvariables(fn *Node) []*Node { 93 var vars []*Node 94 for _, n := range fn.Func.Dcl { 95 if n.Op == ONAME { 96 // The Node.opt field is available for use by optimization passes. 97 // We use it to hold the index of the node in the variables array 98 // (nil means the Node is not in the variables array). 99 // The Node.curfn field is supposed to be set to the current function 100 // already, but for some compiler-introduced names it seems not to be, 101 // so fix that here. 102 // Later, when we want to find the index of a node in the variables list, 103 // we will check that n.Curfn == lv.fn and n.Opt() != nil. Then n.Opt().(int32) 104 // is the index in the variables list. 105 n.SetOpt(nil) 106 n.Name.Curfn = fn 107 } 108 109 if livenessShouldTrack(n) { 110 n.SetOpt(int32(len(vars))) 111 vars = append(vars, n) 112 } 113 } 114 115 return vars 116 } 117 118 func (lv *Liveness) initcache() { 119 if lv.cache.initialized { 120 Fatalf("liveness cache initialized twice") 121 return 122 } 123 lv.cache.initialized = true 124 125 for i, node := range lv.vars { 126 switch node.Class { 127 case PPARAM: 128 // A return instruction with a p.to is a tail return, which brings 129 // the stack pointer back up (if it ever went down) and then jumps 130 // to a new function entirely. That form of instruction must read 131 // all the parameters for correctness, and similarly it must not 132 // read the out arguments - they won't be set until the new 133 // function runs. 134 135 lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i)) 136 137 if node.Addrtaken() { 138 lv.cache.textavarinit = append(lv.cache.textavarinit, int32(i)) 139 } 140 141 case PPARAMOUT: 142 // If the result had its address taken, it is being tracked 143 // by the avarinit code, which does not use uevar. 144 // If we added it to uevar too, we'd not see any kill 145 // and decide that the variable was live entry, which it is not. 146 // So only use uevar in the non-addrtaken case. 147 // The p.to.type == obj.TYPE_NONE limits the bvset to 148 // non-tail-call return instructions; see note below for details. 149 if !node.Addrtaken() { 150 lv.cache.retuevar = append(lv.cache.retuevar, int32(i)) 151 } 152 } 153 } 154 } 155 156 // A liveEffect is a set of flags that describe an instruction's 157 // liveness effects on a variable. 158 // 159 // The possible flags are: 160 // uevar - used by the instruction 161 // varkill - killed by the instruction 162 // for variables without address taken, means variable was set 163 // for variables with address taken, means variable was marked dead 164 // avarinit - initialized or referred to by the instruction, 165 // only for variables with address taken but not escaping to heap 166 // 167 // The avarinit output serves as a signal that the data has been 168 // initialized, because any use of a variable must come after its 169 // initialization. 170 type liveEffect int 171 172 const ( 173 uevar liveEffect = 1 << iota 174 varkill 175 avarinit 176 ) 177 178 // valueEffects returns the index of a variable in lv.vars and the 179 // liveness effects v has on that variable. 180 // If v does not affect any tracked variables, it returns -1, 0. 181 func (lv *Liveness) valueEffects(v *ssa.Value) (pos int32, effect liveEffect) { 182 n, e := affectedNode(v) 183 if e == 0 { 184 return -1, 0 185 } 186 187 // AllocFrame has dropped unused variables from 188 // lv.fn.Func.Dcl, but they might still be referenced by 189 // OpVarFoo pseudo-ops. Ignore them to prevent "lost track of 190 // variable" ICEs (issue 19632). 191 switch v.Op { 192 case ssa.OpVarDef, ssa.OpVarKill, ssa.OpVarLive, ssa.OpKeepAlive: 193 if !n.Used() { 194 return -1, 0 195 } 196 } 197 198 pos = lv.liveIndex(n) 199 if pos < 0 { 200 return -1, 0 201 } 202 203 if n.Addrtaken() { 204 if v.Op != ssa.OpVarKill { 205 effect |= avarinit 206 } 207 if v.Op == ssa.OpVarDef || v.Op == ssa.OpVarKill { 208 effect |= varkill 209 } 210 } else { 211 // Read is a read, obviously. 212 // Addr by itself is also implicitly a read. 213 // 214 // Addr|Write means that the address is being taken 215 // but only so that the instruction can write to the value. 216 // It is not a read. 217 218 if e&ssa.SymRead != 0 || e&(ssa.SymAddr|ssa.SymWrite) == ssa.SymAddr { 219 effect |= uevar 220 } 221 if e&ssa.SymWrite != 0 && (!isfat(n.Type) || v.Op == ssa.OpVarDef) { 222 effect |= varkill 223 } 224 } 225 226 return 227 } 228 229 // affectedNode returns the *Node affected by v 230 func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) { 231 // Special cases. 232 switch v.Op { 233 case ssa.OpLoadReg: 234 n, _ := AutoVar(v.Args[0]) 235 return n, ssa.SymRead 236 case ssa.OpStoreReg: 237 n, _ := AutoVar(v) 238 return n, ssa.SymWrite 239 240 case ssa.OpVarLive: 241 return v.Aux.(*Node), ssa.SymRead 242 case ssa.OpVarDef, ssa.OpVarKill: 243 return v.Aux.(*Node), ssa.SymWrite 244 case ssa.OpKeepAlive: 245 n, _ := AutoVar(v.Args[0]) 246 return n, ssa.SymRead 247 } 248 249 e := v.Op.SymEffect() 250 if e == 0 { 251 return nil, 0 252 } 253 254 var n *Node 255 switch a := v.Aux.(type) { 256 case nil, *ssa.ExternSymbol: 257 // ok, but no node 258 case *ssa.ArgSymbol: 259 n = a.Node.(*Node) 260 case *ssa.AutoSymbol: 261 n = a.Node.(*Node) 262 default: 263 Fatalf("weird aux: %s", v.LongString()) 264 } 265 266 return n, e 267 } 268 269 // liveIndex returns the index of n in the set of tracked vars. 270 // If n is not a tracked var, liveIndex returns -1. 271 // If n is not a tracked var but should be tracked, liveIndex crashes. 272 func (lv *Liveness) liveIndex(n *Node) int32 { 273 if n == nil || n.Name.Curfn != lv.fn || !livenessShouldTrack(n) { 274 return -1 275 } 276 277 pos, ok := n.Opt().(int32) // index in vars 278 if !ok { 279 Fatalf("lost track of variable in liveness: %v (%p, %p)", n, n, n.Orig) 280 } 281 if pos >= int32(len(lv.vars)) || lv.vars[pos] != n { 282 Fatalf("bad bookkeeping in liveness: %v (%p, %p)", n, n, n.Orig) 283 } 284 return pos 285 } 286 287 // Constructs a new liveness structure used to hold the global state of the 288 // liveness computation. The cfg argument is a slice of *BasicBlocks and the 289 // vars argument is a slice of *Nodes. 290 func newliveness(fn *Node, f *ssa.Func, vars []*Node, stkptrsize int64) *Liveness { 291 lv := &Liveness{ 292 fn: fn, 293 f: f, 294 vars: vars, 295 stkptrsize: stkptrsize, 296 be: make([]BlockEffects, f.NumBlocks()), 297 } 298 299 nblocks := int32(len(f.Blocks)) 300 nvars := int32(len(vars)) 301 bulk := bvbulkalloc(nvars, nblocks*7) 302 for _, b := range f.Blocks { 303 be := lv.blockEffects(b) 304 305 be.uevar = bulk.next() 306 be.varkill = bulk.next() 307 be.livein = bulk.next() 308 be.liveout = bulk.next() 309 be.avarinit = bulk.next() 310 be.avarinitany = bulk.next() 311 be.avarinitall = bulk.next() 312 } 313 return lv 314 } 315 316 func (lv *Liveness) blockEffects(b *ssa.Block) *BlockEffects { 317 return &lv.be[b.ID] 318 } 319 320 // NOTE: The bitmap for a specific type t should be cached in t after the first run 321 // and then simply copied into bv at the correct offset on future calls with 322 // the same type t. On https://rsc.googlecode.com/hg/testdata/slow.go, onebitwalktype1 323 // accounts for 40% of the 6g execution time. 324 func onebitwalktype1(t *types.Type, xoffset *int64, bv bvec) { 325 if t.Align > 0 && *xoffset&int64(t.Align-1) != 0 { 326 Fatalf("onebitwalktype1: invalid initial alignment, %v", t) 327 } 328 329 switch t.Etype { 330 case TINT8, 331 TUINT8, 332 TINT16, 333 TUINT16, 334 TINT32, 335 TUINT32, 336 TINT64, 337 TUINT64, 338 TINT, 339 TUINT, 340 TUINTPTR, 341 TBOOL, 342 TFLOAT32, 343 TFLOAT64, 344 TCOMPLEX64, 345 TCOMPLEX128: 346 *xoffset += t.Width 347 348 case TPTR32, 349 TPTR64, 350 TUNSAFEPTR, 351 TFUNC, 352 TCHAN, 353 TMAP: 354 if *xoffset&int64(Widthptr-1) != 0 { 355 Fatalf("onebitwalktype1: invalid alignment, %v", t) 356 } 357 bv.Set(int32(*xoffset / int64(Widthptr))) // pointer 358 *xoffset += t.Width 359 360 case TSTRING: 361 // struct { byte *str; intgo len; } 362 if *xoffset&int64(Widthptr-1) != 0 { 363 Fatalf("onebitwalktype1: invalid alignment, %v", t) 364 } 365 bv.Set(int32(*xoffset / int64(Widthptr))) //pointer in first slot 366 *xoffset += t.Width 367 368 case TINTER: 369 // struct { Itab *tab; void *data; } 370 // or, when isnilinter(t)==true: 371 // struct { Type *type; void *data; } 372 if *xoffset&int64(Widthptr-1) != 0 { 373 Fatalf("onebitwalktype1: invalid alignment, %v", t) 374 } 375 bv.Set(int32(*xoffset / int64(Widthptr))) // pointer in first slot 376 bv.Set(int32(*xoffset/int64(Widthptr) + 1)) // pointer in second slot 377 *xoffset += t.Width 378 379 case TSLICE: 380 // struct { byte *array; uintgo len; uintgo cap; } 381 if *xoffset&int64(Widthptr-1) != 0 { 382 Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t) 383 } 384 bv.Set(int32(*xoffset / int64(Widthptr))) // pointer in first slot (BitsPointer) 385 *xoffset += t.Width 386 387 case TARRAY: 388 for i := int64(0); i < t.NumElem(); i++ { 389 onebitwalktype1(t.Elem(), xoffset, bv) 390 } 391 392 case TSTRUCT: 393 var o int64 394 for _, t1 := range t.Fields().Slice() { 395 fieldoffset := t1.Offset 396 *xoffset += fieldoffset - o 397 onebitwalktype1(t1.Type, xoffset, bv) 398 o = fieldoffset + t1.Type.Width 399 } 400 401 *xoffset += t.Width - o 402 403 default: 404 Fatalf("onebitwalktype1: unexpected type, %v", t) 405 } 406 } 407 408 // Returns the number of words of local variables. 409 func localswords(lv *Liveness) int32 { 410 return int32(lv.stkptrsize / int64(Widthptr)) 411 } 412 413 // Returns the number of words of in and out arguments. 414 func argswords(lv *Liveness) int32 { 415 return int32(lv.fn.Type.ArgWidth() / int64(Widthptr)) 416 } 417 418 // Generates live pointer value maps for arguments and local variables. The 419 // this argument and the in arguments are always assumed live. The vars 420 // argument is a slice of *Nodes. 421 func onebitlivepointermap(lv *Liveness, liveout bvec, vars []*Node, args bvec, locals bvec) { 422 var xoffset int64 423 424 for i := int32(0); ; i++ { 425 i = liveout.Next(i) 426 if i < 0 { 427 break 428 } 429 node := vars[i] 430 switch node.Class { 431 case PAUTO: 432 xoffset = node.Xoffset + lv.stkptrsize 433 onebitwalktype1(node.Type, &xoffset, locals) 434 435 case PPARAM, PPARAMOUT: 436 xoffset = node.Xoffset 437 onebitwalktype1(node.Type, &xoffset, args) 438 } 439 } 440 } 441 442 // Returns true for instructions that are safe points that must be annotated 443 // with liveness information. 444 func issafepoint(v *ssa.Value) bool { 445 return v.Op.IsCall() || v.Op == ssa.OpARMCALLudiv 446 } 447 448 // Initializes the sets for solving the live variables. Visits all the 449 // instructions in each basic block to summarizes the information at each basic 450 // block 451 func livenessprologue(lv *Liveness) { 452 lv.initcache() 453 454 for _, b := range lv.f.Blocks { 455 be := lv.blockEffects(b) 456 457 // Walk the block instructions backward and update the block 458 // effects with the each prog effects. 459 for j := len(b.Values) - 1; j >= 0; j-- { 460 pos, e := lv.valueEffects(b.Values[j]) 461 if e&varkill != 0 { 462 be.varkill.Set(pos) 463 be.uevar.Unset(pos) 464 } 465 if e&uevar != 0 { 466 be.uevar.Set(pos) 467 } 468 } 469 470 // Walk the block instructions forward to update avarinit bits. 471 // avarinit describes the effect at the end of the block, not the beginning. 472 for j := 0; j < len(b.Values); j++ { 473 pos, e := lv.valueEffects(b.Values[j]) 474 if e&varkill != 0 { 475 be.avarinit.Unset(pos) 476 } 477 if e&avarinit != 0 { 478 be.avarinit.Set(pos) 479 } 480 } 481 } 482 } 483 484 // Solve the liveness dataflow equations. 485 func livenesssolve(lv *Liveness) { 486 // These temporary bitvectors exist to avoid successive allocations and 487 // frees within the loop. 488 newlivein := bvalloc(int32(len(lv.vars))) 489 newliveout := bvalloc(int32(len(lv.vars))) 490 any := bvalloc(int32(len(lv.vars))) 491 all := bvalloc(int32(len(lv.vars))) 492 493 // Push avarinitall, avarinitany forward. 494 // avarinitall says the addressed var is initialized along all paths reaching the block exit. 495 // avarinitany says the addressed var is initialized along some path reaching the block exit. 496 for _, b := range lv.f.Blocks { 497 be := lv.blockEffects(b) 498 if b == lv.f.Entry { 499 be.avarinitall.Copy(be.avarinit) 500 } else { 501 be.avarinitall.Clear() 502 be.avarinitall.Not() 503 } 504 be.avarinitany.Copy(be.avarinit) 505 } 506 507 // Walk blocks in the general direction of propagation (RPO 508 // for avarinit{any,all}, and PO for live{in,out}). This 509 // improves convergence. 510 po := lv.f.Postorder() 511 512 for change := true; change; { 513 change = false 514 for i := len(po) - 1; i >= 0; i-- { 515 b := po[i] 516 be := lv.blockEffects(b) 517 lv.avarinitanyall(b, any, all) 518 519 any.AndNot(any, be.varkill) 520 all.AndNot(all, be.varkill) 521 any.Or(any, be.avarinit) 522 all.Or(all, be.avarinit) 523 if !any.Eq(be.avarinitany) { 524 change = true 525 be.avarinitany.Copy(any) 526 } 527 528 if !all.Eq(be.avarinitall) { 529 change = true 530 be.avarinitall.Copy(all) 531 } 532 } 533 } 534 535 // Iterate through the blocks in reverse round-robin fashion. A work 536 // queue might be slightly faster. As is, the number of iterations is 537 // so low that it hardly seems to be worth the complexity. 538 539 for change := true; change; { 540 change = false 541 for _, b := range po { 542 be := lv.blockEffects(b) 543 544 newliveout.Clear() 545 switch b.Kind { 546 case ssa.BlockRet: 547 for _, pos := range lv.cache.retuevar { 548 newliveout.Set(pos) 549 } 550 case ssa.BlockRetJmp: 551 for _, pos := range lv.cache.tailuevar { 552 newliveout.Set(pos) 553 } 554 case ssa.BlockExit: 555 // nothing to do 556 default: 557 // A variable is live on output from this block 558 // if it is live on input to some successor. 559 // 560 // out[b] = \bigcup_{s \in succ[b]} in[s] 561 newliveout.Copy(lv.blockEffects(b.Succs[0].Block()).livein) 562 for _, succ := range b.Succs[1:] { 563 newliveout.Or(newliveout, lv.blockEffects(succ.Block()).livein) 564 } 565 } 566 567 if !be.liveout.Eq(newliveout) { 568 change = true 569 be.liveout.Copy(newliveout) 570 } 571 572 // A variable is live on input to this block 573 // if it is live on output from this block and 574 // not set by the code in this block. 575 // 576 // in[b] = uevar[b] \cup (out[b] \setminus varkill[b]) 577 newlivein.AndNot(be.liveout, be.varkill) 578 be.livein.Or(newlivein, be.uevar) 579 } 580 } 581 } 582 583 // Visits all instructions in a basic block and computes a bit vector of live 584 // variables at each safe point locations. 585 func livenessepilogue(lv *Liveness) { 586 nvars := int32(len(lv.vars)) 587 liveout := bvalloc(nvars) 588 any := bvalloc(nvars) 589 all := bvalloc(nvars) 590 livedefer := bvalloc(nvars) // always-live variables 591 592 // If there is a defer (that could recover), then all output 593 // parameters are live all the time. In addition, any locals 594 // that are pointers to heap-allocated output parameters are 595 // also always live (post-deferreturn code needs these 596 // pointers to copy values back to the stack). 597 // TODO: if the output parameter is heap-allocated, then we 598 // don't need to keep the stack copy live? 599 if lv.fn.Func.HasDefer() { 600 for i, n := range lv.vars { 601 if n.Class == PPARAMOUT { 602 if n.IsOutputParamHeapAddr() { 603 // Just to be paranoid. Heap addresses are PAUTOs. 604 Fatalf("variable %v both output param and heap output param", n) 605 } 606 if n.Name.Param.Heapaddr != nil { 607 // If this variable moved to the heap, then 608 // its stack copy is not live. 609 continue 610 } 611 // Note: zeroing is handled by zeroResults in walk.go. 612 livedefer.Set(int32(i)) 613 } 614 if n.IsOutputParamHeapAddr() { 615 n.Name.SetNeedzero(true) 616 livedefer.Set(int32(i)) 617 } 618 } 619 } 620 621 { 622 // Reserve an entry for function entry. 623 live := bvalloc(nvars) 624 for _, pos := range lv.cache.textavarinit { 625 live.Set(pos) 626 } 627 lv.livevars = append(lv.livevars, live) 628 } 629 630 for _, b := range lv.f.Blocks { 631 be := lv.blockEffects(b) 632 633 // Compute avarinitany and avarinitall for entry to block. 634 // This duplicates information known during livenesssolve 635 // but avoids storing two more vectors for each block. 636 lv.avarinitanyall(b, any, all) 637 638 // Walk forward through the basic block instructions and 639 // allocate liveness maps for those instructions that need them. 640 // Seed the maps with information about the addrtaken variables. 641 for _, v := range b.Values { 642 pos, e := lv.valueEffects(v) 643 if e&varkill != 0 { 644 any.Unset(pos) 645 all.Unset(pos) 646 } 647 if e&avarinit != 0 { 648 any.Set(pos) 649 all.Set(pos) 650 } 651 652 if !issafepoint(v) { 653 continue 654 } 655 656 // Annotate ambiguously live variables so that they can 657 // be zeroed at function entry. 658 // liveout is dead here and used as a temporary. 659 liveout.AndNot(any, all) 660 if !liveout.IsEmpty() { 661 for pos := int32(0); pos < liveout.n; pos++ { 662 if !liveout.Get(pos) { 663 continue 664 } 665 all.Set(pos) // silence future warnings in this block 666 n := lv.vars[pos] 667 if !n.Name.Needzero() { 668 n.Name.SetNeedzero(true) 669 if debuglive >= 1 { 670 Warnl(v.Pos, "%v: %L is ambiguously live", lv.fn.Func.Nname, n) 671 } 672 } 673 } 674 } 675 676 // Live stuff first. 677 live := bvalloc(nvars) 678 live.Copy(any) 679 lv.livevars = append(lv.livevars, live) 680 } 681 682 be.lastbitmapindex = len(lv.livevars) - 1 683 } 684 685 for _, b := range lv.f.Blocks { 686 be := lv.blockEffects(b) 687 688 // walk backward, emit pcdata and populate the maps 689 index := int32(be.lastbitmapindex) 690 if index < 0 { 691 // the first block we encounter should have the ATEXT so 692 // at no point should pos ever be less than zero. 693 Fatalf("livenessepilogue") 694 } 695 696 liveout.Copy(be.liveout) 697 for i := len(b.Values) - 1; i >= 0; i-- { 698 v := b.Values[i] 699 700 if issafepoint(v) { 701 // Found an interesting instruction, record the 702 // corresponding liveness information. 703 704 live := lv.livevars[index] 705 live.Or(live, liveout) 706 live.Or(live, livedefer) // only for non-entry safe points 707 index-- 708 } 709 710 // Update liveness information. 711 pos, e := lv.valueEffects(v) 712 if e&varkill != 0 { 713 liveout.Unset(pos) 714 } 715 if e&uevar != 0 { 716 liveout.Set(pos) 717 } 718 } 719 720 if b == lv.f.Entry { 721 if index != 0 { 722 Fatalf("bad index for entry point: %v", index) 723 } 724 725 // Record live variables. 726 live := lv.livevars[index] 727 live.Or(live, liveout) 728 } 729 } 730 731 // Useful sanity check: on entry to the function, 732 // the only things that can possibly be live are the 733 // input parameters. 734 for j, n := range lv.vars { 735 if n.Class != PPARAM && lv.livevars[0].Get(int32(j)) { 736 Fatalf("internal error: %v %L recorded as live on entry", lv.fn.Func.Nname, n) 737 } 738 } 739 } 740 741 func (lv *Liveness) avarinitanyall(b *ssa.Block, any, all bvec) { 742 if len(b.Preds) == 0 { 743 any.Clear() 744 all.Clear() 745 for _, pos := range lv.cache.textavarinit { 746 any.Set(pos) 747 all.Set(pos) 748 } 749 return 750 } 751 752 be := lv.blockEffects(b.Preds[0].Block()) 753 any.Copy(be.avarinitany) 754 all.Copy(be.avarinitall) 755 756 for _, pred := range b.Preds[1:] { 757 be := lv.blockEffects(pred.Block()) 758 any.Or(any, be.avarinitany) 759 all.And(all, be.avarinitall) 760 } 761 } 762 763 // FNV-1 hash function constants. 764 const ( 765 H0 = 2166136261 766 Hp = 16777619 767 ) 768 769 func hashbitmap(h uint32, bv bvec) uint32 { 770 n := int((bv.n + 31) / 32) 771 for i := 0; i < n; i++ { 772 w := bv.b[i] 773 h = (h * Hp) ^ (w & 0xff) 774 h = (h * Hp) ^ ((w >> 8) & 0xff) 775 h = (h * Hp) ^ ((w >> 16) & 0xff) 776 h = (h * Hp) ^ ((w >> 24) & 0xff) 777 } 778 779 return h 780 } 781 782 // Compact liveness information by coalescing identical per-call-site bitmaps. 783 // The merging only happens for a single function, not across the entire binary. 784 // 785 // There are actually two lists of bitmaps, one list for the local variables and one 786 // list for the function arguments. Both lists are indexed by the same PCDATA 787 // index, so the corresponding pairs must be considered together when 788 // merging duplicates. The argument bitmaps change much less often during 789 // function execution than the local variable bitmaps, so it is possible that 790 // we could introduce a separate PCDATA index for arguments vs locals and 791 // then compact the set of argument bitmaps separately from the set of 792 // local variable bitmaps. As of 2014-04-02, doing this to the godoc binary 793 // is actually a net loss: we save about 50k of argument bitmaps but the new 794 // PCDATA tables cost about 100k. So for now we keep using a single index for 795 // both bitmap lists. 796 func livenesscompact(lv *Liveness) { 797 // Linear probing hash table of bitmaps seen so far. 798 // The hash table has 4n entries to keep the linear 799 // scan short. An entry of -1 indicates an empty slot. 800 n := len(lv.livevars) 801 802 tablesize := 4 * n 803 table := make([]int, tablesize) 804 for i := range table { 805 table[i] = -1 806 } 807 808 // remap[i] = the new index of the old bit vector #i. 809 remap := make([]int, n) 810 for i := range remap { 811 remap[i] = -1 812 } 813 uniq := 0 // unique tables found so far 814 815 // Consider bit vectors in turn. 816 // If new, assign next number using uniq, 817 // record in remap, record in lv.livevars 818 // under the new index, and add entry to hash table. 819 // If already seen, record earlier index in remap. 820 Outer: 821 for i, live := range lv.livevars { 822 h := hashbitmap(H0, live) % uint32(tablesize) 823 824 for { 825 j := table[h] 826 if j < 0 { 827 break 828 } 829 jlive := lv.livevars[j] 830 if live.Eq(jlive) { 831 remap[i] = j 832 continue Outer 833 } 834 835 h++ 836 if h == uint32(tablesize) { 837 h = 0 838 } 839 } 840 841 table[h] = uniq 842 remap[i] = uniq 843 lv.livevars[uniq] = live 844 uniq++ 845 } 846 847 // We've already reordered lv.livevars[0:uniq]. Clear the 848 // pointers later in the array so they can be GC'd. 849 tail := lv.livevars[uniq:] 850 for i := range tail { // memclr loop pattern 851 tail[i] = bvec{} 852 } 853 lv.livevars = lv.livevars[:uniq] 854 855 // Rewrite PCDATA instructions to use new numbering. 856 lv.showlive(nil, lv.livevars[0]) 857 pos := 1 858 lv.stackMapIndex = make(map[*ssa.Value]int) 859 for _, b := range lv.f.Blocks { 860 for _, v := range b.Values { 861 if issafepoint(v) { 862 lv.showlive(v, lv.livevars[remap[pos]]) 863 lv.stackMapIndex[v] = int(remap[pos]) 864 pos++ 865 } 866 } 867 } 868 } 869 870 func (lv *Liveness) showlive(v *ssa.Value, live bvec) { 871 if debuglive == 0 || lv.fn.Func.Nname.Sym.Name == "init" || strings.HasPrefix(lv.fn.Func.Nname.Sym.Name, ".") { 872 return 873 } 874 if live.IsEmpty() { 875 return 876 } 877 878 pos := lv.fn.Func.Nname.Pos 879 if v != nil { 880 pos = v.Pos 881 } 882 883 s := "live at " 884 if v == nil { 885 s += fmt.Sprintf("entry to %s:", lv.fn.Func.Nname.Sym.Name) 886 } else if sym, ok := v.Aux.(*obj.LSym); ok { 887 fn := sym.Name 888 if pos := strings.Index(fn, "."); pos >= 0 { 889 fn = fn[pos+1:] 890 } 891 s += fmt.Sprintf("call to %s:", fn) 892 } else { 893 s += "indirect call:" 894 } 895 896 for j, n := range lv.vars { 897 if live.Get(int32(j)) { 898 s += fmt.Sprintf(" %v", n) 899 } 900 } 901 902 Warnl(pos, s) 903 } 904 905 func (lv *Liveness) printbvec(printed bool, name string, live bvec) bool { 906 started := false 907 for i, n := range lv.vars { 908 if !live.Get(int32(i)) { 909 continue 910 } 911 if !started { 912 if !printed { 913 fmt.Printf("\t") 914 } else { 915 fmt.Printf(" ") 916 } 917 started = true 918 printed = true 919 fmt.Printf("%s=", name) 920 } else { 921 fmt.Printf(",") 922 } 923 924 fmt.Printf("%s", n.Sym.Name) 925 } 926 return printed 927 } 928 929 // printeffect is like printbvec, but for a single variable. 930 func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bool { 931 if !x { 932 return printed 933 } 934 if !printed { 935 fmt.Printf("\t") 936 } else { 937 fmt.Printf(" ") 938 } 939 fmt.Printf("%s=%s", name, lv.vars[pos].Sym.Name) 940 return true 941 } 942 943 // Prints the computed liveness information and inputs, for debugging. 944 // This format synthesizes the information used during the multiple passes 945 // into a single presentation. 946 func livenessprintdebug(lv *Liveness) { 947 fmt.Printf("liveness: %s\n", lv.fn.Func.Nname.Sym.Name) 948 949 pcdata := 0 950 for i, b := range lv.f.Blocks { 951 if i > 0 { 952 fmt.Printf("\n") 953 } 954 955 // bb#0 pred=1,2 succ=3,4 956 fmt.Printf("bb#%d pred=", b.ID) 957 for j, pred := range b.Preds { 958 if j > 0 { 959 fmt.Printf(",") 960 } 961 fmt.Printf("%d", pred.Block().ID) 962 } 963 fmt.Printf(" succ=") 964 for j, succ := range b.Succs { 965 if j > 0 { 966 fmt.Printf(",") 967 } 968 fmt.Printf("%d", succ.Block().ID) 969 } 970 fmt.Printf("\n") 971 972 be := lv.blockEffects(b) 973 974 // initial settings 975 printed := false 976 printed = lv.printbvec(printed, "uevar", be.uevar) 977 printed = lv.printbvec(printed, "livein", be.livein) 978 if printed { 979 fmt.Printf("\n") 980 } 981 982 // program listing, with individual effects listed 983 984 if b == lv.f.Entry { 985 live := lv.livevars[pcdata] 986 fmt.Printf("(%s) function entry\n", linestr(lv.fn.Func.Nname.Pos)) 987 fmt.Printf("\tlive=") 988 printed = false 989 for j, n := range lv.vars { 990 if !live.Get(int32(j)) { 991 continue 992 } 993 if printed { 994 fmt.Printf(",") 995 } 996 fmt.Printf("%v", n) 997 printed = true 998 } 999 fmt.Printf("\n") 1000 } 1001 1002 for _, v := range b.Values { 1003 fmt.Printf("(%s) %v\n", linestr(v.Pos), v.LongString()) 1004 1005 if pos, ok := lv.stackMapIndex[v]; ok { 1006 pcdata = pos 1007 } 1008 1009 pos, effect := lv.valueEffects(v) 1010 printed = false 1011 printed = lv.printeffect(printed, "uevar", pos, effect&uevar != 0) 1012 printed = lv.printeffect(printed, "varkill", pos, effect&varkill != 0) 1013 printed = lv.printeffect(printed, "avarinit", pos, effect&avarinit != 0) 1014 if printed { 1015 fmt.Printf("\n") 1016 } 1017 1018 if !issafepoint(v) { 1019 continue 1020 } 1021 1022 live := lv.livevars[pcdata] 1023 fmt.Printf("\tlive=") 1024 printed = false 1025 for j, n := range lv.vars { 1026 if !live.Get(int32(j)) { 1027 continue 1028 } 1029 if printed { 1030 fmt.Printf(",") 1031 } 1032 fmt.Printf("%v", n) 1033 printed = true 1034 } 1035 fmt.Printf("\n") 1036 } 1037 1038 // bb bitsets 1039 fmt.Printf("end\n") 1040 printed = false 1041 printed = lv.printbvec(printed, "varkill", be.varkill) 1042 printed = lv.printbvec(printed, "liveout", be.liveout) 1043 printed = lv.printbvec(printed, "avarinit", be.avarinit) 1044 printed = lv.printbvec(printed, "avarinitany", be.avarinitany) 1045 printed = lv.printbvec(printed, "avarinitall", be.avarinitall) 1046 if printed { 1047 fmt.Printf("\n") 1048 } 1049 } 1050 1051 fmt.Printf("\n") 1052 } 1053 1054 func finishgclocals(sym *types.Sym) { 1055 ls := Linksym(sym) 1056 ls.Name = fmt.Sprintf("gclocals·%x", md5.Sum(ls.P)) 1057 ls.Set(obj.AttrDuplicateOK, true) 1058 sv := obj.SymVer{Name: ls.Name, Version: 0} 1059 ls2, ok := Ctxt.Hash[sv] 1060 if ok { 1061 sym.Lsym = ls2 1062 } else { 1063 Ctxt.Hash[sv] = ls 1064 ggloblsym(sym, int32(ls.Size), obj.RODATA) 1065 } 1066 } 1067 1068 // Dumps a slice of bitmaps to a symbol as a sequence of uint32 values. The 1069 // first word dumped is the total number of bitmaps. The second word is the 1070 // length of the bitmaps. All bitmaps are assumed to be of equal length. The 1071 // remaining bytes are the raw bitmaps. 1072 func livenessemit(lv *Liveness, argssym, livesym *types.Sym) { 1073 args := bvalloc(argswords(lv)) 1074 aoff := duint32(argssym, 0, uint32(len(lv.livevars))) // number of bitmaps 1075 aoff = duint32(argssym, aoff, uint32(args.n)) // number of bits in each bitmap 1076 1077 locals := bvalloc(localswords(lv)) 1078 loff := duint32(livesym, 0, uint32(len(lv.livevars))) // number of bitmaps 1079 loff = duint32(livesym, loff, uint32(locals.n)) // number of bits in each bitmap 1080 1081 for _, live := range lv.livevars { 1082 args.Clear() 1083 locals.Clear() 1084 1085 onebitlivepointermap(lv, live, lv.vars, args, locals) 1086 1087 aoff = dbvec(argssym, aoff, args) 1088 loff = dbvec(livesym, loff, locals) 1089 } 1090 1091 finishgclocals(livesym) 1092 finishgclocals(argssym) 1093 } 1094 1095 // Entry pointer for liveness analysis. Solves for the liveness of 1096 // pointer variables in the function and emits a runtime data 1097 // structure read by the garbage collector. 1098 // Returns a map from GC safe points to their corresponding stack map index. 1099 func liveness(e *ssafn, f *ssa.Func, argssym, livesym *types.Sym) map[*ssa.Value]int { 1100 // Construct the global liveness state. 1101 vars := getvariables(e.curfn) 1102 lv := newliveness(e.curfn, f, vars, e.stkptrsize) 1103 1104 // Run the dataflow framework. 1105 livenessprologue(lv) 1106 livenesssolve(lv) 1107 livenessepilogue(lv) 1108 livenesscompact(lv) 1109 if debuglive >= 2 { 1110 livenessprintdebug(lv) 1111 } 1112 1113 // Emit the live pointer map data structures 1114 livenessemit(lv, argssym, livesym) 1115 return lv.stackMapIndex 1116 }