github.com/go-asm/go@v1.21.1-0.20240213172139-40c5ead50c48/cmd/compile/liveness/arg.go (about) 1 // Copyright 2021 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package liveness 6 7 import ( 8 "fmt" 9 10 "github.com/go-asm/go/abi" 11 12 "github.com/go-asm/go/cmd/compile/base" 13 "github.com/go-asm/go/cmd/compile/bitvec" 14 "github.com/go-asm/go/cmd/compile/ir" 15 "github.com/go-asm/go/cmd/compile/objw" 16 "github.com/go-asm/go/cmd/compile/ssa" 17 "github.com/go-asm/go/cmd/obj" 18 ) 19 20 // Argument liveness tracking. 21 // 22 // For arguments passed in registers, this file tracks if their spill slots 23 // are live for runtime traceback. An argument spill slot is live at a PC 24 // if we know that an actual value has stored into it at or before this point. 25 // 26 // Stack args are always live and not tracked in this code. Stack args are 27 // laid out before register spill slots, so we emit the smallest offset that 28 // needs tracking. Slots before that offset are always live. That offset is 29 // usually the offset of the first spill slot. But if the first spill slot is 30 // always live (e.g. if it is address-taken), it will be the offset of a later 31 // one. 32 // 33 // The liveness information is emitted as a FUNCDATA and a PCDATA. 34 // 35 // FUNCDATA format: 36 // - start (smallest) offset that needs tracking (1 byte) 37 // - a list of bitmaps. 38 // In a bitmap bit i is set if the i-th spill slot is live. 39 // 40 // At a PC where the liveness info changes, a PCDATA indicates the 41 // byte offset of the liveness map in the FUNCDATA. PCDATA -1 is a 42 // special case indicating all slots are live (for binary size 43 // saving). 44 45 const allLiveIdx = -1 46 47 // name and offset 48 type nameOff struct { 49 n *ir.Name 50 off int64 51 } 52 53 func (a nameOff) FrameOffset() int64 { return a.n.FrameOffset() + a.off } 54 func (a nameOff) String() string { return fmt.Sprintf("%v+%d", a.n, a.off) } 55 56 type blockArgEffects struct { 57 livein bitvec.BitVec // variables live at block entry 58 liveout bitvec.BitVec // variables live at block exit 59 } 60 61 type argLiveness struct { 62 fn *ir.Func 63 f *ssa.Func 64 args []nameOff // name and offset of spill slots 65 idx map[nameOff]int32 // index in args 66 67 be []blockArgEffects // indexed by block ID 68 69 bvset bvecSet // Set of liveness bitmaps, used for uniquifying. 70 71 // Liveness map indices at each Value (where it changes) and Block entry. 72 // During the computation the indices are temporarily index to bvset. 73 // At the end they will be index (offset) to the output funcdata (changed 74 // in (*argLiveness).emit). 75 blockIdx map[ssa.ID]int 76 valueIdx map[ssa.ID]int 77 } 78 79 // ArgLiveness computes the liveness information of register argument spill slots. 80 // An argument's spill slot is "live" if we know it contains a meaningful value, 81 // that is, we have stored the register value to it. 82 // Returns the liveness map indices at each Block entry and at each Value (where 83 // it changes). 84 func ArgLiveness(fn *ir.Func, f *ssa.Func, pp *objw.Progs) (blockIdx, valueIdx map[ssa.ID]int) { 85 if f.OwnAux.ABIInfo().InRegistersUsed() == 0 || base.Flag.N != 0 { 86 // No register args. Nothing to emit. 87 // Or if -N is used we spill everything upfront so it is always live. 88 return nil, nil 89 } 90 91 lv := &argLiveness{ 92 fn: fn, 93 f: f, 94 idx: make(map[nameOff]int32), 95 be: make([]blockArgEffects, f.NumBlocks()), 96 blockIdx: make(map[ssa.ID]int), 97 valueIdx: make(map[ssa.ID]int), 98 } 99 // Gather all register arg spill slots. 100 for _, a := range f.OwnAux.ABIInfo().InParams() { 101 n := a.Name 102 if n == nil || len(a.Registers) == 0 { 103 continue 104 } 105 _, offs := a.RegisterTypesAndOffsets() 106 for _, off := range offs { 107 if n.FrameOffset()+off > 0xff { 108 // We only print a limited number of args, with stack 109 // offsets no larger than 255. 110 continue 111 } 112 lv.args = append(lv.args, nameOff{n, off}) 113 } 114 } 115 if len(lv.args) > 10 { 116 lv.args = lv.args[:10] // We print no more than 10 args. 117 } 118 119 // We spill address-taken or non-SSA-able value upfront, so they are always live. 120 alwaysLive := func(n *ir.Name) bool { return n.Addrtaken() || !ssa.CanSSA(n.Type()) } 121 122 // We'll emit the smallest offset for the slots that need liveness info. 123 // No need to include a slot with a lower offset if it is always live. 124 for len(lv.args) > 0 && alwaysLive(lv.args[0].n) { 125 lv.args = lv.args[1:] 126 } 127 if len(lv.args) == 0 { 128 return // everything is always live 129 } 130 131 for i, a := range lv.args { 132 lv.idx[a] = int32(i) 133 } 134 135 nargs := int32(len(lv.args)) 136 bulk := bitvec.NewBulk(nargs, int32(len(f.Blocks)*2)) 137 for _, b := range f.Blocks { 138 be := &lv.be[b.ID] 139 be.livein = bulk.Next() 140 be.liveout = bulk.Next() 141 142 // initialize to all 1s, so we can AND them 143 be.livein.Not() 144 be.liveout.Not() 145 } 146 147 entrybe := &lv.be[f.Entry.ID] 148 entrybe.livein.Clear() 149 for i, a := range lv.args { 150 if alwaysLive(a.n) { 151 entrybe.livein.Set(int32(i)) 152 } 153 } 154 155 // Visit blocks in reverse-postorder, compute block effects. 156 po := f.Postorder() 157 for i := len(po) - 1; i >= 0; i-- { 158 b := po[i] 159 be := &lv.be[b.ID] 160 161 // A slot is live at block entry if it is live in all predecessors. 162 for _, pred := range b.Preds { 163 pb := pred.Block() 164 be.livein.And(be.livein, lv.be[pb.ID].liveout) 165 } 166 167 be.liveout.Copy(be.livein) 168 for _, v := range b.Values { 169 lv.valueEffect(v, be.liveout) 170 } 171 } 172 173 // Coalesce identical live vectors. Compute liveness indices at each PC 174 // where it changes. 175 live := bitvec.New(nargs) 176 addToSet := func(bv bitvec.BitVec) (int, bool) { 177 if bv.Count() == int(nargs) { // special case for all live 178 return allLiveIdx, false 179 } 180 return lv.bvset.add(bv) 181 } 182 for _, b := range lv.f.Blocks { 183 be := &lv.be[b.ID] 184 lv.blockIdx[b.ID], _ = addToSet(be.livein) 185 186 live.Copy(be.livein) 187 var lastv *ssa.Value 188 for i, v := range b.Values { 189 if lv.valueEffect(v, live) { 190 // Record that liveness changes but not emit a map now. 191 // For a sequence of StoreRegs we only need to emit one 192 // at last. 193 lastv = v 194 } 195 if lastv != nil && (mayFault(v) || i == len(b.Values)-1) { 196 // Emit the liveness map if it may fault or at the end of 197 // the block. We may need a traceback if the instruction 198 // may cause a panic. 199 var added bool 200 lv.valueIdx[lastv.ID], added = addToSet(live) 201 if added { 202 // live is added to bvset and we cannot modify it now. 203 // Make a copy. 204 t := live 205 live = bitvec.New(nargs) 206 live.Copy(t) 207 } 208 lastv = nil 209 } 210 } 211 212 // Sanity check. 213 if !live.Eq(be.liveout) { 214 panic("wrong arg liveness map at block end") 215 } 216 } 217 218 // Emit funcdata symbol, update indices to offsets in the symbol data. 219 lsym := lv.emit() 220 fn.LSym.Func().ArgLiveInfo = lsym 221 222 //lv.print() 223 224 p := pp.Prog(obj.AFUNCDATA) 225 p.From.SetConst(abi.FUNCDATA_ArgLiveInfo) 226 p.To.Type = obj.TYPE_MEM 227 p.To.Name = obj.NAME_EXTERN 228 p.To.Sym = lsym 229 230 return lv.blockIdx, lv.valueIdx 231 } 232 233 // valueEffect applies the effect of v to live, return whether it is changed. 234 func (lv *argLiveness) valueEffect(v *ssa.Value, live bitvec.BitVec) bool { 235 if v.Op != ssa.OpStoreReg { // TODO: include other store instructions? 236 return false 237 } 238 n, off := ssa.AutoVar(v) 239 if n.Class != ir.PPARAM { 240 return false 241 } 242 i, ok := lv.idx[nameOff{n, off}] 243 if !ok || live.Get(i) { 244 return false 245 } 246 live.Set(i) 247 return true 248 } 249 250 func mayFault(v *ssa.Value) bool { 251 switch v.Op { 252 case ssa.OpLoadReg, ssa.OpStoreReg, ssa.OpCopy, ssa.OpPhi, 253 ssa.OpVarDef, ssa.OpVarLive, ssa.OpKeepAlive, 254 ssa.OpSelect0, ssa.OpSelect1, ssa.OpSelectN, ssa.OpMakeResult, 255 ssa.OpConvert, ssa.OpInlMark, ssa.OpGetG: 256 return false 257 } 258 if len(v.Args) == 0 { 259 return false // assume constant op cannot fault 260 } 261 return true // conservatively assume all other ops could fault 262 } 263 264 func (lv *argLiveness) print() { 265 fmt.Println("argument liveness:", lv.f.Name) 266 live := bitvec.New(int32(len(lv.args))) 267 for _, b := range lv.f.Blocks { 268 be := &lv.be[b.ID] 269 270 fmt.Printf("%v: live in: ", b) 271 lv.printLivenessVec(be.livein) 272 if idx, ok := lv.blockIdx[b.ID]; ok { 273 fmt.Printf(" #%d", idx) 274 } 275 fmt.Println() 276 277 for _, v := range b.Values { 278 if lv.valueEffect(v, live) { 279 fmt.Printf(" %v: ", v) 280 lv.printLivenessVec(live) 281 if idx, ok := lv.valueIdx[v.ID]; ok { 282 fmt.Printf(" #%d", idx) 283 } 284 fmt.Println() 285 } 286 } 287 288 fmt.Printf("%v: live out: ", b) 289 lv.printLivenessVec(be.liveout) 290 fmt.Println() 291 } 292 fmt.Println("liveness maps data:", lv.fn.LSym.Func().ArgLiveInfo.P) 293 } 294 295 func (lv *argLiveness) printLivenessVec(bv bitvec.BitVec) { 296 for i, a := range lv.args { 297 if bv.Get(int32(i)) { 298 fmt.Printf("%v ", a) 299 } 300 } 301 } 302 303 func (lv *argLiveness) emit() *obj.LSym { 304 livenessMaps := lv.bvset.extractUnique() 305 306 // stack offsets of register arg spill slots 307 argOffsets := make([]uint8, len(lv.args)) 308 for i, a := range lv.args { 309 off := a.FrameOffset() 310 if off > 0xff { 311 panic("offset too large") 312 } 313 argOffsets[i] = uint8(off) 314 } 315 316 idx2off := make([]int, len(livenessMaps)) 317 318 lsym := base.Ctxt.Lookup(lv.fn.LSym.Name + ".argliveinfo") 319 lsym.Set(obj.AttrContentAddressable, true) 320 321 off := objw.Uint8(lsym, 0, argOffsets[0]) // smallest offset that needs liveness info. 322 for idx, live := range livenessMaps { 323 idx2off[idx] = off 324 off = objw.BitVec(lsym, off, live) 325 } 326 327 // Update liveness indices to offsets. 328 for i, x := range lv.blockIdx { 329 if x != allLiveIdx { 330 lv.blockIdx[i] = idx2off[x] 331 } 332 } 333 for i, x := range lv.valueIdx { 334 if x != allLiveIdx { 335 lv.valueIdx[i] = idx2off[x] 336 } 337 } 338 339 return lsym 340 }