github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/ssa/value.go (about) 1 // Copyright 2015 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 package ssa 6 7 import ( 8 "github.com/bir3/gocompiler/src/cmd/compile/internal/ir" 9 "github.com/bir3/gocompiler/src/cmd/compile/internal/types" 10 "github.com/bir3/gocompiler/src/cmd/internal/src" 11 "fmt" 12 "math" 13 "sort" 14 "strings" 15 ) 16 17 // A Value represents a value in the SSA representation of the program. 18 // The ID and Type fields must not be modified. The remainder may be modified 19 // if they preserve the value of the Value (e.g. changing a (mul 2 x) to an (add x x)). 20 type Value struct { 21 // A unique identifier for the value. For performance we allocate these IDs 22 // densely starting at 1. There is no guarantee that there won't be occasional holes, though. 23 ID ID 24 25 // The operation that computes this value. See op.go. 26 Op Op 27 28 // The type of this value. Normally this will be a Go type, but there 29 // are a few other pseudo-types, see ../types/type.go. 30 Type *types.Type 31 32 // Auxiliary info for this value. The type of this information depends on the opcode and type. 33 // AuxInt is used for integer values, Aux is used for other values. 34 // Floats are stored in AuxInt using math.Float64bits(f). 35 // Unused portions of AuxInt are filled by sign-extending the used portion, 36 // even if the represented value is unsigned. 37 // Users of AuxInt which interpret AuxInt as unsigned (e.g. shifts) must be careful. 38 // Use Value.AuxUnsigned to get the zero-extended value of AuxInt. 39 AuxInt int64 40 Aux Aux 41 42 // Arguments of this value 43 Args []*Value 44 45 // Containing basic block 46 Block *Block 47 48 // Source position 49 Pos src.XPos 50 51 // Use count. Each appearance in Value.Args and Block.Controls counts once. 52 Uses int32 53 54 // wasm: Value stays on the WebAssembly stack. This value will not get a "register" (WebAssembly variable) 55 // nor a slot on Go stack, and the generation of this value is delayed to its use time. 56 OnWasmStack bool 57 58 // Is this value in the per-function constant cache? If so, remove from cache before changing it or recycling it. 59 InCache bool 60 61 // Storage for the first three args 62 argstorage [3]*Value 63 } 64 65 // Examples: 66 // Opcode aux args 67 // OpAdd nil 2 68 // OpConst string 0 string constant 69 // OpConst int64 0 int64 constant 70 // OpAddcq int64 1 amd64 op: v = arg[0] + constant 71 72 // short form print. Just v#. 73 func (v *Value) String() string { 74 if v == nil { 75 return "nil" // should never happen, but not panicking helps with debugging 76 } 77 return fmt.Sprintf("v%d", v.ID) 78 } 79 80 func (v *Value) AuxInt8() int8 { 81 if opcodeTable[v.Op].auxType != auxInt8 && opcodeTable[v.Op].auxType != auxNameOffsetInt8 { 82 v.Fatalf("op %s doesn't have an int8 aux field", v.Op) 83 } 84 return int8(v.AuxInt) 85 } 86 87 func (v *Value) AuxInt16() int16 { 88 if opcodeTable[v.Op].auxType != auxInt16 { 89 v.Fatalf("op %s doesn't have an int16 aux field", v.Op) 90 } 91 return int16(v.AuxInt) 92 } 93 94 func (v *Value) AuxInt32() int32 { 95 if opcodeTable[v.Op].auxType != auxInt32 { 96 v.Fatalf("op %s doesn't have an int32 aux field", v.Op) 97 } 98 return int32(v.AuxInt) 99 } 100 101 // AuxUnsigned returns v.AuxInt as an unsigned value for OpConst*. 102 // v.AuxInt is always sign-extended to 64 bits, even if the 103 // represented value is unsigned. This undoes that sign extension. 104 func (v *Value) AuxUnsigned() uint64 { 105 c := v.AuxInt 106 switch v.Op { 107 case OpConst64: 108 return uint64(c) 109 case OpConst32: 110 return uint64(uint32(c)) 111 case OpConst16: 112 return uint64(uint16(c)) 113 case OpConst8: 114 return uint64(uint8(c)) 115 } 116 v.Fatalf("op %s isn't OpConst*", v.Op) 117 return 0 118 } 119 120 func (v *Value) AuxFloat() float64 { 121 if opcodeTable[v.Op].auxType != auxFloat32 && opcodeTable[v.Op].auxType != auxFloat64 { 122 v.Fatalf("op %s doesn't have a float aux field", v.Op) 123 } 124 return math.Float64frombits(uint64(v.AuxInt)) 125 } 126 func (v *Value) AuxValAndOff() ValAndOff { 127 if opcodeTable[v.Op].auxType != auxSymValAndOff { 128 v.Fatalf("op %s doesn't have a ValAndOff aux field", v.Op) 129 } 130 return ValAndOff(v.AuxInt) 131 } 132 133 func (v *Value) AuxArm64BitField() arm64BitField { 134 if opcodeTable[v.Op].auxType != auxARM64BitField { 135 v.Fatalf("op %s doesn't have a ValAndOff aux field", v.Op) 136 } 137 return arm64BitField(v.AuxInt) 138 } 139 140 // long form print. v# = opcode <type> [aux] args [: reg] (names) 141 func (v *Value) LongString() string { 142 if v == nil { 143 return "<NIL VALUE>" 144 } 145 s := fmt.Sprintf("v%d = %s", v.ID, v.Op) 146 s += " <" + v.Type.String() + ">" 147 s += v.auxString() 148 for _, a := range v.Args { 149 s += fmt.Sprintf(" %v", a) 150 } 151 if v.Block == nil { 152 return s 153 } 154 r := v.Block.Func.RegAlloc 155 if int(v.ID) < len(r) && r[v.ID] != nil { 156 s += " : " + r[v.ID].String() 157 } 158 if reg := v.Block.Func.tempRegs[v.ID]; reg != nil { 159 s += " tmp=" + reg.String() 160 } 161 var names []string 162 for name, values := range v.Block.Func.NamedValues { 163 for _, value := range values { 164 if value == v { 165 names = append(names, name.String()) 166 break // drop duplicates. 167 } 168 } 169 } 170 if len(names) != 0 { 171 sort.Strings(names) // Otherwise a source of variation in debugging output. 172 s += " (" + strings.Join(names, ", ") + ")" 173 } 174 return s 175 } 176 177 func (v *Value) auxString() string { 178 switch opcodeTable[v.Op].auxType { 179 case auxBool: 180 if v.AuxInt == 0 { 181 return " [false]" 182 } else { 183 return " [true]" 184 } 185 case auxInt8: 186 return fmt.Sprintf(" [%d]", v.AuxInt8()) 187 case auxInt16: 188 return fmt.Sprintf(" [%d]", v.AuxInt16()) 189 case auxInt32: 190 return fmt.Sprintf(" [%d]", v.AuxInt32()) 191 case auxInt64, auxInt128: 192 return fmt.Sprintf(" [%d]", v.AuxInt) 193 case auxARM64BitField: 194 lsb := v.AuxArm64BitField().getARM64BFlsb() 195 width := v.AuxArm64BitField().getARM64BFwidth() 196 return fmt.Sprintf(" [lsb=%d,width=%d]", lsb, width) 197 case auxFloat32, auxFloat64: 198 return fmt.Sprintf(" [%g]", v.AuxFloat()) 199 case auxString: 200 return fmt.Sprintf(" {%q}", v.Aux) 201 case auxSym, auxCall, auxTyp: 202 if v.Aux != nil { 203 return fmt.Sprintf(" {%v}", v.Aux) 204 } 205 case auxSymOff, auxCallOff, auxTypSize, auxNameOffsetInt8: 206 s := "" 207 if v.Aux != nil { 208 s = fmt.Sprintf(" {%v}", v.Aux) 209 } 210 if v.AuxInt != 0 || opcodeTable[v.Op].auxType == auxNameOffsetInt8 { 211 s += fmt.Sprintf(" [%v]", v.AuxInt) 212 } 213 return s 214 case auxSymValAndOff: 215 s := "" 216 if v.Aux != nil { 217 s = fmt.Sprintf(" {%v}", v.Aux) 218 } 219 return s + fmt.Sprintf(" [%s]", v.AuxValAndOff()) 220 case auxCCop: 221 return fmt.Sprintf(" {%s}", Op(v.AuxInt)) 222 case auxS390XCCMask, auxS390XRotateParams: 223 return fmt.Sprintf(" {%v}", v.Aux) 224 case auxFlagConstant: 225 return fmt.Sprintf("[%s]", flagConstant(v.AuxInt)) 226 } 227 return "" 228 } 229 230 // If/when midstack inlining is enabled (-l=4), the compiler gets both larger and slower. 231 // Not-inlining this method is a help (*Value.reset and *Block.NewValue0 are similar). 232 // 233 //go:noinline 234 func (v *Value) AddArg(w *Value) { 235 if v.Args == nil { 236 v.resetArgs() // use argstorage 237 } 238 v.Args = append(v.Args, w) 239 w.Uses++ 240 } 241 242 //go:noinline 243 func (v *Value) AddArg2(w1, w2 *Value) { 244 if v.Args == nil { 245 v.resetArgs() // use argstorage 246 } 247 v.Args = append(v.Args, w1, w2) 248 w1.Uses++ 249 w2.Uses++ 250 } 251 252 //go:noinline 253 func (v *Value) AddArg3(w1, w2, w3 *Value) { 254 if v.Args == nil { 255 v.resetArgs() // use argstorage 256 } 257 v.Args = append(v.Args, w1, w2, w3) 258 w1.Uses++ 259 w2.Uses++ 260 w3.Uses++ 261 } 262 263 //go:noinline 264 func (v *Value) AddArg4(w1, w2, w3, w4 *Value) { 265 v.Args = append(v.Args, w1, w2, w3, w4) 266 w1.Uses++ 267 w2.Uses++ 268 w3.Uses++ 269 w4.Uses++ 270 } 271 272 //go:noinline 273 func (v *Value) AddArg5(w1, w2, w3, w4, w5 *Value) { 274 v.Args = append(v.Args, w1, w2, w3, w4, w5) 275 w1.Uses++ 276 w2.Uses++ 277 w3.Uses++ 278 w4.Uses++ 279 w5.Uses++ 280 } 281 282 //go:noinline 283 func (v *Value) AddArg6(w1, w2, w3, w4, w5, w6 *Value) { 284 v.Args = append(v.Args, w1, w2, w3, w4, w5, w6) 285 w1.Uses++ 286 w2.Uses++ 287 w3.Uses++ 288 w4.Uses++ 289 w5.Uses++ 290 w6.Uses++ 291 } 292 293 func (v *Value) AddArgs(a ...*Value) { 294 if v.Args == nil { 295 v.resetArgs() // use argstorage 296 } 297 v.Args = append(v.Args, a...) 298 for _, x := range a { 299 x.Uses++ 300 } 301 } 302 func (v *Value) SetArg(i int, w *Value) { 303 v.Args[i].Uses-- 304 v.Args[i] = w 305 w.Uses++ 306 } 307 func (v *Value) SetArgs1(a *Value) { 308 v.resetArgs() 309 v.AddArg(a) 310 } 311 func (v *Value) SetArgs2(a, b *Value) { 312 v.resetArgs() 313 v.AddArg(a) 314 v.AddArg(b) 315 } 316 func (v *Value) SetArgs3(a, b, c *Value) { 317 v.resetArgs() 318 v.AddArg(a) 319 v.AddArg(b) 320 v.AddArg(c) 321 } 322 323 func (v *Value) resetArgs() { 324 for _, a := range v.Args { 325 a.Uses-- 326 } 327 v.argstorage[0] = nil 328 v.argstorage[1] = nil 329 v.argstorage[2] = nil 330 v.Args = v.argstorage[:0] 331 } 332 333 // reset is called from most rewrite rules. 334 // Allowing it to be inlined increases the size 335 // of cmd/compile by almost 10%, and slows it down. 336 // 337 //go:noinline 338 func (v *Value) reset(op Op) { 339 if v.InCache { 340 v.Block.Func.unCache(v) 341 } 342 v.Op = op 343 v.resetArgs() 344 v.AuxInt = 0 345 v.Aux = nil 346 } 347 348 // invalidateRecursively marks a value as invalid (unused) 349 // and after decrementing reference counts on its Args, 350 // also recursively invalidates any of those whose use 351 // count goes to zero. It returns whether any of the 352 // invalidated values was marked with IsStmt. 353 // 354 // BEWARE of doing this *before* you've applied intended 355 // updates to SSA. 356 func (v *Value) invalidateRecursively() bool { 357 lostStmt := v.Pos.IsStmt() == src.PosIsStmt 358 if v.InCache { 359 v.Block.Func.unCache(v) 360 } 361 v.Op = OpInvalid 362 363 for _, a := range v.Args { 364 a.Uses-- 365 if a.Uses == 0 { 366 lost := a.invalidateRecursively() 367 lostStmt = lost || lostStmt 368 } 369 } 370 371 v.argstorage[0] = nil 372 v.argstorage[1] = nil 373 v.argstorage[2] = nil 374 v.Args = v.argstorage[:0] 375 376 v.AuxInt = 0 377 v.Aux = nil 378 return lostStmt 379 } 380 381 // copyOf is called from rewrite rules. 382 // It modifies v to be (Copy a). 383 // 384 //go:noinline 385 func (v *Value) copyOf(a *Value) { 386 if v == a { 387 return 388 } 389 if v.InCache { 390 v.Block.Func.unCache(v) 391 } 392 v.Op = OpCopy 393 v.resetArgs() 394 v.AddArg(a) 395 v.AuxInt = 0 396 v.Aux = nil 397 v.Type = a.Type 398 } 399 400 // copyInto makes a new value identical to v and adds it to the end of b. 401 // unlike copyIntoWithXPos this does not check for v.Pos being a statement. 402 func (v *Value) copyInto(b *Block) *Value { 403 c := b.NewValue0(v.Pos.WithNotStmt(), v.Op, v.Type) // Lose the position, this causes line number churn otherwise. 404 c.Aux = v.Aux 405 c.AuxInt = v.AuxInt 406 c.AddArgs(v.Args...) 407 for _, a := range v.Args { 408 if a.Type.IsMemory() { 409 v.Fatalf("can't move a value with a memory arg %s", v.LongString()) 410 } 411 } 412 return c 413 } 414 415 // copyIntoWithXPos makes a new value identical to v and adds it to the end of b. 416 // The supplied position is used as the position of the new value. 417 // Because this is used for rematerialization, check for case that (rematerialized) 418 // input to value with position 'pos' carried a statement mark, and that the supplied 419 // position (of the instruction using the rematerialized value) is not marked, and 420 // preserve that mark if its line matches the supplied position. 421 func (v *Value) copyIntoWithXPos(b *Block, pos src.XPos) *Value { 422 if v.Pos.IsStmt() == src.PosIsStmt && pos.IsStmt() != src.PosIsStmt && v.Pos.SameFileAndLine(pos) { 423 pos = pos.WithIsStmt() 424 } 425 c := b.NewValue0(pos, v.Op, v.Type) 426 c.Aux = v.Aux 427 c.AuxInt = v.AuxInt 428 c.AddArgs(v.Args...) 429 for _, a := range v.Args { 430 if a.Type.IsMemory() { 431 v.Fatalf("can't move a value with a memory arg %s", v.LongString()) 432 } 433 } 434 return c 435 } 436 437 func (v *Value) Logf(msg string, args ...interface{}) { v.Block.Logf(msg, args...) } 438 func (v *Value) Log() bool { return v.Block.Log() } 439 func (v *Value) Fatalf(msg string, args ...interface{}) { 440 v.Block.Func.fe.Fatalf(v.Pos, msg, args...) 441 } 442 443 // isGenericIntConst reports whether v is a generic integer constant. 444 func (v *Value) isGenericIntConst() bool { 445 return v != nil && (v.Op == OpConst64 || v.Op == OpConst32 || v.Op == OpConst16 || v.Op == OpConst8) 446 } 447 448 // ResultReg returns the result register assigned to v, in cmd/internal/obj/$ARCH numbering. 449 // It is similar to Reg and Reg0, except that it is usable interchangeably for all Value Ops. 450 // If you know v.Op, using Reg or Reg0 (as appropriate) will be more efficient. 451 func (v *Value) ResultReg() int16 { 452 reg := v.Block.Func.RegAlloc[v.ID] 453 if reg == nil { 454 v.Fatalf("nil reg for value: %s\n%s\n", v.LongString(), v.Block.Func) 455 } 456 if pair, ok := reg.(LocPair); ok { 457 reg = pair[0] 458 } 459 if reg == nil { 460 v.Fatalf("nil reg0 for value: %s\n%s\n", v.LongString(), v.Block.Func) 461 } 462 return reg.(*Register).objNum 463 } 464 465 // Reg returns the register assigned to v, in cmd/internal/obj/$ARCH numbering. 466 func (v *Value) Reg() int16 { 467 reg := v.Block.Func.RegAlloc[v.ID] 468 if reg == nil { 469 v.Fatalf("nil register for value: %s\n%s\n", v.LongString(), v.Block.Func) 470 } 471 return reg.(*Register).objNum 472 } 473 474 // Reg0 returns the register assigned to the first output of v, in cmd/internal/obj/$ARCH numbering. 475 func (v *Value) Reg0() int16 { 476 reg := v.Block.Func.RegAlloc[v.ID].(LocPair)[0] 477 if reg == nil { 478 v.Fatalf("nil first register for value: %s\n%s\n", v.LongString(), v.Block.Func) 479 } 480 return reg.(*Register).objNum 481 } 482 483 // Reg1 returns the register assigned to the second output of v, in cmd/internal/obj/$ARCH numbering. 484 func (v *Value) Reg1() int16 { 485 reg := v.Block.Func.RegAlloc[v.ID].(LocPair)[1] 486 if reg == nil { 487 v.Fatalf("nil second register for value: %s\n%s\n", v.LongString(), v.Block.Func) 488 } 489 return reg.(*Register).objNum 490 } 491 492 // RegTmp returns the temporary register assigned to v, in cmd/internal/obj/$ARCH numbering. 493 func (v *Value) RegTmp() int16 { 494 reg := v.Block.Func.tempRegs[v.ID] 495 if reg == nil { 496 v.Fatalf("nil tmp register for value: %s\n%s\n", v.LongString(), v.Block.Func) 497 } 498 return reg.objNum 499 } 500 501 func (v *Value) RegName() string { 502 reg := v.Block.Func.RegAlloc[v.ID] 503 if reg == nil { 504 v.Fatalf("nil register for value: %s\n%s\n", v.LongString(), v.Block.Func) 505 } 506 return reg.(*Register).name 507 } 508 509 // MemoryArg returns the memory argument for the Value. 510 // The returned value, if non-nil, will be memory-typed (or a tuple with a memory-typed second part). 511 // Otherwise, nil is returned. 512 func (v *Value) MemoryArg() *Value { 513 if v.Op == OpPhi { 514 v.Fatalf("MemoryArg on Phi") 515 } 516 na := len(v.Args) 517 if na == 0 { 518 return nil 519 } 520 if m := v.Args[na-1]; m.Type.IsMemory() { 521 return m 522 } 523 return nil 524 } 525 526 // LackingPos indicates whether v is a value that is unlikely to have a correct 527 // position assigned to it. Ignoring such values leads to more user-friendly positions 528 // assigned to nearby values and the blocks containing them. 529 func (v *Value) LackingPos() bool { 530 // The exact definition of LackingPos is somewhat heuristically defined and may change 531 // in the future, for example if some of these operations are generated more carefully 532 // with respect to their source position. 533 return v.Op == OpVarDef || v.Op == OpVarLive || v.Op == OpPhi || 534 (v.Op == OpFwdRef || v.Op == OpCopy) && v.Type == types.TypeMem 535 } 536 537 // removeable reports whether the value v can be removed from the SSA graph entirely 538 // if its use count drops to 0. 539 func (v *Value) removeable() bool { 540 if v.Type.IsVoid() { 541 // Void ops, like nil pointer checks, must stay. 542 return false 543 } 544 if v.Type.IsMemory() { 545 // We don't need to preserve all memory ops, but we do need 546 // to keep calls at least (because they might have 547 // synchronization operations we can't see). 548 return false 549 } 550 if v.Op.HasSideEffects() { 551 // These are mostly synchronization operations. 552 return false 553 } 554 return true 555 } 556 557 // TODO(mdempsky): Shouldn't be necessary; see discussion at golang.org/cl/275756 558 func (*Value) CanBeAnSSAAux() {} 559 560 // AutoVar returns a *Name and int64 representing the auto variable and offset within it 561 // where v should be spilled. 562 func AutoVar(v *Value) (*ir.Name, int64) { 563 if loc, ok := v.Block.Func.RegAlloc[v.ID].(LocalSlot); ok { 564 if v.Type.Size() > loc.Type.Size() { 565 v.Fatalf("spill/restore type %s doesn't fit in slot type %s", v.Type, loc.Type) 566 } 567 return loc.N, loc.Off 568 } 569 // Assume it is a register, return its spill slot, which needs to be live 570 nameOff := v.Aux.(*AuxNameOffset) 571 return nameOff.Name, nameOff.Offset 572 }