github.com/tetratelabs/wazero@v1.7.1/internal/engine/wazevo/backend/isa/amd64/machine_regalloc.go (about)

     1  package amd64
     2  
     3  import (
     4  	"github.com/tetratelabs/wazero/internal/engine/wazevo/backend"
     5  	"github.com/tetratelabs/wazero/internal/engine/wazevo/backend/regalloc"
     6  	"github.com/tetratelabs/wazero/internal/engine/wazevo/ssa"
     7  )
     8  
     9  // InsertMoveBefore implements backend.RegAllocFunctionMachine.
    10  func (m *machine) InsertMoveBefore(dst, src regalloc.VReg, instr *instruction) {
    11  	typ := src.RegType()
    12  	if typ != dst.RegType() {
    13  		panic("BUG: src and dst must have the same type")
    14  	}
    15  
    16  	mov := m.allocateInstr()
    17  	if typ == regalloc.RegTypeInt {
    18  		mov.asMovRR(src, dst, true)
    19  	} else {
    20  		mov.asXmmUnaryRmR(sseOpcodeMovdqu, newOperandReg(src), dst)
    21  	}
    22  
    23  	cur := instr.prev
    24  	prevNext := cur.next
    25  	cur = linkInstr(cur, mov)
    26  	linkInstr(cur, prevNext)
    27  }
    28  
    29  // InsertStoreRegisterAt implements backend.RegAllocFunctionMachine.
    30  func (m *machine) InsertStoreRegisterAt(v regalloc.VReg, instr *instruction, after bool) *instruction {
    31  	if !v.IsRealReg() {
    32  		panic("BUG: VReg must be backed by real reg to be stored")
    33  	}
    34  
    35  	typ := m.c.TypeOf(v)
    36  
    37  	var prevNext, cur *instruction
    38  	if after {
    39  		cur, prevNext = instr, instr.next
    40  	} else {
    41  		cur, prevNext = instr.prev, instr
    42  	}
    43  
    44  	offsetFromSP := m.getVRegSpillSlotOffsetFromSP(v.ID(), typ.Size())
    45  	store := m.allocateInstr()
    46  	mem := newOperandMem(m.newAmodeImmReg(uint32(offsetFromSP), rspVReg))
    47  	switch typ {
    48  	case ssa.TypeI32:
    49  		store.asMovRM(v, mem, 4)
    50  	case ssa.TypeI64:
    51  		store.asMovRM(v, mem, 8)
    52  	case ssa.TypeF32:
    53  		store.asXmmMovRM(sseOpcodeMovss, v, mem)
    54  	case ssa.TypeF64:
    55  		store.asXmmMovRM(sseOpcodeMovsd, v, mem)
    56  	case ssa.TypeV128:
    57  		store.asXmmMovRM(sseOpcodeMovdqu, v, mem)
    58  	}
    59  
    60  	cur = linkInstr(cur, store)
    61  	return linkInstr(cur, prevNext)
    62  }
    63  
    64  // InsertReloadRegisterAt implements backend.RegAllocFunctionMachine.
    65  func (m *machine) InsertReloadRegisterAt(v regalloc.VReg, instr *instruction, after bool) *instruction {
    66  	if !v.IsRealReg() {
    67  		panic("BUG: VReg must be backed by real reg to be stored")
    68  	}
    69  
    70  	typ := m.c.TypeOf(v)
    71  	var prevNext, cur *instruction
    72  	if after {
    73  		cur, prevNext = instr, instr.next
    74  	} else {
    75  		cur, prevNext = instr.prev, instr
    76  	}
    77  
    78  	// Load the value to the temporary.
    79  	load := m.allocateInstr()
    80  	offsetFromSP := m.getVRegSpillSlotOffsetFromSP(v.ID(), typ.Size())
    81  	a := newOperandMem(m.newAmodeImmReg(uint32(offsetFromSP), rspVReg))
    82  	switch typ {
    83  	case ssa.TypeI32:
    84  		load.asMovzxRmR(extModeLQ, a, v)
    85  	case ssa.TypeI64:
    86  		load.asMov64MR(a, v)
    87  	case ssa.TypeF32:
    88  		load.asXmmUnaryRmR(sseOpcodeMovss, a, v)
    89  	case ssa.TypeF64:
    90  		load.asXmmUnaryRmR(sseOpcodeMovsd, a, v)
    91  	case ssa.TypeV128:
    92  		load.asXmmUnaryRmR(sseOpcodeMovdqu, a, v)
    93  	default:
    94  		panic("BUG")
    95  	}
    96  
    97  	cur = linkInstr(cur, load)
    98  	return linkInstr(cur, prevNext)
    99  }
   100  
   101  // ClobberedRegisters implements backend.RegAllocFunctionMachine.
   102  func (m *machine) ClobberedRegisters(regs []regalloc.VReg) {
   103  	m.clobberedRegs = append(m.clobberedRegs[:0], regs...)
   104  }
   105  
   106  // Swap implements backend.RegAllocFunctionMachine.
   107  func (m *machine) Swap(cur *instruction, x1, x2, tmp regalloc.VReg) {
   108  	if x1.RegType() == regalloc.RegTypeInt {
   109  		prevNext := cur.next
   110  		xc := m.allocateInstr().asXCHG(x1, newOperandReg(x2), 8)
   111  		cur = linkInstr(cur, xc)
   112  		linkInstr(cur, prevNext)
   113  	} else {
   114  		if tmp.Valid() {
   115  			prevNext := cur.next
   116  			m.InsertMoveBefore(tmp, x1, prevNext)
   117  			m.InsertMoveBefore(x1, x2, prevNext)
   118  			m.InsertMoveBefore(x2, tmp, prevNext)
   119  		} else {
   120  			prevNext := cur.next
   121  			r2 := x2.RealReg()
   122  			// Temporarily spill x1 to stack.
   123  			cur = m.InsertStoreRegisterAt(x1, cur, true).prev
   124  			// Then move x2 to x1.
   125  			cur = linkInstr(cur, m.allocateInstr().asXmmUnaryRmR(sseOpcodeMovdqa, newOperandReg(x2), x1))
   126  			linkInstr(cur, prevNext)
   127  			// Then reload the original value on x1 from stack to r2.
   128  			m.InsertReloadRegisterAt(x1.SetRealReg(r2), cur, true)
   129  		}
   130  	}
   131  }
   132  
   133  // LastInstrForInsertion implements backend.RegAllocFunctionMachine.
   134  func (m *machine) LastInstrForInsertion(begin, end *instruction) *instruction {
   135  	cur := end
   136  	for cur.kind == nop0 {
   137  		cur = cur.prev
   138  		if cur == begin {
   139  			return end
   140  		}
   141  	}
   142  	switch cur.kind {
   143  	case jmp:
   144  		return cur
   145  	default:
   146  		return end
   147  	}
   148  }
   149  
   150  // SSABlockLabel implements backend.RegAllocFunctionMachine.
   151  func (m *machine) SSABlockLabel(id ssa.BasicBlockID) backend.Label {
   152  	return m.ectx.SsaBlockIDToLabels[id]
   153  }