github.com/dylandreimerink/gobpfld@v0.6.1-0.20220205171531-e79c330ad608/ebpf/atomic.go (about) 1 package ebpf 2 3 import "fmt" 4 5 var _ Instruction = (*AtomicAdd)(nil) 6 7 type AtomicAdd struct { 8 Src Register 9 Dest Register 10 Offset int16 11 Size Size 12 Fetch bool 13 } 14 15 func (aa *AtomicAdd) Raw() ([]RawInstruction, error) { 16 imm := int32(BPF_ADD) 17 if aa.Fetch { 18 imm = int32(BPF_ADD | BPF_FETCH) 19 } 20 21 // TODO only 32bit and 64bit is supported 22 23 return []RawInstruction{ 24 { 25 Op: BPF_STX | uint8(aa.Size) | BPF_ATOMIC, 26 Reg: NewReg(aa.Src, aa.Dest), 27 Off: aa.Offset, 28 Imm: imm, 29 }, 30 }, nil 31 } 32 33 func (aa *AtomicAdd) String() string { 34 sign := "+" 35 offset := aa.Offset 36 if offset < 0 { 37 sign = "-" 38 offset = -offset 39 } 40 41 reg := "r" 42 if aa.Size == BPF_W { 43 reg = "w" 44 } 45 46 return fmt.Sprintf("lock *(%s *)(r%s %s %d) += %s%s", aa.Size, aa.Dest, sign, offset, reg, aa.Src) 47 } 48 49 var _ Instruction = (*AtomicSub)(nil) 50 51 type AtomicSub struct { 52 Src Register 53 Dest Register 54 Offset int16 55 Size Size 56 Fetch bool 57 } 58 59 func (as *AtomicSub) Raw() ([]RawInstruction, error) { 60 imm := int32(BPF_SUB) 61 if as.Fetch { 62 imm = int32(BPF_SUB | BPF_FETCH) 63 } 64 65 // TODO only 32bit and 64bit is supported 66 67 return []RawInstruction{ 68 { 69 Op: BPF_STX | uint8(as.Size) | BPF_ATOMIC, 70 Reg: NewReg(as.Src, as.Dest), 71 Off: as.Offset, 72 Imm: imm, 73 }, 74 }, nil 75 } 76 77 func (as *AtomicSub) String() string { 78 sign := "+" 79 offset := as.Offset 80 if offset < 0 { 81 sign = "-" 82 offset = -offset 83 } 84 85 reg := "r" 86 if as.Size == BPF_W { 87 reg = "w" 88 } 89 90 return fmt.Sprintf("lock *(%s *)(r%s %s %d) -= %s%s", as.Size, as.Dest, sign, offset, reg, as.Src) 91 } 92 93 var _ Instruction = (*AtomicAnd)(nil) 94 95 type AtomicAnd struct { 96 Src Register 97 Dest Register 98 Offset int16 99 Size Size 100 Fetch bool 101 } 102 103 func (aa *AtomicAnd) Raw() ([]RawInstruction, error) { 104 imm := int32(BPF_AND) 105 if aa.Fetch { 106 imm = int32(BPF_AND | BPF_FETCH) 107 } 108 return []RawInstruction{ 109 { 110 Op: BPF_STX | uint8(aa.Size) | BPF_ATOMIC, 111 Reg: NewReg(aa.Src, aa.Dest), 112 Off: aa.Offset, 113 Imm: imm, 114 }, 115 }, nil 116 } 117 118 func (aa *AtomicAnd) String() string { 119 sign := "+" 120 offset := aa.Offset 121 if offset < 0 { 122 sign = "-" 123 offset = -offset 124 } 125 126 reg := "r" 127 if aa.Size == BPF_W { 128 reg = "w" 129 } 130 131 return fmt.Sprintf("lock *(%s *)(r%s %s %d) &= %s%s", aa.Size, aa.Dest, sign, offset, reg, aa.Src) 132 } 133 134 var _ Instruction = (*AtomicOr)(nil) 135 136 type AtomicOr struct { 137 Src Register 138 Dest Register 139 Offset int16 140 Size Size 141 Fetch bool 142 } 143 144 func (ao *AtomicOr) Raw() ([]RawInstruction, error) { 145 imm := int32(BPF_OR) 146 if ao.Fetch { 147 imm = int32(BPF_OR | BPF_FETCH) 148 } 149 return []RawInstruction{ 150 { 151 Op: BPF_STX | uint8(ao.Size) | BPF_ATOMIC, 152 Reg: NewReg(ao.Src, ao.Dest), 153 Off: ao.Offset, 154 Imm: imm, 155 }, 156 }, nil 157 } 158 159 func (ao *AtomicOr) String() string { 160 sign := "+" 161 offset := ao.Offset 162 if offset < 0 { 163 sign = "-" 164 offset = -offset 165 } 166 167 reg := "r" 168 if ao.Size == BPF_W { 169 reg = "w" 170 } 171 172 return fmt.Sprintf("lock *(%s *)(r%s %s %d) |= %s%s", ao.Size, ao.Dest, sign, offset, reg, ao.Src) 173 } 174 175 var _ Instruction = (*AtomicXor)(nil) 176 177 type AtomicXor struct { 178 Src Register 179 Dest Register 180 Offset int16 181 Size Size 182 Fetch bool 183 } 184 185 func (ax *AtomicXor) Raw() ([]RawInstruction, error) { 186 imm := int32(BPF_XOR) 187 if ax.Fetch { 188 imm = int32(BPF_XOR | BPF_FETCH) 189 } 190 return []RawInstruction{ 191 { 192 Op: BPF_STX | uint8(ax.Size) | BPF_ATOMIC, 193 Reg: NewReg(ax.Src, ax.Dest), 194 Off: ax.Offset, 195 Imm: imm, 196 }, 197 }, nil 198 } 199 200 func (ax *AtomicXor) String() string { 201 sign := "+" 202 offset := ax.Offset 203 if offset < 0 { 204 sign = "-" 205 offset = -offset 206 } 207 208 reg := "r" 209 if ax.Size == BPF_W { 210 reg = "w" 211 } 212 213 return fmt.Sprintf("lock *(%s *)(r%s %s %d) ^= %s%s", ax.Size, ax.Dest, sign, offset, reg, ax.Src) 214 } 215 216 var _ Instruction = (*AtomicExchange)(nil) 217 218 type AtomicExchange struct { 219 Src Register 220 Dest Register 221 Offset int16 222 Size Size 223 } 224 225 func (chg *AtomicExchange) Raw() ([]RawInstruction, error) { 226 return []RawInstruction{ 227 { 228 Op: BPF_STX | uint8(chg.Size) | BPF_ATOMIC, 229 Reg: NewReg(chg.Src, chg.Dest), 230 Off: chg.Offset, 231 Imm: int32(BPF_XCHG), 232 }, 233 }, nil 234 } 235 236 func (chg *AtomicExchange) String() string { 237 sign := "+" 238 offset := chg.Offset 239 if offset < 0 { 240 sign = "-" 241 offset = -offset 242 } 243 244 reg := "r" 245 if chg.Size == BPF_W { 246 reg = "w" 247 } 248 249 // w1 = xchg(r3 + 456, w1) 250 return fmt.Sprintf("%s%s = xchg(r%s %s %d, %s%s)", reg, chg.Src, chg.Dest, sign, offset, reg, chg.Src) 251 } 252 253 var _ Instruction = (*AtomicCompareAndExchange)(nil) 254 255 type AtomicCompareAndExchange struct { 256 Src Register 257 Dest Register 258 Offset int16 259 Size Size 260 } 261 262 func (chg *AtomicCompareAndExchange) Raw() ([]RawInstruction, error) { 263 return []RawInstruction{ 264 { 265 Op: BPF_STX | uint8(chg.Size) | BPF_ATOMIC, 266 Reg: NewReg(chg.Src, chg.Dest), 267 Off: chg.Offset, 268 Imm: int32(BPF_CMPXCHG), 269 }, 270 }, nil 271 } 272 273 func (chg *AtomicCompareAndExchange) String() string { 274 sign := "+" 275 offset := chg.Offset 276 if offset < 0 { 277 sign = "-" 278 offset = -offset 279 } 280 281 reg := "r" 282 if chg.Size == BPF_W { 283 reg = "w" 284 } 285 286 // r0 = cmpxchg(r3 - 456, r0, r2) 287 return fmt.Sprintf( 288 "%s0 = cmpxchg(r%s %s %d, %s0, %s%s)", 289 reg, chg.Dest, sign, offset, reg, reg, chg.Src, 290 ) 291 }