github.com/nicocha30/gvisor-ligolo@v0.0.0-20230726075806-989fa2c0a413/pkg/sentry/arch/arch_amd64_abi_autogen_unsafe.go (about) 1 // Automatically generated marshal implementation. See tools/go_marshal. 2 3 // If there are issues with build constraint aggregation, see 4 // tools/go_marshal/gomarshal/generator.go:writeHeader(). The constraints here 5 // come from the input set of files used to generate this file. This input set 6 // is filtered based on pre-defined file suffixes related to build constraints, 7 // see tools/defs.bzl:calculate_sets(). 8 9 //go:build amd64 && amd64 && amd64 10 // +build amd64,amd64,amd64 11 12 package arch 13 14 import ( 15 "github.com/nicocha30/gvisor-ligolo/pkg/abi/linux" 16 "github.com/nicocha30/gvisor-ligolo/pkg/gohacks" 17 "github.com/nicocha30/gvisor-ligolo/pkg/hostarch" 18 "github.com/nicocha30/gvisor-ligolo/pkg/marshal" 19 "io" 20 "reflect" 21 "runtime" 22 "unsafe" 23 ) 24 25 // Marshallable types used by this file. 26 var _ marshal.Marshallable = (*SignalContext64)(nil) 27 var _ marshal.Marshallable = (*UContext64)(nil) 28 var _ marshal.Marshallable = (*linux.SignalSet)(nil) 29 var _ marshal.Marshallable = (*linux.SignalStack)(nil) 30 31 // SizeBytes implements marshal.Marshallable.SizeBytes. 32 func (s *SignalContext64) SizeBytes() int { 33 return 184 + 34 (*linux.SignalSet)(nil).SizeBytes() + 35 8*8 36 } 37 38 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 39 func (s *SignalContext64) MarshalBytes(dst []byte) []byte { 40 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R8)) 41 dst = dst[8:] 42 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R9)) 43 dst = dst[8:] 44 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R10)) 45 dst = dst[8:] 46 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R11)) 47 dst = dst[8:] 48 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R12)) 49 dst = dst[8:] 50 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R13)) 51 dst = dst[8:] 52 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R14)) 53 dst = dst[8:] 54 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R15)) 55 dst = dst[8:] 56 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rdi)) 57 dst = dst[8:] 58 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rsi)) 59 dst = dst[8:] 60 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rbp)) 61 dst = dst[8:] 62 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rbx)) 63 dst = dst[8:] 64 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rdx)) 65 dst = dst[8:] 66 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rax)) 67 dst = dst[8:] 68 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rcx)) 69 dst = dst[8:] 70 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rsp)) 71 dst = dst[8:] 72 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rip)) 73 dst = dst[8:] 74 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Eflags)) 75 dst = dst[8:] 76 hostarch.ByteOrder.PutUint16(dst[:2], uint16(s.Cs)) 77 dst = dst[2:] 78 hostarch.ByteOrder.PutUint16(dst[:2], uint16(s.Gs)) 79 dst = dst[2:] 80 hostarch.ByteOrder.PutUint16(dst[:2], uint16(s.Fs)) 81 dst = dst[2:] 82 hostarch.ByteOrder.PutUint16(dst[:2], uint16(s.Ss)) 83 dst = dst[2:] 84 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Err)) 85 dst = dst[8:] 86 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Trapno)) 87 dst = dst[8:] 88 dst = s.Oldmask.MarshalUnsafe(dst) 89 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Cr2)) 90 dst = dst[8:] 91 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Fpstate)) 92 dst = dst[8:] 93 for idx := 0; idx < 8; idx++ { 94 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Reserved[idx])) 95 dst = dst[8:] 96 } 97 return dst 98 } 99 100 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 101 func (s *SignalContext64) UnmarshalBytes(src []byte) []byte { 102 s.R8 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 103 src = src[8:] 104 s.R9 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 105 src = src[8:] 106 s.R10 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 107 src = src[8:] 108 s.R11 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 109 src = src[8:] 110 s.R12 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 111 src = src[8:] 112 s.R13 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 113 src = src[8:] 114 s.R14 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 115 src = src[8:] 116 s.R15 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 117 src = src[8:] 118 s.Rdi = uint64(hostarch.ByteOrder.Uint64(src[:8])) 119 src = src[8:] 120 s.Rsi = uint64(hostarch.ByteOrder.Uint64(src[:8])) 121 src = src[8:] 122 s.Rbp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 123 src = src[8:] 124 s.Rbx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 125 src = src[8:] 126 s.Rdx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 127 src = src[8:] 128 s.Rax = uint64(hostarch.ByteOrder.Uint64(src[:8])) 129 src = src[8:] 130 s.Rcx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 131 src = src[8:] 132 s.Rsp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 133 src = src[8:] 134 s.Rip = uint64(hostarch.ByteOrder.Uint64(src[:8])) 135 src = src[8:] 136 s.Eflags = uint64(hostarch.ByteOrder.Uint64(src[:8])) 137 src = src[8:] 138 s.Cs = uint16(hostarch.ByteOrder.Uint16(src[:2])) 139 src = src[2:] 140 s.Gs = uint16(hostarch.ByteOrder.Uint16(src[:2])) 141 src = src[2:] 142 s.Fs = uint16(hostarch.ByteOrder.Uint16(src[:2])) 143 src = src[2:] 144 s.Ss = uint16(hostarch.ByteOrder.Uint16(src[:2])) 145 src = src[2:] 146 s.Err = uint64(hostarch.ByteOrder.Uint64(src[:8])) 147 src = src[8:] 148 s.Trapno = uint64(hostarch.ByteOrder.Uint64(src[:8])) 149 src = src[8:] 150 src = s.Oldmask.UnmarshalUnsafe(src) 151 s.Cr2 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 152 src = src[8:] 153 s.Fpstate = uint64(hostarch.ByteOrder.Uint64(src[:8])) 154 src = src[8:] 155 for idx := 0; idx < 8; idx++ { 156 s.Reserved[idx] = uint64(hostarch.ByteOrder.Uint64(src[:8])) 157 src = src[8:] 158 } 159 return src 160 } 161 162 // Packed implements marshal.Marshallable.Packed. 163 //go:nosplit 164 func (s *SignalContext64) Packed() bool { 165 return s.Oldmask.Packed() 166 } 167 168 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 169 func (s *SignalContext64) MarshalUnsafe(dst []byte) []byte { 170 if s.Oldmask.Packed() { 171 size := s.SizeBytes() 172 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 173 return dst[size:] 174 } 175 // Type SignalContext64 doesn't have a packed layout in memory, fallback to MarshalBytes. 176 return s.MarshalBytes(dst) 177 } 178 179 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 180 func (s *SignalContext64) UnmarshalUnsafe(src []byte) []byte { 181 if s.Oldmask.Packed() { 182 size := s.SizeBytes() 183 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 184 return src[size:] 185 } 186 // Type SignalContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes. 187 return s.UnmarshalBytes(src) 188 } 189 190 // CopyOutN implements marshal.Marshallable.CopyOutN. 191 func (s *SignalContext64) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 192 if !s.Oldmask.Packed() { 193 // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 194 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 195 s.MarshalBytes(buf) // escapes: fallback. 196 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 197 } 198 199 // Construct a slice backed by dst's underlying memory. 200 var buf []byte 201 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 202 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 203 hdr.Len = s.SizeBytes() 204 hdr.Cap = s.SizeBytes() 205 206 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 207 // Since we bypassed the compiler's escape analysis, indicate that s 208 // must live until the use above. 209 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 210 return length, err 211 } 212 213 // CopyOut implements marshal.Marshallable.CopyOut. 214 func (s *SignalContext64) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 215 return s.CopyOutN(cc, addr, s.SizeBytes()) 216 } 217 218 // CopyIn implements marshal.Marshallable.CopyIn. 219 func (s *SignalContext64) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 220 if !s.Oldmask.Packed() { 221 // Type SignalContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes. 222 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 223 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 224 // Unmarshal unconditionally. If we had a short copy-in, this results in a 225 // partially unmarshalled struct. 226 s.UnmarshalBytes(buf) // escapes: fallback. 227 return length, err 228 } 229 230 // Construct a slice backed by dst's underlying memory. 231 var buf []byte 232 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 233 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 234 hdr.Len = s.SizeBytes() 235 hdr.Cap = s.SizeBytes() 236 237 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 238 // Since we bypassed the compiler's escape analysis, indicate that s 239 // must live until the use above. 240 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 241 return length, err 242 } 243 244 // WriteTo implements io.WriterTo.WriteTo. 245 func (s *SignalContext64) WriteTo(writer io.Writer) (int64, error) { 246 if !s.Oldmask.Packed() { 247 // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 248 buf := make([]byte, s.SizeBytes()) 249 s.MarshalBytes(buf) 250 length, err := writer.Write(buf) 251 return int64(length), err 252 } 253 254 // Construct a slice backed by dst's underlying memory. 255 var buf []byte 256 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 257 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 258 hdr.Len = s.SizeBytes() 259 hdr.Cap = s.SizeBytes() 260 261 length, err := writer.Write(buf) 262 // Since we bypassed the compiler's escape analysis, indicate that s 263 // must live until the use above. 264 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 265 return int64(length), err 266 } 267 268 // SizeBytes implements marshal.Marshallable.SizeBytes. 269 func (u *UContext64) SizeBytes() int { 270 return 16 + 271 (*linux.SignalStack)(nil).SizeBytes() + 272 (*SignalContext64)(nil).SizeBytes() + 273 (*linux.SignalSet)(nil).SizeBytes() 274 } 275 276 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 277 func (u *UContext64) MarshalBytes(dst []byte) []byte { 278 hostarch.ByteOrder.PutUint64(dst[:8], uint64(u.Flags)) 279 dst = dst[8:] 280 hostarch.ByteOrder.PutUint64(dst[:8], uint64(u.Link)) 281 dst = dst[8:] 282 dst = u.Stack.MarshalUnsafe(dst) 283 dst = u.MContext.MarshalUnsafe(dst) 284 dst = u.Sigset.MarshalUnsafe(dst) 285 return dst 286 } 287 288 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 289 func (u *UContext64) UnmarshalBytes(src []byte) []byte { 290 u.Flags = uint64(hostarch.ByteOrder.Uint64(src[:8])) 291 src = src[8:] 292 u.Link = uint64(hostarch.ByteOrder.Uint64(src[:8])) 293 src = src[8:] 294 src = u.Stack.UnmarshalUnsafe(src) 295 src = u.MContext.UnmarshalUnsafe(src) 296 src = u.Sigset.UnmarshalUnsafe(src) 297 return src 298 } 299 300 // Packed implements marshal.Marshallable.Packed. 301 //go:nosplit 302 func (u *UContext64) Packed() bool { 303 return u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() 304 } 305 306 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 307 func (u *UContext64) MarshalUnsafe(dst []byte) []byte { 308 if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 309 size := u.SizeBytes() 310 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(u), uintptr(size)) 311 return dst[size:] 312 } 313 // Type UContext64 doesn't have a packed layout in memory, fallback to MarshalBytes. 314 return u.MarshalBytes(dst) 315 } 316 317 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 318 func (u *UContext64) UnmarshalUnsafe(src []byte) []byte { 319 if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 320 size := u.SizeBytes() 321 gohacks.Memmove(unsafe.Pointer(u), unsafe.Pointer(&src[0]), uintptr(size)) 322 return src[size:] 323 } 324 // Type UContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes. 325 return u.UnmarshalBytes(src) 326 } 327 328 // CopyOutN implements marshal.Marshallable.CopyOutN. 329 func (u *UContext64) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 330 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 331 // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 332 buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay. 333 u.MarshalBytes(buf) // escapes: fallback. 334 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 335 } 336 337 // Construct a slice backed by dst's underlying memory. 338 var buf []byte 339 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 340 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 341 hdr.Len = u.SizeBytes() 342 hdr.Cap = u.SizeBytes() 343 344 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 345 // Since we bypassed the compiler's escape analysis, indicate that u 346 // must live until the use above. 347 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 348 return length, err 349 } 350 351 // CopyOut implements marshal.Marshallable.CopyOut. 352 func (u *UContext64) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 353 return u.CopyOutN(cc, addr, u.SizeBytes()) 354 } 355 356 // CopyIn implements marshal.Marshallable.CopyIn. 357 func (u *UContext64) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 358 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 359 // Type UContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes. 360 buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay. 361 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 362 // Unmarshal unconditionally. If we had a short copy-in, this results in a 363 // partially unmarshalled struct. 364 u.UnmarshalBytes(buf) // escapes: fallback. 365 return length, err 366 } 367 368 // Construct a slice backed by dst's underlying memory. 369 var buf []byte 370 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 371 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 372 hdr.Len = u.SizeBytes() 373 hdr.Cap = u.SizeBytes() 374 375 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 376 // Since we bypassed the compiler's escape analysis, indicate that u 377 // must live until the use above. 378 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 379 return length, err 380 } 381 382 // WriteTo implements io.WriterTo.WriteTo. 383 func (u *UContext64) WriteTo(writer io.Writer) (int64, error) { 384 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 385 // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 386 buf := make([]byte, u.SizeBytes()) 387 u.MarshalBytes(buf) 388 length, err := writer.Write(buf) 389 return int64(length), err 390 } 391 392 // Construct a slice backed by dst's underlying memory. 393 var buf []byte 394 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 395 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 396 hdr.Len = u.SizeBytes() 397 hdr.Cap = u.SizeBytes() 398 399 length, err := writer.Write(buf) 400 // Since we bypassed the compiler's escape analysis, indicate that u 401 // must live until the use above. 402 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 403 return int64(length), err 404 } 405