github.com/metacubex/gvisor@v0.0.0-20240320004321-933faba989ec/pkg/sentry/arch/arch_amd64_abi_autogen_unsafe.go (about) 1 // Automatically generated marshal implementation. See tools/go_marshal. 2 3 // If there are issues with build constraint aggregation, see 4 // tools/go_marshal/gomarshal/generator.go:writeHeader(). The constraints here 5 // come from the input set of files used to generate this file. This input set 6 // is filtered based on pre-defined file suffixes related to build constraints, 7 // see tools/defs.bzl:calculate_sets(). 8 9 //go:build amd64 && amd64 && amd64 10 // +build amd64,amd64,amd64 11 12 package arch 13 14 import ( 15 "github.com/metacubex/gvisor/pkg/abi/linux" 16 "github.com/metacubex/gvisor/pkg/gohacks" 17 "github.com/metacubex/gvisor/pkg/hostarch" 18 "github.com/metacubex/gvisor/pkg/marshal" 19 "io" 20 "reflect" 21 "runtime" 22 "unsafe" 23 ) 24 25 // Marshallable types used by this file. 26 var _ marshal.Marshallable = (*SignalContext64)(nil) 27 var _ marshal.Marshallable = (*UContext64)(nil) 28 var _ marshal.Marshallable = (*linux.SignalSet)(nil) 29 var _ marshal.Marshallable = (*linux.SignalStack)(nil) 30 31 // SizeBytes implements marshal.Marshallable.SizeBytes. 32 func (s *SignalContext64) SizeBytes() int { 33 return 184 + 34 (*linux.SignalSet)(nil).SizeBytes() + 35 8*8 36 } 37 38 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 39 func (s *SignalContext64) MarshalBytes(dst []byte) []byte { 40 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R8)) 41 dst = dst[8:] 42 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R9)) 43 dst = dst[8:] 44 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R10)) 45 dst = dst[8:] 46 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R11)) 47 dst = dst[8:] 48 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R12)) 49 dst = dst[8:] 50 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R13)) 51 dst = dst[8:] 52 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R14)) 53 dst = dst[8:] 54 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.R15)) 55 dst = dst[8:] 56 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rdi)) 57 dst = dst[8:] 58 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rsi)) 59 dst = dst[8:] 60 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rbp)) 61 dst = dst[8:] 62 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rbx)) 63 dst = dst[8:] 64 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rdx)) 65 dst = dst[8:] 66 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rax)) 67 dst = dst[8:] 68 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rcx)) 69 dst = dst[8:] 70 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rsp)) 71 dst = dst[8:] 72 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rip)) 73 dst = dst[8:] 74 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Eflags)) 75 dst = dst[8:] 76 hostarch.ByteOrder.PutUint16(dst[:2], uint16(s.Cs)) 77 dst = dst[2:] 78 hostarch.ByteOrder.PutUint16(dst[:2], uint16(s.Gs)) 79 dst = dst[2:] 80 hostarch.ByteOrder.PutUint16(dst[:2], uint16(s.Fs)) 81 dst = dst[2:] 82 hostarch.ByteOrder.PutUint16(dst[:2], uint16(s.Ss)) 83 dst = dst[2:] 84 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Err)) 85 dst = dst[8:] 86 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Trapno)) 87 dst = dst[8:] 88 dst = s.Oldmask.MarshalUnsafe(dst) 89 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Cr2)) 90 dst = dst[8:] 91 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Fpstate)) 92 dst = dst[8:] 93 for idx := 0; idx < 8; idx++ { 94 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Reserved[idx])) 95 dst = dst[8:] 96 } 97 return dst 98 } 99 100 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 101 func (s *SignalContext64) UnmarshalBytes(src []byte) []byte { 102 s.R8 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 103 src = src[8:] 104 s.R9 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 105 src = src[8:] 106 s.R10 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 107 src = src[8:] 108 s.R11 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 109 src = src[8:] 110 s.R12 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 111 src = src[8:] 112 s.R13 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 113 src = src[8:] 114 s.R14 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 115 src = src[8:] 116 s.R15 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 117 src = src[8:] 118 s.Rdi = uint64(hostarch.ByteOrder.Uint64(src[:8])) 119 src = src[8:] 120 s.Rsi = uint64(hostarch.ByteOrder.Uint64(src[:8])) 121 src = src[8:] 122 s.Rbp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 123 src = src[8:] 124 s.Rbx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 125 src = src[8:] 126 s.Rdx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 127 src = src[8:] 128 s.Rax = uint64(hostarch.ByteOrder.Uint64(src[:8])) 129 src = src[8:] 130 s.Rcx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 131 src = src[8:] 132 s.Rsp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 133 src = src[8:] 134 s.Rip = uint64(hostarch.ByteOrder.Uint64(src[:8])) 135 src = src[8:] 136 s.Eflags = uint64(hostarch.ByteOrder.Uint64(src[:8])) 137 src = src[8:] 138 s.Cs = uint16(hostarch.ByteOrder.Uint16(src[:2])) 139 src = src[2:] 140 s.Gs = uint16(hostarch.ByteOrder.Uint16(src[:2])) 141 src = src[2:] 142 s.Fs = uint16(hostarch.ByteOrder.Uint16(src[:2])) 143 src = src[2:] 144 s.Ss = uint16(hostarch.ByteOrder.Uint16(src[:2])) 145 src = src[2:] 146 s.Err = uint64(hostarch.ByteOrder.Uint64(src[:8])) 147 src = src[8:] 148 s.Trapno = uint64(hostarch.ByteOrder.Uint64(src[:8])) 149 src = src[8:] 150 src = s.Oldmask.UnmarshalUnsafe(src) 151 s.Cr2 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 152 src = src[8:] 153 s.Fpstate = uint64(hostarch.ByteOrder.Uint64(src[:8])) 154 src = src[8:] 155 for idx := 0; idx < 8; idx++ { 156 s.Reserved[idx] = uint64(hostarch.ByteOrder.Uint64(src[:8])) 157 src = src[8:] 158 } 159 return src 160 } 161 162 // Packed implements marshal.Marshallable.Packed. 163 //go:nosplit 164 func (s *SignalContext64) Packed() bool { 165 return s.Oldmask.Packed() 166 } 167 168 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 169 func (s *SignalContext64) MarshalUnsafe(dst []byte) []byte { 170 if s.Oldmask.Packed() { 171 size := s.SizeBytes() 172 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 173 return dst[size:] 174 } 175 // Type SignalContext64 doesn't have a packed layout in memory, fallback to MarshalBytes. 176 return s.MarshalBytes(dst) 177 } 178 179 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 180 func (s *SignalContext64) UnmarshalUnsafe(src []byte) []byte { 181 if s.Oldmask.Packed() { 182 size := s.SizeBytes() 183 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 184 return src[size:] 185 } 186 // Type SignalContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes. 187 return s.UnmarshalBytes(src) 188 } 189 190 // CopyOutN implements marshal.Marshallable.CopyOutN. 191 func (s *SignalContext64) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 192 if !s.Oldmask.Packed() { 193 // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 194 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 195 s.MarshalBytes(buf) // escapes: fallback. 196 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 197 } 198 199 // Construct a slice backed by dst's underlying memory. 200 var buf []byte 201 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 202 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 203 hdr.Len = s.SizeBytes() 204 hdr.Cap = s.SizeBytes() 205 206 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 207 // Since we bypassed the compiler's escape analysis, indicate that s 208 // must live until the use above. 209 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 210 return length, err 211 } 212 213 // CopyOut implements marshal.Marshallable.CopyOut. 214 func (s *SignalContext64) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 215 return s.CopyOutN(cc, addr, s.SizeBytes()) 216 } 217 218 // CopyInN implements marshal.Marshallable.CopyInN. 219 func (s *SignalContext64) CopyInN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 220 if !s.Oldmask.Packed() { 221 // Type SignalContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes. 222 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 223 length, err := cc.CopyInBytes(addr, buf[:limit]) // escapes: okay. 224 // Unmarshal unconditionally. If we had a short copy-in, this results in a 225 // partially unmarshalled struct. 226 s.UnmarshalBytes(buf) // escapes: fallback. 227 return length, err 228 } 229 230 // Construct a slice backed by dst's underlying memory. 231 var buf []byte 232 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 233 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 234 hdr.Len = s.SizeBytes() 235 hdr.Cap = s.SizeBytes() 236 237 length, err := cc.CopyInBytes(addr, buf[:limit]) // escapes: okay. 238 // Since we bypassed the compiler's escape analysis, indicate that s 239 // must live until the use above. 240 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 241 return length, err 242 } 243 244 // CopyIn implements marshal.Marshallable.CopyIn. 245 func (s *SignalContext64) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 246 return s.CopyInN(cc, addr, s.SizeBytes()) 247 } 248 249 // WriteTo implements io.WriterTo.WriteTo. 250 func (s *SignalContext64) WriteTo(writer io.Writer) (int64, error) { 251 if !s.Oldmask.Packed() { 252 // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 253 buf := make([]byte, s.SizeBytes()) 254 s.MarshalBytes(buf) 255 length, err := writer.Write(buf) 256 return int64(length), err 257 } 258 259 // Construct a slice backed by dst's underlying memory. 260 var buf []byte 261 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 262 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 263 hdr.Len = s.SizeBytes() 264 hdr.Cap = s.SizeBytes() 265 266 length, err := writer.Write(buf) 267 // Since we bypassed the compiler's escape analysis, indicate that s 268 // must live until the use above. 269 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 270 return int64(length), err 271 } 272 273 // SizeBytes implements marshal.Marshallable.SizeBytes. 274 func (u *UContext64) SizeBytes() int { 275 return 16 + 276 (*linux.SignalStack)(nil).SizeBytes() + 277 (*SignalContext64)(nil).SizeBytes() + 278 (*linux.SignalSet)(nil).SizeBytes() 279 } 280 281 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 282 func (u *UContext64) MarshalBytes(dst []byte) []byte { 283 hostarch.ByteOrder.PutUint64(dst[:8], uint64(u.Flags)) 284 dst = dst[8:] 285 hostarch.ByteOrder.PutUint64(dst[:8], uint64(u.Link)) 286 dst = dst[8:] 287 dst = u.Stack.MarshalUnsafe(dst) 288 dst = u.MContext.MarshalUnsafe(dst) 289 dst = u.Sigset.MarshalUnsafe(dst) 290 return dst 291 } 292 293 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 294 func (u *UContext64) UnmarshalBytes(src []byte) []byte { 295 u.Flags = uint64(hostarch.ByteOrder.Uint64(src[:8])) 296 src = src[8:] 297 u.Link = uint64(hostarch.ByteOrder.Uint64(src[:8])) 298 src = src[8:] 299 src = u.Stack.UnmarshalUnsafe(src) 300 src = u.MContext.UnmarshalUnsafe(src) 301 src = u.Sigset.UnmarshalUnsafe(src) 302 return src 303 } 304 305 // Packed implements marshal.Marshallable.Packed. 306 //go:nosplit 307 func (u *UContext64) Packed() bool { 308 return u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() 309 } 310 311 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 312 func (u *UContext64) MarshalUnsafe(dst []byte) []byte { 313 if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 314 size := u.SizeBytes() 315 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(u), uintptr(size)) 316 return dst[size:] 317 } 318 // Type UContext64 doesn't have a packed layout in memory, fallback to MarshalBytes. 319 return u.MarshalBytes(dst) 320 } 321 322 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 323 func (u *UContext64) UnmarshalUnsafe(src []byte) []byte { 324 if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 325 size := u.SizeBytes() 326 gohacks.Memmove(unsafe.Pointer(u), unsafe.Pointer(&src[0]), uintptr(size)) 327 return src[size:] 328 } 329 // Type UContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes. 330 return u.UnmarshalBytes(src) 331 } 332 333 // CopyOutN implements marshal.Marshallable.CopyOutN. 334 func (u *UContext64) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 335 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 336 // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 337 buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay. 338 u.MarshalBytes(buf) // escapes: fallback. 339 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 340 } 341 342 // Construct a slice backed by dst's underlying memory. 343 var buf []byte 344 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 345 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 346 hdr.Len = u.SizeBytes() 347 hdr.Cap = u.SizeBytes() 348 349 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 350 // Since we bypassed the compiler's escape analysis, indicate that u 351 // must live until the use above. 352 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 353 return length, err 354 } 355 356 // CopyOut implements marshal.Marshallable.CopyOut. 357 func (u *UContext64) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 358 return u.CopyOutN(cc, addr, u.SizeBytes()) 359 } 360 361 // CopyInN implements marshal.Marshallable.CopyInN. 362 func (u *UContext64) CopyInN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 363 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 364 // Type UContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes. 365 buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay. 366 length, err := cc.CopyInBytes(addr, buf[:limit]) // escapes: okay. 367 // Unmarshal unconditionally. If we had a short copy-in, this results in a 368 // partially unmarshalled struct. 369 u.UnmarshalBytes(buf) // escapes: fallback. 370 return length, err 371 } 372 373 // Construct a slice backed by dst's underlying memory. 374 var buf []byte 375 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 376 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 377 hdr.Len = u.SizeBytes() 378 hdr.Cap = u.SizeBytes() 379 380 length, err := cc.CopyInBytes(addr, buf[:limit]) // escapes: okay. 381 // Since we bypassed the compiler's escape analysis, indicate that u 382 // must live until the use above. 383 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 384 return length, err 385 } 386 387 // CopyIn implements marshal.Marshallable.CopyIn. 388 func (u *UContext64) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 389 return u.CopyInN(cc, addr, u.SizeBytes()) 390 } 391 392 // WriteTo implements io.WriterTo.WriteTo. 393 func (u *UContext64) WriteTo(writer io.Writer) (int64, error) { 394 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 395 // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 396 buf := make([]byte, u.SizeBytes()) 397 u.MarshalBytes(buf) 398 length, err := writer.Write(buf) 399 return int64(length), err 400 } 401 402 // Construct a slice backed by dst's underlying memory. 403 var buf []byte 404 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 405 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 406 hdr.Len = u.SizeBytes() 407 hdr.Cap = u.SizeBytes() 408 409 length, err := writer.Write(buf) 410 // Since we bypassed the compiler's escape analysis, indicate that u 411 // must live until the use above. 412 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 413 return int64(length), err 414 } 415