github.com/nicocha30/gvisor-ligolo@v0.0.0-20230726075806-989fa2c0a413/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go (about) 1 // Automatically generated marshal implementation. See tools/go_marshal. 2 3 // If there are issues with build constraint aggregation, see 4 // tools/go_marshal/gomarshal/generator.go:writeHeader(). The constraints here 5 // come from the input set of files used to generate this file. This input set 6 // is filtered based on pre-defined file suffixes related to build constraints, 7 // see tools/defs.bzl:calculate_sets(). 8 9 //go:build amd64 && amd64 && amd64 && amd64 && amd64 && amd64 10 // +build amd64,amd64,amd64,amd64,amd64,amd64 11 12 package linux 13 14 import ( 15 "github.com/nicocha30/gvisor-ligolo/pkg/gohacks" 16 "github.com/nicocha30/gvisor-ligolo/pkg/hostarch" 17 "github.com/nicocha30/gvisor-ligolo/pkg/marshal" 18 "io" 19 "reflect" 20 "runtime" 21 "unsafe" 22 ) 23 24 // Marshallable types used by this file. 25 var _ marshal.Marshallable = (*EpollEvent)(nil) 26 var _ marshal.Marshallable = (*IPCPerm)(nil) 27 var _ marshal.Marshallable = (*PtraceRegs)(nil) 28 var _ marshal.Marshallable = (*SemidDS)(nil) 29 var _ marshal.Marshallable = (*Stat)(nil) 30 var _ marshal.Marshallable = (*TimeT)(nil) 31 var _ marshal.Marshallable = (*Timespec)(nil) 32 33 // SizeBytes implements marshal.Marshallable.SizeBytes. 34 func (e *EpollEvent) SizeBytes() int { 35 return 4 + 36 4*2 37 } 38 39 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 40 func (e *EpollEvent) MarshalBytes(dst []byte) []byte { 41 hostarch.ByteOrder.PutUint32(dst[:4], uint32(e.Events)) 42 dst = dst[4:] 43 for idx := 0; idx < 2; idx++ { 44 hostarch.ByteOrder.PutUint32(dst[:4], uint32(e.Data[idx])) 45 dst = dst[4:] 46 } 47 return dst 48 } 49 50 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 51 func (e *EpollEvent) UnmarshalBytes(src []byte) []byte { 52 e.Events = uint32(hostarch.ByteOrder.Uint32(src[:4])) 53 src = src[4:] 54 for idx := 0; idx < 2; idx++ { 55 e.Data[idx] = int32(hostarch.ByteOrder.Uint32(src[:4])) 56 src = src[4:] 57 } 58 return src 59 } 60 61 // Packed implements marshal.Marshallable.Packed. 62 //go:nosplit 63 func (e *EpollEvent) Packed() bool { 64 return true 65 } 66 67 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 68 func (e *EpollEvent) MarshalUnsafe(dst []byte) []byte { 69 size := e.SizeBytes() 70 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(e), uintptr(size)) 71 return dst[size:] 72 } 73 74 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 75 func (e *EpollEvent) UnmarshalUnsafe(src []byte) []byte { 76 size := e.SizeBytes() 77 gohacks.Memmove(unsafe.Pointer(e), unsafe.Pointer(&src[0]), uintptr(size)) 78 return src[size:] 79 } 80 81 // CopyOutN implements marshal.Marshallable.CopyOutN. 82 func (e *EpollEvent) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 83 // Construct a slice backed by dst's underlying memory. 84 var buf []byte 85 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 86 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 87 hdr.Len = e.SizeBytes() 88 hdr.Cap = e.SizeBytes() 89 90 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 91 // Since we bypassed the compiler's escape analysis, indicate that e 92 // must live until the use above. 93 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 94 return length, err 95 } 96 97 // CopyOut implements marshal.Marshallable.CopyOut. 98 func (e *EpollEvent) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 99 return e.CopyOutN(cc, addr, e.SizeBytes()) 100 } 101 102 // CopyIn implements marshal.Marshallable.CopyIn. 103 func (e *EpollEvent) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 104 // Construct a slice backed by dst's underlying memory. 105 var buf []byte 106 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 107 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 108 hdr.Len = e.SizeBytes() 109 hdr.Cap = e.SizeBytes() 110 111 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 112 // Since we bypassed the compiler's escape analysis, indicate that e 113 // must live until the use above. 114 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 115 return length, err 116 } 117 118 // WriteTo implements io.WriterTo.WriteTo. 119 func (e *EpollEvent) WriteTo(writer io.Writer) (int64, error) { 120 // Construct a slice backed by dst's underlying memory. 121 var buf []byte 122 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 123 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 124 hdr.Len = e.SizeBytes() 125 hdr.Cap = e.SizeBytes() 126 127 length, err := writer.Write(buf) 128 // Since we bypassed the compiler's escape analysis, indicate that e 129 // must live until the use above. 130 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 131 return int64(length), err 132 } 133 134 // CopyEpollEventSliceIn copies in a slice of EpollEvent objects from the task's memory. 135 func CopyEpollEventSliceIn(cc marshal.CopyContext, addr hostarch.Addr, dst []EpollEvent) (int, error) { 136 count := len(dst) 137 if count == 0 { 138 return 0, nil 139 } 140 size := (*EpollEvent)(nil).SizeBytes() 141 142 ptr := unsafe.Pointer(&dst) 143 val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) 144 145 // Construct a slice backed by dst's underlying memory. 146 var buf []byte 147 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 148 hdr.Data = uintptr(val) 149 hdr.Len = size * count 150 hdr.Cap = size * count 151 152 length, err := cc.CopyInBytes(addr, buf) 153 // Since we bypassed the compiler's escape analysis, indicate that dst 154 // must live until the use above. 155 runtime.KeepAlive(dst) // escapes: replaced by intrinsic. 156 return length, err 157 } 158 159 // CopyEpollEventSliceOut copies a slice of EpollEvent objects to the task's memory. 160 func CopyEpollEventSliceOut(cc marshal.CopyContext, addr hostarch.Addr, src []EpollEvent) (int, error) { 161 count := len(src) 162 if count == 0 { 163 return 0, nil 164 } 165 size := (*EpollEvent)(nil).SizeBytes() 166 167 ptr := unsafe.Pointer(&src) 168 val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) 169 170 // Construct a slice backed by dst's underlying memory. 171 var buf []byte 172 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 173 hdr.Data = uintptr(val) 174 hdr.Len = size * count 175 hdr.Cap = size * count 176 177 length, err := cc.CopyOutBytes(addr, buf) 178 // Since we bypassed the compiler's escape analysis, indicate that src 179 // must live until the use above. 180 runtime.KeepAlive(src) // escapes: replaced by intrinsic. 181 return length, err 182 } 183 184 // MarshalUnsafeEpollEventSlice is like EpollEvent.MarshalUnsafe, but for a []EpollEvent. 185 func MarshalUnsafeEpollEventSlice(src []EpollEvent, dst []byte) []byte { 186 count := len(src) 187 if count == 0 { 188 return dst 189 } 190 191 size := (*EpollEvent)(nil).SizeBytes() 192 buf := dst[:size*count] 193 gohacks.Memmove(unsafe.Pointer(&buf[0]), unsafe.Pointer(&src[0]), uintptr(len(buf))) 194 return dst[size*count:] 195 } 196 197 // UnmarshalUnsafeEpollEventSlice is like EpollEvent.UnmarshalUnsafe, but for a []EpollEvent. 198 func UnmarshalUnsafeEpollEventSlice(dst []EpollEvent, src []byte) []byte { 199 count := len(dst) 200 if count == 0 { 201 return src 202 } 203 204 size := (*EpollEvent)(nil).SizeBytes() 205 buf := src[:size*count] 206 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(&buf[0]), uintptr(len(buf))) 207 return src[size*count:] 208 } 209 210 // SizeBytes implements marshal.Marshallable.SizeBytes. 211 func (s *Stat) SizeBytes() int { 212 return 72 + 213 (*Timespec)(nil).SizeBytes() + 214 (*Timespec)(nil).SizeBytes() + 215 (*Timespec)(nil).SizeBytes() + 216 8*3 217 } 218 219 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 220 func (s *Stat) MarshalBytes(dst []byte) []byte { 221 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Dev)) 222 dst = dst[8:] 223 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Ino)) 224 dst = dst[8:] 225 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Nlink)) 226 dst = dst[8:] 227 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.Mode)) 228 dst = dst[4:] 229 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.UID)) 230 dst = dst[4:] 231 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.GID)) 232 dst = dst[4:] 233 // Padding: dst[:sizeof(int32)] ~= int32(0) 234 dst = dst[4:] 235 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rdev)) 236 dst = dst[8:] 237 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Size)) 238 dst = dst[8:] 239 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Blksize)) 240 dst = dst[8:] 241 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks)) 242 dst = dst[8:] 243 dst = s.ATime.MarshalUnsafe(dst) 244 dst = s.MTime.MarshalUnsafe(dst) 245 dst = s.CTime.MarshalUnsafe(dst) 246 // Padding: dst[:sizeof(int64)*3] ~= [3]int64{0} 247 dst = dst[8*(3):] 248 return dst 249 } 250 251 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 252 func (s *Stat) UnmarshalBytes(src []byte) []byte { 253 s.Dev = uint64(hostarch.ByteOrder.Uint64(src[:8])) 254 src = src[8:] 255 s.Ino = uint64(hostarch.ByteOrder.Uint64(src[:8])) 256 src = src[8:] 257 s.Nlink = uint64(hostarch.ByteOrder.Uint64(src[:8])) 258 src = src[8:] 259 s.Mode = uint32(hostarch.ByteOrder.Uint32(src[:4])) 260 src = src[4:] 261 s.UID = uint32(hostarch.ByteOrder.Uint32(src[:4])) 262 src = src[4:] 263 s.GID = uint32(hostarch.ByteOrder.Uint32(src[:4])) 264 src = src[4:] 265 // Padding: var _ int32 ~= src[:sizeof(int32)] 266 src = src[4:] 267 s.Rdev = uint64(hostarch.ByteOrder.Uint64(src[:8])) 268 src = src[8:] 269 s.Size = int64(hostarch.ByteOrder.Uint64(src[:8])) 270 src = src[8:] 271 s.Blksize = int64(hostarch.ByteOrder.Uint64(src[:8])) 272 src = src[8:] 273 s.Blocks = int64(hostarch.ByteOrder.Uint64(src[:8])) 274 src = src[8:] 275 src = s.ATime.UnmarshalUnsafe(src) 276 src = s.MTime.UnmarshalUnsafe(src) 277 src = s.CTime.UnmarshalUnsafe(src) 278 // Padding: ~ copy([3]int64(s._), src[:sizeof(int64)*3]) 279 src = src[8*(3):] 280 return src 281 } 282 283 // Packed implements marshal.Marshallable.Packed. 284 //go:nosplit 285 func (s *Stat) Packed() bool { 286 return s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() 287 } 288 289 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 290 func (s *Stat) MarshalUnsafe(dst []byte) []byte { 291 if s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 292 size := s.SizeBytes() 293 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 294 return dst[size:] 295 } 296 // Type Stat doesn't have a packed layout in memory, fallback to MarshalBytes. 297 return s.MarshalBytes(dst) 298 } 299 300 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 301 func (s *Stat) UnmarshalUnsafe(src []byte) []byte { 302 if s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 303 size := s.SizeBytes() 304 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 305 return src[size:] 306 } 307 // Type Stat doesn't have a packed layout in memory, fallback to UnmarshalBytes. 308 return s.UnmarshalBytes(src) 309 } 310 311 // CopyOutN implements marshal.Marshallable.CopyOutN. 312 func (s *Stat) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 313 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 314 // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. 315 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 316 s.MarshalBytes(buf) // escapes: fallback. 317 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 318 } 319 320 // Construct a slice backed by dst's underlying memory. 321 var buf []byte 322 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 323 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 324 hdr.Len = s.SizeBytes() 325 hdr.Cap = s.SizeBytes() 326 327 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 328 // Since we bypassed the compiler's escape analysis, indicate that s 329 // must live until the use above. 330 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 331 return length, err 332 } 333 334 // CopyOut implements marshal.Marshallable.CopyOut. 335 func (s *Stat) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 336 return s.CopyOutN(cc, addr, s.SizeBytes()) 337 } 338 339 // CopyIn implements marshal.Marshallable.CopyIn. 340 func (s *Stat) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 341 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 342 // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. 343 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 344 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 345 // Unmarshal unconditionally. If we had a short copy-in, this results in a 346 // partially unmarshalled struct. 347 s.UnmarshalBytes(buf) // escapes: fallback. 348 return length, err 349 } 350 351 // Construct a slice backed by dst's underlying memory. 352 var buf []byte 353 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 354 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 355 hdr.Len = s.SizeBytes() 356 hdr.Cap = s.SizeBytes() 357 358 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 359 // Since we bypassed the compiler's escape analysis, indicate that s 360 // must live until the use above. 361 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 362 return length, err 363 } 364 365 // WriteTo implements io.WriterTo.WriteTo. 366 func (s *Stat) WriteTo(writer io.Writer) (int64, error) { 367 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 368 // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. 369 buf := make([]byte, s.SizeBytes()) 370 s.MarshalBytes(buf) 371 length, err := writer.Write(buf) 372 return int64(length), err 373 } 374 375 // Construct a slice backed by dst's underlying memory. 376 var buf []byte 377 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 378 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 379 hdr.Len = s.SizeBytes() 380 hdr.Cap = s.SizeBytes() 381 382 length, err := writer.Write(buf) 383 // Since we bypassed the compiler's escape analysis, indicate that s 384 // must live until the use above. 385 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 386 return int64(length), err 387 } 388 389 // SizeBytes implements marshal.Marshallable.SizeBytes. 390 func (p *PtraceRegs) SizeBytes() int { 391 return 216 392 } 393 394 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 395 func (p *PtraceRegs) MarshalBytes(dst []byte) []byte { 396 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R15)) 397 dst = dst[8:] 398 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R14)) 399 dst = dst[8:] 400 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R13)) 401 dst = dst[8:] 402 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R12)) 403 dst = dst[8:] 404 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rbp)) 405 dst = dst[8:] 406 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rbx)) 407 dst = dst[8:] 408 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R11)) 409 dst = dst[8:] 410 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R10)) 411 dst = dst[8:] 412 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R9)) 413 dst = dst[8:] 414 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R8)) 415 dst = dst[8:] 416 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rax)) 417 dst = dst[8:] 418 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rcx)) 419 dst = dst[8:] 420 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rdx)) 421 dst = dst[8:] 422 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rsi)) 423 dst = dst[8:] 424 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rdi)) 425 dst = dst[8:] 426 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Orig_rax)) 427 dst = dst[8:] 428 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rip)) 429 dst = dst[8:] 430 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Cs)) 431 dst = dst[8:] 432 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Eflags)) 433 dst = dst[8:] 434 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rsp)) 435 dst = dst[8:] 436 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Ss)) 437 dst = dst[8:] 438 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Fs_base)) 439 dst = dst[8:] 440 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Gs_base)) 441 dst = dst[8:] 442 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Ds)) 443 dst = dst[8:] 444 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Es)) 445 dst = dst[8:] 446 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Fs)) 447 dst = dst[8:] 448 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Gs)) 449 dst = dst[8:] 450 return dst 451 } 452 453 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 454 func (p *PtraceRegs) UnmarshalBytes(src []byte) []byte { 455 p.R15 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 456 src = src[8:] 457 p.R14 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 458 src = src[8:] 459 p.R13 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 460 src = src[8:] 461 p.R12 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 462 src = src[8:] 463 p.Rbp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 464 src = src[8:] 465 p.Rbx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 466 src = src[8:] 467 p.R11 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 468 src = src[8:] 469 p.R10 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 470 src = src[8:] 471 p.R9 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 472 src = src[8:] 473 p.R8 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 474 src = src[8:] 475 p.Rax = uint64(hostarch.ByteOrder.Uint64(src[:8])) 476 src = src[8:] 477 p.Rcx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 478 src = src[8:] 479 p.Rdx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 480 src = src[8:] 481 p.Rsi = uint64(hostarch.ByteOrder.Uint64(src[:8])) 482 src = src[8:] 483 p.Rdi = uint64(hostarch.ByteOrder.Uint64(src[:8])) 484 src = src[8:] 485 p.Orig_rax = uint64(hostarch.ByteOrder.Uint64(src[:8])) 486 src = src[8:] 487 p.Rip = uint64(hostarch.ByteOrder.Uint64(src[:8])) 488 src = src[8:] 489 p.Cs = uint64(hostarch.ByteOrder.Uint64(src[:8])) 490 src = src[8:] 491 p.Eflags = uint64(hostarch.ByteOrder.Uint64(src[:8])) 492 src = src[8:] 493 p.Rsp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 494 src = src[8:] 495 p.Ss = uint64(hostarch.ByteOrder.Uint64(src[:8])) 496 src = src[8:] 497 p.Fs_base = uint64(hostarch.ByteOrder.Uint64(src[:8])) 498 src = src[8:] 499 p.Gs_base = uint64(hostarch.ByteOrder.Uint64(src[:8])) 500 src = src[8:] 501 p.Ds = uint64(hostarch.ByteOrder.Uint64(src[:8])) 502 src = src[8:] 503 p.Es = uint64(hostarch.ByteOrder.Uint64(src[:8])) 504 src = src[8:] 505 p.Fs = uint64(hostarch.ByteOrder.Uint64(src[:8])) 506 src = src[8:] 507 p.Gs = uint64(hostarch.ByteOrder.Uint64(src[:8])) 508 src = src[8:] 509 return src 510 } 511 512 // Packed implements marshal.Marshallable.Packed. 513 //go:nosplit 514 func (p *PtraceRegs) Packed() bool { 515 return true 516 } 517 518 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 519 func (p *PtraceRegs) MarshalUnsafe(dst []byte) []byte { 520 size := p.SizeBytes() 521 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(p), uintptr(size)) 522 return dst[size:] 523 } 524 525 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 526 func (p *PtraceRegs) UnmarshalUnsafe(src []byte) []byte { 527 size := p.SizeBytes() 528 gohacks.Memmove(unsafe.Pointer(p), unsafe.Pointer(&src[0]), uintptr(size)) 529 return src[size:] 530 } 531 532 // CopyOutN implements marshal.Marshallable.CopyOutN. 533 func (p *PtraceRegs) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 534 // Construct a slice backed by dst's underlying memory. 535 var buf []byte 536 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 537 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 538 hdr.Len = p.SizeBytes() 539 hdr.Cap = p.SizeBytes() 540 541 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 542 // Since we bypassed the compiler's escape analysis, indicate that p 543 // must live until the use above. 544 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 545 return length, err 546 } 547 548 // CopyOut implements marshal.Marshallable.CopyOut. 549 func (p *PtraceRegs) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 550 return p.CopyOutN(cc, addr, p.SizeBytes()) 551 } 552 553 // CopyIn implements marshal.Marshallable.CopyIn. 554 func (p *PtraceRegs) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 555 // Construct a slice backed by dst's underlying memory. 556 var buf []byte 557 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 558 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 559 hdr.Len = p.SizeBytes() 560 hdr.Cap = p.SizeBytes() 561 562 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 563 // Since we bypassed the compiler's escape analysis, indicate that p 564 // must live until the use above. 565 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 566 return length, err 567 } 568 569 // WriteTo implements io.WriterTo.WriteTo. 570 func (p *PtraceRegs) WriteTo(writer io.Writer) (int64, error) { 571 // Construct a slice backed by dst's underlying memory. 572 var buf []byte 573 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 574 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 575 hdr.Len = p.SizeBytes() 576 hdr.Cap = p.SizeBytes() 577 578 length, err := writer.Write(buf) 579 // Since we bypassed the compiler's escape analysis, indicate that p 580 // must live until the use above. 581 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 582 return int64(length), err 583 } 584 585 // SizeBytes implements marshal.Marshallable.SizeBytes. 586 func (s *SemidDS) SizeBytes() int { 587 return 40 + 588 (*IPCPerm)(nil).SizeBytes() + 589 (*TimeT)(nil).SizeBytes() + 590 (*TimeT)(nil).SizeBytes() 591 } 592 593 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 594 func (s *SemidDS) MarshalBytes(dst []byte) []byte { 595 dst = s.SemPerm.MarshalUnsafe(dst) 596 dst = s.SemOTime.MarshalUnsafe(dst) 597 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused1)) 598 dst = dst[8:] 599 dst = s.SemCTime.MarshalUnsafe(dst) 600 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused2)) 601 dst = dst[8:] 602 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.SemNSems)) 603 dst = dst[8:] 604 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused3)) 605 dst = dst[8:] 606 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused4)) 607 dst = dst[8:] 608 return dst 609 } 610 611 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 612 func (s *SemidDS) UnmarshalBytes(src []byte) []byte { 613 src = s.SemPerm.UnmarshalUnsafe(src) 614 src = s.SemOTime.UnmarshalUnsafe(src) 615 s.unused1 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 616 src = src[8:] 617 src = s.SemCTime.UnmarshalUnsafe(src) 618 s.unused2 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 619 src = src[8:] 620 s.SemNSems = uint64(hostarch.ByteOrder.Uint64(src[:8])) 621 src = src[8:] 622 s.unused3 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 623 src = src[8:] 624 s.unused4 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 625 src = src[8:] 626 return src 627 } 628 629 // Packed implements marshal.Marshallable.Packed. 630 //go:nosplit 631 func (s *SemidDS) Packed() bool { 632 return s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() 633 } 634 635 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 636 func (s *SemidDS) MarshalUnsafe(dst []byte) []byte { 637 if s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 638 size := s.SizeBytes() 639 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 640 return dst[size:] 641 } 642 // Type SemidDS doesn't have a packed layout in memory, fallback to MarshalBytes. 643 return s.MarshalBytes(dst) 644 } 645 646 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 647 func (s *SemidDS) UnmarshalUnsafe(src []byte) []byte { 648 if s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 649 size := s.SizeBytes() 650 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 651 return src[size:] 652 } 653 // Type SemidDS doesn't have a packed layout in memory, fallback to UnmarshalBytes. 654 return s.UnmarshalBytes(src) 655 } 656 657 // CopyOutN implements marshal.Marshallable.CopyOutN. 658 func (s *SemidDS) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 659 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 660 // Type SemidDS doesn't have a packed layout in memory, fall back to MarshalBytes. 661 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 662 s.MarshalBytes(buf) // escapes: fallback. 663 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 664 } 665 666 // Construct a slice backed by dst's underlying memory. 667 var buf []byte 668 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 669 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 670 hdr.Len = s.SizeBytes() 671 hdr.Cap = s.SizeBytes() 672 673 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 674 // Since we bypassed the compiler's escape analysis, indicate that s 675 // must live until the use above. 676 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 677 return length, err 678 } 679 680 // CopyOut implements marshal.Marshallable.CopyOut. 681 func (s *SemidDS) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 682 return s.CopyOutN(cc, addr, s.SizeBytes()) 683 } 684 685 // CopyIn implements marshal.Marshallable.CopyIn. 686 func (s *SemidDS) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 687 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 688 // Type SemidDS doesn't have a packed layout in memory, fall back to UnmarshalBytes. 689 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 690 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 691 // Unmarshal unconditionally. If we had a short copy-in, this results in a 692 // partially unmarshalled struct. 693 s.UnmarshalBytes(buf) // escapes: fallback. 694 return length, err 695 } 696 697 // Construct a slice backed by dst's underlying memory. 698 var buf []byte 699 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 700 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 701 hdr.Len = s.SizeBytes() 702 hdr.Cap = s.SizeBytes() 703 704 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 705 // Since we bypassed the compiler's escape analysis, indicate that s 706 // must live until the use above. 707 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 708 return length, err 709 } 710 711 // WriteTo implements io.WriterTo.WriteTo. 712 func (s *SemidDS) WriteTo(writer io.Writer) (int64, error) { 713 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 714 // Type SemidDS doesn't have a packed layout in memory, fall back to MarshalBytes. 715 buf := make([]byte, s.SizeBytes()) 716 s.MarshalBytes(buf) 717 length, err := writer.Write(buf) 718 return int64(length), err 719 } 720 721 // Construct a slice backed by dst's underlying memory. 722 var buf []byte 723 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 724 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 725 hdr.Len = s.SizeBytes() 726 hdr.Cap = s.SizeBytes() 727 728 length, err := writer.Write(buf) 729 // Since we bypassed the compiler's escape analysis, indicate that s 730 // must live until the use above. 731 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 732 return int64(length), err 733 } 734