github.com/ttpreport/gvisor-ligolo@v0.0.0-20240123134145-a858404967ba/pkg/abi/linux/linux_amd64_abi_autogen_unsafe.go (about) 1 // Automatically generated marshal implementation. See tools/go_marshal. 2 3 // If there are issues with build constraint aggregation, see 4 // tools/go_marshal/gomarshal/generator.go:writeHeader(). The constraints here 5 // come from the input set of files used to generate this file. This input set 6 // is filtered based on pre-defined file suffixes related to build constraints, 7 // see tools/defs.bzl:calculate_sets(). 8 9 //go:build amd64 && amd64 && amd64 && amd64 && amd64 && amd64 10 // +build amd64,amd64,amd64,amd64,amd64,amd64 11 12 package linux 13 14 import ( 15 "github.com/ttpreport/gvisor-ligolo/pkg/gohacks" 16 "github.com/ttpreport/gvisor-ligolo/pkg/hostarch" 17 "github.com/ttpreport/gvisor-ligolo/pkg/marshal" 18 "io" 19 "reflect" 20 "runtime" 21 "unsafe" 22 ) 23 24 // Marshallable types used by this file. 25 var _ marshal.Marshallable = (*EpollEvent)(nil) 26 var _ marshal.Marshallable = (*IPCPerm)(nil) 27 var _ marshal.Marshallable = (*PtraceRegs)(nil) 28 var _ marshal.Marshallable = (*SemidDS)(nil) 29 var _ marshal.Marshallable = (*Stat)(nil) 30 var _ marshal.Marshallable = (*TimeT)(nil) 31 var _ marshal.Marshallable = (*Timespec)(nil) 32 33 // SizeBytes implements marshal.Marshallable.SizeBytes. 34 func (e *EpollEvent) SizeBytes() int { 35 return 4 + 36 4*2 37 } 38 39 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 40 func (e *EpollEvent) MarshalBytes(dst []byte) []byte { 41 hostarch.ByteOrder.PutUint32(dst[:4], uint32(e.Events)) 42 dst = dst[4:] 43 for idx := 0; idx < 2; idx++ { 44 hostarch.ByteOrder.PutUint32(dst[:4], uint32(e.Data[idx])) 45 dst = dst[4:] 46 } 47 return dst 48 } 49 50 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 51 func (e *EpollEvent) UnmarshalBytes(src []byte) []byte { 52 e.Events = uint32(hostarch.ByteOrder.Uint32(src[:4])) 53 src = src[4:] 54 for idx := 0; idx < 2; idx++ { 55 e.Data[idx] = int32(hostarch.ByteOrder.Uint32(src[:4])) 56 src = src[4:] 57 } 58 return src 59 } 60 61 // Packed implements marshal.Marshallable.Packed. 62 // 63 //go:nosplit 64 func (e *EpollEvent) Packed() bool { 65 return true 66 } 67 68 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 69 func (e *EpollEvent) MarshalUnsafe(dst []byte) []byte { 70 size := e.SizeBytes() 71 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(e), uintptr(size)) 72 return dst[size:] 73 } 74 75 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 76 func (e *EpollEvent) UnmarshalUnsafe(src []byte) []byte { 77 size := e.SizeBytes() 78 gohacks.Memmove(unsafe.Pointer(e), unsafe.Pointer(&src[0]), uintptr(size)) 79 return src[size:] 80 } 81 82 // CopyOutN implements marshal.Marshallable.CopyOutN. 83 func (e *EpollEvent) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 84 // Construct a slice backed by dst's underlying memory. 85 var buf []byte 86 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 87 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 88 hdr.Len = e.SizeBytes() 89 hdr.Cap = e.SizeBytes() 90 91 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 92 // Since we bypassed the compiler's escape analysis, indicate that e 93 // must live until the use above. 94 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 95 return length, err 96 } 97 98 // CopyOut implements marshal.Marshallable.CopyOut. 99 func (e *EpollEvent) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 100 return e.CopyOutN(cc, addr, e.SizeBytes()) 101 } 102 103 // CopyIn implements marshal.Marshallable.CopyIn. 104 func (e *EpollEvent) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 105 // Construct a slice backed by dst's underlying memory. 106 var buf []byte 107 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 108 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 109 hdr.Len = e.SizeBytes() 110 hdr.Cap = e.SizeBytes() 111 112 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 113 // Since we bypassed the compiler's escape analysis, indicate that e 114 // must live until the use above. 115 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 116 return length, err 117 } 118 119 // WriteTo implements io.WriterTo.WriteTo. 120 func (e *EpollEvent) WriteTo(writer io.Writer) (int64, error) { 121 // Construct a slice backed by dst's underlying memory. 122 var buf []byte 123 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 124 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 125 hdr.Len = e.SizeBytes() 126 hdr.Cap = e.SizeBytes() 127 128 length, err := writer.Write(buf) 129 // Since we bypassed the compiler's escape analysis, indicate that e 130 // must live until the use above. 131 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 132 return int64(length), err 133 } 134 135 // CopyEpollEventSliceIn copies in a slice of EpollEvent objects from the task's memory. 136 func CopyEpollEventSliceIn(cc marshal.CopyContext, addr hostarch.Addr, dst []EpollEvent) (int, error) { 137 count := len(dst) 138 if count == 0 { 139 return 0, nil 140 } 141 size := (*EpollEvent)(nil).SizeBytes() 142 143 ptr := unsafe.Pointer(&dst) 144 val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) 145 146 // Construct a slice backed by dst's underlying memory. 147 var buf []byte 148 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 149 hdr.Data = uintptr(val) 150 hdr.Len = size * count 151 hdr.Cap = size * count 152 153 length, err := cc.CopyInBytes(addr, buf) 154 // Since we bypassed the compiler's escape analysis, indicate that dst 155 // must live until the use above. 156 runtime.KeepAlive(dst) // escapes: replaced by intrinsic. 157 return length, err 158 } 159 160 // CopyEpollEventSliceOut copies a slice of EpollEvent objects to the task's memory. 161 func CopyEpollEventSliceOut(cc marshal.CopyContext, addr hostarch.Addr, src []EpollEvent) (int, error) { 162 count := len(src) 163 if count == 0 { 164 return 0, nil 165 } 166 size := (*EpollEvent)(nil).SizeBytes() 167 168 ptr := unsafe.Pointer(&src) 169 val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) 170 171 // Construct a slice backed by dst's underlying memory. 172 var buf []byte 173 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 174 hdr.Data = uintptr(val) 175 hdr.Len = size * count 176 hdr.Cap = size * count 177 178 length, err := cc.CopyOutBytes(addr, buf) 179 // Since we bypassed the compiler's escape analysis, indicate that src 180 // must live until the use above. 181 runtime.KeepAlive(src) // escapes: replaced by intrinsic. 182 return length, err 183 } 184 185 // MarshalUnsafeEpollEventSlice is like EpollEvent.MarshalUnsafe, but for a []EpollEvent. 186 func MarshalUnsafeEpollEventSlice(src []EpollEvent, dst []byte) []byte { 187 count := len(src) 188 if count == 0 { 189 return dst 190 } 191 192 size := (*EpollEvent)(nil).SizeBytes() 193 buf := dst[:size*count] 194 gohacks.Memmove(unsafe.Pointer(&buf[0]), unsafe.Pointer(&src[0]), uintptr(len(buf))) 195 return dst[size*count:] 196 } 197 198 // UnmarshalUnsafeEpollEventSlice is like EpollEvent.UnmarshalUnsafe, but for a []EpollEvent. 199 func UnmarshalUnsafeEpollEventSlice(dst []EpollEvent, src []byte) []byte { 200 count := len(dst) 201 if count == 0 { 202 return src 203 } 204 205 size := (*EpollEvent)(nil).SizeBytes() 206 buf := src[:size*count] 207 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(&buf[0]), uintptr(len(buf))) 208 return src[size*count:] 209 } 210 211 // SizeBytes implements marshal.Marshallable.SizeBytes. 212 func (s *Stat) SizeBytes() int { 213 return 72 + 214 (*Timespec)(nil).SizeBytes() + 215 (*Timespec)(nil).SizeBytes() + 216 (*Timespec)(nil).SizeBytes() + 217 8*3 218 } 219 220 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 221 func (s *Stat) MarshalBytes(dst []byte) []byte { 222 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Dev)) 223 dst = dst[8:] 224 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Ino)) 225 dst = dst[8:] 226 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Nlink)) 227 dst = dst[8:] 228 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.Mode)) 229 dst = dst[4:] 230 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.UID)) 231 dst = dst[4:] 232 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.GID)) 233 dst = dst[4:] 234 // Padding: dst[:sizeof(int32)] ~= int32(0) 235 dst = dst[4:] 236 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rdev)) 237 dst = dst[8:] 238 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Size)) 239 dst = dst[8:] 240 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Blksize)) 241 dst = dst[8:] 242 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks)) 243 dst = dst[8:] 244 dst = s.ATime.MarshalUnsafe(dst) 245 dst = s.MTime.MarshalUnsafe(dst) 246 dst = s.CTime.MarshalUnsafe(dst) 247 // Padding: dst[:sizeof(int64)*3] ~= [3]int64{0} 248 dst = dst[8*(3):] 249 return dst 250 } 251 252 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 253 func (s *Stat) UnmarshalBytes(src []byte) []byte { 254 s.Dev = uint64(hostarch.ByteOrder.Uint64(src[:8])) 255 src = src[8:] 256 s.Ino = uint64(hostarch.ByteOrder.Uint64(src[:8])) 257 src = src[8:] 258 s.Nlink = uint64(hostarch.ByteOrder.Uint64(src[:8])) 259 src = src[8:] 260 s.Mode = uint32(hostarch.ByteOrder.Uint32(src[:4])) 261 src = src[4:] 262 s.UID = uint32(hostarch.ByteOrder.Uint32(src[:4])) 263 src = src[4:] 264 s.GID = uint32(hostarch.ByteOrder.Uint32(src[:4])) 265 src = src[4:] 266 // Padding: var _ int32 ~= src[:sizeof(int32)] 267 src = src[4:] 268 s.Rdev = uint64(hostarch.ByteOrder.Uint64(src[:8])) 269 src = src[8:] 270 s.Size = int64(hostarch.ByteOrder.Uint64(src[:8])) 271 src = src[8:] 272 s.Blksize = int64(hostarch.ByteOrder.Uint64(src[:8])) 273 src = src[8:] 274 s.Blocks = int64(hostarch.ByteOrder.Uint64(src[:8])) 275 src = src[8:] 276 src = s.ATime.UnmarshalUnsafe(src) 277 src = s.MTime.UnmarshalUnsafe(src) 278 src = s.CTime.UnmarshalUnsafe(src) 279 // Padding: ~ copy([3]int64(s._), src[:sizeof(int64)*3]) 280 src = src[8*(3):] 281 return src 282 } 283 284 // Packed implements marshal.Marshallable.Packed. 285 // 286 //go:nosplit 287 func (s *Stat) Packed() bool { 288 return s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() 289 } 290 291 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 292 func (s *Stat) MarshalUnsafe(dst []byte) []byte { 293 if s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 294 size := s.SizeBytes() 295 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 296 return dst[size:] 297 } 298 // Type Stat doesn't have a packed layout in memory, fallback to MarshalBytes. 299 return s.MarshalBytes(dst) 300 } 301 302 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 303 func (s *Stat) UnmarshalUnsafe(src []byte) []byte { 304 if s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 305 size := s.SizeBytes() 306 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 307 return src[size:] 308 } 309 // Type Stat doesn't have a packed layout in memory, fallback to UnmarshalBytes. 310 return s.UnmarshalBytes(src) 311 } 312 313 // CopyOutN implements marshal.Marshallable.CopyOutN. 314 func (s *Stat) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 315 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 316 // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. 317 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 318 s.MarshalBytes(buf) // escapes: fallback. 319 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 320 } 321 322 // Construct a slice backed by dst's underlying memory. 323 var buf []byte 324 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 325 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 326 hdr.Len = s.SizeBytes() 327 hdr.Cap = s.SizeBytes() 328 329 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 330 // Since we bypassed the compiler's escape analysis, indicate that s 331 // must live until the use above. 332 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 333 return length, err 334 } 335 336 // CopyOut implements marshal.Marshallable.CopyOut. 337 func (s *Stat) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 338 return s.CopyOutN(cc, addr, s.SizeBytes()) 339 } 340 341 // CopyIn implements marshal.Marshallable.CopyIn. 342 func (s *Stat) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 343 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 344 // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. 345 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 346 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 347 // Unmarshal unconditionally. If we had a short copy-in, this results in a 348 // partially unmarshalled struct. 349 s.UnmarshalBytes(buf) // escapes: fallback. 350 return length, err 351 } 352 353 // Construct a slice backed by dst's underlying memory. 354 var buf []byte 355 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 356 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 357 hdr.Len = s.SizeBytes() 358 hdr.Cap = s.SizeBytes() 359 360 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 361 // Since we bypassed the compiler's escape analysis, indicate that s 362 // must live until the use above. 363 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 364 return length, err 365 } 366 367 // WriteTo implements io.WriterTo.WriteTo. 368 func (s *Stat) WriteTo(writer io.Writer) (int64, error) { 369 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 370 // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. 371 buf := make([]byte, s.SizeBytes()) 372 s.MarshalBytes(buf) 373 length, err := writer.Write(buf) 374 return int64(length), err 375 } 376 377 // Construct a slice backed by dst's underlying memory. 378 var buf []byte 379 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 380 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 381 hdr.Len = s.SizeBytes() 382 hdr.Cap = s.SizeBytes() 383 384 length, err := writer.Write(buf) 385 // Since we bypassed the compiler's escape analysis, indicate that s 386 // must live until the use above. 387 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 388 return int64(length), err 389 } 390 391 // SizeBytes implements marshal.Marshallable.SizeBytes. 392 func (p *PtraceRegs) SizeBytes() int { 393 return 216 394 } 395 396 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 397 func (p *PtraceRegs) MarshalBytes(dst []byte) []byte { 398 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R15)) 399 dst = dst[8:] 400 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R14)) 401 dst = dst[8:] 402 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R13)) 403 dst = dst[8:] 404 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R12)) 405 dst = dst[8:] 406 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rbp)) 407 dst = dst[8:] 408 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rbx)) 409 dst = dst[8:] 410 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R11)) 411 dst = dst[8:] 412 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R10)) 413 dst = dst[8:] 414 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R9)) 415 dst = dst[8:] 416 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.R8)) 417 dst = dst[8:] 418 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rax)) 419 dst = dst[8:] 420 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rcx)) 421 dst = dst[8:] 422 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rdx)) 423 dst = dst[8:] 424 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rsi)) 425 dst = dst[8:] 426 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rdi)) 427 dst = dst[8:] 428 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Orig_rax)) 429 dst = dst[8:] 430 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rip)) 431 dst = dst[8:] 432 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Cs)) 433 dst = dst[8:] 434 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Eflags)) 435 dst = dst[8:] 436 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Rsp)) 437 dst = dst[8:] 438 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Ss)) 439 dst = dst[8:] 440 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Fs_base)) 441 dst = dst[8:] 442 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Gs_base)) 443 dst = dst[8:] 444 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Ds)) 445 dst = dst[8:] 446 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Es)) 447 dst = dst[8:] 448 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Fs)) 449 dst = dst[8:] 450 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Gs)) 451 dst = dst[8:] 452 return dst 453 } 454 455 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 456 func (p *PtraceRegs) UnmarshalBytes(src []byte) []byte { 457 p.R15 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 458 src = src[8:] 459 p.R14 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 460 src = src[8:] 461 p.R13 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 462 src = src[8:] 463 p.R12 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 464 src = src[8:] 465 p.Rbp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 466 src = src[8:] 467 p.Rbx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 468 src = src[8:] 469 p.R11 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 470 src = src[8:] 471 p.R10 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 472 src = src[8:] 473 p.R9 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 474 src = src[8:] 475 p.R8 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 476 src = src[8:] 477 p.Rax = uint64(hostarch.ByteOrder.Uint64(src[:8])) 478 src = src[8:] 479 p.Rcx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 480 src = src[8:] 481 p.Rdx = uint64(hostarch.ByteOrder.Uint64(src[:8])) 482 src = src[8:] 483 p.Rsi = uint64(hostarch.ByteOrder.Uint64(src[:8])) 484 src = src[8:] 485 p.Rdi = uint64(hostarch.ByteOrder.Uint64(src[:8])) 486 src = src[8:] 487 p.Orig_rax = uint64(hostarch.ByteOrder.Uint64(src[:8])) 488 src = src[8:] 489 p.Rip = uint64(hostarch.ByteOrder.Uint64(src[:8])) 490 src = src[8:] 491 p.Cs = uint64(hostarch.ByteOrder.Uint64(src[:8])) 492 src = src[8:] 493 p.Eflags = uint64(hostarch.ByteOrder.Uint64(src[:8])) 494 src = src[8:] 495 p.Rsp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 496 src = src[8:] 497 p.Ss = uint64(hostarch.ByteOrder.Uint64(src[:8])) 498 src = src[8:] 499 p.Fs_base = uint64(hostarch.ByteOrder.Uint64(src[:8])) 500 src = src[8:] 501 p.Gs_base = uint64(hostarch.ByteOrder.Uint64(src[:8])) 502 src = src[8:] 503 p.Ds = uint64(hostarch.ByteOrder.Uint64(src[:8])) 504 src = src[8:] 505 p.Es = uint64(hostarch.ByteOrder.Uint64(src[:8])) 506 src = src[8:] 507 p.Fs = uint64(hostarch.ByteOrder.Uint64(src[:8])) 508 src = src[8:] 509 p.Gs = uint64(hostarch.ByteOrder.Uint64(src[:8])) 510 src = src[8:] 511 return src 512 } 513 514 // Packed implements marshal.Marshallable.Packed. 515 // 516 //go:nosplit 517 func (p *PtraceRegs) Packed() bool { 518 return true 519 } 520 521 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 522 func (p *PtraceRegs) MarshalUnsafe(dst []byte) []byte { 523 size := p.SizeBytes() 524 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(p), uintptr(size)) 525 return dst[size:] 526 } 527 528 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 529 func (p *PtraceRegs) UnmarshalUnsafe(src []byte) []byte { 530 size := p.SizeBytes() 531 gohacks.Memmove(unsafe.Pointer(p), unsafe.Pointer(&src[0]), uintptr(size)) 532 return src[size:] 533 } 534 535 // CopyOutN implements marshal.Marshallable.CopyOutN. 536 func (p *PtraceRegs) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 537 // Construct a slice backed by dst's underlying memory. 538 var buf []byte 539 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 540 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 541 hdr.Len = p.SizeBytes() 542 hdr.Cap = p.SizeBytes() 543 544 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 545 // Since we bypassed the compiler's escape analysis, indicate that p 546 // must live until the use above. 547 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 548 return length, err 549 } 550 551 // CopyOut implements marshal.Marshallable.CopyOut. 552 func (p *PtraceRegs) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 553 return p.CopyOutN(cc, addr, p.SizeBytes()) 554 } 555 556 // CopyIn implements marshal.Marshallable.CopyIn. 557 func (p *PtraceRegs) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 558 // Construct a slice backed by dst's underlying memory. 559 var buf []byte 560 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 561 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 562 hdr.Len = p.SizeBytes() 563 hdr.Cap = p.SizeBytes() 564 565 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 566 // Since we bypassed the compiler's escape analysis, indicate that p 567 // must live until the use above. 568 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 569 return length, err 570 } 571 572 // WriteTo implements io.WriterTo.WriteTo. 573 func (p *PtraceRegs) WriteTo(writer io.Writer) (int64, error) { 574 // Construct a slice backed by dst's underlying memory. 575 var buf []byte 576 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 577 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 578 hdr.Len = p.SizeBytes() 579 hdr.Cap = p.SizeBytes() 580 581 length, err := writer.Write(buf) 582 // Since we bypassed the compiler's escape analysis, indicate that p 583 // must live until the use above. 584 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 585 return int64(length), err 586 } 587 588 // SizeBytes implements marshal.Marshallable.SizeBytes. 589 func (s *SemidDS) SizeBytes() int { 590 return 40 + 591 (*IPCPerm)(nil).SizeBytes() + 592 (*TimeT)(nil).SizeBytes() + 593 (*TimeT)(nil).SizeBytes() 594 } 595 596 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 597 func (s *SemidDS) MarshalBytes(dst []byte) []byte { 598 dst = s.SemPerm.MarshalUnsafe(dst) 599 dst = s.SemOTime.MarshalUnsafe(dst) 600 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused1)) 601 dst = dst[8:] 602 dst = s.SemCTime.MarshalUnsafe(dst) 603 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused2)) 604 dst = dst[8:] 605 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.SemNSems)) 606 dst = dst[8:] 607 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused3)) 608 dst = dst[8:] 609 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused4)) 610 dst = dst[8:] 611 return dst 612 } 613 614 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 615 func (s *SemidDS) UnmarshalBytes(src []byte) []byte { 616 src = s.SemPerm.UnmarshalUnsafe(src) 617 src = s.SemOTime.UnmarshalUnsafe(src) 618 s.unused1 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 619 src = src[8:] 620 src = s.SemCTime.UnmarshalUnsafe(src) 621 s.unused2 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 622 src = src[8:] 623 s.SemNSems = uint64(hostarch.ByteOrder.Uint64(src[:8])) 624 src = src[8:] 625 s.unused3 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 626 src = src[8:] 627 s.unused4 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 628 src = src[8:] 629 return src 630 } 631 632 // Packed implements marshal.Marshallable.Packed. 633 // 634 //go:nosplit 635 func (s *SemidDS) Packed() bool { 636 return s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() 637 } 638 639 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 640 func (s *SemidDS) MarshalUnsafe(dst []byte) []byte { 641 if s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 642 size := s.SizeBytes() 643 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 644 return dst[size:] 645 } 646 // Type SemidDS doesn't have a packed layout in memory, fallback to MarshalBytes. 647 return s.MarshalBytes(dst) 648 } 649 650 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 651 func (s *SemidDS) UnmarshalUnsafe(src []byte) []byte { 652 if s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 653 size := s.SizeBytes() 654 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 655 return src[size:] 656 } 657 // Type SemidDS doesn't have a packed layout in memory, fallback to UnmarshalBytes. 658 return s.UnmarshalBytes(src) 659 } 660 661 // CopyOutN implements marshal.Marshallable.CopyOutN. 662 func (s *SemidDS) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 663 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 664 // Type SemidDS doesn't have a packed layout in memory, fall back to MarshalBytes. 665 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 666 s.MarshalBytes(buf) // escapes: fallback. 667 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 668 } 669 670 // Construct a slice backed by dst's underlying memory. 671 var buf []byte 672 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 673 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 674 hdr.Len = s.SizeBytes() 675 hdr.Cap = s.SizeBytes() 676 677 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 678 // Since we bypassed the compiler's escape analysis, indicate that s 679 // must live until the use above. 680 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 681 return length, err 682 } 683 684 // CopyOut implements marshal.Marshallable.CopyOut. 685 func (s *SemidDS) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 686 return s.CopyOutN(cc, addr, s.SizeBytes()) 687 } 688 689 // CopyIn implements marshal.Marshallable.CopyIn. 690 func (s *SemidDS) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 691 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 692 // Type SemidDS doesn't have a packed layout in memory, fall back to UnmarshalBytes. 693 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 694 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 695 // Unmarshal unconditionally. If we had a short copy-in, this results in a 696 // partially unmarshalled struct. 697 s.UnmarshalBytes(buf) // escapes: fallback. 698 return length, err 699 } 700 701 // Construct a slice backed by dst's underlying memory. 702 var buf []byte 703 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 704 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 705 hdr.Len = s.SizeBytes() 706 hdr.Cap = s.SizeBytes() 707 708 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 709 // Since we bypassed the compiler's escape analysis, indicate that s 710 // must live until the use above. 711 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 712 return length, err 713 } 714 715 // WriteTo implements io.WriterTo.WriteTo. 716 func (s *SemidDS) WriteTo(writer io.Writer) (int64, error) { 717 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 718 // Type SemidDS doesn't have a packed layout in memory, fall back to MarshalBytes. 719 buf := make([]byte, s.SizeBytes()) 720 s.MarshalBytes(buf) 721 length, err := writer.Write(buf) 722 return int64(length), err 723 } 724 725 // Construct a slice backed by dst's underlying memory. 726 var buf []byte 727 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 728 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 729 hdr.Len = s.SizeBytes() 730 hdr.Cap = s.SizeBytes() 731 732 length, err := writer.Write(buf) 733 // Since we bypassed the compiler's escape analysis, indicate that s 734 // must live until the use above. 735 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 736 return int64(length), err 737 }