inet.af/netstack@v0.0.0-20220214151720-7585b01ddccf/abi/linux/linux_arm64_abi_autogen_unsafe.go (about) 1 // Automatically generated marshal implementation. See tools/go_marshal. 2 3 // If there are issues with build constraint aggregation, see 4 // tools/go_marshal/gomarshal/generator.go:writeHeader(). The constraints here 5 // come from the input set of files used to generate this file. This input set 6 // is filtered based on pre-defined file suffixes related to build constraints, 7 // see tools/defs.bzl:calculate_sets(). 8 9 //go:build arm64 && arm64 && arm64 && arm64 10 // +build arm64,arm64,arm64,arm64 11 12 package linux 13 14 import ( 15 "inet.af/netstack/gohacks" 16 "inet.af/netstack/hostarch" 17 "inet.af/netstack/marshal" 18 "io" 19 "reflect" 20 "runtime" 21 "unsafe" 22 ) 23 24 // Marshallable types used by this file. 25 var _ marshal.Marshallable = (*EpollEvent)(nil) 26 var _ marshal.Marshallable = (*IPCPerm)(nil) 27 var _ marshal.Marshallable = (*PtraceRegs)(nil) 28 var _ marshal.Marshallable = (*SemidDS)(nil) 29 var _ marshal.Marshallable = (*Stat)(nil) 30 var _ marshal.Marshallable = (*TimeT)(nil) 31 var _ marshal.Marshallable = (*Timespec)(nil) 32 33 // SizeBytes implements marshal.Marshallable.SizeBytes. 34 func (e *EpollEvent) SizeBytes() int { 35 return 8 + 36 4*2 37 } 38 39 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 40 func (e *EpollEvent) MarshalBytes(dst []byte) []byte { 41 hostarch.ByteOrder.PutUint32(dst[:4], uint32(e.Events)) 42 dst = dst[4:] 43 // Padding: dst[:sizeof(int32)] ~= int32(0) 44 dst = dst[4:] 45 for idx := 0; idx < 2; idx++ { 46 hostarch.ByteOrder.PutUint32(dst[:4], uint32(e.Data[idx])) 47 dst = dst[4:] 48 } 49 return dst 50 } 51 52 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 53 func (e *EpollEvent) UnmarshalBytes(src []byte) []byte { 54 e.Events = uint32(hostarch.ByteOrder.Uint32(src[:4])) 55 src = src[4:] 56 // Padding: var _ int32 ~= src[:sizeof(int32)] 57 src = src[4:] 58 for idx := 0; idx < 2; idx++ { 59 e.Data[idx] = int32(hostarch.ByteOrder.Uint32(src[:4])) 60 src = src[4:] 61 } 62 return src 63 } 64 65 // Packed implements marshal.Marshallable.Packed. 66 //go:nosplit 67 func (e *EpollEvent) Packed() bool { 68 return true 69 } 70 71 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 72 func (e *EpollEvent) MarshalUnsafe(dst []byte) []byte { 73 size := e.SizeBytes() 74 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(e), uintptr(size)) 75 return dst[size:] 76 } 77 78 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 79 func (e *EpollEvent) UnmarshalUnsafe(src []byte) []byte { 80 size := e.SizeBytes() 81 gohacks.Memmove(unsafe.Pointer(e), unsafe.Pointer(&src[0]), uintptr(size)) 82 return src[size:] 83 } 84 85 // CopyOutN implements marshal.Marshallable.CopyOutN. 86 //go:nosplit 87 func (e *EpollEvent) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 88 // Construct a slice backed by dst's underlying memory. 89 var buf []byte 90 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 91 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 92 hdr.Len = e.SizeBytes() 93 hdr.Cap = e.SizeBytes() 94 95 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 96 // Since we bypassed the compiler's escape analysis, indicate that e 97 // must live until the use above. 98 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 99 return length, err 100 } 101 102 // CopyOut implements marshal.Marshallable.CopyOut. 103 //go:nosplit 104 func (e *EpollEvent) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 105 return e.CopyOutN(cc, addr, e.SizeBytes()) 106 } 107 108 // CopyIn implements marshal.Marshallable.CopyIn. 109 //go:nosplit 110 func (e *EpollEvent) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 111 // Construct a slice backed by dst's underlying memory. 112 var buf []byte 113 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 114 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 115 hdr.Len = e.SizeBytes() 116 hdr.Cap = e.SizeBytes() 117 118 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 119 // Since we bypassed the compiler's escape analysis, indicate that e 120 // must live until the use above. 121 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 122 return length, err 123 } 124 125 // WriteTo implements io.WriterTo.WriteTo. 126 func (e *EpollEvent) WriteTo(writer io.Writer) (int64, error) { 127 // Construct a slice backed by dst's underlying memory. 128 var buf []byte 129 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 130 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(e))) 131 hdr.Len = e.SizeBytes() 132 hdr.Cap = e.SizeBytes() 133 134 length, err := writer.Write(buf) 135 // Since we bypassed the compiler's escape analysis, indicate that e 136 // must live until the use above. 137 runtime.KeepAlive(e) // escapes: replaced by intrinsic. 138 return int64(length), err 139 } 140 141 // CopyEpollEventSliceIn copies in a slice of EpollEvent objects from the task's memory. 142 func CopyEpollEventSliceIn(cc marshal.CopyContext, addr hostarch.Addr, dst []EpollEvent) (int, error) { 143 count := len(dst) 144 if count == 0 { 145 return 0, nil 146 } 147 size := (*EpollEvent)(nil).SizeBytes() 148 149 ptr := unsafe.Pointer(&dst) 150 val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) 151 152 // Construct a slice backed by dst's underlying memory. 153 var buf []byte 154 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 155 hdr.Data = uintptr(val) 156 hdr.Len = size * count 157 hdr.Cap = size * count 158 159 length, err := cc.CopyInBytes(addr, buf) 160 // Since we bypassed the compiler's escape analysis, indicate that dst 161 // must live until the use above. 162 runtime.KeepAlive(dst) // escapes: replaced by intrinsic. 163 return length, err 164 } 165 166 // CopyEpollEventSliceOut copies a slice of EpollEvent objects to the task's memory. 167 func CopyEpollEventSliceOut(cc marshal.CopyContext, addr hostarch.Addr, src []EpollEvent) (int, error) { 168 count := len(src) 169 if count == 0 { 170 return 0, nil 171 } 172 size := (*EpollEvent)(nil).SizeBytes() 173 174 ptr := unsafe.Pointer(&src) 175 val := gohacks.Noescape(unsafe.Pointer((*reflect.SliceHeader)(ptr).Data)) 176 177 // Construct a slice backed by dst's underlying memory. 178 var buf []byte 179 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 180 hdr.Data = uintptr(val) 181 hdr.Len = size * count 182 hdr.Cap = size * count 183 184 length, err := cc.CopyOutBytes(addr, buf) 185 // Since we bypassed the compiler's escape analysis, indicate that src 186 // must live until the use above. 187 runtime.KeepAlive(src) // escapes: replaced by intrinsic. 188 return length, err 189 } 190 191 // MarshalUnsafeEpollEventSlice is like EpollEvent.MarshalUnsafe, but for a []EpollEvent. 192 func MarshalUnsafeEpollEventSlice(src []EpollEvent, dst []byte) []byte { 193 count := len(src) 194 if count == 0 { 195 return dst 196 } 197 198 size := (*EpollEvent)(nil).SizeBytes() 199 buf := dst[:size*count] 200 gohacks.Memmove(unsafe.Pointer(&buf[0]), unsafe.Pointer(&src[0]), uintptr(len(buf))) 201 return dst[size*count:] 202 } 203 204 // UnmarshalUnsafeEpollEventSlice is like EpollEvent.UnmarshalUnsafe, but for a []EpollEvent. 205 func UnmarshalUnsafeEpollEventSlice(dst []EpollEvent, src []byte) []byte { 206 count := len(dst) 207 if count == 0 { 208 return src 209 } 210 211 size := (*EpollEvent)(nil).SizeBytes() 212 buf := src[:size*count] 213 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(&buf[0]), uintptr(len(buf))) 214 return src[size*count:] 215 } 216 217 // SizeBytes implements marshal.Marshallable.SizeBytes. 218 func (s *Stat) SizeBytes() int { 219 return 72 + 220 (*Timespec)(nil).SizeBytes() + 221 (*Timespec)(nil).SizeBytes() + 222 (*Timespec)(nil).SizeBytes() + 223 4*2 224 } 225 226 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 227 func (s *Stat) MarshalBytes(dst []byte) []byte { 228 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Dev)) 229 dst = dst[8:] 230 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Ino)) 231 dst = dst[8:] 232 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.Mode)) 233 dst = dst[4:] 234 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.Nlink)) 235 dst = dst[4:] 236 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.UID)) 237 dst = dst[4:] 238 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.GID)) 239 dst = dst[4:] 240 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Rdev)) 241 dst = dst[8:] 242 // Padding: dst[:sizeof(uint64)] ~= uint64(0) 243 dst = dst[8:] 244 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Size)) 245 dst = dst[8:] 246 hostarch.ByteOrder.PutUint32(dst[:4], uint32(s.Blksize)) 247 dst = dst[4:] 248 // Padding: dst[:sizeof(int32)] ~= int32(0) 249 dst = dst[4:] 250 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Blocks)) 251 dst = dst[8:] 252 dst = s.ATime.MarshalUnsafe(dst) 253 dst = s.MTime.MarshalUnsafe(dst) 254 dst = s.CTime.MarshalUnsafe(dst) 255 // Padding: dst[:sizeof(int32)*2] ~= [2]int32{0} 256 dst = dst[4*(2):] 257 return dst 258 } 259 260 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 261 func (s *Stat) UnmarshalBytes(src []byte) []byte { 262 s.Dev = uint64(hostarch.ByteOrder.Uint64(src[:8])) 263 src = src[8:] 264 s.Ino = uint64(hostarch.ByteOrder.Uint64(src[:8])) 265 src = src[8:] 266 s.Mode = uint32(hostarch.ByteOrder.Uint32(src[:4])) 267 src = src[4:] 268 s.Nlink = uint32(hostarch.ByteOrder.Uint32(src[:4])) 269 src = src[4:] 270 s.UID = uint32(hostarch.ByteOrder.Uint32(src[:4])) 271 src = src[4:] 272 s.GID = uint32(hostarch.ByteOrder.Uint32(src[:4])) 273 src = src[4:] 274 s.Rdev = uint64(hostarch.ByteOrder.Uint64(src[:8])) 275 src = src[8:] 276 // Padding: var _ uint64 ~= src[:sizeof(uint64)] 277 src = src[8:] 278 s.Size = int64(hostarch.ByteOrder.Uint64(src[:8])) 279 src = src[8:] 280 s.Blksize = int32(hostarch.ByteOrder.Uint32(src[:4])) 281 src = src[4:] 282 // Padding: var _ int32 ~= src[:sizeof(int32)] 283 src = src[4:] 284 s.Blocks = int64(hostarch.ByteOrder.Uint64(src[:8])) 285 src = src[8:] 286 src = s.ATime.UnmarshalUnsafe(src) 287 src = s.MTime.UnmarshalUnsafe(src) 288 src = s.CTime.UnmarshalUnsafe(src) 289 // Padding: ~ copy([2]int32(s._), src[:sizeof(int32)*2]) 290 src = src[4*(2):] 291 return src 292 } 293 294 // Packed implements marshal.Marshallable.Packed. 295 //go:nosplit 296 func (s *Stat) Packed() bool { 297 return s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() 298 } 299 300 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 301 func (s *Stat) MarshalUnsafe(dst []byte) []byte { 302 if s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 303 size := s.SizeBytes() 304 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 305 return dst[size:] 306 } 307 // Type Stat doesn't have a packed layout in memory, fallback to MarshalBytes. 308 return s.MarshalBytes(dst) 309 } 310 311 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 312 func (s *Stat) UnmarshalUnsafe(src []byte) []byte { 313 if s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 314 size := s.SizeBytes() 315 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 316 return src[size:] 317 } 318 // Type Stat doesn't have a packed layout in memory, fallback to UnmarshalBytes. 319 return s.UnmarshalBytes(src) 320 } 321 322 // CopyOutN implements marshal.Marshallable.CopyOutN. 323 //go:nosplit 324 func (s *Stat) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 325 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 326 // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. 327 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 328 s.MarshalBytes(buf) // escapes: fallback. 329 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 330 } 331 332 // Construct a slice backed by dst's underlying memory. 333 var buf []byte 334 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 335 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 336 hdr.Len = s.SizeBytes() 337 hdr.Cap = s.SizeBytes() 338 339 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 340 // Since we bypassed the compiler's escape analysis, indicate that s 341 // must live until the use above. 342 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 343 return length, err 344 } 345 346 // CopyOut implements marshal.Marshallable.CopyOut. 347 //go:nosplit 348 func (s *Stat) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 349 return s.CopyOutN(cc, addr, s.SizeBytes()) 350 } 351 352 // CopyIn implements marshal.Marshallable.CopyIn. 353 //go:nosplit 354 func (s *Stat) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 355 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 356 // Type Stat doesn't have a packed layout in memory, fall back to UnmarshalBytes. 357 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 358 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 359 // Unmarshal unconditionally. If we had a short copy-in, this results in a 360 // partially unmarshalled struct. 361 s.UnmarshalBytes(buf) // escapes: fallback. 362 return length, err 363 } 364 365 // Construct a slice backed by dst's underlying memory. 366 var buf []byte 367 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 368 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 369 hdr.Len = s.SizeBytes() 370 hdr.Cap = s.SizeBytes() 371 372 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 373 // Since we bypassed the compiler's escape analysis, indicate that s 374 // must live until the use above. 375 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 376 return length, err 377 } 378 379 // WriteTo implements io.WriterTo.WriteTo. 380 func (s *Stat) WriteTo(writer io.Writer) (int64, error) { 381 if !s.ATime.Packed() && s.CTime.Packed() && s.MTime.Packed() { 382 // Type Stat doesn't have a packed layout in memory, fall back to MarshalBytes. 383 buf := make([]byte, s.SizeBytes()) 384 s.MarshalBytes(buf) 385 length, err := writer.Write(buf) 386 return int64(length), err 387 } 388 389 // Construct a slice backed by dst's underlying memory. 390 var buf []byte 391 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 392 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 393 hdr.Len = s.SizeBytes() 394 hdr.Cap = s.SizeBytes() 395 396 length, err := writer.Write(buf) 397 // Since we bypassed the compiler's escape analysis, indicate that s 398 // must live until the use above. 399 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 400 return int64(length), err 401 } 402 403 // SizeBytes implements marshal.Marshallable.SizeBytes. 404 func (p *PtraceRegs) SizeBytes() int { 405 return 24 + 406 8*31 407 } 408 409 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 410 func (p *PtraceRegs) MarshalBytes(dst []byte) []byte { 411 for idx := 0; idx < 31; idx++ { 412 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Regs[idx])) 413 dst = dst[8:] 414 } 415 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Sp)) 416 dst = dst[8:] 417 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Pc)) 418 dst = dst[8:] 419 hostarch.ByteOrder.PutUint64(dst[:8], uint64(p.Pstate)) 420 dst = dst[8:] 421 return dst 422 } 423 424 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 425 func (p *PtraceRegs) UnmarshalBytes(src []byte) []byte { 426 for idx := 0; idx < 31; idx++ { 427 p.Regs[idx] = uint64(hostarch.ByteOrder.Uint64(src[:8])) 428 src = src[8:] 429 } 430 p.Sp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 431 src = src[8:] 432 p.Pc = uint64(hostarch.ByteOrder.Uint64(src[:8])) 433 src = src[8:] 434 p.Pstate = uint64(hostarch.ByteOrder.Uint64(src[:8])) 435 src = src[8:] 436 return src 437 } 438 439 // Packed implements marshal.Marshallable.Packed. 440 //go:nosplit 441 func (p *PtraceRegs) Packed() bool { 442 return true 443 } 444 445 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 446 func (p *PtraceRegs) MarshalUnsafe(dst []byte) []byte { 447 size := p.SizeBytes() 448 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(p), uintptr(size)) 449 return dst[size:] 450 } 451 452 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 453 func (p *PtraceRegs) UnmarshalUnsafe(src []byte) []byte { 454 size := p.SizeBytes() 455 gohacks.Memmove(unsafe.Pointer(p), unsafe.Pointer(&src[0]), uintptr(size)) 456 return src[size:] 457 } 458 459 // CopyOutN implements marshal.Marshallable.CopyOutN. 460 //go:nosplit 461 func (p *PtraceRegs) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 462 // Construct a slice backed by dst's underlying memory. 463 var buf []byte 464 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 465 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 466 hdr.Len = p.SizeBytes() 467 hdr.Cap = p.SizeBytes() 468 469 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 470 // Since we bypassed the compiler's escape analysis, indicate that p 471 // must live until the use above. 472 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 473 return length, err 474 } 475 476 // CopyOut implements marshal.Marshallable.CopyOut. 477 //go:nosplit 478 func (p *PtraceRegs) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 479 return p.CopyOutN(cc, addr, p.SizeBytes()) 480 } 481 482 // CopyIn implements marshal.Marshallable.CopyIn. 483 //go:nosplit 484 func (p *PtraceRegs) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 485 // Construct a slice backed by dst's underlying memory. 486 var buf []byte 487 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 488 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 489 hdr.Len = p.SizeBytes() 490 hdr.Cap = p.SizeBytes() 491 492 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 493 // Since we bypassed the compiler's escape analysis, indicate that p 494 // must live until the use above. 495 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 496 return length, err 497 } 498 499 // WriteTo implements io.WriterTo.WriteTo. 500 func (p *PtraceRegs) WriteTo(writer io.Writer) (int64, error) { 501 // Construct a slice backed by dst's underlying memory. 502 var buf []byte 503 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 504 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(p))) 505 hdr.Len = p.SizeBytes() 506 hdr.Cap = p.SizeBytes() 507 508 length, err := writer.Write(buf) 509 // Since we bypassed the compiler's escape analysis, indicate that p 510 // must live until the use above. 511 runtime.KeepAlive(p) // escapes: replaced by intrinsic. 512 return int64(length), err 513 } 514 515 // SizeBytes implements marshal.Marshallable.SizeBytes. 516 func (s *SemidDS) SizeBytes() int { 517 return 24 + 518 (*IPCPerm)(nil).SizeBytes() + 519 (*TimeT)(nil).SizeBytes() + 520 (*TimeT)(nil).SizeBytes() 521 } 522 523 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 524 func (s *SemidDS) MarshalBytes(dst []byte) []byte { 525 dst = s.SemPerm.MarshalUnsafe(dst) 526 dst = s.SemOTime.MarshalUnsafe(dst) 527 dst = s.SemCTime.MarshalUnsafe(dst) 528 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.SemNSems)) 529 dst = dst[8:] 530 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused3)) 531 dst = dst[8:] 532 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.unused4)) 533 dst = dst[8:] 534 return dst 535 } 536 537 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 538 func (s *SemidDS) UnmarshalBytes(src []byte) []byte { 539 src = s.SemPerm.UnmarshalUnsafe(src) 540 src = s.SemOTime.UnmarshalUnsafe(src) 541 src = s.SemCTime.UnmarshalUnsafe(src) 542 s.SemNSems = uint64(hostarch.ByteOrder.Uint64(src[:8])) 543 src = src[8:] 544 s.unused3 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 545 src = src[8:] 546 s.unused4 = uint64(hostarch.ByteOrder.Uint64(src[:8])) 547 src = src[8:] 548 return src 549 } 550 551 // Packed implements marshal.Marshallable.Packed. 552 //go:nosplit 553 func (s *SemidDS) Packed() bool { 554 return s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() 555 } 556 557 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 558 func (s *SemidDS) MarshalUnsafe(dst []byte) []byte { 559 if s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 560 size := s.SizeBytes() 561 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 562 return dst[size:] 563 } 564 // Type SemidDS doesn't have a packed layout in memory, fallback to MarshalBytes. 565 return s.MarshalBytes(dst) 566 } 567 568 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 569 func (s *SemidDS) UnmarshalUnsafe(src []byte) []byte { 570 if s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 571 size := s.SizeBytes() 572 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 573 return src[size:] 574 } 575 // Type SemidDS doesn't have a packed layout in memory, fallback to UnmarshalBytes. 576 return s.UnmarshalBytes(src) 577 } 578 579 // CopyOutN implements marshal.Marshallable.CopyOutN. 580 //go:nosplit 581 func (s *SemidDS) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 582 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 583 // Type SemidDS doesn't have a packed layout in memory, fall back to MarshalBytes. 584 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 585 s.MarshalBytes(buf) // escapes: fallback. 586 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 587 } 588 589 // Construct a slice backed by dst's underlying memory. 590 var buf []byte 591 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 592 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 593 hdr.Len = s.SizeBytes() 594 hdr.Cap = s.SizeBytes() 595 596 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 597 // Since we bypassed the compiler's escape analysis, indicate that s 598 // must live until the use above. 599 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 600 return length, err 601 } 602 603 // CopyOut implements marshal.Marshallable.CopyOut. 604 //go:nosplit 605 func (s *SemidDS) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 606 return s.CopyOutN(cc, addr, s.SizeBytes()) 607 } 608 609 // CopyIn implements marshal.Marshallable.CopyIn. 610 //go:nosplit 611 func (s *SemidDS) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 612 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 613 // Type SemidDS doesn't have a packed layout in memory, fall back to UnmarshalBytes. 614 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 615 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 616 // Unmarshal unconditionally. If we had a short copy-in, this results in a 617 // partially unmarshalled struct. 618 s.UnmarshalBytes(buf) // escapes: fallback. 619 return length, err 620 } 621 622 // Construct a slice backed by dst's underlying memory. 623 var buf []byte 624 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 625 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 626 hdr.Len = s.SizeBytes() 627 hdr.Cap = s.SizeBytes() 628 629 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 630 // Since we bypassed the compiler's escape analysis, indicate that s 631 // must live until the use above. 632 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 633 return length, err 634 } 635 636 // WriteTo implements io.WriterTo.WriteTo. 637 func (s *SemidDS) WriteTo(writer io.Writer) (int64, error) { 638 if !s.SemCTime.Packed() && s.SemOTime.Packed() && s.SemPerm.Packed() { 639 // Type SemidDS doesn't have a packed layout in memory, fall back to MarshalBytes. 640 buf := make([]byte, s.SizeBytes()) 641 s.MarshalBytes(buf) 642 length, err := writer.Write(buf) 643 return int64(length), err 644 } 645 646 // Construct a slice backed by dst's underlying memory. 647 var buf []byte 648 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 649 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 650 hdr.Len = s.SizeBytes() 651 hdr.Cap = s.SizeBytes() 652 653 length, err := writer.Write(buf) 654 // Since we bypassed the compiler's escape analysis, indicate that s 655 // must live until the use above. 656 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 657 return int64(length), err 658 } 659