github.com/nicocha30/gvisor-ligolo@v0.0.0-20230726075806-989fa2c0a413/pkg/sentry/arch/arch_arm64_abi_autogen_unsafe.go (about) 1 // Automatically generated marshal implementation. See tools/go_marshal. 2 3 // If there are issues with build constraint aggregation, see 4 // tools/go_marshal/gomarshal/generator.go:writeHeader(). The constraints here 5 // come from the input set of files used to generate this file. This input set 6 // is filtered based on pre-defined file suffixes related to build constraints, 7 // see tools/defs.bzl:calculate_sets(). 8 9 //go:build arm64 && arm64 && arm64 10 // +build arm64,arm64,arm64 11 12 package arch 13 14 import ( 15 "github.com/nicocha30/gvisor-ligolo/pkg/abi/linux" 16 "github.com/nicocha30/gvisor-ligolo/pkg/gohacks" 17 "github.com/nicocha30/gvisor-ligolo/pkg/hostarch" 18 "github.com/nicocha30/gvisor-ligolo/pkg/marshal" 19 "io" 20 "reflect" 21 "runtime" 22 "unsafe" 23 ) 24 25 // Marshallable types used by this file. 26 var _ marshal.Marshallable = (*FpsimdContext)(nil) 27 var _ marshal.Marshallable = (*SignalContext64)(nil) 28 var _ marshal.Marshallable = (*UContext64)(nil) 29 var _ marshal.Marshallable = (*aarch64Ctx)(nil) 30 var _ marshal.Marshallable = (*linux.SignalSet)(nil) 31 var _ marshal.Marshallable = (*linux.SignalStack)(nil) 32 33 // SizeBytes implements marshal.Marshallable.SizeBytes. 34 func (f *FpsimdContext) SizeBytes() int { 35 return 8 + 36 (*aarch64Ctx)(nil).SizeBytes() + 37 8*64 38 } 39 40 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 41 func (f *FpsimdContext) MarshalBytes(dst []byte) []byte { 42 dst = f.Head.MarshalUnsafe(dst) 43 hostarch.ByteOrder.PutUint32(dst[:4], uint32(f.Fpsr)) 44 dst = dst[4:] 45 hostarch.ByteOrder.PutUint32(dst[:4], uint32(f.Fpcr)) 46 dst = dst[4:] 47 for idx := 0; idx < 64; idx++ { 48 hostarch.ByteOrder.PutUint64(dst[:8], uint64(f.Vregs[idx])) 49 dst = dst[8:] 50 } 51 return dst 52 } 53 54 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 55 func (f *FpsimdContext) UnmarshalBytes(src []byte) []byte { 56 src = f.Head.UnmarshalUnsafe(src) 57 f.Fpsr = uint32(hostarch.ByteOrder.Uint32(src[:4])) 58 src = src[4:] 59 f.Fpcr = uint32(hostarch.ByteOrder.Uint32(src[:4])) 60 src = src[4:] 61 for idx := 0; idx < 64; idx++ { 62 f.Vregs[idx] = uint64(hostarch.ByteOrder.Uint64(src[:8])) 63 src = src[8:] 64 } 65 return src 66 } 67 68 // Packed implements marshal.Marshallable.Packed. 69 //go:nosplit 70 func (f *FpsimdContext) Packed() bool { 71 return f.Head.Packed() 72 } 73 74 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 75 func (f *FpsimdContext) MarshalUnsafe(dst []byte) []byte { 76 if f.Head.Packed() { 77 size := f.SizeBytes() 78 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(f), uintptr(size)) 79 return dst[size:] 80 } 81 // Type FpsimdContext doesn't have a packed layout in memory, fallback to MarshalBytes. 82 return f.MarshalBytes(dst) 83 } 84 85 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 86 func (f *FpsimdContext) UnmarshalUnsafe(src []byte) []byte { 87 if f.Head.Packed() { 88 size := f.SizeBytes() 89 gohacks.Memmove(unsafe.Pointer(f), unsafe.Pointer(&src[0]), uintptr(size)) 90 return src[size:] 91 } 92 // Type FpsimdContext doesn't have a packed layout in memory, fallback to UnmarshalBytes. 93 return f.UnmarshalBytes(src) 94 } 95 96 // CopyOutN implements marshal.Marshallable.CopyOutN. 97 func (f *FpsimdContext) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 98 if !f.Head.Packed() { 99 // Type FpsimdContext doesn't have a packed layout in memory, fall back to MarshalBytes. 100 buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. 101 f.MarshalBytes(buf) // escapes: fallback. 102 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 103 } 104 105 // Construct a slice backed by dst's underlying memory. 106 var buf []byte 107 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 108 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(f))) 109 hdr.Len = f.SizeBytes() 110 hdr.Cap = f.SizeBytes() 111 112 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 113 // Since we bypassed the compiler's escape analysis, indicate that f 114 // must live until the use above. 115 runtime.KeepAlive(f) // escapes: replaced by intrinsic. 116 return length, err 117 } 118 119 // CopyOut implements marshal.Marshallable.CopyOut. 120 func (f *FpsimdContext) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 121 return f.CopyOutN(cc, addr, f.SizeBytes()) 122 } 123 124 // CopyIn implements marshal.Marshallable.CopyIn. 125 func (f *FpsimdContext) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 126 if !f.Head.Packed() { 127 // Type FpsimdContext doesn't have a packed layout in memory, fall back to UnmarshalBytes. 128 buf := cc.CopyScratchBuffer(f.SizeBytes()) // escapes: okay. 129 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 130 // Unmarshal unconditionally. If we had a short copy-in, this results in a 131 // partially unmarshalled struct. 132 f.UnmarshalBytes(buf) // escapes: fallback. 133 return length, err 134 } 135 136 // Construct a slice backed by dst's underlying memory. 137 var buf []byte 138 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 139 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(f))) 140 hdr.Len = f.SizeBytes() 141 hdr.Cap = f.SizeBytes() 142 143 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 144 // Since we bypassed the compiler's escape analysis, indicate that f 145 // must live until the use above. 146 runtime.KeepAlive(f) // escapes: replaced by intrinsic. 147 return length, err 148 } 149 150 // WriteTo implements io.WriterTo.WriteTo. 151 func (f *FpsimdContext) WriteTo(writer io.Writer) (int64, error) { 152 if !f.Head.Packed() { 153 // Type FpsimdContext doesn't have a packed layout in memory, fall back to MarshalBytes. 154 buf := make([]byte, f.SizeBytes()) 155 f.MarshalBytes(buf) 156 length, err := writer.Write(buf) 157 return int64(length), err 158 } 159 160 // Construct a slice backed by dst's underlying memory. 161 var buf []byte 162 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 163 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(f))) 164 hdr.Len = f.SizeBytes() 165 hdr.Cap = f.SizeBytes() 166 167 length, err := writer.Write(buf) 168 // Since we bypassed the compiler's escape analysis, indicate that f 169 // must live until the use above. 170 runtime.KeepAlive(f) // escapes: replaced by intrinsic. 171 return int64(length), err 172 } 173 174 // SizeBytes implements marshal.Marshallable.SizeBytes. 175 func (s *SignalContext64) SizeBytes() int { 176 return 32 + 177 8*31 + 178 1*8 + 179 (*FpsimdContext)(nil).SizeBytes() 180 } 181 182 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 183 func (s *SignalContext64) MarshalBytes(dst []byte) []byte { 184 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.FaultAddr)) 185 dst = dst[8:] 186 for idx := 0; idx < 31; idx++ { 187 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Regs[idx])) 188 dst = dst[8:] 189 } 190 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Sp)) 191 dst = dst[8:] 192 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Pc)) 193 dst = dst[8:] 194 hostarch.ByteOrder.PutUint64(dst[:8], uint64(s.Pstate)) 195 dst = dst[8:] 196 for idx := 0; idx < 8; idx++ { 197 dst[0] = byte(s._pad[idx]) 198 dst = dst[1:] 199 } 200 dst = s.Fpsimd64.MarshalUnsafe(dst) 201 return dst 202 } 203 204 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 205 func (s *SignalContext64) UnmarshalBytes(src []byte) []byte { 206 s.FaultAddr = uint64(hostarch.ByteOrder.Uint64(src[:8])) 207 src = src[8:] 208 for idx := 0; idx < 31; idx++ { 209 s.Regs[idx] = uint64(hostarch.ByteOrder.Uint64(src[:8])) 210 src = src[8:] 211 } 212 s.Sp = uint64(hostarch.ByteOrder.Uint64(src[:8])) 213 src = src[8:] 214 s.Pc = uint64(hostarch.ByteOrder.Uint64(src[:8])) 215 src = src[8:] 216 s.Pstate = uint64(hostarch.ByteOrder.Uint64(src[:8])) 217 src = src[8:] 218 for idx := 0; idx < 8; idx++ { 219 s._pad[idx] = src[0] 220 src = src[1:] 221 } 222 src = s.Fpsimd64.UnmarshalUnsafe(src) 223 return src 224 } 225 226 // Packed implements marshal.Marshallable.Packed. 227 //go:nosplit 228 func (s *SignalContext64) Packed() bool { 229 return s.Fpsimd64.Packed() 230 } 231 232 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 233 func (s *SignalContext64) MarshalUnsafe(dst []byte) []byte { 234 if s.Fpsimd64.Packed() { 235 size := s.SizeBytes() 236 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(s), uintptr(size)) 237 return dst[size:] 238 } 239 // Type SignalContext64 doesn't have a packed layout in memory, fallback to MarshalBytes. 240 return s.MarshalBytes(dst) 241 } 242 243 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 244 func (s *SignalContext64) UnmarshalUnsafe(src []byte) []byte { 245 if s.Fpsimd64.Packed() { 246 size := s.SizeBytes() 247 gohacks.Memmove(unsafe.Pointer(s), unsafe.Pointer(&src[0]), uintptr(size)) 248 return src[size:] 249 } 250 // Type SignalContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes. 251 return s.UnmarshalBytes(src) 252 } 253 254 // CopyOutN implements marshal.Marshallable.CopyOutN. 255 func (s *SignalContext64) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 256 if !s.Fpsimd64.Packed() { 257 // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 258 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 259 s.MarshalBytes(buf) // escapes: fallback. 260 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 261 } 262 263 // Construct a slice backed by dst's underlying memory. 264 var buf []byte 265 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 266 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 267 hdr.Len = s.SizeBytes() 268 hdr.Cap = s.SizeBytes() 269 270 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 271 // Since we bypassed the compiler's escape analysis, indicate that s 272 // must live until the use above. 273 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 274 return length, err 275 } 276 277 // CopyOut implements marshal.Marshallable.CopyOut. 278 func (s *SignalContext64) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 279 return s.CopyOutN(cc, addr, s.SizeBytes()) 280 } 281 282 // CopyIn implements marshal.Marshallable.CopyIn. 283 func (s *SignalContext64) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 284 if !s.Fpsimd64.Packed() { 285 // Type SignalContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes. 286 buf := cc.CopyScratchBuffer(s.SizeBytes()) // escapes: okay. 287 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 288 // Unmarshal unconditionally. If we had a short copy-in, this results in a 289 // partially unmarshalled struct. 290 s.UnmarshalBytes(buf) // escapes: fallback. 291 return length, err 292 } 293 294 // Construct a slice backed by dst's underlying memory. 295 var buf []byte 296 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 297 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 298 hdr.Len = s.SizeBytes() 299 hdr.Cap = s.SizeBytes() 300 301 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 302 // Since we bypassed the compiler's escape analysis, indicate that s 303 // must live until the use above. 304 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 305 return length, err 306 } 307 308 // WriteTo implements io.WriterTo.WriteTo. 309 func (s *SignalContext64) WriteTo(writer io.Writer) (int64, error) { 310 if !s.Fpsimd64.Packed() { 311 // Type SignalContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 312 buf := make([]byte, s.SizeBytes()) 313 s.MarshalBytes(buf) 314 length, err := writer.Write(buf) 315 return int64(length), err 316 } 317 318 // Construct a slice backed by dst's underlying memory. 319 var buf []byte 320 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 321 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(s))) 322 hdr.Len = s.SizeBytes() 323 hdr.Cap = s.SizeBytes() 324 325 length, err := writer.Write(buf) 326 // Since we bypassed the compiler's escape analysis, indicate that s 327 // must live until the use above. 328 runtime.KeepAlive(s) // escapes: replaced by intrinsic. 329 return int64(length), err 330 } 331 332 // SizeBytes implements marshal.Marshallable.SizeBytes. 333 func (u *UContext64) SizeBytes() int { 334 return 16 + 335 (*linux.SignalStack)(nil).SizeBytes() + 336 (*linux.SignalSet)(nil).SizeBytes() + 337 1*120 + 338 1*8 + 339 (*SignalContext64)(nil).SizeBytes() 340 } 341 342 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 343 func (u *UContext64) MarshalBytes(dst []byte) []byte { 344 hostarch.ByteOrder.PutUint64(dst[:8], uint64(u.Flags)) 345 dst = dst[8:] 346 hostarch.ByteOrder.PutUint64(dst[:8], uint64(u.Link)) 347 dst = dst[8:] 348 dst = u.Stack.MarshalUnsafe(dst) 349 dst = u.Sigset.MarshalUnsafe(dst) 350 for idx := 0; idx < 120; idx++ { 351 dst[0] = byte(u._pad[idx]) 352 dst = dst[1:] 353 } 354 for idx := 0; idx < 8; idx++ { 355 dst[0] = byte(u._pad2[idx]) 356 dst = dst[1:] 357 } 358 dst = u.MContext.MarshalUnsafe(dst) 359 return dst 360 } 361 362 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 363 func (u *UContext64) UnmarshalBytes(src []byte) []byte { 364 u.Flags = uint64(hostarch.ByteOrder.Uint64(src[:8])) 365 src = src[8:] 366 u.Link = uint64(hostarch.ByteOrder.Uint64(src[:8])) 367 src = src[8:] 368 src = u.Stack.UnmarshalUnsafe(src) 369 src = u.Sigset.UnmarshalUnsafe(src) 370 for idx := 0; idx < 120; idx++ { 371 u._pad[idx] = src[0] 372 src = src[1:] 373 } 374 for idx := 0; idx < 8; idx++ { 375 u._pad2[idx] = src[0] 376 src = src[1:] 377 } 378 src = u.MContext.UnmarshalUnsafe(src) 379 return src 380 } 381 382 // Packed implements marshal.Marshallable.Packed. 383 //go:nosplit 384 func (u *UContext64) Packed() bool { 385 return u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() 386 } 387 388 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 389 func (u *UContext64) MarshalUnsafe(dst []byte) []byte { 390 if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 391 size := u.SizeBytes() 392 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(u), uintptr(size)) 393 return dst[size:] 394 } 395 // Type UContext64 doesn't have a packed layout in memory, fallback to MarshalBytes. 396 return u.MarshalBytes(dst) 397 } 398 399 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 400 func (u *UContext64) UnmarshalUnsafe(src []byte) []byte { 401 if u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 402 size := u.SizeBytes() 403 gohacks.Memmove(unsafe.Pointer(u), unsafe.Pointer(&src[0]), uintptr(size)) 404 return src[size:] 405 } 406 // Type UContext64 doesn't have a packed layout in memory, fallback to UnmarshalBytes. 407 return u.UnmarshalBytes(src) 408 } 409 410 // CopyOutN implements marshal.Marshallable.CopyOutN. 411 func (u *UContext64) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 412 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 413 // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 414 buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay. 415 u.MarshalBytes(buf) // escapes: fallback. 416 return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 417 } 418 419 // Construct a slice backed by dst's underlying memory. 420 var buf []byte 421 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 422 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 423 hdr.Len = u.SizeBytes() 424 hdr.Cap = u.SizeBytes() 425 426 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 427 // Since we bypassed the compiler's escape analysis, indicate that u 428 // must live until the use above. 429 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 430 return length, err 431 } 432 433 // CopyOut implements marshal.Marshallable.CopyOut. 434 func (u *UContext64) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 435 return u.CopyOutN(cc, addr, u.SizeBytes()) 436 } 437 438 // CopyIn implements marshal.Marshallable.CopyIn. 439 func (u *UContext64) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 440 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 441 // Type UContext64 doesn't have a packed layout in memory, fall back to UnmarshalBytes. 442 buf := cc.CopyScratchBuffer(u.SizeBytes()) // escapes: okay. 443 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 444 // Unmarshal unconditionally. If we had a short copy-in, this results in a 445 // partially unmarshalled struct. 446 u.UnmarshalBytes(buf) // escapes: fallback. 447 return length, err 448 } 449 450 // Construct a slice backed by dst's underlying memory. 451 var buf []byte 452 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 453 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 454 hdr.Len = u.SizeBytes() 455 hdr.Cap = u.SizeBytes() 456 457 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 458 // Since we bypassed the compiler's escape analysis, indicate that u 459 // must live until the use above. 460 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 461 return length, err 462 } 463 464 // WriteTo implements io.WriterTo.WriteTo. 465 func (u *UContext64) WriteTo(writer io.Writer) (int64, error) { 466 if !u.MContext.Packed() && u.Sigset.Packed() && u.Stack.Packed() { 467 // Type UContext64 doesn't have a packed layout in memory, fall back to MarshalBytes. 468 buf := make([]byte, u.SizeBytes()) 469 u.MarshalBytes(buf) 470 length, err := writer.Write(buf) 471 return int64(length), err 472 } 473 474 // Construct a slice backed by dst's underlying memory. 475 var buf []byte 476 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 477 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(u))) 478 hdr.Len = u.SizeBytes() 479 hdr.Cap = u.SizeBytes() 480 481 length, err := writer.Write(buf) 482 // Since we bypassed the compiler's escape analysis, indicate that u 483 // must live until the use above. 484 runtime.KeepAlive(u) // escapes: replaced by intrinsic. 485 return int64(length), err 486 } 487 488 // SizeBytes implements marshal.Marshallable.SizeBytes. 489 func (a *aarch64Ctx) SizeBytes() int { 490 return 8 491 } 492 493 // MarshalBytes implements marshal.Marshallable.MarshalBytes. 494 func (a *aarch64Ctx) MarshalBytes(dst []byte) []byte { 495 hostarch.ByteOrder.PutUint32(dst[:4], uint32(a.Magic)) 496 dst = dst[4:] 497 hostarch.ByteOrder.PutUint32(dst[:4], uint32(a.Size)) 498 dst = dst[4:] 499 return dst 500 } 501 502 // UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes. 503 func (a *aarch64Ctx) UnmarshalBytes(src []byte) []byte { 504 a.Magic = uint32(hostarch.ByteOrder.Uint32(src[:4])) 505 src = src[4:] 506 a.Size = uint32(hostarch.ByteOrder.Uint32(src[:4])) 507 src = src[4:] 508 return src 509 } 510 511 // Packed implements marshal.Marshallable.Packed. 512 //go:nosplit 513 func (a *aarch64Ctx) Packed() bool { 514 return true 515 } 516 517 // MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe. 518 func (a *aarch64Ctx) MarshalUnsafe(dst []byte) []byte { 519 size := a.SizeBytes() 520 gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(a), uintptr(size)) 521 return dst[size:] 522 } 523 524 // UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe. 525 func (a *aarch64Ctx) UnmarshalUnsafe(src []byte) []byte { 526 size := a.SizeBytes() 527 gohacks.Memmove(unsafe.Pointer(a), unsafe.Pointer(&src[0]), uintptr(size)) 528 return src[size:] 529 } 530 531 // CopyOutN implements marshal.Marshallable.CopyOutN. 532 func (a *aarch64Ctx) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) { 533 // Construct a slice backed by dst's underlying memory. 534 var buf []byte 535 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 536 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(a))) 537 hdr.Len = a.SizeBytes() 538 hdr.Cap = a.SizeBytes() 539 540 length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay. 541 // Since we bypassed the compiler's escape analysis, indicate that a 542 // must live until the use above. 543 runtime.KeepAlive(a) // escapes: replaced by intrinsic. 544 return length, err 545 } 546 547 // CopyOut implements marshal.Marshallable.CopyOut. 548 func (a *aarch64Ctx) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 549 return a.CopyOutN(cc, addr, a.SizeBytes()) 550 } 551 552 // CopyIn implements marshal.Marshallable.CopyIn. 553 func (a *aarch64Ctx) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) { 554 // Construct a slice backed by dst's underlying memory. 555 var buf []byte 556 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 557 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(a))) 558 hdr.Len = a.SizeBytes() 559 hdr.Cap = a.SizeBytes() 560 561 length, err := cc.CopyInBytes(addr, buf) // escapes: okay. 562 // Since we bypassed the compiler's escape analysis, indicate that a 563 // must live until the use above. 564 runtime.KeepAlive(a) // escapes: replaced by intrinsic. 565 return length, err 566 } 567 568 // WriteTo implements io.WriterTo.WriteTo. 569 func (a *aarch64Ctx) WriteTo(writer io.Writer) (int64, error) { 570 // Construct a slice backed by dst's underlying memory. 571 var buf []byte 572 hdr := (*reflect.SliceHeader)(unsafe.Pointer(&buf)) 573 hdr.Data = uintptr(gohacks.Noescape(unsafe.Pointer(a))) 574 hdr.Len = a.SizeBytes() 575 hdr.Cap = a.SizeBytes() 576 577 length, err := writer.Write(buf) 578 // Since we bypassed the compiler's escape analysis, indicate that a 579 // must live until the use above. 580 runtime.KeepAlive(a) // escapes: replaced by intrinsic. 581 return int64(length), err 582 } 583