github.com/SagerNet/gvisor@v0.0.0-20210707092255-7731c139d75c/tools/go_marshal/gomarshal/generator_interfaces_struct.go (about) 1 // Copyright 2020 The gVisor Authors. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 // This file contains the bits of the code generator specific to marshalling 16 // structs. 17 18 package gomarshal 19 20 import ( 21 "fmt" 22 "go/ast" 23 "sort" 24 "strings" 25 ) 26 27 func (g *interfaceGenerator) fieldAccessor(n *ast.Ident) string { 28 return fmt.Sprintf("%s.%s", g.r, n.Name) 29 } 30 31 // areFieldsPackedExpression returns a go expression checking whether g.t's fields are 32 // packed. Returns "", false if g.t has no fields that may be potentially 33 // packed, otherwise returns <clause>, true, where <clause> is an expression 34 // like "t.a.Packed() && t.b.Packed() && t.c.Packed()". 35 func (g *interfaceGenerator) areFieldsPackedExpression() (string, bool) { 36 if len(g.as) == 0 { 37 return "", false 38 } 39 40 cs := make([]string, 0, len(g.as)) 41 for accessor := range g.as { 42 cs = append(cs, fmt.Sprintf("%s.Packed()", accessor)) 43 } 44 // Sort expressions for determinstic build outputs. 45 sort.Strings(cs) 46 return strings.Join(cs, " && "), true 47 } 48 49 // validateStruct ensures the type we're working with can be marshalled. These 50 // checks are done ahead of time and in one place so we can make assumptions 51 // later. 52 func (g *interfaceGenerator) validateStruct(ts *ast.TypeSpec, st *ast.StructType) { 53 forEachStructField(st, func(f *ast.Field) { 54 fieldDispatcher{ 55 primitive: func(_, t *ast.Ident) { 56 g.validatePrimitiveNewtype(t) 57 }, 58 selector: func(_, _, _ *ast.Ident) { 59 // No validation to perform on selector fields. However this 60 // callback must still be provided. 61 }, 62 array: func(n *ast.Ident, a *ast.ArrayType, _ *ast.Ident) { 63 g.validateArrayNewtype(n, a) 64 }, 65 unhandled: func(_ *ast.Ident) { 66 g.abortAt(f.Pos(), fmt.Sprintf("Marshalling not supported for %s fields", kindString(f.Type))) 67 }, 68 }.dispatch(f) 69 }) 70 } 71 72 func (g *interfaceGenerator) isStructPacked(st *ast.StructType) bool { 73 packed := true 74 forEachStructField(st, func(f *ast.Field) { 75 if f.Tag != nil { 76 if f.Tag.Value == "`marshal:\"unaligned\"`" { 77 if packed { 78 debugfAt(g.f.Position(g.t.Pos()), 79 fmt.Sprintf("Marking type '%s' as not packed due to tag `marshal:\"unaligned\"`.\n", g.t.Name)) 80 packed = false 81 } 82 } 83 } 84 }) 85 return packed 86 } 87 88 func (g *interfaceGenerator) emitMarshallableForStruct(st *ast.StructType) { 89 thisPacked := g.isStructPacked(st) 90 91 g.emit("// SizeBytes implements marshal.Marshallable.SizeBytes.\n") 92 g.emit("func (%s *%s) SizeBytes() int {\n", g.r, g.typeName()) 93 g.inIndent(func() { 94 primitiveSize := 0 95 var dynamicSizeTerms []string 96 97 forEachStructField(st, fieldDispatcher{ 98 primitive: func(_, t *ast.Ident) { 99 if size, dynamic := g.scalarSize(t); !dynamic { 100 primitiveSize += size 101 } else { 102 g.recordUsedMarshallable(t.Name) 103 dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()", t.Name)) 104 } 105 }, 106 selector: func(_, tX, tSel *ast.Ident) { 107 tName := fmt.Sprintf("%s.%s", tX.Name, tSel.Name) 108 g.recordUsedImport(tX.Name) 109 g.recordUsedMarshallable(tName) 110 dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()", tName)) 111 }, 112 array: func(_ *ast.Ident, a *ast.ArrayType, t *ast.Ident) { 113 lenExpr := g.arrayLenExpr(a) 114 if size, dynamic := g.scalarSize(t); !dynamic { 115 dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("%d*%s", size, lenExpr)) 116 } else { 117 g.recordUsedMarshallable(t.Name) 118 dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()*%s", t.Name, lenExpr)) 119 } 120 }, 121 }.dispatch) 122 g.emit("return %d", primitiveSize) 123 if len(dynamicSizeTerms) > 0 { 124 g.incIndent() 125 } 126 { 127 for _, d := range dynamicSizeTerms { 128 g.emitNoIndent(" +\n") 129 g.emit(d) 130 } 131 } 132 if len(dynamicSizeTerms) > 0 { 133 g.decIndent() 134 } 135 }) 136 g.emit("\n}\n\n") 137 138 g.emit("// MarshalBytes implements marshal.Marshallable.MarshalBytes.\n") 139 g.emit("func (%s *%s) MarshalBytes(dst []byte) {\n", g.r, g.typeName()) 140 g.inIndent(func() { 141 forEachStructField(st, fieldDispatcher{ 142 primitive: func(n, t *ast.Ident) { 143 if n.Name == "_" { 144 g.emit("// Padding: dst[:sizeof(%s)] ~= %s(0)\n", t.Name, t.Name) 145 if len, dynamic := g.scalarSize(t); !dynamic { 146 g.shift("dst", len) 147 } else { 148 // We can't use shiftDynamic here because we don't have 149 // an instance of the dynamic type we can reference here 150 // (since the version in this struct is anonymous). Use 151 // a typed nil pointer to call SizeBytes() instead. 152 g.emit("dst = dst[(*%s)(nil).SizeBytes():]\n", t.Name) 153 } 154 return 155 } 156 g.marshalScalar(g.fieldAccessor(n), t.Name, "dst") 157 }, 158 selector: func(n, tX, tSel *ast.Ident) { 159 if n.Name == "_" { 160 g.emit("// Padding: dst[:sizeof(%s)] ~= %s(0)\n", tX.Name, tSel.Name) 161 g.emit("dst = dst[(*%s.%s)(nil).SizeBytes():]\n", tX.Name, tSel.Name) 162 return 163 } 164 g.marshalScalar(g.fieldAccessor(n), fmt.Sprintf("%s.%s", tX.Name, tSel.Name), "dst") 165 }, 166 array: func(n *ast.Ident, a *ast.ArrayType, t *ast.Ident) { 167 lenExpr := g.arrayLenExpr(a) 168 if n.Name == "_" { 169 g.emit("// Padding: dst[:sizeof(%s)*%s] ~= [%s]%s{0}\n", t.Name, lenExpr, lenExpr, t.Name) 170 if size, dynamic := g.scalarSize(t); !dynamic { 171 g.emit("dst = dst[%d*(%s):]\n", size, lenExpr) 172 } else { 173 // We can't use shiftDynamic here because we don't have 174 // an instance of the dynamic type we can reference here 175 // (since the version in this struct is anonymous). Use 176 // a typed nil pointer to call SizeBytes() instead. 177 g.emit("dst = dst[(*%s)(nil).SizeBytes()*(%s):]\n", t.Name, lenExpr) 178 } 179 return 180 } 181 182 g.emit("for idx := 0; idx < %s; idx++ {\n", lenExpr) 183 g.inIndent(func() { 184 g.marshalScalar(fmt.Sprintf("%s[idx]", g.fieldAccessor(n)), t.Name, "dst") 185 }) 186 g.emit("}\n") 187 }, 188 }.dispatch) 189 }) 190 g.emit("}\n\n") 191 192 g.emit("// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.\n") 193 g.emit("func (%s *%s) UnmarshalBytes(src []byte) {\n", g.r, g.typeName()) 194 g.inIndent(func() { 195 forEachStructField(st, fieldDispatcher{ 196 primitive: func(n, t *ast.Ident) { 197 if n.Name == "_" { 198 g.emit("// Padding: var _ %s ~= src[:sizeof(%s)]\n", t.Name, t.Name) 199 if len, dynamic := g.scalarSize(t); !dynamic { 200 g.shift("src", len) 201 } else { 202 // We don't have an instance of the dynamic type we can 203 // reference here (since the version in this struct is 204 // anonymous). Use a typed nil pointer to call 205 // SizeBytes() instead. 206 g.shiftDynamic("src", fmt.Sprintf("(*%s)(nil)", t.Name)) 207 g.recordPotentiallyNonPackedField(fmt.Sprintf("(*%s)(nil)", t.Name)) 208 } 209 return 210 } 211 g.unmarshalScalar(g.fieldAccessor(n), t.Name, "src") 212 }, 213 selector: func(n, tX, tSel *ast.Ident) { 214 if n.Name == "_" { 215 g.emit("// Padding: %s ~= src[:sizeof(%s.%s)]\n", g.fieldAccessor(n), tX.Name, tSel.Name) 216 g.emit("src = src[(*%s.%s)(nil).SizeBytes():]\n", tX.Name, tSel.Name) 217 g.recordPotentiallyNonPackedField(fmt.Sprintf("(*%s.%s)(nil)", tX.Name, tSel.Name)) 218 return 219 } 220 g.unmarshalScalar(g.fieldAccessor(n), fmt.Sprintf("%s.%s", tX.Name, tSel.Name), "src") 221 }, 222 array: func(n *ast.Ident, a *ast.ArrayType, t *ast.Ident) { 223 lenExpr := g.arrayLenExpr(a) 224 if n.Name == "_" { 225 g.emit("// Padding: ~ copy([%s]%s(%s), src[:sizeof(%s)*%s])\n", lenExpr, t.Name, g.fieldAccessor(n), t.Name, lenExpr) 226 if size, dynamic := g.scalarSize(t); !dynamic { 227 g.emit("src = src[%d*(%s):]\n", size, lenExpr) 228 } else { 229 // We can't use shiftDynamic here because we don't have 230 // an instance of the dynamic type we can referece here 231 // (since the version in this struct is anonymous). Use 232 // a typed nil pointer to call SizeBytes() instead. 233 g.emit("src = src[(*%s)(nil).SizeBytes()*(%s):]\n", t.Name, lenExpr) 234 } 235 return 236 } 237 238 g.emit("for idx := 0; idx < %s; idx++ {\n", lenExpr) 239 g.inIndent(func() { 240 g.unmarshalScalar(fmt.Sprintf("%s[idx]", g.fieldAccessor(n)), t.Name, "src") 241 }) 242 g.emit("}\n") 243 }, 244 }.dispatch) 245 }) 246 g.emit("}\n\n") 247 248 g.emit("// Packed implements marshal.Marshallable.Packed.\n") 249 g.emit("//go:nosplit\n") 250 g.emit("func (%s *%s) Packed() bool {\n", g.r, g.typeName()) 251 g.inIndent(func() { 252 expr, fieldsMaybePacked := g.areFieldsPackedExpression() 253 switch { 254 case !thisPacked: 255 g.emit("return false\n") 256 case fieldsMaybePacked: 257 g.emit("return %s\n", expr) 258 default: 259 g.emit("return true\n") 260 261 } 262 }) 263 g.emit("}\n\n") 264 265 g.emit("// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.\n") 266 g.emit("func (%s *%s) MarshalUnsafe(dst []byte) {\n", g.r, g.typeName()) 267 g.inIndent(func() { 268 fallback := func() { 269 g.emit("// Type %s doesn't have a packed layout in memory, fallback to MarshalBytes.\n", g.typeName()) 270 g.emit("%s.MarshalBytes(dst)\n", g.r) 271 } 272 if thisPacked { 273 g.recordUsedImport("gohacks") 274 g.recordUsedImport("unsafe") 275 if cond, ok := g.areFieldsPackedExpression(); ok { 276 g.emit("if %s {\n", cond) 277 g.inIndent(func() { 278 g.emit("gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(%s), uintptr(%s.SizeBytes()))\n", g.r, g.r) 279 }) 280 g.emit("} else {\n") 281 g.inIndent(fallback) 282 g.emit("}\n") 283 } else { 284 g.emit("gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(%s), uintptr(%s.SizeBytes()))\n", g.r, g.r) 285 } 286 } else { 287 fallback() 288 } 289 }) 290 g.emit("}\n\n") 291 292 g.emit("// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.\n") 293 g.emit("func (%s *%s) UnmarshalUnsafe(src []byte) {\n", g.r, g.typeName()) 294 g.inIndent(func() { 295 fallback := func() { 296 g.emit("// Type %s doesn't have a packed layout in memory, fallback to UnmarshalBytes.\n", g.typeName()) 297 g.emit("%s.UnmarshalBytes(src)\n", g.r) 298 } 299 if thisPacked { 300 g.recordUsedImport("gohacks") 301 if cond, ok := g.areFieldsPackedExpression(); ok { 302 g.emit("if %s {\n", cond) 303 g.inIndent(func() { 304 g.emit("gohacks.Memmove(unsafe.Pointer(%s), unsafe.Pointer(&src[0]), uintptr(%s.SizeBytes()))\n", g.r, g.r) 305 }) 306 g.emit("} else {\n") 307 g.inIndent(fallback) 308 g.emit("}\n") 309 } else { 310 g.emit("gohacks.Memmove(unsafe.Pointer(%s), unsafe.Pointer(&src[0]), uintptr(%s.SizeBytes()))\n", g.r, g.r) 311 } 312 } else { 313 fallback() 314 } 315 }) 316 g.emit("}\n\n") 317 g.emit("// CopyOutN implements marshal.Marshallable.CopyOutN.\n") 318 g.emit("//go:nosplit\n") 319 g.recordUsedImport("marshal") 320 g.recordUsedImport("hostarch") 321 g.emit("func (%s *%s) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) {\n", g.r, g.typeName()) 322 g.inIndent(func() { 323 fallback := func() { 324 g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName()) 325 g.emit("buf := cc.CopyScratchBuffer(%s.SizeBytes()) // escapes: okay.\n", g.r) 326 g.emit("%s.MarshalBytes(buf) // escapes: fallback.\n", g.r) 327 g.emit("return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.\n") 328 } 329 if thisPacked { 330 g.recordUsedImport("reflect") 331 g.recordUsedImport("runtime") 332 g.recordUsedImport("unsafe") 333 if cond, ok := g.areFieldsPackedExpression(); ok { 334 g.emit("if !%s {\n", cond) 335 g.inIndent(fallback) 336 g.emit("}\n\n") 337 } 338 // Fast serialization. 339 g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r)) 340 341 g.emit("length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.\n") 342 g.emitKeepAlive(g.r) 343 g.emit("return length, err\n") 344 } else { 345 fallback() 346 } 347 }) 348 g.emit("}\n\n") 349 350 g.emit("// CopyOut implements marshal.Marshallable.CopyOut.\n") 351 g.emit("//go:nosplit\n") 352 g.recordUsedImport("marshal") 353 g.recordUsedImport("hostarch") 354 g.emit("func (%s *%s) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) {\n", g.r, g.typeName()) 355 g.inIndent(func() { 356 g.emit("return %s.CopyOutN(cc, addr, %s.SizeBytes())\n", g.r, g.r) 357 }) 358 g.emit("}\n\n") 359 360 g.emit("// CopyIn implements marshal.Marshallable.CopyIn.\n") 361 g.emit("//go:nosplit\n") 362 g.recordUsedImport("marshal") 363 g.recordUsedImport("hostarch") 364 g.emit("func (%s *%s) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) {\n", g.r, g.typeName()) 365 g.inIndent(func() { 366 fallback := func() { 367 g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName()) 368 g.emit("buf := cc.CopyScratchBuffer(%s.SizeBytes()) // escapes: okay.\n", g.r) 369 g.emit("length, err := cc.CopyInBytes(addr, buf) // escapes: okay.\n") 370 g.emit("// Unmarshal unconditionally. If we had a short copy-in, this results in a\n") 371 g.emit("// partially unmarshalled struct.\n") 372 g.emit("%s.UnmarshalBytes(buf) // escapes: fallback.\n", g.r) 373 g.emit("return length, err\n") 374 } 375 if thisPacked { 376 g.recordUsedImport("reflect") 377 g.recordUsedImport("runtime") 378 g.recordUsedImport("unsafe") 379 if cond, ok := g.areFieldsPackedExpression(); ok { 380 g.emit("if !%s {\n", cond) 381 g.inIndent(fallback) 382 g.emit("}\n\n") 383 } 384 // Fast deserialization. 385 g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r)) 386 387 g.emit("length, err := cc.CopyInBytes(addr, buf) // escapes: okay.\n") 388 g.emitKeepAlive(g.r) 389 g.emit("return length, err\n") 390 } else { 391 fallback() 392 } 393 }) 394 g.emit("}\n\n") 395 396 g.emit("// WriteTo implements io.WriterTo.WriteTo.\n") 397 g.recordUsedImport("io") 398 g.emit("func (%s *%s) WriteTo(writer io.Writer) (int64, error) {\n", g.r, g.typeName()) 399 g.inIndent(func() { 400 fallback := func() { 401 g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName()) 402 g.emit("buf := make([]byte, %s.SizeBytes())\n", g.r) 403 g.emit("%s.MarshalBytes(buf)\n", g.r) 404 g.emit("length, err := writer.Write(buf)\n") 405 g.emit("return int64(length), err\n") 406 } 407 if thisPacked { 408 g.recordUsedImport("reflect") 409 g.recordUsedImport("runtime") 410 g.recordUsedImport("unsafe") 411 if cond, ok := g.areFieldsPackedExpression(); ok { 412 g.emit("if !%s {\n", cond) 413 g.inIndent(fallback) 414 g.emit("}\n\n") 415 } 416 // Fast serialization. 417 g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r)) 418 419 g.emit("length, err := writer.Write(buf)\n") 420 g.emitKeepAlive(g.r) 421 g.emit("return int64(length), err\n") 422 } else { 423 fallback() 424 } 425 }) 426 g.emit("}\n\n") 427 } 428 429 func (g *interfaceGenerator) emitMarshallableSliceForStruct(st *ast.StructType, slice *sliceAPI) { 430 thisPacked := g.isStructPacked(st) 431 432 if slice.inner { 433 abortAt(g.f.Position(slice.comment.Slash), fmt.Sprintf("The ':inner' argument to '+marshal slice:%s:inner' is only applicable to newtypes on primitives. Remove it from this struct declaration.", slice.ident)) 434 } 435 436 g.recordUsedImport("marshal") 437 g.recordUsedImport("hostarch") 438 439 g.emit("// Copy%sIn copies in a slice of %s objects from the task's memory.\n", slice.ident, g.typeName()) 440 g.emit("func Copy%sIn(cc marshal.CopyContext, addr hostarch.Addr, dst []%s) (int, error) {\n", slice.ident, g.typeName()) 441 g.inIndent(func() { 442 g.emit("count := len(dst)\n") 443 g.emit("if count == 0 {\n") 444 g.inIndent(func() { 445 g.emit("return 0, nil\n") 446 }) 447 g.emit("}\n") 448 g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName()) 449 450 fallback := func() { 451 g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName()) 452 g.emit("buf := cc.CopyScratchBuffer(size * count)\n") 453 g.emit("length, err := cc.CopyInBytes(addr, buf)\n\n") 454 455 g.emit("// Unmarshal as much as possible, even on error. First handle full objects.\n") 456 g.emit("limit := length/size\n") 457 g.emit("for idx := 0; idx < limit; idx++ {\n") 458 g.inIndent(func() { 459 g.emit("dst[idx].UnmarshalBytes(buf[size*idx:size*(idx+1)])\n") 460 }) 461 g.emit("}\n\n") 462 463 g.emit("// Handle any final partial object. buf is guaranteed to be long enough for the\n") 464 g.emit("// final element, but may not contain valid data for the entire range. This may\n") 465 g.emit("// result in unmarshalling zero values for some parts of the object.\n") 466 g.emit("if length%size != 0 {\n") 467 g.inIndent(func() { 468 g.emit("idx := limit\n") 469 g.emit("dst[idx].UnmarshalBytes(buf[size*idx:size*(idx+1)])\n") 470 }) 471 g.emit("}\n\n") 472 473 g.emit("return length, err\n") 474 } 475 if thisPacked { 476 g.recordUsedImport("reflect") 477 g.recordUsedImport("runtime") 478 g.recordUsedImport("unsafe") 479 if _, ok := g.areFieldsPackedExpression(); ok { 480 g.emit("if !dst[0].Packed() {\n") 481 g.inIndent(fallback) 482 g.emit("}\n\n") 483 } 484 // Fast deserialization. 485 g.emitCastSliceToByteSlice("&dst", "buf", "size * count") 486 487 g.emit("length, err := cc.CopyInBytes(addr, buf)\n") 488 g.emitKeepAlive("dst") 489 g.emit("return length, err\n") 490 } else { 491 fallback() 492 } 493 }) 494 g.emit("}\n\n") 495 496 g.emit("// Copy%sOut copies a slice of %s objects to the task's memory.\n", slice.ident, g.typeName()) 497 g.emit("func Copy%sOut(cc marshal.CopyContext, addr hostarch.Addr, src []%s) (int, error) {\n", slice.ident, g.typeName()) 498 g.inIndent(func() { 499 g.emit("count := len(src)\n") 500 g.emit("if count == 0 {\n") 501 g.inIndent(func() { 502 g.emit("return 0, nil\n") 503 }) 504 g.emit("}\n") 505 g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName()) 506 507 fallback := func() { 508 g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName()) 509 g.emit("buf := cc.CopyScratchBuffer(size * count)\n") 510 g.emit("for idx := 0; idx < count; idx++ {\n") 511 g.inIndent(func() { 512 g.emit("src[idx].MarshalBytes(buf[size*idx:size*(idx+1)])\n") 513 }) 514 g.emit("}\n") 515 g.emit("return cc.CopyOutBytes(addr, buf)\n") 516 } 517 if thisPacked { 518 g.recordUsedImport("reflect") 519 g.recordUsedImport("runtime") 520 g.recordUsedImport("unsafe") 521 if _, ok := g.areFieldsPackedExpression(); ok { 522 g.emit("if !src[0].Packed() {\n") 523 g.inIndent(fallback) 524 g.emit("}\n\n") 525 } 526 // Fast serialization. 527 g.emitCastSliceToByteSlice("&src", "buf", "size * count") 528 529 g.emit("length, err := cc.CopyOutBytes(addr, buf)\n") 530 g.emitKeepAlive("src") 531 g.emit("return length, err\n") 532 } else { 533 fallback() 534 } 535 }) 536 g.emit("}\n\n") 537 538 g.emit("// MarshalUnsafe%s is like %s.MarshalUnsafe, but for a []%s.\n", slice.ident, g.typeName(), g.typeName()) 539 g.emit("func MarshalUnsafe%s(src []%s, dst []byte) (int, error) {\n", slice.ident, g.typeName()) 540 g.inIndent(func() { 541 g.emit("count := len(src)\n") 542 g.emit("if count == 0 {\n") 543 g.inIndent(func() { 544 g.emit("return 0, nil\n") 545 }) 546 g.emit("}\n") 547 g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName()) 548 549 fallback := func() { 550 g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName()) 551 g.emit("for idx := 0; idx < count; idx++ {\n") 552 g.inIndent(func() { 553 g.emit("src[idx].MarshalBytes(dst[size*idx:(size)*(idx+1)])\n") 554 }) 555 g.emit("}\n") 556 g.emit("return size * count, nil\n") 557 } 558 if thisPacked { 559 g.recordUsedImport("reflect") 560 g.recordUsedImport("runtime") 561 g.recordUsedImport("unsafe") 562 g.recordUsedImport("gohacks") 563 if _, ok := g.areFieldsPackedExpression(); ok { 564 g.emit("if !src[0].Packed() {\n") 565 g.inIndent(fallback) 566 g.emit("}\n\n") 567 } 568 g.emit("dst = dst[:size*count]\n") 569 g.emit("gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(&src[0]), uintptr(len(dst)))\n") 570 g.emit("return size * count, nil\n") 571 } else { 572 fallback() 573 } 574 }) 575 g.emit("}\n\n") 576 577 g.emit("// UnmarshalUnsafe%s is like %s.UnmarshalUnsafe, but for a []%s.\n", slice.ident, g.typeName(), g.typeName()) 578 g.emit("func UnmarshalUnsafe%s(dst []%s, src []byte) (int, error) {\n", slice.ident, g.typeName()) 579 g.inIndent(func() { 580 g.emit("count := len(dst)\n") 581 g.emit("if count == 0 {\n") 582 g.inIndent(func() { 583 g.emit("return 0, nil\n") 584 }) 585 g.emit("}\n") 586 g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName()) 587 588 fallback := func() { 589 g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName()) 590 g.emit("for idx := 0; idx < count; idx++ {\n") 591 g.inIndent(func() { 592 g.emit("dst[idx].UnmarshalBytes(src[size*idx:size*(idx+1)])\n") 593 }) 594 g.emit("}\n") 595 g.emit("return size * count, nil\n") 596 } 597 if thisPacked { 598 g.recordUsedImport("gohacks") 599 g.recordUsedImport("reflect") 600 g.recordUsedImport("runtime") 601 if _, ok := g.areFieldsPackedExpression(); ok { 602 g.emit("if !dst[0].Packed() {\n") 603 g.inIndent(fallback) 604 g.emit("}\n\n") 605 } 606 607 g.emit("src = src[:(size*count)]\n") 608 g.emit("gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(&src[0]), uintptr(len(src)))\n") 609 610 g.emit("return count*size, nil\n") 611 } else { 612 fallback() 613 } 614 }) 615 g.emit("}\n\n") 616 }