gvisor.dev/gvisor@v0.0.0-20240520182842-f9d4d51c7e0f/tools/go_marshal/gomarshal/generator_interfaces_struct.go (about)

     1  // Copyright 2020 The gVisor Authors.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  
    15  // This file contains the bits of the code generator specific to marshalling
    16  // structs.
    17  
    18  package gomarshal
    19  
    20  import (
    21  	"fmt"
    22  	"go/ast"
    23  	"sort"
    24  	"strings"
    25  )
    26  
    27  func (g *interfaceGenerator) fieldAccessor(n *ast.Ident) string {
    28  	return fmt.Sprintf("%s.%s", g.r, n.Name)
    29  }
    30  
    31  // areFieldsPackedExpression returns a go expression checking whether g.t's fields are
    32  // packed. Returns "", false if g.t has no fields that may be potentially not
    33  // packed, otherwise returns <clause>, true, where <clause> is an expression
    34  // like "t.a.Packed() && t.b.Packed() && t.c.Packed()".
    35  func (g *interfaceGenerator) areFieldsPackedExpression() (string, bool) {
    36  	if len(g.as) == 0 {
    37  		return "", false
    38  	}
    39  
    40  	cs := make([]string, 0, len(g.as))
    41  	for accessor := range g.as {
    42  		cs = append(cs, fmt.Sprintf("%s.Packed()", accessor))
    43  	}
    44  	// Sort expressions for determinstic build outputs.
    45  	sort.Strings(cs)
    46  	return strings.Join(cs, " && "), true
    47  }
    48  
    49  // validateStruct ensures the type we're working with can be marshalled. These
    50  // checks are done ahead of time and in one place so we can make assumptions
    51  // later.
    52  func (g *interfaceGenerator) validateStruct(ts *ast.TypeSpec, st *ast.StructType) {
    53  	forEachStructField(st, func(f *ast.Field) {
    54  		fieldDispatcher{
    55  			primitive: func(_, t *ast.Ident) {
    56  				g.validatePrimitiveNewtype(t)
    57  			},
    58  			selector: func(_, _, _ *ast.Ident) {
    59  				// No validation to perform on selector fields. However this
    60  				// callback must still be provided.
    61  			},
    62  			array: func(n *ast.Ident, a *ast.ArrayType, _ *ast.Ident) {
    63  				g.validateArrayNewtype(n, a)
    64  			},
    65  			unhandled: func(_ *ast.Ident) {
    66  				g.abortAt(f.Pos(), fmt.Sprintf("Marshalling not supported for %s fields", kindString(f.Type)))
    67  			},
    68  		}.dispatch(f)
    69  	})
    70  }
    71  
    72  func (g *interfaceGenerator) isStructPacked(st *ast.StructType) bool {
    73  	packed := true
    74  	forEachStructField(st, func(f *ast.Field) {
    75  		if f.Tag != nil {
    76  			if f.Tag.Value == "`marshal:\"unaligned\"`" {
    77  				if packed {
    78  					debugfAt(g.f.Position(g.t.Pos()),
    79  						fmt.Sprintf("Marking type '%s' as not packed due to tag `marshal:\"unaligned\"`.\n", g.t.Name))
    80  					packed = false
    81  				}
    82  			}
    83  		}
    84  	})
    85  	return packed
    86  }
    87  
    88  func (g *interfaceGenerator) emitMarshallableForStruct(st *ast.StructType) {
    89  	thisPacked := g.isStructPacked(st)
    90  
    91  	g.emit("// SizeBytes implements marshal.Marshallable.SizeBytes.\n")
    92  	g.emit("func (%s *%s) SizeBytes() int {\n", g.r, g.typeName())
    93  	g.inIndent(func() {
    94  		primitiveSize := 0
    95  		var dynamicSizeTerms []string
    96  
    97  		forEachStructField(st, fieldDispatcher{
    98  			primitive: func(_, t *ast.Ident) {
    99  				if size, dynamic := g.scalarSize(t); !dynamic {
   100  					primitiveSize += size
   101  				} else {
   102  					g.recordUsedMarshallable(t.Name)
   103  					dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()", t.Name))
   104  				}
   105  			},
   106  			selector: func(_, tX, tSel *ast.Ident) {
   107  				tName := fmt.Sprintf("%s.%s", tX.Name, tSel.Name)
   108  				g.recordUsedImport(tX.Name)
   109  				g.recordUsedMarshallable(tName)
   110  				dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()", tName))
   111  			},
   112  			array: func(_ *ast.Ident, a *ast.ArrayType, t *ast.Ident) {
   113  				lenExpr := g.arrayLenExpr(a)
   114  				if size, dynamic := g.scalarSize(t); !dynamic {
   115  					dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("%d*%s", size, lenExpr))
   116  				} else {
   117  					g.recordUsedMarshallable(t.Name)
   118  					dynamicSizeTerms = append(dynamicSizeTerms, fmt.Sprintf("(*%s)(nil).SizeBytes()*%s", t.Name, lenExpr))
   119  				}
   120  			},
   121  		}.dispatch)
   122  		g.emit("return %d", primitiveSize)
   123  		if len(dynamicSizeTerms) > 0 {
   124  			g.incIndent()
   125  		}
   126  		{
   127  			for _, d := range dynamicSizeTerms {
   128  				g.emitNoIndent(" +\n")
   129  				g.emit(d)
   130  			}
   131  		}
   132  		if len(dynamicSizeTerms) > 0 {
   133  			g.decIndent()
   134  		}
   135  	})
   136  	g.emit("\n}\n\n")
   137  
   138  	g.emit("// MarshalBytes implements marshal.Marshallable.MarshalBytes.\n")
   139  	g.emit("func (%s *%s) MarshalBytes(dst []byte) []byte {\n", g.r, g.typeName())
   140  	g.inIndent(func() {
   141  		forEachStructField(st, fieldDispatcher{
   142  			primitive: func(n, t *ast.Ident) {
   143  				if n.Name == "_" {
   144  					g.emit("// Padding: dst[:sizeof(%s)] ~= %s(0)\n", t.Name, t.Name)
   145  					if len, dynamic := g.scalarSize(t); !dynamic {
   146  						g.shift("dst", len)
   147  					} else {
   148  						// We can't use shiftDynamic here because we don't have
   149  						// an instance of the dynamic type we can reference here
   150  						// (since the version in this struct is anonymous). Use
   151  						// a typed nil pointer to call SizeBytes() instead.
   152  						g.emit("dst = dst[(*%s)(nil).SizeBytes():]\n", t.Name)
   153  					}
   154  					return
   155  				}
   156  				g.marshalScalar(g.fieldAccessor(n), t.Name, "dst")
   157  			},
   158  			selector: func(n, tX, tSel *ast.Ident) {
   159  				if n.Name == "_" {
   160  					g.emit("// Padding: dst[:sizeof(%s)] ~= %s(0)\n", tX.Name, tSel.Name)
   161  					g.emit("dst = dst[(*%s.%s)(nil).SizeBytes():]\n", tX.Name, tSel.Name)
   162  					return
   163  				}
   164  				g.marshalScalar(g.fieldAccessor(n), fmt.Sprintf("%s.%s", tX.Name, tSel.Name), "dst")
   165  			},
   166  			array: func(n *ast.Ident, a *ast.ArrayType, t *ast.Ident) {
   167  				lenExpr := g.arrayLenExpr(a)
   168  				if n.Name == "_" {
   169  					g.emit("// Padding: dst[:sizeof(%s)*%s] ~= [%s]%s{0}\n", t.Name, lenExpr, lenExpr, t.Name)
   170  					if size, dynamic := g.scalarSize(t); !dynamic {
   171  						g.emit("dst = dst[%d*(%s):]\n", size, lenExpr)
   172  					} else {
   173  						// We can't use shiftDynamic here because we don't have
   174  						// an instance of the dynamic type we can reference here
   175  						// (since the version in this struct is anonymous). Use
   176  						// a typed nil pointer to call SizeBytes() instead.
   177  						g.emit("dst = dst[(*%s)(nil).SizeBytes()*(%s):]\n", t.Name, lenExpr)
   178  					}
   179  					return
   180  				}
   181  
   182  				g.emit("for idx := 0; idx < %s; idx++ {\n", lenExpr)
   183  				g.inIndent(func() {
   184  					g.marshalScalar(fmt.Sprintf("%s[idx]", g.fieldAccessor(n)), t.Name, "dst")
   185  				})
   186  				g.emit("}\n")
   187  			},
   188  		}.dispatch)
   189  		// All cases above shift the buffer appropriately.
   190  		g.emit("return dst\n")
   191  	})
   192  	g.emit("}\n\n")
   193  
   194  	g.emit("// UnmarshalBytes implements marshal.Marshallable.UnmarshalBytes.\n")
   195  	g.emit("func (%s *%s) UnmarshalBytes(src []byte) []byte {\n", g.r, g.typeName())
   196  	g.inIndent(func() {
   197  		forEachStructField(st, fieldDispatcher{
   198  			primitive: func(n, t *ast.Ident) {
   199  				if n.Name == "_" {
   200  					g.emit("// Padding: var _ %s ~= src[:sizeof(%s)]\n", t.Name, t.Name)
   201  					if len, dynamic := g.scalarSize(t); !dynamic {
   202  						g.shift("src", len)
   203  					} else {
   204  						// We don't have an instance of the dynamic type we can
   205  						// reference here (since the version in this struct is
   206  						// anonymous). Use a typed nil pointer to call
   207  						// SizeBytes() instead.
   208  						g.shiftDynamic("src", fmt.Sprintf("(*%s)(nil)", t.Name))
   209  						g.recordPotentiallyNonPackedField(fmt.Sprintf("(*%s)(nil)", t.Name))
   210  					}
   211  					return
   212  				}
   213  				g.unmarshalScalar(g.fieldAccessor(n), t.Name, "src")
   214  			},
   215  			selector: func(n, tX, tSel *ast.Ident) {
   216  				if n.Name == "_" {
   217  					g.emit("// Padding: %s ~= src[:sizeof(%s.%s)]\n", g.fieldAccessor(n), tX.Name, tSel.Name)
   218  					g.emit("src = src[(*%s.%s)(nil).SizeBytes():]\n", tX.Name, tSel.Name)
   219  					g.recordPotentiallyNonPackedField(fmt.Sprintf("(*%s.%s)(nil)", tX.Name, tSel.Name))
   220  					return
   221  				}
   222  				g.unmarshalScalar(g.fieldAccessor(n), fmt.Sprintf("%s.%s", tX.Name, tSel.Name), "src")
   223  			},
   224  			array: func(n *ast.Ident, a *ast.ArrayType, t *ast.Ident) {
   225  				lenExpr := g.arrayLenExpr(a)
   226  				if n.Name == "_" {
   227  					g.emit("// Padding: ~ copy([%s]%s(%s), src[:sizeof(%s)*%s])\n", lenExpr, t.Name, g.fieldAccessor(n), t.Name, lenExpr)
   228  					if size, dynamic := g.scalarSize(t); !dynamic {
   229  						g.emit("src = src[%d*(%s):]\n", size, lenExpr)
   230  					} else {
   231  						// We can't use shiftDynamic here because we don't have
   232  						// an instance of the dynamic type we can referece here
   233  						// (since the version in this struct is anonymous). Use
   234  						// a typed nil pointer to call SizeBytes() instead.
   235  						g.emit("src = src[(*%s)(nil).SizeBytes()*(%s):]\n", t.Name, lenExpr)
   236  					}
   237  					return
   238  				}
   239  
   240  				g.emit("for idx := 0; idx < %s; idx++ {\n", lenExpr)
   241  				g.inIndent(func() {
   242  					g.unmarshalScalar(fmt.Sprintf("%s[idx]", g.fieldAccessor(n)), t.Name, "src")
   243  				})
   244  				g.emit("}\n")
   245  			},
   246  		}.dispatch)
   247  		// All cases above shift the buffer appropriately.
   248  		g.emit("return src\n")
   249  	})
   250  	g.emit("}\n\n")
   251  
   252  	g.emit("// Packed implements marshal.Marshallable.Packed.\n")
   253  	g.emit("//go:nosplit\n")
   254  	g.emit("func (%s *%s) Packed() bool {\n", g.r, g.typeName())
   255  	g.inIndent(func() {
   256  		expr, fieldsMaybePacked := g.areFieldsPackedExpression()
   257  		switch {
   258  		case !thisPacked:
   259  			g.emit("return false\n")
   260  		case fieldsMaybePacked:
   261  			g.emit("return %s\n", expr)
   262  		default:
   263  			g.emit("return true\n")
   264  
   265  		}
   266  	})
   267  	g.emit("}\n\n")
   268  
   269  	g.emit("// MarshalUnsafe implements marshal.Marshallable.MarshalUnsafe.\n")
   270  	g.emit("func (%s *%s) MarshalUnsafe(dst []byte) []byte {\n", g.r, g.typeName())
   271  	g.inIndent(func() {
   272  		fallback := func() {
   273  			g.emit("// Type %s doesn't have a packed layout in memory, fallback to MarshalBytes.\n", g.typeName())
   274  			g.emit("return %s.MarshalBytes(dst)\n", g.r)
   275  		}
   276  		if thisPacked {
   277  			g.recordUsedImport("gohacks")
   278  			g.recordUsedImport("unsafe")
   279  			fastMarshal := func() {
   280  				g.emit("size := %s.SizeBytes()\n", g.r)
   281  				g.emit("gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(%s), uintptr(size))\n", g.r)
   282  				g.emit("return dst[size:]\n")
   283  			}
   284  			if cond, ok := g.areFieldsPackedExpression(); ok {
   285  				g.emit("if %s {\n", cond)
   286  				g.inIndent(fastMarshal)
   287  				g.emit("}\n")
   288  				fallback()
   289  			} else {
   290  				fastMarshal()
   291  			}
   292  		} else {
   293  			fallback()
   294  		}
   295  	})
   296  	g.emit("}\n\n")
   297  
   298  	g.emit("// UnmarshalUnsafe implements marshal.Marshallable.UnmarshalUnsafe.\n")
   299  	g.emit("func (%s *%s) UnmarshalUnsafe(src []byte) []byte {\n", g.r, g.typeName())
   300  	g.inIndent(func() {
   301  		fallback := func() {
   302  			g.emit("// Type %s doesn't have a packed layout in memory, fallback to UnmarshalBytes.\n", g.typeName())
   303  			g.emit("return %s.UnmarshalBytes(src)\n", g.r)
   304  		}
   305  		if thisPacked {
   306  			g.recordUsedImport("gohacks")
   307  			g.recordUsedImport("unsafe")
   308  			fastUnmarshal := func() {
   309  				g.emit("size := %s.SizeBytes()\n", g.r)
   310  				g.emit("gohacks.Memmove(unsafe.Pointer(%s), unsafe.Pointer(&src[0]), uintptr(size))\n", g.r)
   311  				g.emit("return src[size:]\n")
   312  			}
   313  			if cond, ok := g.areFieldsPackedExpression(); ok {
   314  				g.emit("if %s {\n", cond)
   315  				g.inIndent(fastUnmarshal)
   316  				g.emit("}\n")
   317  				fallback()
   318  			} else {
   319  				fastUnmarshal()
   320  			}
   321  		} else {
   322  			fallback()
   323  		}
   324  	})
   325  
   326  	g.emit("}\n\n")
   327  	g.emit("// CopyOutN implements marshal.Marshallable.CopyOutN.\n")
   328  	g.recordUsedImport("marshal")
   329  	g.recordUsedImport("hostarch")
   330  	g.emit("func (%s *%s) CopyOutN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) {\n", g.r, g.typeName())
   331  	g.inIndent(func() {
   332  		fallback := func() {
   333  			g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
   334  			g.emit("buf := cc.CopyScratchBuffer(%s.SizeBytes()) // escapes: okay.\n", g.r)
   335  			g.emit("%s.MarshalBytes(buf) // escapes: fallback.\n", g.r)
   336  			g.emit("return cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.\n")
   337  		}
   338  		if thisPacked {
   339  			g.recordUsedImport("reflect")
   340  			g.recordUsedImport("runtime")
   341  			g.recordUsedImport("unsafe")
   342  			if cond, ok := g.areFieldsPackedExpression(); ok {
   343  				g.emit("if !%s {\n", cond)
   344  				g.inIndent(fallback)
   345  				g.emit("}\n\n")
   346  			}
   347  			// Fast serialization.
   348  			g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
   349  
   350  			g.emit("length, err := cc.CopyOutBytes(addr, buf[:limit]) // escapes: okay.\n")
   351  			g.emitKeepAlive(g.r)
   352  			g.emit("return length, err\n")
   353  		} else {
   354  			fallback()
   355  		}
   356  	})
   357  	g.emit("}\n\n")
   358  
   359  	g.emit("// CopyOut implements marshal.Marshallable.CopyOut.\n")
   360  	g.recordUsedImport("marshal")
   361  	g.recordUsedImport("hostarch")
   362  	g.emit("func (%s *%s) CopyOut(cc marshal.CopyContext, addr hostarch.Addr) (int, error) {\n", g.r, g.typeName())
   363  	g.inIndent(func() {
   364  		g.emit("return %s.CopyOutN(cc, addr, %s.SizeBytes())\n", g.r, g.r)
   365  	})
   366  	g.emit("}\n\n")
   367  
   368  	g.emit("// CopyInN implements marshal.Marshallable.CopyInN.\n")
   369  	g.recordUsedImport("marshal")
   370  	g.recordUsedImport("hostarch")
   371  	g.emit("func (%s *%s) CopyInN(cc marshal.CopyContext, addr hostarch.Addr, limit int) (int, error) {\n", g.r, g.typeName())
   372  	g.inIndent(func() {
   373  		fallback := func() {
   374  			g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName())
   375  			g.emit("buf := cc.CopyScratchBuffer(%s.SizeBytes()) // escapes: okay.\n", g.r)
   376  			g.emit("length, err := cc.CopyInBytes(addr, buf[:limit]) // escapes: okay.\n")
   377  			g.emit("// Unmarshal unconditionally. If we had a short copy-in, this results in a\n")
   378  			g.emit("// partially unmarshalled struct.\n")
   379  			g.emit("%s.UnmarshalBytes(buf) // escapes: fallback.\n", g.r)
   380  			g.emit("return length, err\n")
   381  		}
   382  		if thisPacked {
   383  			g.recordUsedImport("reflect")
   384  			g.recordUsedImport("runtime")
   385  			g.recordUsedImport("unsafe")
   386  			if cond, ok := g.areFieldsPackedExpression(); ok {
   387  				g.emit("if !%s {\n", cond)
   388  				g.inIndent(fallback)
   389  				g.emit("}\n\n")
   390  			}
   391  			// Fast deserialization.
   392  			g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
   393  
   394  			g.emit("length, err := cc.CopyInBytes(addr, buf[:limit]) // escapes: okay.\n")
   395  			g.emitKeepAlive(g.r)
   396  			g.emit("return length, err\n")
   397  		} else {
   398  			fallback()
   399  		}
   400  	})
   401  	g.emit("}\n\n")
   402  
   403  	g.emit("// CopyIn implements marshal.Marshallable.CopyIn.\n")
   404  	g.recordUsedImport("marshal")
   405  	g.recordUsedImport("hostarch")
   406  	g.emit("func (%s *%s) CopyIn(cc marshal.CopyContext, addr hostarch.Addr) (int, error) {\n", g.r, g.typeName())
   407  	g.inIndent(func() {
   408  		g.emit("return %s.CopyInN(cc, addr, %s.SizeBytes())\n", g.r, g.r)
   409  	})
   410  	g.emit("}\n\n")
   411  
   412  	g.emit("// WriteTo implements io.WriterTo.WriteTo.\n")
   413  	g.recordUsedImport("io")
   414  	g.emit("func (%s *%s) WriteTo(writer io.Writer) (int64, error) {\n", g.r, g.typeName())
   415  	g.inIndent(func() {
   416  		fallback := func() {
   417  			g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
   418  			g.emit("buf := make([]byte, %s.SizeBytes())\n", g.r)
   419  			g.emit("%s.MarshalBytes(buf)\n", g.r)
   420  			g.emit("length, err := writer.Write(buf)\n")
   421  			g.emit("return int64(length), err\n")
   422  		}
   423  		if thisPacked {
   424  			g.recordUsedImport("reflect")
   425  			g.recordUsedImport("runtime")
   426  			g.recordUsedImport("unsafe")
   427  			if cond, ok := g.areFieldsPackedExpression(); ok {
   428  				g.emit("if !%s {\n", cond)
   429  				g.inIndent(fallback)
   430  				g.emit("}\n\n")
   431  			}
   432  			// Fast serialization.
   433  			g.emitCastToByteSlice(g.r, "buf", fmt.Sprintf("%s.SizeBytes()", g.r))
   434  
   435  			g.emit("length, err := writer.Write(buf)\n")
   436  			g.emitKeepAlive(g.r)
   437  			g.emit("return int64(length), err\n")
   438  		} else {
   439  			fallback()
   440  		}
   441  	})
   442  	g.emit("}\n\n")
   443  }
   444  
   445  func (g *interfaceGenerator) emitCheckedMarshallableForStruct() {
   446  	g.emit("// CheckedMarshal implements marshal.CheckedMarshallable.CheckedMarshal.\n")
   447  	g.emit("func (%s *%s) CheckedMarshal(dst []byte) ([]byte, bool) {\n", g.r, g.typeName())
   448  	g.inIndent(func() {
   449  		g.emit("if %s.SizeBytes() > len(dst) {\n", g.r)
   450  		g.inIndent(func() {
   451  			g.emit("return dst, false\n")
   452  		})
   453  		g.emit("}\n")
   454  		g.emit("return %s.MarshalUnsafe(dst), true\n", g.r)
   455  	})
   456  	g.emit("}\n\n")
   457  
   458  	g.emit("// CheckedUnmarshal implements marshal.CheckedMarshallable.CheckedUnmarshal.\n")
   459  	g.emit("func (%s *%s) CheckedUnmarshal(src []byte) ([]byte, bool) {\n", g.r, g.typeName())
   460  	g.inIndent(func() {
   461  		g.emit("if %s.SizeBytes() > len(src) {\n", g.r)
   462  		g.inIndent(func() {
   463  			g.emit("return src, false\n")
   464  		})
   465  		g.emit("}\n")
   466  		g.emit("return %s.UnmarshalUnsafe(src), true\n", g.r)
   467  	})
   468  	g.emit("}\n\n")
   469  }
   470  
   471  func (g *interfaceGenerator) emitMarshallableSliceForStruct(st *ast.StructType, slice *sliceAPI) {
   472  	thisPacked := g.isStructPacked(st)
   473  
   474  	if slice.inner {
   475  		abortAt(g.f.Position(slice.comment.Slash), fmt.Sprintf("The ':inner' argument to '+marshal slice:%s:inner' is only applicable to newtypes on primitives. Remove it from this struct declaration.", slice.ident))
   476  	}
   477  
   478  	g.recordUsedImport("marshal")
   479  	g.recordUsedImport("hostarch")
   480  
   481  	g.emit("// Copy%sIn copies in a slice of %s objects from the task's memory.\n", slice.ident, g.typeName())
   482  	g.emit("func Copy%sIn(cc marshal.CopyContext, addr hostarch.Addr, dst []%s) (int, error) {\n", slice.ident, g.typeName())
   483  	g.inIndent(func() {
   484  		g.emit("count := len(dst)\n")
   485  		g.emit("if count == 0 {\n")
   486  		g.inIndent(func() {
   487  			g.emit("return 0, nil\n")
   488  		})
   489  		g.emit("}\n")
   490  		g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
   491  
   492  		fallback := func() {
   493  			g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName())
   494  			g.emit("buf := cc.CopyScratchBuffer(size * count)\n")
   495  			g.emit("length, err := cc.CopyInBytes(addr, buf)\n\n")
   496  
   497  			g.emit("// Unmarshal as much as possible, even on error. First handle full objects.\n")
   498  			g.emit("limit := length/size\n")
   499  			g.emit("for idx := 0; idx < limit; idx++ {\n")
   500  			g.inIndent(func() {
   501  				g.emit("buf = dst[idx].UnmarshalBytes(buf)\n")
   502  			})
   503  			g.emit("}\n\n")
   504  
   505  			g.emit("// Handle any final partial object. buf is guaranteed to be long enough for the\n")
   506  			g.emit("// final element, but may not contain valid data for the entire range. This may\n")
   507  			g.emit("// result in unmarshalling zero values for some parts of the object.\n")
   508  			g.emit("if length%size != 0 {\n")
   509  			g.inIndent(func() {
   510  				g.emit("dst[limit].UnmarshalBytes(buf)\n")
   511  			})
   512  			g.emit("}\n\n")
   513  
   514  			g.emit("return length, err\n")
   515  		}
   516  		if thisPacked {
   517  			g.recordUsedImport("reflect")
   518  			g.recordUsedImport("runtime")
   519  			g.recordUsedImport("unsafe")
   520  			if _, ok := g.areFieldsPackedExpression(); ok {
   521  				g.emit("if !dst[0].Packed() {\n")
   522  				g.inIndent(fallback)
   523  				g.emit("}\n\n")
   524  			}
   525  			// Fast deserialization.
   526  			g.emitCastSliceToByteSlice("&dst", "buf", "size * count")
   527  
   528  			g.emit("length, err := cc.CopyInBytes(addr, buf)\n")
   529  			g.emitKeepAlive("dst")
   530  			g.emit("return length, err\n")
   531  		} else {
   532  			fallback()
   533  		}
   534  	})
   535  	g.emit("}\n\n")
   536  
   537  	g.emit("// Copy%sOut copies a slice of %s objects to the task's memory.\n", slice.ident, g.typeName())
   538  	g.emit("func Copy%sOut(cc marshal.CopyContext, addr hostarch.Addr, src []%s) (int, error) {\n", slice.ident, g.typeName())
   539  	g.inIndent(func() {
   540  		g.emit("count := len(src)\n")
   541  		g.emit("if count == 0 {\n")
   542  		g.inIndent(func() {
   543  			g.emit("return 0, nil\n")
   544  		})
   545  		g.emit("}\n")
   546  		g.emit("size := (*%s)(nil).SizeBytes()\n\n", g.typeName())
   547  
   548  		fallback := func() {
   549  			g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
   550  			g.emit("buf := cc.CopyScratchBuffer(size * count)\n")
   551  			g.emit("curBuf := buf\n")
   552  			g.emit("for idx := 0; idx < count; idx++ {\n")
   553  			g.inIndent(func() {
   554  				g.emit("curBuf = src[idx].MarshalBytes(curBuf)\n")
   555  			})
   556  			g.emit("}\n")
   557  			g.emit("return cc.CopyOutBytes(addr, buf)\n")
   558  		}
   559  		if thisPacked {
   560  			g.recordUsedImport("reflect")
   561  			g.recordUsedImport("runtime")
   562  			g.recordUsedImport("unsafe")
   563  			if _, ok := g.areFieldsPackedExpression(); ok {
   564  				g.emit("if !src[0].Packed() {\n")
   565  				g.inIndent(fallback)
   566  				g.emit("}\n\n")
   567  			}
   568  			// Fast serialization.
   569  			g.emitCastSliceToByteSlice("&src", "buf", "size * count")
   570  
   571  			g.emit("length, err := cc.CopyOutBytes(addr, buf)\n")
   572  			g.emitKeepAlive("src")
   573  			g.emit("return length, err\n")
   574  		} else {
   575  			fallback()
   576  		}
   577  	})
   578  	g.emit("}\n\n")
   579  
   580  	g.emit("// MarshalUnsafe%s is like %s.MarshalUnsafe, but for a []%s.\n", slice.ident, g.typeName(), g.typeName())
   581  	g.emit("func MarshalUnsafe%s(src []%s, dst []byte) []byte {\n", slice.ident, g.typeName())
   582  	g.inIndent(func() {
   583  		g.emit("count := len(src)\n")
   584  		g.emit("if count == 0 {\n")
   585  		g.inIndent(func() {
   586  			g.emit("return dst\n")
   587  		})
   588  		g.emit("}\n\n")
   589  
   590  		fallback := func() {
   591  			g.emit("// Type %s doesn't have a packed layout in memory, fall back to MarshalBytes.\n", g.typeName())
   592  			g.emit("for idx := 0; idx < count; idx++ {\n")
   593  			g.inIndent(func() {
   594  				g.emit("dst = src[idx].MarshalBytes(dst)\n")
   595  			})
   596  			g.emit("}\n")
   597  			g.emit("return dst\n")
   598  		}
   599  		if thisPacked {
   600  			g.recordUsedImport("reflect")
   601  			g.recordUsedImport("runtime")
   602  			g.recordUsedImport("unsafe")
   603  			g.recordUsedImport("gohacks")
   604  			if _, ok := g.areFieldsPackedExpression(); ok {
   605  				g.emit("if !src[0].Packed() {\n")
   606  				g.inIndent(fallback)
   607  				g.emit("}\n\n")
   608  			}
   609  			g.emit("size := (*%s)(nil).SizeBytes()\n", g.typeName())
   610  			g.emit("buf := dst[:size*count]\n")
   611  			g.emit("gohacks.Memmove(unsafe.Pointer(&buf[0]), unsafe.Pointer(&src[0]), uintptr(len(buf)))\n")
   612  			g.emit("return dst[size*count:]\n")
   613  		} else {
   614  			fallback()
   615  		}
   616  	})
   617  	g.emit("}\n\n")
   618  
   619  	g.emit("// UnmarshalUnsafe%s is like %s.UnmarshalUnsafe, but for a []%s.\n", slice.ident, g.typeName(), g.typeName())
   620  	g.emit("func UnmarshalUnsafe%s(dst []%s, src []byte) []byte {\n", slice.ident, g.typeName())
   621  	g.inIndent(func() {
   622  		g.emit("count := len(dst)\n")
   623  		g.emit("if count == 0 {\n")
   624  		g.inIndent(func() {
   625  			g.emit("return src\n")
   626  		})
   627  		g.emit("}\n\n")
   628  
   629  		fallback := func() {
   630  			g.emit("// Type %s doesn't have a packed layout in memory, fall back to UnmarshalBytes.\n", g.typeName())
   631  			g.emit("for idx := 0; idx < count; idx++ {\n")
   632  			g.inIndent(func() {
   633  				g.emit("src = dst[idx].UnmarshalBytes(src)\n")
   634  			})
   635  			g.emit("}\n")
   636  			g.emit("return src\n")
   637  		}
   638  		if thisPacked {
   639  			g.recordUsedImport("gohacks")
   640  			g.recordUsedImport("reflect")
   641  			g.recordUsedImport("runtime")
   642  			if _, ok := g.areFieldsPackedExpression(); ok {
   643  				g.emit("if !dst[0].Packed() {\n")
   644  				g.inIndent(fallback)
   645  				g.emit("}\n\n")
   646  			}
   647  
   648  			g.emit("size := (*%s)(nil).SizeBytes()\n", g.typeName())
   649  			g.emit("buf := src[:size*count]\n")
   650  			g.emit("gohacks.Memmove(unsafe.Pointer(&dst[0]), unsafe.Pointer(&buf[0]), uintptr(len(buf)))\n")
   651  			g.emit("return src[size*count:]\n")
   652  		} else {
   653  			fallback()
   654  		}
   655  	})
   656  	g.emit("}\n\n")
   657  }