github.com/cloudwego/dynamicgo@v0.2.6-0.20240519101509-707f41b6b834/testdata/baseline_tg_test.go (about)

     1  /**
     2   * Copyright 2023 CloudWeGo Authors.
     3   *
     4   * Licensed under the Apache License, Version 2.0 (the "License");
     5   * you may not use this file except in compliance with the License.
     6   * You may obtain a copy of the License at
     7   *
     8   *     http://www.apache.org/licenses/LICENSE-2.0
     9   *
    10   * Unless required by applicable law or agreed to in writing, software
    11   * distributed under the License is distributed on an "AS IS" BASIS,
    12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13   * See the License for the specific language governing permissions and
    14   * limitations under the License.
    15   */
    16  
    17  package testdata
    18  
    19  import (
    20  	"bytes"
    21  	"context"
    22  	"testing"
    23  
    24  	athrift "github.com/apache/thrift/lib/go/thrift"
    25  	"github.com/cloudwego/dynamicgo/testdata/kitex_gen/baseline"
    26  	"github.com/cloudwego/dynamicgo/thrift"
    27  	"github.com/cloudwego/dynamicgo/thrift/generic"
    28  	kg "github.com/cloudwego/kitex/pkg/generic"
    29  	kd "github.com/cloudwego/kitex/pkg/generic/descriptor"
    30  	gthrift "github.com/cloudwego/kitex/pkg/generic/thrift"
    31  	"github.com/cloudwego/kitex/pkg/remote"
    32  	bthrift "github.com/cloudwego/kitex/pkg/remote/codec/thrift"
    33  	"github.com/stretchr/testify/require"
    34  )
    35  
    36  func init() {
    37  	sobj := getSimpleValue()
    38  	println("small thrift data size: ", sobj.BLength())
    39  
    40  	psobj := getPartialSimpleValue()
    41  	println("partial small thrift data size: ", psobj.BLength())
    42  
    43  	nobj := getNestingValue()
    44  	println("medium thrift data size: ", nobj.BLength())
    45  
    46  	pnobj := getPartialNestingValue()
    47  	println("partial medium thrift data size: ", pnobj.BLength())
    48  }
    49  
    50  func getKitexGenericDesc() *kd.ServiceDescriptor {
    51  	p, err := kg.NewThriftFileProvider(idlPath)
    52  	if err != nil {
    53  		panic(err.Error())
    54  	}
    55  	return <-p.Provide()
    56  }
    57  
    58  func BenchmarkThriftUnmarshalAll_KitexGeneric(b *testing.B) {
    59  	b.Run("small", func(b *testing.B) {
    60  		obj := getSimpleValue()
    61  		data := make([]byte, obj.BLength())
    62  		ret := obj.FastWriteNocopy(data, nil)
    63  		if ret < 0 {
    64  			b.Fatal(ret)
    65  		}
    66  		data = wrapKitexGenericResponsePayload(data)
    67  		svcDsc := getKitexGenericDesc()
    68  		codec := gthrift.NewReadStruct(svcDsc, true)
    69  		mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
    70  		bc := athrift.NewTBinaryProtocol(mm, false, false)
    71  		ctx := context.Background()
    72  		v, err := codec.Read(ctx, "SimpleMethod", bc)
    73  		require.NoError(b, err)
    74  		_ = v
    75  		b.SetBytes(int64(len(data)))
    76  		b.ResetTimer()
    77  		for i := 0; i < b.N; i++ {
    78  			mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
    79  			bc := athrift.NewTBinaryProtocol(mm, false, false)
    80  			_, _ = codec.Read(ctx, "SimpleMethod", bc)
    81  		}
    82  	})
    83  	b.Run("medium", func(b *testing.B) {
    84  		obj := getNestingValue()
    85  		data := make([]byte, obj.BLength())
    86  		ret := obj.FastWriteNocopy(data, nil)
    87  		if ret < 0 {
    88  			b.Fatal(ret)
    89  		}
    90  		data = wrapKitexGenericResponsePayload(data)
    91  		svcDsc := getKitexGenericDesc()
    92  		codec := gthrift.NewReadStruct(svcDsc, true)
    93  		mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
    94  		bc := athrift.NewTBinaryProtocol(mm, false, false)
    95  		ctx := context.Background()
    96  		v, err := codec.Read(ctx, "NestingMethod", bc)
    97  		require.NoError(b, err)
    98  		_ = v
    99  		b.SetBytes(int64(len(data)))
   100  		b.ResetTimer()
   101  		for i := 0; i < b.N; i++ {
   102  			mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   103  			bc := athrift.NewTBinaryProtocol(mm, false, false)
   104  			_, _ = codec.Read(ctx, "NestingMethod", bc)
   105  		}
   106  	})
   107  }
   108  
   109  func BenchmarkThriftMarshalAll_KitexGeneric(b *testing.B) {
   110  	b.Run("small", func(b *testing.B) {
   111  		obj := getSimpleValue()
   112  		data := make([]byte, obj.BLength())
   113  		ret := obj.FastWriteNocopy(data, nil)
   114  		if ret < 0 {
   115  			b.Fatal(ret)
   116  		}
   117  		data = wrapKitexGenericRequestPayload(data)
   118  		svcDsc := getKitexGenericDesc()
   119  		reader := gthrift.NewReadStruct(svcDsc, false)
   120  		mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   121  		ac := athrift.NewTBinaryProtocol(mm, false, false)
   122  		ctx := context.Background()
   123  		v, err := reader.Read(ctx, "SimpleMethod", ac)
   124  		require.NoError(b, err)
   125  
   126  		ctx = context.Background()
   127  		codec, _ := gthrift.NewWriteStruct(svcDsc, "SimpleMethod", true)
   128  		buf := remote.NewWriterBuffer(BufferSize)
   129  		bc := bthrift.NewBinaryProtocol(buf)
   130  		err = codec.Write(ctx, bc, v, gthrift.NewBase())
   131  		require.NoError(b, err)
   132  
   133  		b.SetBytes(int64(len(data)))
   134  		b.ResetTimer()
   135  		for i := 0; i < b.N; i++ {
   136  			buf.Flush()
   137  			bc := bthrift.NewBinaryProtocol(buf)
   138  			_ = codec.Write(ctx, bc, v, gthrift.NewBase())
   139  		}
   140  	})
   141  	b.Run("medium", func(b *testing.B) {
   142  		obj := getNestingValue()
   143  		data := make([]byte, obj.BLength())
   144  		ret := obj.FastWriteNocopy(data, nil)
   145  		if ret < 0 {
   146  			b.Fatal(ret)
   147  		}
   148  		data = wrapKitexGenericRequestPayload(data)
   149  		svcDsc := getKitexGenericDesc()
   150  		reader := gthrift.NewReadStruct(svcDsc, false)
   151  		mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   152  		ac := athrift.NewTBinaryProtocol(mm, false, false)
   153  		ctx := context.Background()
   154  		v, err := reader.Read(ctx, "NestingMethod", ac)
   155  		require.NoError(b, err)
   156  
   157  		ctx = context.Background()
   158  		codec, _ := gthrift.NewWriteStruct(svcDsc, "NestingMethod", true)
   159  		buf := remote.NewWriterBuffer(BufferSize * 10)
   160  		bc := bthrift.NewBinaryProtocol(buf)
   161  		err = codec.Write(ctx, bc, v, gthrift.NewBase())
   162  		require.NoError(b, err)
   163  
   164  		b.SetBytes(int64(len(data)))
   165  		b.ResetTimer()
   166  		for i := 0; i < b.N; i++ {
   167  			buf.Flush()
   168  			bc := bthrift.NewBinaryProtocol(buf)
   169  			_ = codec.Write(ctx, bc, v, gthrift.NewBase())
   170  		}
   171  	})
   172  }
   173  
   174  func BenchmarkThriftUnmarshalPartial_KitexGeneric(b *testing.B) {
   175  	b.Run("small", func(b *testing.B) {
   176  		obj := getPartialSimpleValue()
   177  		data := make([]byte, obj.BLength())
   178  		ret := obj.FastWriteNocopy(data, nil)
   179  		if ret < 0 {
   180  			b.Fatal(ret)
   181  		}
   182  		data = wrapKitexGenericResponsePayload(data)
   183  		svcDsc := getKitexGenericDesc()
   184  		codec := gthrift.NewReadStruct(svcDsc, true)
   185  		mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   186  		bc := athrift.NewTBinaryProtocol(mm, false, false)
   187  		ctx := context.Background()
   188  		v, err := codec.Read(ctx, "PartialSimpleMethod", bc)
   189  		require.NoError(b, err)
   190  		_ = v
   191  		b.SetBytes(int64(len(data)))
   192  		b.ResetTimer()
   193  		for i := 0; i < b.N; i++ {
   194  			mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   195  			bc := athrift.NewTBinaryProtocol(mm, false, false)
   196  			_, _ = codec.Read(ctx, "PartialSimpleMethod", bc)
   197  		}
   198  	})
   199  	b.Run("medium", func(b *testing.B) {
   200  		obj := getPartialNestingValue()
   201  		data := make([]byte, obj.BLength())
   202  		ret := obj.FastWriteNocopy(data, nil)
   203  		if ret < 0 {
   204  			b.Fatal(ret)
   205  		}
   206  		data = wrapKitexGenericResponsePayload(data)
   207  		svcDsc := getKitexGenericDesc()
   208  		codec := gthrift.NewReadStruct(svcDsc, true)
   209  		mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   210  		bc := athrift.NewTBinaryProtocol(mm, false, false)
   211  		ctx := context.Background()
   212  		v, err := codec.Read(ctx, "PartialNestingMethod", bc)
   213  		require.NoError(b, err)
   214  		_ = v
   215  		b.SetBytes(int64(len(data)))
   216  		b.ResetTimer()
   217  		for i := 0; i < b.N; i++ {
   218  			mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   219  			bc := athrift.NewTBinaryProtocol(mm, false, false)
   220  			_, _ = codec.Read(ctx, "PartialNestingMethod", bc)
   221  		}
   222  	})
   223  }
   224  
   225  func BenchmarkThriftMarshalPartial_KitexGeneric(b *testing.B) {
   226  	b.Run("small", func(b *testing.B) {
   227  		obj := getPartialSimpleValue()
   228  		data := make([]byte, obj.BLength())
   229  		ret := obj.FastWriteNocopy(data, nil)
   230  		if ret < 0 {
   231  			b.Fatal(ret)
   232  		}
   233  		data = wrapKitexGenericRequestPayload(data)
   234  		svcDsc := getKitexGenericDesc()
   235  		reader := gthrift.NewReadStruct(svcDsc, false)
   236  		mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   237  		ac := athrift.NewTBinaryProtocol(mm, false, false)
   238  		ctx := context.Background()
   239  		v, err := reader.Read(ctx, "PartialSimpleMethod", ac)
   240  		require.NoError(b, err)
   241  
   242  		ctx = context.Background()
   243  		codec, _ := gthrift.NewWriteStruct(svcDsc, "PartialSimpleMethod", true)
   244  		buf := remote.NewWriterBuffer(BufferSize)
   245  		bc := bthrift.NewBinaryProtocol(buf)
   246  		err = codec.Write(ctx, bc, v, gthrift.NewBase())
   247  		require.NoError(b, err)
   248  
   249  		b.SetBytes(int64(len(data)))
   250  		b.ResetTimer()
   251  		for i := 0; i < b.N; i++ {
   252  			buf.Flush()
   253  			bc := bthrift.NewBinaryProtocol(buf)
   254  			_ = codec.Write(ctx, bc, v, gthrift.NewBase())
   255  		}
   256  	})
   257  	b.Run("medium", func(b *testing.B) {
   258  		obj := getPartialNestingValue()
   259  		data := make([]byte, obj.BLength())
   260  		ret := obj.FastWriteNocopy(data, nil)
   261  		if ret < 0 {
   262  			b.Fatal(ret)
   263  		}
   264  		data = wrapKitexGenericRequestPayload(data)
   265  		svcDsc := getKitexGenericDesc()
   266  		reader := gthrift.NewReadStruct(svcDsc, false)
   267  		mm := athrift.NewStreamTransportR(bytes.NewBuffer(data))
   268  		ac := athrift.NewTBinaryProtocol(mm, false, false)
   269  		ctx := context.Background()
   270  		v, err := reader.Read(ctx, "PartialNestingMethod", ac)
   271  		require.NoError(b, err)
   272  
   273  		ctx = context.Background()
   274  		codec, _ := gthrift.NewWriteStruct(svcDsc, "PartialNestingMethod", true)
   275  		buf := remote.NewWriterBuffer(BufferSize * 10)
   276  		bc := bthrift.NewBinaryProtocol(buf)
   277  		err = codec.Write(ctx, bc, v, gthrift.NewBase())
   278  		require.NoError(b, err)
   279  
   280  		b.SetBytes(int64(len(data)))
   281  		b.ResetTimer()
   282  		for i := 0; i < b.N; i++ {
   283  			buf.Flush()
   284  			bc := bthrift.NewBinaryProtocol(buf)
   285  			_ = codec.Write(ctx, bc, v, gthrift.NewBase())
   286  		}
   287  	})
   288  }
   289  
   290  func BenchmarkThriftMarshalAll_KitexFast(b *testing.B) {
   291  	b.Run("small", func(b *testing.B) {
   292  		obj := getSimpleValue()
   293  		data := make([]byte, obj.BLength())
   294  		ret := obj.FastWriteNocopy(data, nil)
   295  		if ret < 0 {
   296  			b.Fatal(ret)
   297  		}
   298  		b.SetBytes(int64(len(data)))
   299  		b.ResetTimer()
   300  		for i := 0; i < b.N; i++ {
   301  			data = data[:obj.BLength()]
   302  			_ = obj.FastWriteNocopy(data, nil)
   303  		}
   304  	})
   305  	b.Run("medium", func(b *testing.B) {
   306  		obj := getNestingValue()
   307  		data := make([]byte, obj.BLength())
   308  		ret := obj.FastWriteNocopy(data, nil)
   309  		if ret < 0 {
   310  			b.Fatal(ret)
   311  		}
   312  		b.SetBytes(int64(len(data)))
   313  		b.ResetTimer()
   314  		for i := 0; i < b.N; i++ {
   315  			data = data[:obj.BLength()]
   316  			_ = obj.FastWriteNocopy(data, nil)
   317  		}
   318  	})
   319  }
   320  
   321  func BenchmarkThriftUnmarshalAll_KitexFast(b *testing.B) {
   322  	b.Run("small", func(b *testing.B) {
   323  		obj := getSimpleValue()
   324  		data := make([]byte, obj.BLength())
   325  		ret := obj.FastWriteNocopy(data, nil)
   326  		if ret < 0 {
   327  			b.Fatal(ret)
   328  		}
   329  		b.SetBytes(int64(len(data)))
   330  		b.ResetTimer()
   331  		for i := 0; i < b.N; i++ {
   332  			_, _ = obj.FastRead(data)
   333  		}
   334  	})
   335  	b.Run("medium", func(b *testing.B) {
   336  		obj := getNestingValue()
   337  		data := make([]byte, obj.BLength())
   338  		ret := obj.FastWriteNocopy(data, nil)
   339  		if ret < 0 {
   340  			b.Fatal(ret)
   341  		}
   342  		b.SetBytes(int64(len(data)))
   343  		b.ResetTimer()
   344  		for i := 0; i < b.N; i++ {
   345  			_, _ = obj.FastRead(data)
   346  		}
   347  	})
   348  }
   349  
   350  func BenchmarkThriftMarshalPartial_KitexFast(b *testing.B) {
   351  	b.Run("small", func(b *testing.B) {
   352  		obj := getPartialSimpleValue()
   353  		data := make([]byte, obj.BLength())
   354  		ret := obj.FastWriteNocopy(data, nil)
   355  		if ret < 0 {
   356  			b.Fatal(ret)
   357  		}
   358  		b.SetBytes(int64(len(data)))
   359  		b.ResetTimer()
   360  		for i := 0; i < b.N; i++ {
   361  			data = data[:obj.BLength()]
   362  			_ = obj.FastWriteNocopy(data, nil)
   363  		}
   364  	})
   365  	b.Run("medium", func(b *testing.B) {
   366  		obj := getPartialNestingValue()
   367  		data := make([]byte, obj.BLength())
   368  		ret := obj.FastWriteNocopy(data, nil)
   369  		if ret < 0 {
   370  			b.Fatal(ret)
   371  		}
   372  		b.SetBytes(int64(len(data)))
   373  		b.ResetTimer()
   374  		for i := 0; i < b.N; i++ {
   375  			data = data[:obj.BLength()]
   376  			_ = obj.FastWriteNocopy(data, nil)
   377  		}
   378  	})
   379  }
   380  
   381  func BenchmarkThriftUnmarshalPartial_KitexFast(b *testing.B) {
   382  	b.Run("small", func(b *testing.B) {
   383  		obj := getPartialSimpleValue()
   384  		data := make([]byte, obj.BLength())
   385  		ret := obj.FastWriteNocopy(data, nil)
   386  		if ret < 0 {
   387  			b.Fatal(ret)
   388  		}
   389  		b.SetBytes(int64(len(data)))
   390  		b.ResetTimer()
   391  		for i := 0; i < b.N; i++ {
   392  			_, _ = obj.FastRead(data)
   393  		}
   394  	})
   395  	b.Run("medium", func(b *testing.B) {
   396  		obj := getPartialNestingValue()
   397  		data := make([]byte, obj.BLength())
   398  		ret := obj.FastWriteNocopy(data, nil)
   399  		if ret < 0 {
   400  			b.Fatal(ret)
   401  		}
   402  		b.SetBytes(int64(len(data)))
   403  		b.ResetTimer()
   404  		for i := 0; i < b.N; i++ {
   405  			_, _ = obj.FastRead(data)
   406  		}
   407  	})
   408  }
   409  
   410  func BenchmarkThriftMarshalTo_KitexFast(b *testing.B) {
   411  	b.Run("small", func(b *testing.B) {
   412  		obj := getSimpleValue()
   413  		data := make([]byte, obj.BLength())
   414  		ret := obj.FastWriteNocopy(data, nil)
   415  		if ret < 0 {
   416  			b.Fatal(ret)
   417  		}
   418  		pobj := getPartialSimpleValue()
   419  		_, err := pobj.FastRead(data)
   420  		require.Nil(b, err)
   421  		pdata := make([]byte, pobj.BLength())
   422  		ret = pobj.FastWriteNocopy(pdata, nil)
   423  		if ret < 0 {
   424  			b.Fatal(ret)
   425  		}
   426  		b.SetBytes(int64(len(data)))
   427  		b.ResetTimer()
   428  		for i := 0; i < b.N; i++ {
   429  			_, _ = pobj.FastRead(data)
   430  			_ = pobj.FastWriteNocopy(pdata, nil)
   431  		}
   432  	})
   433  	b.Run("medium", func(b *testing.B) {
   434  		obj := getPartialNestingValue()
   435  		data := make([]byte, obj.BLength())
   436  		ret := obj.FastWriteNocopy(data, nil)
   437  		if ret < 0 {
   438  			b.Fatal(ret)
   439  		}
   440  		pobj := getPartialNestingValue()
   441  		_, err := pobj.FastRead(data)
   442  		require.Nil(b, err)
   443  		pdata := make([]byte, pobj.BLength())
   444  		ret = pobj.FastWriteNocopy(pdata, nil)
   445  		if ret < 0 {
   446  			b.Fatal(ret)
   447  		}
   448  		b.SetBytes(int64(len(data)))
   449  		b.ResetTimer()
   450  		for i := 0; i < b.N; i++ {
   451  			_, _ = pobj.FastRead(data)
   452  			_ = pobj.FastWriteNocopy(pdata, nil)
   453  		}
   454  	})
   455  }
   456  
   457  func BenchmarkThriftSkip(b *testing.B) {
   458  	b.Run("small", func(b *testing.B) {
   459  		desc := getSimpleDesc()
   460  		obj := getSimpleValue()
   461  		data := make([]byte, obj.BLength())
   462  		ret := obj.FastWriteNocopy(data, nil)
   463  		if ret < 0 {
   464  			b.Fatal(ret)
   465  		}
   466  		p := thrift.NewBinaryProtocol(data)
   467  		err := p.SkipNative(desc.Type(), 512)
   468  		require.Nil(b, err)
   469  		require.Equal(b, len(data), p.Read)
   470  
   471  		b.Run("native", func(b *testing.B) {
   472  			b.SetBytes(int64(len(data)))
   473  			b.ResetTimer()
   474  			for i := 0; i < b.N; i++ {
   475  				p.Read = 0
   476  				_ = p.SkipNative(desc.Type(), 512)
   477  			}
   478  		})
   479  
   480  		b.Run("go", func(b *testing.B) {
   481  			b.SetBytes(int64(len(data)))
   482  			b.ResetTimer()
   483  			for i := 0; i < b.N; i++ {
   484  				p.Read = 0
   485  				_ = p.SkipGo(desc.Type(), 512)
   486  			}
   487  		})
   488  	})
   489  
   490  	b.Run("medium", func(b *testing.B) {
   491  		desc := getNestingDesc()
   492  		obj := getNestingValue()
   493  		data := make([]byte, obj.BLength())
   494  		ret := obj.FastWriteNocopy(data, nil)
   495  		if ret < 0 {
   496  			b.Fatal(ret)
   497  		}
   498  		p := thrift.NewBinaryProtocol(data)
   499  		err := p.SkipNative(desc.Type(), 512)
   500  		require.Nil(b, err)
   501  		require.Equal(b, len(data), p.Read)
   502  
   503  		b.Run("native", func(b *testing.B) {
   504  			b.SetBytes(int64(len(data)))
   505  			b.ResetTimer()
   506  			for i := 0; i < b.N; i++ {
   507  				p.Read = 0
   508  				_ = p.SkipNative(desc.Type(), 512)
   509  			}
   510  		})
   511  
   512  		b.Run("go", func(b *testing.B) {
   513  			b.SetBytes(int64(len(data)))
   514  			b.ResetTimer()
   515  			for i := 0; i < b.N; i++ {
   516  				p.Read = 0
   517  				_ = p.SkipGo(desc.Type(), 512)
   518  			}
   519  		})
   520  	})
   521  }
   522  
   523  func BenchmarkThriftGetOne(b *testing.B) {
   524  	b.Run("small", func(b *testing.B) {
   525  		desc := getSimpleDesc()
   526  		obj := getSimpleValue()
   527  		data := make([]byte, obj.BLength())
   528  		ret := obj.FastWriteNocopy(data, nil)
   529  		if ret < 0 {
   530  			b.Fatal(ret)
   531  		}
   532  		v := generic.NewValue(desc, data)
   533  		vv := v.GetByPath(generic.NewPathFieldId(6))
   534  		require.Nil(b, vv.Check())
   535  
   536  		b.Run("native", func(b *testing.B) {
   537  			old := generic.UseNativeSkipForGet
   538  			generic.UseNativeSkipForGet = true
   539  			b.SetBytes(int64(len(data)))
   540  			b.ResetTimer()
   541  			for i := 0; i < b.N; i++ {
   542  				_ = v.GetByPath(generic.NewPathFieldId(6))
   543  			}
   544  			generic.UseNativeSkipForGet = old
   545  		})
   546  
   547  		b.Run("go", func(b *testing.B) {
   548  			old := generic.UseNativeSkipForGet
   549  			generic.UseNativeSkipForGet = false
   550  			b.SetBytes(int64(len(data)))
   551  			b.ResetTimer()
   552  			for i := 0; i < b.N; i++ {
   553  				_ = v.GetByPath(generic.NewPathFieldId(6))
   554  			}
   555  			generic.UseNativeSkipForGet = old
   556  		})
   557  	})
   558  
   559  	b.Run("medium", func(b *testing.B) {
   560  		desc := getNestingDesc()
   561  		obj := getNestingValue()
   562  		data := make([]byte, obj.BLength())
   563  		ret := obj.FastWriteNocopy(data, nil)
   564  		if ret < 0 {
   565  			b.Fatal(ret)
   566  		}
   567  		v := generic.NewValue(desc, data)
   568  		vv := v.GetByPath(generic.NewPathFieldId(15), generic.NewPathStrKey("15"), generic.NewPathFieldId(6))
   569  		require.Nil(b, vv.Check())
   570  
   571  		b.Run("native", func(b *testing.B) {
   572  			old := generic.UseNativeSkipForGet
   573  			generic.UseNativeSkipForGet = true
   574  			b.SetBytes(int64(len(data)))
   575  			b.ResetTimer()
   576  			for i := 0; i < b.N; i++ {
   577  				_ = v.GetByPath(generic.NewPathFieldId(15), generic.NewPathStrKey("15"), generic.NewPathFieldId(6))
   578  			}
   579  			generic.UseNativeSkipForGet = old
   580  		})
   581  
   582  		b.Run("go", func(b *testing.B) {
   583  			old := generic.UseNativeSkipForGet
   584  			generic.UseNativeSkipForGet = false
   585  			b.SetBytes(int64(len(data)))
   586  			b.ResetTimer()
   587  			for i := 0; i < b.N; i++ {
   588  				_ = v.GetByPath(generic.NewPathFieldId(15), generic.NewPathStrKey("15"), generic.NewPathFieldId(6))
   589  			}
   590  			generic.UseNativeSkipForGet = old
   591  		})
   592  	})
   593  }
   594  
   595  func BenchmarkThriftGetMany(b *testing.B) {
   596  	b.Run("small", func(b *testing.B) {
   597  		desc := getSimpleDesc()
   598  		obj := getSimpleValue()
   599  		data := make([]byte, obj.BLength())
   600  		ret := obj.FastWriteNocopy(data, nil)
   601  		if ret < 0 {
   602  			b.Fatal(ret)
   603  		}
   604  		opts := generic.Options{
   605  			// StopScanOnError: true,
   606  		}
   607  		v := generic.NewValue(desc, data)
   608  		ps := []generic.PathNode{
   609  			{Path: generic.NewPathFieldId(1)},
   610  			{Path: generic.NewPathFieldId(3)},
   611  			{Path: generic.NewPathFieldId(6)},
   612  		}
   613  		err := v.GetMany(ps, &opts)
   614  		require.Nil(b, err)
   615  
   616  		b.Run("native", func(b *testing.B) {
   617  			opts.UseNativeSkip = true
   618  			b.SetBytes(int64(len(data)))
   619  			b.ResetTimer()
   620  			for i := 0; i < b.N; i++ {
   621  				_ = v.GetMany(ps, &opts)
   622  			}
   623  			opts.UseNativeSkip = false
   624  		})
   625  
   626  		b.Run("go", func(b *testing.B) {
   627  			opts.UseNativeSkip = false
   628  			b.SetBytes(int64(len(data)))
   629  			b.ResetTimer()
   630  			for i := 0; i < b.N; i++ {
   631  				_ = v.GetMany(ps, &opts)
   632  			}
   633  		})
   634  	})
   635  
   636  	b.Run("medium", func(b *testing.B) {
   637  		desc := getNestingDesc()
   638  		obj := getNestingValue()
   639  		data := make([]byte, obj.BLength())
   640  		ret := obj.FastWriteNocopy(data, nil)
   641  		if ret < 0 {
   642  			b.Fatal(ret)
   643  		}
   644  		opts := generic.Options{
   645  			// StopScanOnError: true,
   646  		}
   647  		v := generic.NewValue(desc, data)
   648  		ps := []generic.PathNode{
   649  			{Path: generic.NewPathFieldId(2)},
   650  			{Path: generic.NewPathFieldId(8)},
   651  			{Path: generic.NewPathFieldId(15)},
   652  		}
   653  		err := v.GetMany(ps, &opts)
   654  		require.Nil(b, err)
   655  
   656  		b.Run("native", func(b *testing.B) {
   657  			opts.UseNativeSkip = true
   658  			b.SetBytes(int64(len(data)))
   659  			b.ResetTimer()
   660  			for i := 0; i < b.N; i++ {
   661  				_ = v.GetMany(ps, &opts)
   662  			}
   663  			opts.UseNativeSkip = false
   664  		})
   665  
   666  		b.Run("go", func(b *testing.B) {
   667  			opts.UseNativeSkip = false
   668  			b.SetBytes(int64(len(data)))
   669  			b.ResetTimer()
   670  			for i := 0; i < b.N; i++ {
   671  				_ = v.GetMany(ps, &opts)
   672  			}
   673  		})
   674  	})
   675  }
   676  
   677  func BenchmarkThriftMarshalMany(b *testing.B) {
   678  	b.Run("small", func(b *testing.B) {
   679  		desc := getSimpleDesc()
   680  		obj := getSimpleValue()
   681  		data := make([]byte, obj.BLength())
   682  		ret := obj.FastWriteNocopy(data, nil)
   683  		if ret < 0 {
   684  			b.Fatal(ret)
   685  		}
   686  		opts := generic.Options{
   687  			// StopScanOnError: true,
   688  		}
   689  		v := generic.NewValue(desc, data)
   690  		ps := []generic.PathNode{
   691  			{Path: generic.NewPathFieldId(1)},
   692  			{Path: generic.NewPathFieldId(3)},
   693  			{Path: generic.NewPathFieldId(6)},
   694  		}
   695  		err := v.GetMany(ps, &opts)
   696  		require.Nil(b, err)
   697  		n := generic.PathNode{
   698  			Path: generic.NewPathFieldId(1),
   699  			Node: v.Node,
   700  			Next: ps,
   701  		}
   702  		buf, err := n.Marshal(&opts)
   703  		require.Nil(b, err)
   704  		exp := baseline.NewPartialSimple()
   705  		_, err = exp.FastRead(buf)
   706  		require.Nil(b, err)
   707  		b.SetBytes(int64(len(data)))
   708  		b.ResetTimer()
   709  		for i := 0; i < b.N; i++ {
   710  			_, _ = n.Marshal(&opts)
   711  		}
   712  	})
   713  
   714  	b.Run("medium", func(b *testing.B) {
   715  		desc := getNestingDesc()
   716  		obj := getNestingValue()
   717  		data := make([]byte, obj.BLength())
   718  		ret := obj.FastWriteNocopy(data, nil)
   719  		if ret < 0 {
   720  			b.Fatal(ret)
   721  		}
   722  		opts := generic.Options{
   723  			// StopScanOnError: true,
   724  		}
   725  		v := generic.NewValue(desc, data)
   726  		ps := []generic.PathNode{
   727  			{Path: generic.NewPathFieldId(2)},
   728  			{Path: generic.NewPathFieldId(8)},
   729  			{Path: generic.NewPathFieldId(15)},
   730  		}
   731  		err := v.GetMany(ps, &opts)
   732  		require.Nil(b, err)
   733  		n := generic.PathNode{
   734  			Path: generic.NewPathFieldId(1),
   735  			Node: v.Node,
   736  			Next: ps,
   737  		}
   738  		buf, err := n.Marshal(&opts)
   739  		require.Nil(b, err)
   740  		exp := baseline.NewPartialNesting()
   741  		_, err = exp.FastRead(buf)
   742  		require.Nil(b, err)
   743  
   744  		b.SetBytes(int64(len(data)))
   745  		b.ResetTimer()
   746  		for i := 0; i < b.N; i++ {
   747  			_, _ = n.Marshal(&opts)
   748  		}
   749  	})
   750  }
   751  
   752  func BenchmarkThriftGetAll_New(b *testing.B) {
   753  	b.Run("small", func(b *testing.B) {
   754  		desc := getSimpleDesc()
   755  		obj := getSimpleValue()
   756  		data := make([]byte, obj.BLength())
   757  		ret := obj.FastWriteNocopy(data, nil)
   758  		if ret < 0 {
   759  			b.Fatal(ret)
   760  		}
   761  		v := generic.NewValue(desc, data)
   762  		out := []generic.PathNode{}
   763  		require.Nil(b, v.Children(&out, false, &generic.Options{UseNativeSkip: true}))
   764  
   765  		b.Run("native", func(b *testing.B) {
   766  			opts := &generic.Options{UseNativeSkip: true}
   767  			b.SetBytes(int64(len(data)))
   768  			b.ResetTimer()
   769  			for i := 0; i < b.N; i++ {
   770  				out := []generic.PathNode{}
   771  				_ = v.Children(&out, true, opts)
   772  			}
   773  		})
   774  
   775  		b.Run("go", func(b *testing.B) {
   776  			opts := &generic.Options{
   777  				UseNativeSkip: false,
   778  				// OnlyScanStruct: true,
   779  			}
   780  			b.SetBytes(int64(len(data)))
   781  			b.ResetTimer()
   782  			for i := 0; i < b.N; i++ {
   783  				out := []generic.PathNode{}
   784  				_ = v.Children(&out, true, opts)
   785  			}
   786  		})
   787  	})
   788  
   789  	b.Run("medium", func(b *testing.B) {
   790  		desc := getNestingDesc()
   791  		obj := getNestingValue()
   792  		data := make([]byte, obj.BLength())
   793  		ret := obj.FastWriteNocopy(data, nil)
   794  		if ret < 0 {
   795  			b.Fatal(ret)
   796  		}
   797  		v := generic.NewValue(desc, data)
   798  		out := make([]generic.PathNode, 0, 16)
   799  		require.Nil(b, v.Children(&out, false, &generic.Options{UseNativeSkip: true}))
   800  
   801  		b.Run("native", func(b *testing.B) {
   802  			opts := &generic.Options{UseNativeSkip: true}
   803  			b.SetBytes(int64(len(data)))
   804  			b.ResetTimer()
   805  			for i := 0; i < b.N; i++ {
   806  				out := []generic.PathNode{}
   807  				_ = v.Children(&out, true, opts)
   808  			}
   809  		})
   810  
   811  		b.Run("go", func(b *testing.B) {
   812  			opts := &generic.Options{UseNativeSkip: false}
   813  			b.SetBytes(int64(len(data)))
   814  			b.ResetTimer()
   815  			for i := 0; i < b.N; i++ {
   816  				out := []generic.PathNode{}
   817  				_ = v.Children(&out, true, opts)
   818  			}
   819  		})
   820  	})
   821  }
   822  
   823  func BenchmarkThriftGetAll_ReuseMemory(b *testing.B) {
   824  	b.Run("small", func(b *testing.B) {
   825  		desc := getSimpleDesc()
   826  		obj := getSimpleValue()
   827  		data := make([]byte, obj.BLength())
   828  		ret := obj.FastWriteNocopy(data, nil)
   829  		if ret < 0 {
   830  			b.Fatal(ret)
   831  		}
   832  		v := generic.NewValue(desc, data)
   833  		r := generic.NewPathNode()
   834  		r.Node = v.Node
   835  		require.Nil(b, r.Load(true, &generic.Options{UseNativeSkip: true}))
   836  		r.ResetAll()
   837  		generic.FreePathNode(r)
   838  
   839  		b.Run("native", func(b *testing.B) {
   840  			opts := &generic.Options{UseNativeSkip: true}
   841  			b.SetBytes(int64(len(data)))
   842  			b.ResetTimer()
   843  			for i := 0; i < b.N; i++ {
   844  				r := generic.NewPathNode()
   845  				r.Node = v.Node
   846  				_ = r.Load(true, opts)
   847  				r.ResetAll()
   848  				generic.FreePathNode(r)
   849  			}
   850  		})
   851  
   852  		b.Run("go", func(b *testing.B) {
   853  			opts := &generic.Options{
   854  				UseNativeSkip: false,
   855  				// OnlyScanStruct: true,
   856  			}
   857  			b.SetBytes(int64(len(data)))
   858  			b.ResetTimer()
   859  			for i := 0; i < b.N; i++ {
   860  				r := generic.NewPathNode()
   861  				r.Node = v.Node
   862  				_ = r.Load(true, opts)
   863  				r.ResetAll()
   864  				generic.FreePathNode(r)
   865  			}
   866  		})
   867  		b.Run("not_scan_parent/native", func(b *testing.B) {
   868  			opts := &generic.Options{
   869  				// OnlyScanStruct: true,
   870  				UseNativeSkip:     true,
   871  				NotScanParentNode: true,
   872  			}
   873  			b.SetBytes(int64(len(data)))
   874  			b.ResetTimer()
   875  			for i := 0; i < b.N; i++ {
   876  				r := generic.NewPathNode()
   877  				r.Node = v.Node
   878  				_ = r.Load(true, opts)
   879  				r.ResetAll()
   880  				generic.FreePathNode(r)
   881  			}
   882  		})
   883  		b.Run("not_scan_parent/go", func(b *testing.B) {
   884  			opts := &generic.Options{
   885  				// OnlyScanStruct: true,
   886  				UseNativeSkip:     false,
   887  				NotScanParentNode: true,
   888  			}
   889  			b.SetBytes(int64(len(data)))
   890  			b.ResetTimer()
   891  			for i := 0; i < b.N; i++ {
   892  				r := generic.NewPathNode()
   893  				r.Node = v.Node
   894  				_ = r.Load(true, opts)
   895  				r.ResetAll()
   896  				generic.FreePathNode(r)
   897  			}
   898  		})
   899  	})
   900  
   901  	b.Run("medium", func(b *testing.B) {
   902  		desc := getNestingDesc()
   903  		obj := getNestingValue()
   904  		data := make([]byte, obj.BLength())
   905  		ret := obj.FastWriteNocopy(data, nil)
   906  		if ret < 0 {
   907  			b.Fatal(ret)
   908  		}
   909  		v := generic.NewValue(desc, data)
   910  		r := generic.NewPathNode()
   911  		r.Node = v.Node
   912  		require.Nil(b, r.Load(true, &generic.Options{UseNativeSkip: true}))
   913  		r.ResetAll()
   914  		generic.FreePathNode(r)
   915  
   916  		b.Run("native", func(b *testing.B) {
   917  			opts := &generic.Options{UseNativeSkip: true}
   918  			b.SetBytes(int64(len(data)))
   919  			b.ResetTimer()
   920  			for i := 0; i < b.N; i++ {
   921  				r := generic.NewPathNode()
   922  				r.Node = v.Node
   923  				_ = r.Load(true, opts)
   924  				r.ResetAll()
   925  				generic.FreePathNode(r)
   926  			}
   927  		})
   928  
   929  		b.Run("go", func(b *testing.B) {
   930  			opts := &generic.Options{UseNativeSkip: false}
   931  			b.SetBytes(int64(len(data)))
   932  			b.ResetTimer()
   933  			for i := 0; i < b.N; i++ {
   934  				r := generic.NewPathNode()
   935  				r.Node = v.Node
   936  				_ = r.Load(true, opts)
   937  				r.ResetAll()
   938  				generic.FreePathNode(r)
   939  			}
   940  		})
   941  
   942  		b.Run("not_scan_parent/native", func(b *testing.B) {
   943  			opts := &generic.Options{
   944  				// OnlyScanStruct: true,
   945  				UseNativeSkip:     true,
   946  				NotScanParentNode: true,
   947  			}
   948  			b.SetBytes(int64(len(data)))
   949  			b.ResetTimer()
   950  			for i := 0; i < b.N; i++ {
   951  				r := generic.NewPathNode()
   952  				r.Node = v.Node
   953  				_ = r.Load(true, opts)
   954  				r.ResetAll()
   955  				generic.FreePathNode(r)
   956  			}
   957  		})
   958  		b.Run("not_scan_parent/go", func(b *testing.B) {
   959  			opts := &generic.Options{
   960  				// OnlyScanStruct: true,
   961  				UseNativeSkip:     false,
   962  				NotScanParentNode: true,
   963  			}
   964  			b.SetBytes(int64(len(data)))
   965  			b.ResetTimer()
   966  			for i := 0; i < b.N; i++ {
   967  				r := generic.NewPathNode()
   968  				r.Node = v.Node
   969  				_ = r.Load(true, opts)
   970  				r.ResetAll()
   971  				generic.FreePathNode(r)
   972  			}
   973  		})
   974  	})
   975  }
   976  
   977  func BenchmarkThriftMarshalAll(b *testing.B) {
   978  	b.Run("small", func(b *testing.B) {
   979  		desc := getSimpleDesc()
   980  		obj := getSimpleValue()
   981  		data := make([]byte, obj.BLength())
   982  		ret := obj.FastWriteNocopy(data, nil)
   983  		if ret < 0 {
   984  			b.Fatal(ret)
   985  		}
   986  		v := generic.NewValue(desc, data)
   987  		p := generic.PathNode{
   988  			Node: v.Node,
   989  		}
   990  		opts := &generic.Options{}
   991  		require.Nil(b, p.Load(true, opts))
   992  		out, err := p.Marshal(opts)
   993  		require.Nil(b, err)
   994  		off, err := obj.FastRead(out)
   995  		require.Nil(b, err)
   996  		require.Equal(b, off, len(out))
   997  
   998  		b.SetBytes(int64(len(data)))
   999  		b.ResetTimer()
  1000  		for i := 0; i < b.N; i++ {
  1001  			_, _ = p.Marshal(opts)
  1002  		}
  1003  	})
  1004  
  1005  	b.Run("medium", func(b *testing.B) {
  1006  		desc := getNestingDesc()
  1007  		obj := getNestingValue()
  1008  		data := make([]byte, obj.BLength())
  1009  		ret := obj.FastWriteNocopy(data, nil)
  1010  		if ret < 0 {
  1011  			b.Fatal(ret)
  1012  		}
  1013  		v := generic.NewValue(desc, data)
  1014  		p := generic.PathNode{
  1015  			Node: v.Node,
  1016  		}
  1017  		opts := &generic.Options{}
  1018  		require.Nil(b, p.Load(true, opts))
  1019  		out, err := p.Marshal(opts)
  1020  		require.Nil(b, err)
  1021  		off, err := obj.FastRead(out)
  1022  		require.Nil(b, err)
  1023  		require.Equal(b, off, len(out))
  1024  
  1025  		b.SetBytes(int64(len(data)))
  1026  		b.ResetTimer()
  1027  		for i := 0; i < b.N; i++ {
  1028  			_, _ = p.Marshal(opts)
  1029  		}
  1030  	})
  1031  }
  1032  
  1033  func BenchmarkThriftSetOne(b *testing.B) {
  1034  	b.Run("small", func(b *testing.B) {
  1035  		desc := getSimpleDesc()
  1036  		obj := getSimpleValue()
  1037  		data := make([]byte, obj.BLength())
  1038  		ret := obj.FastWriteNocopy(data, nil)
  1039  		if ret < 0 {
  1040  			b.Fatal(ret)
  1041  		}
  1042  		v := generic.NewValue(desc, data)
  1043  		p := thrift.NewBinaryProtocolBuffer()
  1044  		p.WriteBinary(obj.BinaryField)
  1045  		n := generic.NewValue(desc.Struct().FieldById(6).Type(), p.Buf)
  1046  		_, err := v.SetByPath(n, generic.NewPathFieldId(6))
  1047  		require.Nil(b, err)
  1048  		nn := v.GetByPath(generic.NewPathFieldId(6))
  1049  		require.Equal(b, n.Raw(), nn.Raw())
  1050  
  1051  		b.Run("native", func(b *testing.B) {
  1052  			old := generic.UseNativeSkipForGet
  1053  			generic.UseNativeSkipForGet = true
  1054  			b.SetBytes(int64(len(data)))
  1055  			b.ResetTimer()
  1056  			for i := 0; i < b.N; i++ {
  1057  				_, _ = v.SetByPath(n, generic.NewPathFieldId(6))
  1058  			}
  1059  			generic.UseNativeSkipForGet = old
  1060  		})
  1061  
  1062  		b.Run("go", func(b *testing.B) {
  1063  			old := generic.UseNativeSkipForGet
  1064  			generic.UseNativeSkipForGet = false
  1065  			b.SetBytes(int64(len(data)))
  1066  			b.ResetTimer()
  1067  			for i := 0; i < b.N; i++ {
  1068  				_, _ = v.SetByPath(n, generic.NewPathFieldId(6))
  1069  			}
  1070  			generic.UseNativeSkipForGet = old
  1071  		})
  1072  	})
  1073  
  1074  	b.Run("medium", func(b *testing.B) {
  1075  		desc := getNestingDesc()
  1076  		obj := getNestingValue()
  1077  		data := make([]byte, obj.BLength())
  1078  		ret := obj.FastWriteNocopy(data, nil)
  1079  		if ret < 0 {
  1080  			b.Fatal(ret)
  1081  		}
  1082  		v := generic.NewValue(desc, data)
  1083  		p := thrift.NewBinaryProtocolBuffer()
  1084  		p.WriteBinary(obj.MapStringSimple["15"].BinaryField)
  1085  		n := generic.NewValue(desc.Struct().FieldById(15).Type().Elem().Struct().FieldById(6).Type(), p.Buf)
  1086  		_, err := v.SetByPath(n, generic.NewPathFieldId(15), generic.NewPathStrKey("15"), generic.NewPathFieldId(6))
  1087  		require.Nil(b, err)
  1088  		nn := v.GetByPath(generic.NewPathFieldId(15), generic.NewPathStrKey("15"), generic.NewPathFieldId(6))
  1089  		require.Equal(b, n.Raw(), nn.Raw())
  1090  
  1091  		b.Run("native", func(b *testing.B) {
  1092  			old := generic.UseNativeSkipForGet
  1093  			generic.UseNativeSkipForGet = true
  1094  			b.SetBytes(int64(len(data)))
  1095  			b.ResetTimer()
  1096  			for i := 0; i < b.N; i++ {
  1097  				_, _ = v.SetByPath(n, generic.NewPathFieldId(15), generic.NewPathStrKey("15"), generic.NewPathFieldId(6))
  1098  			}
  1099  			generic.UseNativeSkipForGet = old
  1100  		})
  1101  
  1102  		b.Run("go", func(b *testing.B) {
  1103  			old := generic.UseNativeSkipForGet
  1104  			generic.UseNativeSkipForGet = false
  1105  			b.SetBytes(int64(len(data)))
  1106  			b.ResetTimer()
  1107  			for i := 0; i < b.N; i++ {
  1108  				_, _ = v.SetByPath(n, generic.NewPathFieldId(15), generic.NewPathStrKey("15"), generic.NewPathFieldId(6))
  1109  			}
  1110  			generic.UseNativeSkipForGet = old
  1111  		})
  1112  	})
  1113  }
  1114  
  1115  func BenchmarkThriftSetMany(b *testing.B) {
  1116  	b.Run("small", func(b *testing.B) {
  1117  		desc := getSimpleDesc()
  1118  		obj := getSimpleValue()
  1119  		data := make([]byte, obj.BLength())
  1120  		ret := obj.FastWriteNocopy(data, nil)
  1121  		if ret < 0 {
  1122  			b.Fatal(ret)
  1123  		}
  1124  		opts := generic.Options{}
  1125  		v := generic.NewValue(desc, data)
  1126  		ps := []generic.PathNode{
  1127  			{Path: generic.NewPathFieldId(1)},
  1128  			{Path: generic.NewPathFieldId(3)},
  1129  			{Path: generic.NewPathFieldId(6)},
  1130  		}
  1131  		require.Nil(b, v.GetMany(ps, &opts))
  1132  		require.Nil(b, v.SetMany(ps, &opts))
  1133  
  1134  		b.Run("native", func(b *testing.B) {
  1135  			opts.UseNativeSkip = true
  1136  			b.SetBytes(int64(len(data)))
  1137  			b.ResetTimer()
  1138  			for i := 0; i < b.N; i++ {
  1139  				_ = v.SetMany(ps, &opts)
  1140  			}
  1141  		})
  1142  
  1143  		b.Run("go", func(b *testing.B) {
  1144  			opts.UseNativeSkip = false
  1145  			b.SetBytes(int64(len(data)))
  1146  			b.ResetTimer()
  1147  			for i := 0; i < b.N; i++ {
  1148  				_ = v.SetMany(ps, &opts)
  1149  			}
  1150  		})
  1151  	})
  1152  
  1153  	b.Run("medium", func(b *testing.B) {
  1154  		desc := getNestingDesc()
  1155  		obj := getNestingValue()
  1156  		data := make([]byte, obj.BLength())
  1157  		ret := obj.FastWriteNocopy(data, nil)
  1158  		if ret < 0 {
  1159  			b.Fatal(ret)
  1160  		}
  1161  		opts := generic.Options{}
  1162  		v := generic.NewValue(desc, data)
  1163  		ps := []generic.PathNode{
  1164  			{Path: generic.NewPathFieldId(2)},
  1165  			{Path: generic.NewPathFieldId(8)},
  1166  			{Path: generic.NewPathFieldId(15)},
  1167  		}
  1168  		require.Nil(b, v.GetMany(ps, &opts))
  1169  		require.Nil(b, v.SetMany(ps, &opts))
  1170  
  1171  		b.Run("native", func(b *testing.B) {
  1172  			opts.UseNativeSkip = true
  1173  			b.SetBytes(int64(len(data)))
  1174  			b.ResetTimer()
  1175  			for i := 0; i < b.N; i++ {
  1176  				_ = v.SetMany(ps, &opts)
  1177  			}
  1178  		})
  1179  
  1180  		b.Run("go", func(b *testing.B) {
  1181  			opts.UseNativeSkip = false
  1182  			b.SetBytes(int64(len(data)))
  1183  			b.ResetTimer()
  1184  			for i := 0; i < b.N; i++ {
  1185  				_ = v.SetMany(ps, &opts)
  1186  			}
  1187  		})
  1188  	})
  1189  }
  1190  
  1191  func BenchmarkThriftMarshalTo(b *testing.B) {
  1192  	b.Run("small", func(b *testing.B) {
  1193  		desc := getSimpleDesc()
  1194  		part := getPartialSimpleDesc()
  1195  		obj := getSimpleValue()
  1196  		data := make([]byte, obj.BLength())
  1197  		ret := obj.FastWriteNocopy(data, nil)
  1198  		if ret < 0 {
  1199  			b.Fatal(ret)
  1200  		}
  1201  		opts := generic.Options{}
  1202  		v := generic.NewValue(desc, data)
  1203  		out, err := v.MarshalTo(part, &opts)
  1204  		require.Nil(b, err)
  1205  		exp := baseline.NewPartialSimple()
  1206  		_, err = exp.FastRead(out)
  1207  		require.Nil(b, err)
  1208  
  1209  		b.Run("native", func(b *testing.B) {
  1210  			opts := generic.Options{
  1211  				UseNativeSkip: true,
  1212  			}
  1213  			b.SetBytes(int64(len(data)))
  1214  			b.ResetTimer()
  1215  			for i := 0; i < b.N; i++ {
  1216  				_, _ = v.MarshalTo(part, &opts)
  1217  			}
  1218  		})
  1219  
  1220  		b.Run("go", func(b *testing.B) {
  1221  			opts := generic.Options{
  1222  				UseNativeSkip: false,
  1223  			}
  1224  			b.SetBytes(int64(len(data)))
  1225  			b.ResetTimer()
  1226  			for i := 0; i < b.N; i++ {
  1227  				_, _ = v.MarshalTo(part, &opts)
  1228  			}
  1229  		})
  1230  	})
  1231  
  1232  	b.Run("medium", func(b *testing.B) {
  1233  		desc := getNestingDesc()
  1234  		part := getPartialNestingDesc()
  1235  		obj := getNestingValue()
  1236  		data := make([]byte, obj.BLength())
  1237  		ret := obj.FastWriteNocopy(data, nil)
  1238  		if ret < 0 {
  1239  			b.Fatal(ret)
  1240  		}
  1241  		opts := generic.Options{}
  1242  		v := generic.NewValue(desc, data)
  1243  		out, err := v.MarshalTo(part, &opts)
  1244  		require.NoError(b, err)
  1245  		exp := baseline.NewPartialNesting()
  1246  		_, err = exp.FastRead(out)
  1247  		require.Nil(b, err)
  1248  
  1249  		b.Run("native", func(b *testing.B) {
  1250  			b.Run("not_check_requireness", func(b *testing.B) {
  1251  				opts := generic.Options{
  1252  					UseNativeSkip:       true,
  1253  					NotCheckRequireNess: true,
  1254  				}
  1255  				b.SetBytes(int64(len(data)))
  1256  				b.ResetTimer()
  1257  				for i := 0; i < b.N; i++ {
  1258  					_, _ = v.MarshalTo(part, &opts)
  1259  				}
  1260  			})
  1261  			b.Run("check_requireness", func(b *testing.B) {
  1262  				opts := generic.Options{
  1263  					UseNativeSkip: true,
  1264  				}
  1265  				b.SetBytes(int64(len(data)))
  1266  				b.ResetTimer()
  1267  				for i := 0; i < b.N; i++ {
  1268  					_, _ = v.MarshalTo(part, &opts)
  1269  				}
  1270  			})
  1271  		})
  1272  
  1273  		b.Run("go", func(b *testing.B) {
  1274  			b.Run("not_check_requireness", func(b *testing.B) {
  1275  				opts := generic.Options{
  1276  					UseNativeSkip:       true,
  1277  					NotCheckRequireNess: true,
  1278  				}
  1279  				b.SetBytes(int64(len(data)))
  1280  				b.ResetTimer()
  1281  				for i := 0; i < b.N; i++ {
  1282  					_, _ = v.MarshalTo(part, &opts)
  1283  				}
  1284  			})
  1285  			b.Run("check_requireness", func(b *testing.B) {
  1286  				opts := generic.Options{
  1287  					UseNativeSkip: true,
  1288  				}
  1289  				b.SetBytes(int64(len(data)))
  1290  				b.ResetTimer()
  1291  				for i := 0; i < b.N; i++ {
  1292  					_, _ = v.MarshalTo(part, &opts)
  1293  				}
  1294  			})
  1295  		})
  1296  	})
  1297  }