github.com/m3db/m3@v1.5.1-0.20231129193456-75a402aa583b/src/metrics/encoding/protobuf/unaggregated_encoder_test.go (about)

     1  // Copyright (c) 2018 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package protobuf
    22  
    23  import (
    24  	"strings"
    25  	"testing"
    26  	"time"
    27  
    28  	"github.com/m3db/m3/src/metrics/aggregation"
    29  	"github.com/m3db/m3/src/metrics/encoding"
    30  	"github.com/m3db/m3/src/metrics/generated/proto/aggregationpb"
    31  	"github.com/m3db/m3/src/metrics/generated/proto/metricpb"
    32  	"github.com/m3db/m3/src/metrics/generated/proto/pipelinepb"
    33  	"github.com/m3db/m3/src/metrics/generated/proto/policypb"
    34  	"github.com/m3db/m3/src/metrics/generated/proto/transformationpb"
    35  	"github.com/m3db/m3/src/metrics/metadata"
    36  	"github.com/m3db/m3/src/metrics/metric"
    37  	"github.com/m3db/m3/src/metrics/metric/aggregated"
    38  	"github.com/m3db/m3/src/metrics/metric/unaggregated"
    39  	"github.com/m3db/m3/src/metrics/pipeline"
    40  	"github.com/m3db/m3/src/metrics/pipeline/applied"
    41  	"github.com/m3db/m3/src/metrics/policy"
    42  	"github.com/m3db/m3/src/metrics/transformation"
    43  	xtime "github.com/m3db/m3/src/x/time"
    44  
    45  	"github.com/google/go-cmp/cmp"
    46  	"github.com/google/go-cmp/cmp/cmpopts"
    47  	"github.com/stretchr/testify/require"
    48  )
    49  
    50  var (
    51  	testCounter1 = unaggregated.Counter{
    52  		ID:    []byte("testCounter1"),
    53  		Value: 123,
    54  	}
    55  	testCounter2 = unaggregated.Counter{
    56  		ID:    []byte("testCounter2"),
    57  		Value: 456,
    58  	}
    59  	testBatchTimer1 = unaggregated.BatchTimer{
    60  		ID:     []byte("testBatchTimer1"),
    61  		Values: []float64{3.67, -9.38},
    62  	}
    63  	testBatchTimer2 = unaggregated.BatchTimer{
    64  		ID:     []byte("testBatchTimer2"),
    65  		Values: []float64{4.57, 189234.01},
    66  	}
    67  	testGauge1 = unaggregated.Gauge{
    68  		ID:    []byte("testGauge1"),
    69  		Value: 845.23,
    70  	}
    71  	testGauge2 = unaggregated.Gauge{
    72  		ID:    []byte("testGauge2"),
    73  		Value: 234231.345,
    74  	}
    75  	testForwardedMetric1 = aggregated.ForwardedMetric{
    76  		Type:      metric.CounterType,
    77  		ID:        []byte("testForwardedMetric1"),
    78  		TimeNanos: 8259,
    79  		Values:    []float64{1, 3234, -12},
    80  	}
    81  	testForwardedMetric2 = aggregated.ForwardedMetric{
    82  		Type:      metric.TimerType,
    83  		ID:        []byte("testForwardedMetric2"),
    84  		TimeNanos: 145668,
    85  		Values:    []float64{563.875, -23.87},
    86  	}
    87  	testTimedMetric1 = aggregated.Metric{
    88  		Type:      metric.CounterType,
    89  		ID:        []byte("testTimedMetric1"),
    90  		TimeNanos: 8259,
    91  		Value:     3234,
    92  	}
    93  	testTimedMetric2 = aggregated.Metric{
    94  		Type:      metric.GaugeType,
    95  		ID:        []byte("testTimedMetric2"),
    96  		TimeNanos: 82590,
    97  		Value:     0,
    98  	}
    99  	testPassthroughMetric1 = aggregated.Metric{
   100  		Type:      metric.CounterType,
   101  		ID:        []byte("testPassthroughMetric1"),
   102  		TimeNanos: 11111,
   103  		Value:     1,
   104  	}
   105  	testPassthroughMetric2 = aggregated.Metric{
   106  		Type:      metric.GaugeType,
   107  		ID:        []byte("testPassthroughMetric2"),
   108  		TimeNanos: 22222,
   109  		Value:     2,
   110  	}
   111  	testStagedMetadatas1 = metadata.StagedMetadatas{
   112  		{
   113  			CutoverNanos: 1234,
   114  			Tombstoned:   false,
   115  			Metadata: metadata.Metadata{
   116  				Pipelines: []metadata.PipelineMetadata{
   117  					{
   118  						AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
   119  						StoragePolicies: []policy.StoragePolicy{
   120  							policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
   121  						},
   122  					},
   123  					{
   124  						AggregationID: aggregation.DefaultID,
   125  						StoragePolicies: []policy.StoragePolicy{
   126  							policy.NewStoragePolicy(10*time.Second, xtime.Second, time.Hour),
   127  						},
   128  						Pipeline: applied.NewPipeline([]applied.OpUnion{
   129  							{
   130  								Type: pipeline.RollupOpType,
   131  								Rollup: applied.RollupOp{
   132  									ID:            []byte("baz"),
   133  									AggregationID: aggregation.MustCompressTypes(aggregation.Mean),
   134  								},
   135  							},
   136  						}),
   137  					},
   138  				},
   139  			},
   140  		},
   141  	}
   142  	testStagedMetadatas2 = metadata.StagedMetadatas{
   143  		{
   144  			CutoverNanos: 4567,
   145  			Tombstoned:   false,
   146  		},
   147  		{
   148  			CutoverNanos: 7890,
   149  			Tombstoned:   true,
   150  			Metadata: metadata.Metadata{
   151  				Pipelines: []metadata.PipelineMetadata{
   152  					{
   153  						AggregationID: aggregation.MustCompressTypes(aggregation.Count),
   154  						StoragePolicies: []policy.StoragePolicy{
   155  							policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
   156  						},
   157  					},
   158  					{
   159  						AggregationID: aggregation.DefaultID,
   160  						StoragePolicies: []policy.StoragePolicy{
   161  							policy.NewStoragePolicy(time.Minute, xtime.Minute, 6*time.Hour),
   162  							policy.NewStoragePolicy(time.Hour, xtime.Hour, 30*24*time.Hour),
   163  						},
   164  						Pipeline: applied.NewPipeline([]applied.OpUnion{
   165  							{
   166  								Type: pipeline.TransformationOpType,
   167  								Transformation: pipeline.TransformationOp{
   168  									Type: transformation.Absolute,
   169  								},
   170  							},
   171  							{
   172  								Type: pipeline.RollupOpType,
   173  								Rollup: applied.RollupOp{
   174  									ID:            []byte("foo"),
   175  									AggregationID: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum),
   176  								},
   177  							},
   178  						}),
   179  					},
   180  				},
   181  			},
   182  		},
   183  		{
   184  			CutoverNanos: 32768,
   185  			Tombstoned:   false,
   186  			Metadata: metadata.Metadata{
   187  				Pipelines: []metadata.PipelineMetadata{
   188  					{
   189  						AggregationID: aggregation.DefaultID,
   190  						Pipeline: applied.NewPipeline([]applied.OpUnion{
   191  							{
   192  								Type: pipeline.TransformationOpType,
   193  								Transformation: pipeline.TransformationOp{
   194  									Type: transformation.PerSecond,
   195  								},
   196  							},
   197  							{
   198  								Type: pipeline.RollupOpType,
   199  								Rollup: applied.RollupOp{
   200  									ID:            []byte("bar"),
   201  									AggregationID: aggregation.MustCompressTypes(aggregation.P99),
   202  								},
   203  							},
   204  						}),
   205  					},
   206  				},
   207  			},
   208  		},
   209  	}
   210  	testForwardMetadata1 = metadata.ForwardMetadata{
   211  		AggregationID: aggregation.DefaultID,
   212  		StoragePolicy: policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
   213  		Pipeline: applied.NewPipeline([]applied.OpUnion{
   214  			{
   215  				Type: pipeline.RollupOpType,
   216  				Rollup: applied.RollupOp{
   217  					ID:            []byte("foo"),
   218  					AggregationID: aggregation.MustCompressTypes(aggregation.Count),
   219  				},
   220  			},
   221  		}),
   222  		SourceID:          1234,
   223  		NumForwardedTimes: 3,
   224  	}
   225  	testForwardMetadata2 = metadata.ForwardMetadata{
   226  		AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
   227  		StoragePolicy: policy.NewStoragePolicy(10*time.Second, xtime.Second, 6*time.Hour),
   228  		Pipeline: applied.NewPipeline([]applied.OpUnion{
   229  			{
   230  				Type: pipeline.TransformationOpType,
   231  				Transformation: pipeline.TransformationOp{
   232  					Type: transformation.Absolute,
   233  				},
   234  			},
   235  			{
   236  				Type: pipeline.RollupOpType,
   237  				Rollup: applied.RollupOp{
   238  					ID:            []byte("bar"),
   239  					AggregationID: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum),
   240  				},
   241  			},
   242  		}),
   243  		SourceID:          897,
   244  		NumForwardedTimes: 2,
   245  	}
   246  	testTimedMetadata1 = metadata.TimedMetadata{
   247  		AggregationID: aggregation.DefaultID,
   248  		StoragePolicy: policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
   249  	}
   250  	testTimedMetadata2 = metadata.TimedMetadata{
   251  		AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
   252  		StoragePolicy: policy.NewStoragePolicy(10*time.Second, xtime.Second, 6*time.Hour),
   253  	}
   254  	testPassthroughMetadata1 = policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour)
   255  	testPassthroughMetadata2 = policy.NewStoragePolicy(10*time.Second, xtime.Second, 6*time.Hour)
   256  	testCounter1Proto        = metricpb.Counter{
   257  		Id:    []byte("testCounter1"),
   258  		Value: 123,
   259  	}
   260  	testCounter2Proto = metricpb.Counter{
   261  		Id:    []byte("testCounter2"),
   262  		Value: 456,
   263  	}
   264  	testBatchTimer1Proto = metricpb.BatchTimer{
   265  		Id:     []byte("testBatchTimer1"),
   266  		Values: []float64{3.67, -9.38},
   267  	}
   268  	testBatchTimer2Proto = metricpb.BatchTimer{
   269  		Id:     []byte("testBatchTimer2"),
   270  		Values: []float64{4.57, 189234.01},
   271  	}
   272  	testGauge1Proto = metricpb.Gauge{
   273  		Id:    []byte("testGauge1"),
   274  		Value: 845.23,
   275  	}
   276  	testGauge2Proto = metricpb.Gauge{
   277  		Id:    []byte("testGauge2"),
   278  		Value: 234231.345,
   279  	}
   280  	testForwardedMetric1Proto = metricpb.ForwardedMetric{
   281  		Type:      metricpb.MetricType_COUNTER,
   282  		Id:        []byte("testForwardedMetric1"),
   283  		TimeNanos: 8259,
   284  		Values:    []float64{1, 3234, -12},
   285  	}
   286  	testForwardedMetric2Proto = metricpb.ForwardedMetric{
   287  		Type:      metricpb.MetricType_TIMER,
   288  		Id:        []byte("testForwardedMetric2"),
   289  		TimeNanos: 145668,
   290  		Values:    []float64{563.875, -23.87},
   291  	}
   292  	testTimedMetric1Proto = metricpb.TimedMetric{
   293  		Type:      metricpb.MetricType_COUNTER,
   294  		Id:        []byte("testTimedMetric1"),
   295  		TimeNanos: 8259,
   296  		Value:     3234,
   297  	}
   298  	testTimedMetric2Proto = metricpb.TimedMetric{
   299  		Type:      metricpb.MetricType_GAUGE,
   300  		Id:        []byte("testTimedMetric2"),
   301  		TimeNanos: 82590,
   302  		Value:     0,
   303  	}
   304  	testPassthroughMetric1Proto = metricpb.TimedMetric{
   305  		Type:      metricpb.MetricType_COUNTER,
   306  		Id:        []byte("testPassthroughMetric1"),
   307  		TimeNanos: 11111,
   308  		Value:     1,
   309  	}
   310  	testPassthroughMetric2Proto = metricpb.TimedMetric{
   311  		Type:      metricpb.MetricType_GAUGE,
   312  		Id:        []byte("testPassthroughMetric2"),
   313  		TimeNanos: 22222,
   314  		Value:     2,
   315  	}
   316  	testStagedMetadatas1Proto = metricpb.StagedMetadatas{
   317  		Metadatas: []metricpb.StagedMetadata{
   318  			{
   319  				CutoverNanos: 1234,
   320  				Tombstoned:   false,
   321  				Metadata: metricpb.Metadata{
   322  					Pipelines: []metricpb.PipelineMetadata{
   323  						{
   324  							AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Sum)[0]},
   325  							StoragePolicies: []policypb.StoragePolicy{
   326  								{
   327  									Resolution: policypb.Resolution{
   328  										WindowSize: time.Second.Nanoseconds(),
   329  										Precision:  time.Second.Nanoseconds(),
   330  									},
   331  									Retention: policypb.Retention{
   332  										Period: time.Hour.Nanoseconds(),
   333  									},
   334  								},
   335  							},
   336  						},
   337  						{
   338  							AggregationId: aggregationpb.AggregationID{Id: 0},
   339  							StoragePolicies: []policypb.StoragePolicy{
   340  								{
   341  									Resolution: policypb.Resolution{
   342  										WindowSize: 10 * time.Second.Nanoseconds(),
   343  										Precision:  time.Second.Nanoseconds(),
   344  									},
   345  									Retention: policypb.Retention{
   346  										Period: time.Hour.Nanoseconds(),
   347  									},
   348  								},
   349  							},
   350  							Pipeline: pipelinepb.AppliedPipeline{
   351  								Ops: []pipelinepb.AppliedPipelineOp{
   352  									{
   353  										Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   354  										Rollup: pipelinepb.AppliedRollupOp{
   355  											Id:            []byte("baz"),
   356  											AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Mean)[0]},
   357  										},
   358  									},
   359  								},
   360  							},
   361  						},
   362  					},
   363  				},
   364  			},
   365  		},
   366  	}
   367  	testStagedMetadatas2Proto = metricpb.StagedMetadatas{
   368  		Metadatas: []metricpb.StagedMetadata{
   369  			{
   370  				CutoverNanos: 4567,
   371  				Tombstoned:   false,
   372  			},
   373  			{
   374  				CutoverNanos: 7890,
   375  				Tombstoned:   true,
   376  				Metadata: metricpb.Metadata{
   377  					Pipelines: []metricpb.PipelineMetadata{
   378  						{
   379  							AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Count)[0]},
   380  							StoragePolicies: []policypb.StoragePolicy{
   381  								{
   382  									Resolution: policypb.Resolution{
   383  										WindowSize: time.Second.Nanoseconds(),
   384  										Precision:  time.Second.Nanoseconds(),
   385  									},
   386  									Retention: policypb.Retention{
   387  										Period: time.Hour.Nanoseconds(),
   388  									},
   389  								},
   390  							},
   391  						},
   392  						{
   393  							AggregationId: aggregationpb.AggregationID{Id: 0},
   394  							StoragePolicies: []policypb.StoragePolicy{
   395  								{
   396  									Resolution: policypb.Resolution{
   397  										WindowSize: time.Minute.Nanoseconds(),
   398  										Precision:  time.Minute.Nanoseconds(),
   399  									},
   400  									Retention: policypb.Retention{
   401  										Period: (6 * time.Hour).Nanoseconds(),
   402  									},
   403  								},
   404  								{
   405  									Resolution: policypb.Resolution{
   406  										WindowSize: time.Hour.Nanoseconds(),
   407  										Precision:  time.Hour.Nanoseconds(),
   408  									},
   409  									Retention: policypb.Retention{
   410  										Period: (30 * 24 * time.Hour).Nanoseconds(),
   411  									},
   412  								},
   413  							},
   414  							Pipeline: pipelinepb.AppliedPipeline{
   415  								Ops: []pipelinepb.AppliedPipelineOp{
   416  									{
   417  										Type: pipelinepb.AppliedPipelineOp_TRANSFORMATION,
   418  										Transformation: pipelinepb.TransformationOp{
   419  											Type: transformationpb.TransformationType_ABSOLUTE,
   420  										},
   421  									},
   422  									{
   423  										Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   424  										Rollup: pipelinepb.AppliedRollupOp{
   425  											Id:            []byte("foo"),
   426  											AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum)[0]},
   427  										},
   428  									},
   429  								},
   430  							},
   431  						},
   432  					},
   433  				},
   434  			},
   435  			{
   436  				CutoverNanos: 32768,
   437  				Tombstoned:   false,
   438  				Metadata: metricpb.Metadata{
   439  					Pipelines: []metricpb.PipelineMetadata{
   440  						{
   441  							AggregationId: aggregationpb.AggregationID{Id: 0},
   442  							Pipeline: pipelinepb.AppliedPipeline{
   443  								Ops: []pipelinepb.AppliedPipelineOp{
   444  									{
   445  										Type: pipelinepb.AppliedPipelineOp_TRANSFORMATION,
   446  										Transformation: pipelinepb.TransformationOp{
   447  											Type: transformationpb.TransformationType_PERSECOND,
   448  										},
   449  									},
   450  									{
   451  										Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   452  										Rollup: pipelinepb.AppliedRollupOp{
   453  											Id:            []byte("bar"),
   454  											AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.P99)[0]},
   455  										},
   456  									},
   457  								},
   458  							},
   459  						},
   460  					},
   461  				},
   462  			},
   463  		},
   464  	}
   465  	testForwardMetadata1Proto = metricpb.ForwardMetadata{
   466  		AggregationId: aggregationpb.AggregationID{Id: 0},
   467  		StoragePolicy: policypb.StoragePolicy{
   468  			Resolution: policypb.Resolution{
   469  				WindowSize: time.Minute.Nanoseconds(),
   470  				Precision:  time.Minute.Nanoseconds(),
   471  			},
   472  			Retention: policypb.Retention{
   473  				Period: (12 * time.Hour).Nanoseconds(),
   474  			},
   475  		},
   476  		Pipeline: pipelinepb.AppliedPipeline{
   477  			Ops: []pipelinepb.AppliedPipelineOp{
   478  				{
   479  					Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   480  					Rollup: pipelinepb.AppliedRollupOp{
   481  						Id:            []byte("foo"),
   482  						AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Count)[0]},
   483  					},
   484  				},
   485  			},
   486  		},
   487  		SourceId:          1234,
   488  		NumForwardedTimes: 3,
   489  	}
   490  	testForwardMetadata2Proto = metricpb.ForwardMetadata{
   491  		AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Sum)[0]},
   492  		StoragePolicy: policypb.StoragePolicy{
   493  			Resolution: policypb.Resolution{
   494  				WindowSize: 10 * time.Second.Nanoseconds(),
   495  				Precision:  time.Second.Nanoseconds(),
   496  			},
   497  			Retention: policypb.Retention{
   498  				Period: (6 * time.Hour).Nanoseconds(),
   499  			},
   500  		},
   501  		Pipeline: pipelinepb.AppliedPipeline{
   502  			Ops: []pipelinepb.AppliedPipelineOp{
   503  				{
   504  					Type: pipelinepb.AppliedPipelineOp_TRANSFORMATION,
   505  					Transformation: pipelinepb.TransformationOp{
   506  						Type: transformationpb.TransformationType_ABSOLUTE,
   507  					},
   508  				},
   509  				{
   510  					Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   511  					Rollup: pipelinepb.AppliedRollupOp{
   512  						Id:            []byte("bar"),
   513  						AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum)[0]},
   514  					},
   515  				},
   516  			},
   517  		},
   518  		SourceId:          897,
   519  		NumForwardedTimes: 2,
   520  	}
   521  	testTimedMetadata1Proto = metricpb.TimedMetadata{
   522  		AggregationId: aggregationpb.AggregationID{Id: 0},
   523  		StoragePolicy: policypb.StoragePolicy{
   524  			Resolution: policypb.Resolution{
   525  				WindowSize: time.Minute.Nanoseconds(),
   526  				Precision:  time.Minute.Nanoseconds(),
   527  			},
   528  			Retention: policypb.Retention{
   529  				Period: (12 * time.Hour).Nanoseconds(),
   530  			},
   531  		},
   532  	}
   533  	testTimedMetadata2Proto = metricpb.TimedMetadata{
   534  		AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Sum)[0]},
   535  		StoragePolicy: policypb.StoragePolicy{
   536  			Resolution: policypb.Resolution{
   537  				WindowSize: 10 * time.Second.Nanoseconds(),
   538  				Precision:  time.Second.Nanoseconds(),
   539  			},
   540  			Retention: policypb.Retention{
   541  				Period: (6 * time.Hour).Nanoseconds(),
   542  			},
   543  		},
   544  	}
   545  	testPassthroughMetadata1Proto = policypb.StoragePolicy{
   546  		Resolution: policypb.Resolution{
   547  			WindowSize: time.Minute.Nanoseconds(),
   548  			Precision:  time.Minute.Nanoseconds(),
   549  		},
   550  		Retention: policypb.Retention{
   551  			Period: (12 * time.Hour).Nanoseconds(),
   552  		},
   553  	}
   554  	testPassthroughMetadata2Proto = policypb.StoragePolicy{
   555  		Resolution: policypb.Resolution{
   556  			WindowSize: 10 * time.Second.Nanoseconds(),
   557  			Precision:  time.Second.Nanoseconds(),
   558  		},
   559  		Retention: policypb.Retention{
   560  			Period: (6 * time.Hour).Nanoseconds(),
   561  		},
   562  	}
   563  	testCmpOpts = []cmp.Option{
   564  		cmpopts.EquateEmpty(),
   565  		cmp.AllowUnexported(policy.StoragePolicy{}),
   566  	}
   567  )
   568  
   569  func TestUnaggregatedEncoderEncodeCounterWithMetadatas(t *testing.T) {
   570  	inputs := []unaggregated.CounterWithMetadatas{
   571  		{
   572  			Counter:         testCounter1,
   573  			StagedMetadatas: testStagedMetadatas1,
   574  		},
   575  		{
   576  			Counter:         testCounter2,
   577  			StagedMetadatas: testStagedMetadatas1,
   578  		},
   579  		{
   580  			Counter:         testCounter1,
   581  			StagedMetadatas: testStagedMetadatas2,
   582  		},
   583  		{
   584  			Counter:         testCounter2,
   585  			StagedMetadatas: testStagedMetadatas2,
   586  		},
   587  	}
   588  	expected := []metricpb.CounterWithMetadatas{
   589  		{
   590  			Counter:   testCounter1Proto,
   591  			Metadatas: testStagedMetadatas1Proto,
   592  		},
   593  		{
   594  			Counter:   testCounter2Proto,
   595  			Metadatas: testStagedMetadatas1Proto,
   596  		},
   597  		{
   598  			Counter:   testCounter1Proto,
   599  			Metadatas: testStagedMetadatas2Proto,
   600  		},
   601  		{
   602  			Counter:   testCounter2Proto,
   603  			Metadatas: testStagedMetadatas2Proto,
   604  		},
   605  	}
   606  
   607  	var (
   608  		sizeRes int
   609  		pbRes   metricpb.MetricWithMetadatas
   610  	)
   611  	enc := NewUnaggregatedEncoder(NewUnaggregatedOptions())
   612  	enc.(*unaggregatedEncoder).encodeMessageSizeFn = func(size int) { sizeRes = size }
   613  	enc.(*unaggregatedEncoder).encodeMessageFn = func(pb metricpb.MetricWithMetadatas) error { pbRes = pb; return nil }
   614  	for i, input := range inputs {
   615  		require.NoError(t, enc.EncodeMessage(encoding.UnaggregatedMessageUnion{
   616  			Type:                 encoding.CounterWithMetadatasType,
   617  			CounterWithMetadatas: input,
   618  		}))
   619  		expectedProto := metricpb.MetricWithMetadatas{
   620  			Type:                 metricpb.MetricWithMetadatas_COUNTER_WITH_METADATAS,
   621  			CounterWithMetadatas: &expected[i],
   622  		}
   623  		expectedMsgSize := expectedProto.Size()
   624  		require.Equal(t, expectedMsgSize, sizeRes)
   625  		require.Equal(t, expectedProto, pbRes)
   626  	}
   627  }
   628  
   629  func TestUnaggregatedEncoderEncodeBatchTimerWithMetadatas(t *testing.T) {
   630  	inputs := []unaggregated.BatchTimerWithMetadatas{
   631  		{
   632  			BatchTimer:      testBatchTimer1,
   633  			StagedMetadatas: testStagedMetadatas1,
   634  		},
   635  		{
   636  			BatchTimer:      testBatchTimer2,
   637  			StagedMetadatas: testStagedMetadatas1,
   638  		},
   639  		{
   640  			BatchTimer:      testBatchTimer1,
   641  			StagedMetadatas: testStagedMetadatas2,
   642  		},
   643  		{
   644  			BatchTimer:      testBatchTimer2,
   645  			StagedMetadatas: testStagedMetadatas2,
   646  		},
   647  	}
   648  	expected := []metricpb.BatchTimerWithMetadatas{
   649  		{
   650  			BatchTimer: testBatchTimer1Proto,
   651  			Metadatas:  testStagedMetadatas1Proto,
   652  		},
   653  		{
   654  			BatchTimer: testBatchTimer2Proto,
   655  			Metadatas:  testStagedMetadatas1Proto,
   656  		},
   657  		{
   658  			BatchTimer: testBatchTimer1Proto,
   659  			Metadatas:  testStagedMetadatas2Proto,
   660  		},
   661  		{
   662  			BatchTimer: testBatchTimer2Proto,
   663  			Metadatas:  testStagedMetadatas2Proto,
   664  		},
   665  	}
   666  
   667  	var (
   668  		sizeRes int
   669  		pbRes   metricpb.MetricWithMetadatas
   670  	)
   671  	enc := NewUnaggregatedEncoder(NewUnaggregatedOptions())
   672  	enc.(*unaggregatedEncoder).encodeMessageSizeFn = func(size int) { sizeRes = size }
   673  	enc.(*unaggregatedEncoder).encodeMessageFn = func(pb metricpb.MetricWithMetadatas) error { pbRes = pb; return nil }
   674  	for i, input := range inputs {
   675  		require.NoError(t, enc.EncodeMessage(encoding.UnaggregatedMessageUnion{
   676  			Type:                    encoding.BatchTimerWithMetadatasType,
   677  			BatchTimerWithMetadatas: input,
   678  		}))
   679  		expectedProto := metricpb.MetricWithMetadatas{
   680  			Type:                    metricpb.MetricWithMetadatas_BATCH_TIMER_WITH_METADATAS,
   681  			BatchTimerWithMetadatas: &expected[i],
   682  		}
   683  		expectedMsgSize := expectedProto.Size()
   684  		require.Equal(t, expectedMsgSize, sizeRes)
   685  		require.Equal(t, expectedProto, pbRes)
   686  	}
   687  }
   688  
   689  func TestUnaggregatedEncoderEncodeGaugeWithMetadatas(t *testing.T) {
   690  	inputs := []unaggregated.GaugeWithMetadatas{
   691  		{
   692  			Gauge:           testGauge1,
   693  			StagedMetadatas: testStagedMetadatas1,
   694  		},
   695  		{
   696  			Gauge:           testGauge2,
   697  			StagedMetadatas: testStagedMetadatas1,
   698  		},
   699  		{
   700  			Gauge:           testGauge1,
   701  			StagedMetadatas: testStagedMetadatas2,
   702  		},
   703  		{
   704  			Gauge:           testGauge2,
   705  			StagedMetadatas: testStagedMetadatas2,
   706  		},
   707  	}
   708  	expected := []metricpb.GaugeWithMetadatas{
   709  		{
   710  			Gauge:     testGauge1Proto,
   711  			Metadatas: testStagedMetadatas1Proto,
   712  		},
   713  		{
   714  			Gauge:     testGauge2Proto,
   715  			Metadatas: testStagedMetadatas1Proto,
   716  		},
   717  		{
   718  			Gauge:     testGauge1Proto,
   719  			Metadatas: testStagedMetadatas2Proto,
   720  		},
   721  		{
   722  			Gauge:     testGauge2Proto,
   723  			Metadatas: testStagedMetadatas2Proto,
   724  		},
   725  	}
   726  
   727  	var (
   728  		sizeRes int
   729  		pbRes   metricpb.MetricWithMetadatas
   730  	)
   731  	enc := NewUnaggregatedEncoder(NewUnaggregatedOptions())
   732  	enc.(*unaggregatedEncoder).encodeMessageSizeFn = func(size int) { sizeRes = size }
   733  	enc.(*unaggregatedEncoder).encodeMessageFn = func(pb metricpb.MetricWithMetadatas) error { pbRes = pb; return nil }
   734  	for i, input := range inputs {
   735  		require.NoError(t, enc.EncodeMessage(encoding.UnaggregatedMessageUnion{
   736  			Type:               encoding.GaugeWithMetadatasType,
   737  			GaugeWithMetadatas: input,
   738  		}))
   739  		expectedProto := metricpb.MetricWithMetadatas{
   740  			Type:               metricpb.MetricWithMetadatas_GAUGE_WITH_METADATAS,
   741  			GaugeWithMetadatas: &expected[i],
   742  		}
   743  		expectedMsgSize := expectedProto.Size()
   744  		require.Equal(t, expectedMsgSize, sizeRes)
   745  		require.Equal(t, expectedProto, pbRes)
   746  	}
   747  }
   748  
   749  func TestUnaggregatedEncoderEncodeForwardedMetricWithMetadata(t *testing.T) {
   750  	inputs := []aggregated.ForwardedMetricWithMetadata{
   751  		{
   752  			ForwardedMetric: testForwardedMetric1,
   753  			ForwardMetadata: testForwardMetadata1,
   754  		},
   755  		{
   756  			ForwardedMetric: testForwardedMetric1,
   757  			ForwardMetadata: testForwardMetadata2,
   758  		},
   759  		{
   760  			ForwardedMetric: testForwardedMetric2,
   761  			ForwardMetadata: testForwardMetadata1,
   762  		},
   763  		{
   764  			ForwardedMetric: testForwardedMetric2,
   765  			ForwardMetadata: testForwardMetadata2,
   766  		},
   767  	}
   768  	expected := []metricpb.ForwardedMetricWithMetadata{
   769  		{
   770  			Metric:   testForwardedMetric1Proto,
   771  			Metadata: testForwardMetadata1Proto,
   772  		},
   773  		{
   774  			Metric:   testForwardedMetric1Proto,
   775  			Metadata: testForwardMetadata2Proto,
   776  		},
   777  		{
   778  			Metric:   testForwardedMetric2Proto,
   779  			Metadata: testForwardMetadata1Proto,
   780  		},
   781  		{
   782  			Metric:   testForwardedMetric2Proto,
   783  			Metadata: testForwardMetadata2Proto,
   784  		},
   785  	}
   786  
   787  	var (
   788  		sizeRes int
   789  		pbRes   metricpb.MetricWithMetadatas
   790  	)
   791  	enc := NewUnaggregatedEncoder(NewUnaggregatedOptions())
   792  	enc.(*unaggregatedEncoder).encodeMessageSizeFn = func(size int) { sizeRes = size }
   793  	enc.(*unaggregatedEncoder).encodeMessageFn = func(pb metricpb.MetricWithMetadatas) error { pbRes = pb; return nil }
   794  	for i, input := range inputs {
   795  		require.NoError(t, enc.EncodeMessage(encoding.UnaggregatedMessageUnion{
   796  			Type:                        encoding.ForwardedMetricWithMetadataType,
   797  			ForwardedMetricWithMetadata: input,
   798  		}))
   799  		expectedProto := metricpb.MetricWithMetadatas{
   800  			Type:                        metricpb.MetricWithMetadatas_FORWARDED_METRIC_WITH_METADATA,
   801  			ForwardedMetricWithMetadata: &expected[i],
   802  		}
   803  		expectedMsgSize := expectedProto.Size()
   804  		require.Equal(t, expectedMsgSize, sizeRes)
   805  		require.Equal(t, expectedProto, pbRes)
   806  	}
   807  }
   808  
   809  func TestUnaggregatedEncoderEncodeTimedMetricWithMetadata(t *testing.T) {
   810  	inputs := []aggregated.TimedMetricWithMetadata{
   811  		{
   812  			Metric:        testTimedMetric1,
   813  			TimedMetadata: testTimedMetadata1,
   814  		},
   815  		{
   816  			Metric:        testTimedMetric1,
   817  			TimedMetadata: testTimedMetadata2,
   818  		},
   819  		{
   820  			Metric:        testTimedMetric2,
   821  			TimedMetadata: testTimedMetadata1,
   822  		},
   823  		{
   824  			Metric:        testTimedMetric2,
   825  			TimedMetadata: testTimedMetadata2,
   826  		},
   827  	}
   828  	expected := []metricpb.TimedMetricWithMetadata{
   829  		{
   830  			Metric:   testTimedMetric1Proto,
   831  			Metadata: testTimedMetadata1Proto,
   832  		},
   833  		{
   834  			Metric:   testTimedMetric1Proto,
   835  			Metadata: testTimedMetadata2Proto,
   836  		},
   837  		{
   838  			Metric:   testTimedMetric2Proto,
   839  			Metadata: testTimedMetadata1Proto,
   840  		},
   841  		{
   842  			Metric:   testTimedMetric2Proto,
   843  			Metadata: testTimedMetadata2Proto,
   844  		},
   845  	}
   846  
   847  	var (
   848  		sizeRes int
   849  		pbRes   metricpb.MetricWithMetadatas
   850  	)
   851  	enc := NewUnaggregatedEncoder(NewUnaggregatedOptions())
   852  	enc.(*unaggregatedEncoder).encodeMessageSizeFn = func(size int) { sizeRes = size }
   853  	enc.(*unaggregatedEncoder).encodeMessageFn = func(pb metricpb.MetricWithMetadatas) error { pbRes = pb; return nil }
   854  	for i, input := range inputs {
   855  		require.NoError(t, enc.EncodeMessage(encoding.UnaggregatedMessageUnion{
   856  			Type:                    encoding.TimedMetricWithMetadataType,
   857  			TimedMetricWithMetadata: input,
   858  		}))
   859  		expectedProto := metricpb.MetricWithMetadatas{
   860  			Type:                    metricpb.MetricWithMetadatas_TIMED_METRIC_WITH_METADATA,
   861  			TimedMetricWithMetadata: &expected[i],
   862  		}
   863  		expectedMsgSize := expectedProto.Size()
   864  		require.Equal(t, expectedMsgSize, sizeRes)
   865  		require.Equal(t, expectedProto, pbRes)
   866  	}
   867  }
   868  
   869  func TestUnaggregatedEncoderEncodePassthroughMetricWithMetadata(t *testing.T) {
   870  	inputs := []aggregated.PassthroughMetricWithMetadata{
   871  		{
   872  			Metric:        testPassthroughMetric1,
   873  			StoragePolicy: testPassthroughMetadata1,
   874  		},
   875  		{
   876  			Metric:        testPassthroughMetric1,
   877  			StoragePolicy: testPassthroughMetadata2,
   878  		},
   879  		{
   880  			Metric:        testPassthroughMetric2,
   881  			StoragePolicy: testPassthroughMetadata1,
   882  		},
   883  		{
   884  			Metric:        testPassthroughMetric2,
   885  			StoragePolicy: testPassthroughMetadata2,
   886  		},
   887  	}
   888  	expected := []metricpb.TimedMetricWithStoragePolicy{
   889  		{
   890  			TimedMetric:   testPassthroughMetric1Proto,
   891  			StoragePolicy: testPassthroughMetadata1Proto,
   892  		},
   893  		{
   894  			TimedMetric:   testPassthroughMetric1Proto,
   895  			StoragePolicy: testPassthroughMetadata2Proto,
   896  		},
   897  		{
   898  			TimedMetric:   testPassthroughMetric2Proto,
   899  			StoragePolicy: testPassthroughMetadata1Proto,
   900  		},
   901  		{
   902  			TimedMetric:   testPassthroughMetric2Proto,
   903  			StoragePolicy: testPassthroughMetadata2Proto,
   904  		},
   905  	}
   906  
   907  	var (
   908  		sizeRes int
   909  		pbRes   metricpb.MetricWithMetadatas
   910  	)
   911  	enc := NewUnaggregatedEncoder(NewUnaggregatedOptions())
   912  	enc.(*unaggregatedEncoder).encodeMessageSizeFn = func(size int) { sizeRes = size }
   913  	enc.(*unaggregatedEncoder).encodeMessageFn = func(pb metricpb.MetricWithMetadatas) error { pbRes = pb; return nil }
   914  	for i, input := range inputs {
   915  		require.NoError(t, enc.EncodeMessage(encoding.UnaggregatedMessageUnion{
   916  			Type:                          encoding.PassthroughMetricWithMetadataType,
   917  			PassthroughMetricWithMetadata: input,
   918  		}))
   919  		expectedProto := metricpb.MetricWithMetadatas{
   920  			Type:                         metricpb.MetricWithMetadatas_TIMED_METRIC_WITH_STORAGE_POLICY,
   921  			TimedMetricWithStoragePolicy: &expected[i],
   922  		}
   923  		expectedMsgSize := expectedProto.Size()
   924  		require.Equal(t, expectedMsgSize, sizeRes)
   925  		require.Equal(t, expectedProto, pbRes)
   926  	}
   927  }
   928  
   929  func TestUnaggregatedEncoderStress(t *testing.T) {
   930  	inputs := []interface{}{
   931  		unaggregated.CounterWithMetadatas{
   932  			Counter:         testCounter1,
   933  			StagedMetadatas: testStagedMetadatas1,
   934  		},
   935  		unaggregated.BatchTimerWithMetadatas{
   936  			BatchTimer:      testBatchTimer1,
   937  			StagedMetadatas: testStagedMetadatas1,
   938  		},
   939  		unaggregated.GaugeWithMetadatas{
   940  			Gauge:           testGauge1,
   941  			StagedMetadatas: testStagedMetadatas1,
   942  		},
   943  		aggregated.ForwardedMetricWithMetadata{
   944  			ForwardedMetric: testForwardedMetric1,
   945  			ForwardMetadata: testForwardMetadata1,
   946  		},
   947  		aggregated.TimedMetricWithMetadata{
   948  			Metric:        testTimedMetric1,
   949  			TimedMetadata: testTimedMetadata1,
   950  		},
   951  		aggregated.PassthroughMetricWithMetadata{
   952  			Metric:        testPassthroughMetric1,
   953  			StoragePolicy: testPassthroughMetadata1,
   954  		},
   955  		unaggregated.CounterWithMetadatas{
   956  			Counter:         testCounter2,
   957  			StagedMetadatas: testStagedMetadatas1,
   958  		},
   959  		unaggregated.BatchTimerWithMetadatas{
   960  			BatchTimer:      testBatchTimer2,
   961  			StagedMetadatas: testStagedMetadatas1,
   962  		},
   963  		unaggregated.GaugeWithMetadatas{
   964  			Gauge:           testGauge2,
   965  			StagedMetadatas: testStagedMetadatas1,
   966  		},
   967  		aggregated.ForwardedMetricWithMetadata{
   968  			ForwardedMetric: testForwardedMetric2,
   969  			ForwardMetadata: testForwardMetadata1,
   970  		},
   971  		unaggregated.CounterWithMetadatas{
   972  			Counter:         testCounter1,
   973  			StagedMetadatas: testStagedMetadatas2,
   974  		},
   975  		unaggregated.BatchTimerWithMetadatas{
   976  			BatchTimer:      testBatchTimer1,
   977  			StagedMetadatas: testStagedMetadatas2,
   978  		},
   979  		unaggregated.GaugeWithMetadatas{
   980  			Gauge:           testGauge1,
   981  			StagedMetadatas: testStagedMetadatas2,
   982  		},
   983  		aggregated.ForwardedMetricWithMetadata{
   984  			ForwardedMetric: testForwardedMetric1,
   985  			ForwardMetadata: testForwardMetadata2,
   986  		},
   987  		unaggregated.CounterWithMetadatas{
   988  			Counter:         testCounter2,
   989  			StagedMetadatas: testStagedMetadatas2,
   990  		},
   991  		unaggregated.BatchTimerWithMetadatas{
   992  			BatchTimer:      testBatchTimer2,
   993  			StagedMetadatas: testStagedMetadatas2,
   994  		},
   995  		unaggregated.GaugeWithMetadatas{
   996  			Gauge:           testGauge2,
   997  			StagedMetadatas: testStagedMetadatas2,
   998  		},
   999  		aggregated.ForwardedMetricWithMetadata{
  1000  			ForwardedMetric: testForwardedMetric2,
  1001  			ForwardMetadata: testForwardMetadata2,
  1002  		},
  1003  		aggregated.TimedMetricWithMetadata{
  1004  			Metric:        testTimedMetric2,
  1005  			TimedMetadata: testTimedMetadata2,
  1006  		},
  1007  		aggregated.PassthroughMetricWithMetadata{
  1008  			Metric:        testPassthroughMetric2,
  1009  			StoragePolicy: testPassthroughMetadata2,
  1010  		},
  1011  	}
  1012  
  1013  	expected := []interface{}{
  1014  		metricpb.CounterWithMetadatas{
  1015  			Counter:   testCounter1Proto,
  1016  			Metadatas: testStagedMetadatas1Proto,
  1017  		},
  1018  		metricpb.BatchTimerWithMetadatas{
  1019  			BatchTimer: testBatchTimer1Proto,
  1020  			Metadatas:  testStagedMetadatas1Proto,
  1021  		},
  1022  		metricpb.GaugeWithMetadatas{
  1023  			Gauge:     testGauge1Proto,
  1024  			Metadatas: testStagedMetadatas1Proto,
  1025  		},
  1026  		metricpb.ForwardedMetricWithMetadata{
  1027  			Metric:   testForwardedMetric1Proto,
  1028  			Metadata: testForwardMetadata1Proto,
  1029  		},
  1030  		metricpb.TimedMetricWithMetadata{
  1031  			Metric:   testTimedMetric1Proto,
  1032  			Metadata: testTimedMetadata1Proto,
  1033  		},
  1034  		metricpb.TimedMetricWithStoragePolicy{
  1035  			TimedMetric:   testPassthroughMetric1Proto,
  1036  			StoragePolicy: testPassthroughMetadata1Proto,
  1037  		},
  1038  		metricpb.CounterWithMetadatas{
  1039  			Counter:   testCounter2Proto,
  1040  			Metadatas: testStagedMetadatas1Proto,
  1041  		},
  1042  		metricpb.BatchTimerWithMetadatas{
  1043  			BatchTimer: testBatchTimer2Proto,
  1044  			Metadatas:  testStagedMetadatas1Proto,
  1045  		},
  1046  		metricpb.GaugeWithMetadatas{
  1047  			Gauge:     testGauge2Proto,
  1048  			Metadatas: testStagedMetadatas1Proto,
  1049  		},
  1050  		metricpb.ForwardedMetricWithMetadata{
  1051  			Metric:   testForwardedMetric2Proto,
  1052  			Metadata: testForwardMetadata1Proto,
  1053  		},
  1054  		metricpb.CounterWithMetadatas{
  1055  			Counter:   testCounter1Proto,
  1056  			Metadatas: testStagedMetadatas2Proto,
  1057  		},
  1058  		metricpb.BatchTimerWithMetadatas{
  1059  			BatchTimer: testBatchTimer1Proto,
  1060  			Metadatas:  testStagedMetadatas2Proto,
  1061  		},
  1062  		metricpb.GaugeWithMetadatas{
  1063  			Gauge:     testGauge1Proto,
  1064  			Metadatas: testStagedMetadatas2Proto,
  1065  		},
  1066  		metricpb.ForwardedMetricWithMetadata{
  1067  			Metric:   testForwardedMetric1Proto,
  1068  			Metadata: testForwardMetadata2Proto,
  1069  		},
  1070  		metricpb.CounterWithMetadatas{
  1071  			Counter:   testCounter2Proto,
  1072  			Metadatas: testStagedMetadatas2Proto,
  1073  		},
  1074  		metricpb.BatchTimerWithMetadatas{
  1075  			BatchTimer: testBatchTimer2Proto,
  1076  			Metadatas:  testStagedMetadatas2Proto,
  1077  		},
  1078  		metricpb.GaugeWithMetadatas{
  1079  			Gauge:     testGauge2Proto,
  1080  			Metadatas: testStagedMetadatas2Proto,
  1081  		},
  1082  		metricpb.ForwardedMetricWithMetadata{
  1083  			Metric:   testForwardedMetric2Proto,
  1084  			Metadata: testForwardMetadata2Proto,
  1085  		},
  1086  		metricpb.TimedMetricWithMetadata{
  1087  			Metric:   testTimedMetric2Proto,
  1088  			Metadata: testTimedMetadata2Proto,
  1089  		},
  1090  		metricpb.TimedMetricWithStoragePolicy{
  1091  			TimedMetric:   testPassthroughMetric2Proto,
  1092  			StoragePolicy: testPassthroughMetadata2Proto,
  1093  		},
  1094  	}
  1095  
  1096  	var (
  1097  		sizeRes int
  1098  		pbRes   metricpb.MetricWithMetadatas
  1099  		numIter = 1000
  1100  	)
  1101  	opts := NewUnaggregatedOptions().SetInitBufferSize(2)
  1102  	enc := NewUnaggregatedEncoder(opts)
  1103  	enc.(*unaggregatedEncoder).encodeMessageSizeFn = func(size int) { sizeRes = size }
  1104  	enc.(*unaggregatedEncoder).encodeMessageFn = func(pb metricpb.MetricWithMetadatas) error { pbRes = pb; return nil }
  1105  	for iter := 0; iter < numIter; iter++ {
  1106  		for i, input := range inputs {
  1107  			var (
  1108  				msg           encoding.UnaggregatedMessageUnion
  1109  				expectedProto metricpb.MetricWithMetadatas
  1110  			)
  1111  			switch input := input.(type) {
  1112  			case unaggregated.CounterWithMetadatas:
  1113  				msg = encoding.UnaggregatedMessageUnion{
  1114  					Type:                 encoding.CounterWithMetadatasType,
  1115  					CounterWithMetadatas: input,
  1116  				}
  1117  				res := expected[i].(metricpb.CounterWithMetadatas)
  1118  				expectedProto = metricpb.MetricWithMetadatas{
  1119  					Type:                 metricpb.MetricWithMetadatas_COUNTER_WITH_METADATAS,
  1120  					CounterWithMetadatas: &res,
  1121  				}
  1122  			case unaggregated.BatchTimerWithMetadatas:
  1123  				msg = encoding.UnaggregatedMessageUnion{
  1124  					Type:                    encoding.BatchTimerWithMetadatasType,
  1125  					BatchTimerWithMetadatas: input,
  1126  				}
  1127  				res := expected[i].(metricpb.BatchTimerWithMetadatas)
  1128  				expectedProto = metricpb.MetricWithMetadatas{
  1129  					Type:                    metricpb.MetricWithMetadatas_BATCH_TIMER_WITH_METADATAS,
  1130  					BatchTimerWithMetadatas: &res,
  1131  				}
  1132  			case unaggregated.GaugeWithMetadatas:
  1133  				msg = encoding.UnaggregatedMessageUnion{
  1134  					Type:               encoding.GaugeWithMetadatasType,
  1135  					GaugeWithMetadatas: input,
  1136  				}
  1137  				res := expected[i].(metricpb.GaugeWithMetadatas)
  1138  				expectedProto = metricpb.MetricWithMetadatas{
  1139  					Type:               metricpb.MetricWithMetadatas_GAUGE_WITH_METADATAS,
  1140  					GaugeWithMetadatas: &res,
  1141  				}
  1142  			case aggregated.ForwardedMetricWithMetadata:
  1143  				msg = encoding.UnaggregatedMessageUnion{
  1144  					Type:                        encoding.ForwardedMetricWithMetadataType,
  1145  					ForwardedMetricWithMetadata: input,
  1146  				}
  1147  				res := expected[i].(metricpb.ForwardedMetricWithMetadata)
  1148  				expectedProto = metricpb.MetricWithMetadatas{
  1149  					Type:                        metricpb.MetricWithMetadatas_FORWARDED_METRIC_WITH_METADATA,
  1150  					ForwardedMetricWithMetadata: &res,
  1151  				}
  1152  			case aggregated.TimedMetricWithMetadata:
  1153  				msg = encoding.UnaggregatedMessageUnion{
  1154  					Type:                    encoding.TimedMetricWithMetadataType,
  1155  					TimedMetricWithMetadata: input,
  1156  				}
  1157  				res := expected[i].(metricpb.TimedMetricWithMetadata)
  1158  				expectedProto = metricpb.MetricWithMetadatas{
  1159  					Type:                    metricpb.MetricWithMetadatas_TIMED_METRIC_WITH_METADATA,
  1160  					TimedMetricWithMetadata: &res,
  1161  				}
  1162  			case aggregated.PassthroughMetricWithMetadata:
  1163  				msg = encoding.UnaggregatedMessageUnion{
  1164  					Type:                          encoding.PassthroughMetricWithMetadataType,
  1165  					PassthroughMetricWithMetadata: input,
  1166  				}
  1167  				res := expected[i].(metricpb.TimedMetricWithStoragePolicy)
  1168  				expectedProto = metricpb.MetricWithMetadatas{
  1169  					Type:                         metricpb.MetricWithMetadatas_TIMED_METRIC_WITH_STORAGE_POLICY,
  1170  					TimedMetricWithStoragePolicy: &res,
  1171  				}
  1172  			default:
  1173  				require.Fail(t, "unrecognized type %T", input)
  1174  			}
  1175  			require.NoError(t, enc.EncodeMessage(msg))
  1176  			expectedMsgSize := expectedProto.Size()
  1177  			require.Equal(t, expectedMsgSize, sizeRes)
  1178  			require.True(t, cmp.Equal(expectedProto, pbRes, testCmpOpts...))
  1179  		}
  1180  	}
  1181  }
  1182  
  1183  func TestUnaggregatedEncoderEncodeMessageInvalidMessageType(t *testing.T) {
  1184  	enc := NewUnaggregatedEncoder(NewUnaggregatedOptions())
  1185  	msg := encoding.UnaggregatedMessageUnion{Type: encoding.UnknownMessageType}
  1186  	err := enc.EncodeMessage(msg)
  1187  	require.Error(t, err)
  1188  	require.True(t, strings.Contains(err.Error(), "unknown message type"))
  1189  }
  1190  
  1191  func TestUnaggregatedEncoderEncodeMessageTooLarge(t *testing.T) {
  1192  	msg := encoding.UnaggregatedMessageUnion{
  1193  		Type: encoding.CounterWithMetadatasType,
  1194  		CounterWithMetadatas: unaggregated.CounterWithMetadatas{
  1195  			Counter:         testCounter1,
  1196  			StagedMetadatas: testStagedMetadatas1,
  1197  		},
  1198  	}
  1199  	opts := NewUnaggregatedOptions().SetMaxMessageSize(1)
  1200  	enc := NewUnaggregatedEncoder(opts)
  1201  	err := enc.EncodeMessage(msg)
  1202  	require.Error(t, err)
  1203  	require.True(t, strings.Contains(err.Error(), "larger than maximum supported size"))
  1204  }
  1205  
  1206  func TestUnaggregatedEncoderTruncate(t *testing.T) {
  1207  	opts := NewUnaggregatedOptions().SetInitBufferSize(2)
  1208  	enc := NewUnaggregatedEncoder(opts)
  1209  	encoder := enc.(*unaggregatedEncoder)
  1210  	buf := []byte{1, 2, 3, 4}
  1211  	enc.Reset(buf)
  1212  	require.Equal(t, 4, enc.Len())
  1213  
  1214  	for i := 4; i >= 0; i-- {
  1215  		require.NoError(t, enc.Truncate(i))
  1216  		require.Equal(t, i, enc.Len())
  1217  		require.Equal(t, buf[:i], encoder.buf[:encoder.used])
  1218  	}
  1219  }
  1220  
  1221  func TestUnaggregatedEncoderTruncateError(t *testing.T) {
  1222  	opts := NewUnaggregatedOptions().SetInitBufferSize(2)
  1223  	enc := NewUnaggregatedEncoder(opts)
  1224  	buf := []byte{1, 2, 3, 4}
  1225  	enc.Reset(buf)
  1226  	require.Equal(t, 4, enc.Len())
  1227  
  1228  	invalidTargets := []int{-3, 5}
  1229  	for _, target := range invalidTargets {
  1230  		err := enc.Truncate(target)
  1231  		require.Error(t, err)
  1232  		require.True(t, strings.Contains(err.Error(), "truncation out of range"))
  1233  	}
  1234  }
  1235  
  1236  func TestUnaggregatedEncoderEncodeMessageRelinquishReset(t *testing.T) {
  1237  	msg := encoding.UnaggregatedMessageUnion{
  1238  		Type: encoding.CounterWithMetadatasType,
  1239  		CounterWithMetadatas: unaggregated.CounterWithMetadatas{
  1240  			Counter:         testCounter1,
  1241  			StagedMetadatas: testStagedMetadatas1,
  1242  		},
  1243  	}
  1244  	opts := NewUnaggregatedOptions().SetInitBufferSize(2)
  1245  	enc := NewUnaggregatedEncoder(opts)
  1246  	encoder := enc.(*unaggregatedEncoder)
  1247  	require.NoError(t, enc.EncodeMessage(msg))
  1248  	require.True(t, enc.Len() > 0)
  1249  	require.NotNil(t, encoder.buf)
  1250  
  1251  	initData := []byte{1, 2, 3, 4}
  1252  	enc.Reset(initData)
  1253  	require.Equal(t, initData, encoder.buf)
  1254  	require.Equal(t, 4, enc.Len())
  1255  
  1256  	// Verify the initial data has been copied.
  1257  	initData[0] = 123
  1258  	require.Equal(t, byte(1), encoder.buf[0])
  1259  }
  1260  
  1261  func TestUnaggregatedEncoderRelinquish(t *testing.T) {
  1262  	msg := encoding.UnaggregatedMessageUnion{
  1263  		Type: encoding.CounterWithMetadatasType,
  1264  		CounterWithMetadatas: unaggregated.CounterWithMetadatas{
  1265  			Counter:         testCounter1,
  1266  			StagedMetadatas: testStagedMetadatas1,
  1267  		},
  1268  	}
  1269  	opts := NewUnaggregatedOptions().SetInitBufferSize(2)
  1270  	enc := NewUnaggregatedEncoder(opts)
  1271  	encoder := enc.(*unaggregatedEncoder)
  1272  	require.NoError(t, enc.EncodeMessage(msg))
  1273  	require.True(t, enc.Len() > 0)
  1274  	require.NotNil(t, encoder.buf)
  1275  
  1276  	var (
  1277  		size    = enc.Len()
  1278  		buf     = encoder.buf
  1279  		dataBuf = enc.Relinquish()
  1280  	)
  1281  	require.True(t, enc.Len() == 0)
  1282  	require.Nil(t, encoder.buf)
  1283  	require.Equal(t, buf[:size], dataBuf.Bytes())
  1284  }