github.com/m3db/m3@v1.5.0/src/metrics/metadata/metadata_test.go (about)

     1  // Copyright (c) 2018 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package metadata
    22  
    23  import (
    24  	"encoding/json"
    25  	"fmt"
    26  	"testing"
    27  	"time"
    28  
    29  	"github.com/m3db/m3/src/metrics/aggregation"
    30  	"github.com/m3db/m3/src/metrics/generated/proto/aggregationpb"
    31  	"github.com/m3db/m3/src/metrics/generated/proto/metricpb"
    32  	"github.com/m3db/m3/src/metrics/generated/proto/pipelinepb"
    33  	"github.com/m3db/m3/src/metrics/generated/proto/policypb"
    34  	"github.com/m3db/m3/src/metrics/generated/proto/transformationpb"
    35  	"github.com/m3db/m3/src/metrics/pipeline"
    36  	"github.com/m3db/m3/src/metrics/pipeline/applied"
    37  	"github.com/m3db/m3/src/metrics/policy"
    38  	"github.com/m3db/m3/src/metrics/transformation"
    39  	xtime "github.com/m3db/m3/src/x/time"
    40  
    41  	"github.com/stretchr/testify/require"
    42  )
    43  
    44  var (
    45  	testSmallForwardMetadata = ForwardMetadata{
    46  		AggregationID: aggregation.DefaultID,
    47  		StoragePolicy: policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
    48  		Pipeline: applied.NewPipeline([]applied.OpUnion{
    49  			{
    50  				Type: pipeline.RollupOpType,
    51  				Rollup: applied.RollupOp{
    52  					ID:            []byte("foo"),
    53  					AggregationID: aggregation.MustCompressTypes(aggregation.Count),
    54  				},
    55  			},
    56  		}),
    57  		SourceID:          1234,
    58  		NumForwardedTimes: 3,
    59  	}
    60  	testLargeForwardMetadata = ForwardMetadata{
    61  		AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
    62  		StoragePolicy: policy.NewStoragePolicy(10*time.Second, xtime.Second, 6*time.Hour),
    63  		Pipeline: applied.NewPipeline([]applied.OpUnion{
    64  			{
    65  				Type: pipeline.TransformationOpType,
    66  				Transformation: pipeline.TransformationOp{
    67  					Type: transformation.Absolute,
    68  				},
    69  			},
    70  			{
    71  				Type: pipeline.RollupOpType,
    72  				Rollup: applied.RollupOp{
    73  					ID:            []byte("bar"),
    74  					AggregationID: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum),
    75  				},
    76  			},
    77  		}),
    78  		SourceID:          897,
    79  		NumForwardedTimes: 2,
    80  	}
    81  	testSmallPipelineMetadata = PipelineMetadata{
    82  		AggregationID: aggregation.DefaultID,
    83  		StoragePolicies: []policy.StoragePolicy{
    84  			policy.NewStoragePolicy(time.Minute, xtime.Minute, 6*time.Hour),
    85  		},
    86  		Pipeline: applied.NewPipeline([]applied.OpUnion{
    87  			{
    88  				Type: pipeline.TransformationOpType,
    89  				Transformation: pipeline.TransformationOp{
    90  					Type: transformation.PerSecond,
    91  				},
    92  			},
    93  		}),
    94  	}
    95  	testLargePipelineMetadata = PipelineMetadata{
    96  		AggregationID: aggregation.DefaultID,
    97  		StoragePolicies: []policy.StoragePolicy{
    98  			policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
    99  			policy.NewStoragePolicy(time.Hour, xtime.Hour, 30*24*time.Hour),
   100  		},
   101  		Pipeline: applied.NewPipeline([]applied.OpUnion{
   102  			{
   103  				Type: pipeline.TransformationOpType,
   104  				Transformation: pipeline.TransformationOp{
   105  					Type: transformation.Absolute,
   106  				},
   107  			},
   108  			{
   109  				Type: pipeline.RollupOpType,
   110  				Rollup: applied.RollupOp{
   111  					ID:            []byte("foo"),
   112  					AggregationID: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum),
   113  				},
   114  			},
   115  		}),
   116  	}
   117  	testBadForwardMetadata = ForwardMetadata{
   118  		StoragePolicy: policy.NewStoragePolicy(10*time.Second, xtime.Unit(101), 6*time.Hour),
   119  	}
   120  	testBadPipelineMetadata = PipelineMetadata{
   121  		AggregationID: aggregation.DefaultID,
   122  		StoragePolicies: []policy.StoragePolicy{
   123  			policy.NewStoragePolicy(time.Minute, xtime.Unit(100), 6*time.Hour),
   124  		},
   125  	}
   126  	testSmallStagedMetadatas = StagedMetadatas{
   127  		{
   128  			CutoverNanos: 4567,
   129  			Tombstoned:   true,
   130  			Metadata: Metadata{
   131  				Pipelines: []PipelineMetadata{
   132  					{
   133  						AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
   134  						StoragePolicies: []policy.StoragePolicy{
   135  							policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
   136  						},
   137  					},
   138  					{
   139  						AggregationID: aggregation.DefaultID,
   140  						StoragePolicies: []policy.StoragePolicy{
   141  							policy.NewStoragePolicy(10*time.Second, xtime.Second, time.Hour),
   142  						},
   143  						Pipeline: applied.NewPipeline([]applied.OpUnion{
   144  							{
   145  								Type: pipeline.RollupOpType,
   146  								Rollup: applied.RollupOp{
   147  									ID:            []byte("baz"),
   148  									AggregationID: aggregation.MustCompressTypes(aggregation.Mean),
   149  								},
   150  							},
   151  						}),
   152  					},
   153  				},
   154  			},
   155  		},
   156  	}
   157  	testLargeStagedMetadatas = StagedMetadatas{
   158  		{
   159  			CutoverNanos: 1234,
   160  			Tombstoned:   false,
   161  		},
   162  		{
   163  			CutoverNanos: 4567,
   164  			Tombstoned:   true,
   165  			Metadata: Metadata{
   166  				Pipelines: []PipelineMetadata{
   167  					{
   168  						AggregationID: aggregation.MustCompressTypes(aggregation.Count),
   169  						StoragePolicies: []policy.StoragePolicy{
   170  							policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
   171  						},
   172  					},
   173  					{
   174  						AggregationID: aggregation.DefaultID,
   175  						StoragePolicies: []policy.StoragePolicy{
   176  							policy.NewStoragePolicy(time.Minute, xtime.Minute, 6*time.Hour),
   177  							policy.NewStoragePolicy(time.Hour, xtime.Hour, 30*24*time.Hour),
   178  						},
   179  						Pipeline: applied.NewPipeline([]applied.OpUnion{
   180  							{
   181  								Type: pipeline.TransformationOpType,
   182  								Transformation: pipeline.TransformationOp{
   183  									Type: transformation.Absolute,
   184  								},
   185  							},
   186  							{
   187  								Type: pipeline.RollupOpType,
   188  								Rollup: applied.RollupOp{
   189  									ID:            []byte("foo"),
   190  									AggregationID: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum),
   191  								},
   192  							},
   193  						}),
   194  					},
   195  				},
   196  			},
   197  		},
   198  		{
   199  			CutoverNanos: 32768,
   200  			Tombstoned:   false,
   201  			Metadata: Metadata{
   202  				Pipelines: []PipelineMetadata{
   203  					{
   204  						AggregationID: aggregation.DefaultID,
   205  						Pipeline: applied.NewPipeline([]applied.OpUnion{
   206  							{
   207  								Type: pipeline.TransformationOpType,
   208  								Transformation: pipeline.TransformationOp{
   209  									Type: transformation.PerSecond,
   210  								},
   211  							},
   212  							{
   213  								Type: pipeline.RollupOpType,
   214  								Rollup: applied.RollupOp{
   215  									ID:            []byte("bar"),
   216  									AggregationID: aggregation.MustCompressTypes(aggregation.P99),
   217  								},
   218  							},
   219  						}),
   220  					},
   221  				},
   222  			},
   223  		},
   224  	}
   225  	testBadStagedMetadatas = StagedMetadatas{
   226  		{
   227  			Metadata: Metadata{
   228  				Pipelines: []PipelineMetadata{
   229  					{
   230  						AggregationID: aggregation.DefaultID,
   231  						StoragePolicies: []policy.StoragePolicy{
   232  							policy.NewStoragePolicy(time.Minute, xtime.Unit(100), 6*time.Hour),
   233  						},
   234  					},
   235  				},
   236  			},
   237  		},
   238  	}
   239  	testSmallForwardMetadataProto = metricpb.ForwardMetadata{
   240  		AggregationId: aggregationpb.AggregationID{Id: 0},
   241  		StoragePolicy: policypb.StoragePolicy{
   242  			Resolution: policypb.Resolution{
   243  				WindowSize: time.Minute.Nanoseconds(),
   244  				Precision:  time.Minute.Nanoseconds(),
   245  			},
   246  			Retention: policypb.Retention{
   247  				Period: (12 * time.Hour).Nanoseconds(),
   248  			},
   249  		},
   250  		Pipeline: pipelinepb.AppliedPipeline{
   251  			Ops: []pipelinepb.AppliedPipelineOp{
   252  				{
   253  					Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   254  					Rollup: pipelinepb.AppliedRollupOp{
   255  						Id:            []byte("foo"),
   256  						AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Count)[0]},
   257  					},
   258  				},
   259  			},
   260  		},
   261  		SourceId:          1234,
   262  		NumForwardedTimes: 3,
   263  	}
   264  	testLargeForwardMetadataProto = metricpb.ForwardMetadata{
   265  		AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Sum)[0]},
   266  		StoragePolicy: policypb.StoragePolicy{
   267  			Resolution: policypb.Resolution{
   268  				WindowSize: 10 * time.Second.Nanoseconds(),
   269  				Precision:  time.Second.Nanoseconds(),
   270  			},
   271  			Retention: policypb.Retention{
   272  				Period: (6 * time.Hour).Nanoseconds(),
   273  			},
   274  		},
   275  		Pipeline: pipelinepb.AppliedPipeline{
   276  			Ops: []pipelinepb.AppliedPipelineOp{
   277  				{
   278  					Type: pipelinepb.AppliedPipelineOp_TRANSFORMATION,
   279  					Transformation: pipelinepb.TransformationOp{
   280  						Type: transformationpb.TransformationType_ABSOLUTE,
   281  					},
   282  				},
   283  				{
   284  					Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   285  					Rollup: pipelinepb.AppliedRollupOp{
   286  						Id:            []byte("bar"),
   287  						AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum)[0]},
   288  					},
   289  				},
   290  			},
   291  		},
   292  		SourceId:          897,
   293  		NumForwardedTimes: 2,
   294  	}
   295  	testBadForwardMetadataProto    = metricpb.ForwardMetadata{}
   296  	testSmallPipelineMetadataProto = metricpb.PipelineMetadata{
   297  		AggregationId: aggregationpb.AggregationID{Id: 0},
   298  		StoragePolicies: []policypb.StoragePolicy{
   299  			{
   300  				Resolution: policypb.Resolution{
   301  					WindowSize: time.Minute.Nanoseconds(),
   302  					Precision:  time.Minute.Nanoseconds(),
   303  				},
   304  				Retention: policypb.Retention{
   305  					Period: (6 * time.Hour).Nanoseconds(),
   306  				},
   307  			},
   308  		},
   309  		Pipeline: pipelinepb.AppliedPipeline{
   310  			Ops: []pipelinepb.AppliedPipelineOp{
   311  				{
   312  					Type: pipelinepb.AppliedPipelineOp_TRANSFORMATION,
   313  					Transformation: pipelinepb.TransformationOp{
   314  						Type: transformationpb.TransformationType_PERSECOND,
   315  					},
   316  				},
   317  			},
   318  		},
   319  	}
   320  	testLargePipelineMetadataProto = metricpb.PipelineMetadata{
   321  		AggregationId: aggregationpb.AggregationID{Id: 0},
   322  		StoragePolicies: []policypb.StoragePolicy{
   323  			{
   324  				Resolution: policypb.Resolution{
   325  					WindowSize: time.Minute.Nanoseconds(),
   326  					Precision:  time.Minute.Nanoseconds(),
   327  				},
   328  				Retention: policypb.Retention{
   329  					Period: (12 * time.Hour).Nanoseconds(),
   330  				},
   331  			},
   332  			{
   333  				Resolution: policypb.Resolution{
   334  					WindowSize: time.Hour.Nanoseconds(),
   335  					Precision:  time.Hour.Nanoseconds(),
   336  				},
   337  				Retention: policypb.Retention{
   338  					Period: (30 * 24 * time.Hour).Nanoseconds(),
   339  				},
   340  			},
   341  		},
   342  		Pipeline: pipelinepb.AppliedPipeline{
   343  			Ops: []pipelinepb.AppliedPipelineOp{
   344  				{
   345  					Type: pipelinepb.AppliedPipelineOp_TRANSFORMATION,
   346  					Transformation: pipelinepb.TransformationOp{
   347  						Type: transformationpb.TransformationType_ABSOLUTE,
   348  					},
   349  				},
   350  				{
   351  					Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   352  					Rollup: pipelinepb.AppliedRollupOp{
   353  						Id:            []byte("foo"),
   354  						AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum)[0]},
   355  					},
   356  				},
   357  			},
   358  		},
   359  	}
   360  	testBadPipelineMetadataProto = metricpb.PipelineMetadata{
   361  		StoragePolicies: []policypb.StoragePolicy{
   362  			{},
   363  		},
   364  	}
   365  	testSmallStagedMetadatasProto = metricpb.StagedMetadatas{
   366  		Metadatas: []metricpb.StagedMetadata{
   367  			{
   368  				CutoverNanos: 4567,
   369  				Tombstoned:   true,
   370  				Metadata: metricpb.Metadata{
   371  					Pipelines: []metricpb.PipelineMetadata{
   372  						{
   373  							AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Sum)[0]},
   374  							StoragePolicies: []policypb.StoragePolicy{
   375  								{
   376  									Resolution: policypb.Resolution{
   377  										WindowSize: time.Second.Nanoseconds(),
   378  										Precision:  time.Second.Nanoseconds(),
   379  									},
   380  									Retention: policypb.Retention{
   381  										Period: time.Hour.Nanoseconds(),
   382  									},
   383  								},
   384  							},
   385  						},
   386  						{
   387  							AggregationId: aggregationpb.AggregationID{Id: 0},
   388  							StoragePolicies: []policypb.StoragePolicy{
   389  								{
   390  									Resolution: policypb.Resolution{
   391  										WindowSize: 10 * time.Second.Nanoseconds(),
   392  										Precision:  time.Second.Nanoseconds(),
   393  									},
   394  									Retention: policypb.Retention{
   395  										Period: time.Hour.Nanoseconds(),
   396  									},
   397  								},
   398  							},
   399  							Pipeline: pipelinepb.AppliedPipeline{
   400  								Ops: []pipelinepb.AppliedPipelineOp{
   401  									{
   402  										Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   403  										Rollup: pipelinepb.AppliedRollupOp{
   404  											Id:            []byte("baz"),
   405  											AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Mean)[0]},
   406  										},
   407  									},
   408  								},
   409  							},
   410  						},
   411  					},
   412  				},
   413  			},
   414  		},
   415  	}
   416  	testSmallStagedMetadatasWithLargeStoragePoliciesProto = metricpb.StagedMetadatas{
   417  		Metadatas: []metricpb.StagedMetadata{
   418  			{
   419  				CutoverNanos: 4567,
   420  				Tombstoned:   true,
   421  				Metadata: metricpb.Metadata{
   422  					Pipelines: []metricpb.PipelineMetadata{
   423  						{
   424  							AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Sum)[0]},
   425  							StoragePolicies: []policypb.StoragePolicy{
   426  								{
   427  									Resolution: policypb.Resolution{
   428  										WindowSize: time.Second.Nanoseconds(),
   429  										Precision:  time.Second.Nanoseconds(),
   430  									},
   431  									Retention: policypb.Retention{
   432  										Period: 10 * time.Second.Nanoseconds(),
   433  									},
   434  								},
   435  								{
   436  									Resolution: policypb.Resolution{
   437  										WindowSize: 10 * time.Second.Nanoseconds(),
   438  										Precision:  time.Second.Nanoseconds(),
   439  									},
   440  									Retention: policypb.Retention{
   441  										Period: time.Hour.Nanoseconds(),
   442  									},
   443  								},
   444  								{
   445  									Resolution: policypb.Resolution{
   446  										WindowSize: 10 * time.Minute.Nanoseconds(),
   447  										Precision:  time.Second.Nanoseconds(),
   448  									},
   449  									Retention: policypb.Retention{
   450  										Period: time.Minute.Nanoseconds(),
   451  									},
   452  								},
   453  								{
   454  									Resolution: policypb.Resolution{
   455  										WindowSize: 10 * time.Minute.Nanoseconds(),
   456  										Precision:  time.Second.Nanoseconds(),
   457  									},
   458  									Retention: policypb.Retention{
   459  										Period: time.Second.Nanoseconds(),
   460  									},
   461  								},
   462  								{
   463  									Resolution: policypb.Resolution{
   464  										WindowSize: 10 * time.Hour.Nanoseconds(),
   465  										Precision:  time.Second.Nanoseconds(),
   466  									},
   467  									Retention: policypb.Retention{
   468  										Period: time.Second.Nanoseconds(),
   469  									},
   470  								},
   471  							},
   472  						},
   473  					},
   474  				},
   475  			},
   476  		},
   477  	}
   478  	testLargeStagedMetadatasProto = metricpb.StagedMetadatas{
   479  		Metadatas: []metricpb.StagedMetadata{
   480  			{
   481  				CutoverNanos: 1234,
   482  				Tombstoned:   false,
   483  			},
   484  			{
   485  				CutoverNanos: 4567,
   486  				Tombstoned:   true,
   487  				Metadata: metricpb.Metadata{
   488  					Pipelines: []metricpb.PipelineMetadata{
   489  						{
   490  							AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Count)[0]},
   491  							StoragePolicies: []policypb.StoragePolicy{
   492  								{
   493  									Resolution: policypb.Resolution{
   494  										WindowSize: time.Second.Nanoseconds(),
   495  										Precision:  time.Second.Nanoseconds(),
   496  									},
   497  									Retention: policypb.Retention{
   498  										Period: time.Hour.Nanoseconds(),
   499  									},
   500  								},
   501  							},
   502  						},
   503  						{
   504  							AggregationId: aggregationpb.AggregationID{Id: 0},
   505  							StoragePolicies: []policypb.StoragePolicy{
   506  								{
   507  									Resolution: policypb.Resolution{
   508  										WindowSize: time.Minute.Nanoseconds(),
   509  										Precision:  time.Minute.Nanoseconds(),
   510  									},
   511  									Retention: policypb.Retention{
   512  										Period: (6 * time.Hour).Nanoseconds(),
   513  									},
   514  								},
   515  								{
   516  									Resolution: policypb.Resolution{
   517  										WindowSize: time.Hour.Nanoseconds(),
   518  										Precision:  time.Hour.Nanoseconds(),
   519  									},
   520  									Retention: policypb.Retention{
   521  										Period: (30 * 24 * time.Hour).Nanoseconds(),
   522  									},
   523  								},
   524  							},
   525  							Pipeline: pipelinepb.AppliedPipeline{
   526  								Ops: []pipelinepb.AppliedPipelineOp{
   527  									{
   528  										Type: pipelinepb.AppliedPipelineOp_TRANSFORMATION,
   529  										Transformation: pipelinepb.TransformationOp{
   530  											Type: transformationpb.TransformationType_ABSOLUTE,
   531  										},
   532  									},
   533  									{
   534  										Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   535  										Rollup: pipelinepb.AppliedRollupOp{
   536  											Id:            []byte("foo"),
   537  											AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.Last, aggregation.Sum)[0]},
   538  										},
   539  									},
   540  								},
   541  							},
   542  						},
   543  					},
   544  				},
   545  			},
   546  			{
   547  				CutoverNanos: 32768,
   548  				Tombstoned:   false,
   549  				Metadata: metricpb.Metadata{
   550  					Pipelines: []metricpb.PipelineMetadata{
   551  						{
   552  							AggregationId: aggregationpb.AggregationID{Id: 0},
   553  							Pipeline: pipelinepb.AppliedPipeline{
   554  								Ops: []pipelinepb.AppliedPipelineOp{
   555  									{
   556  										Type: pipelinepb.AppliedPipelineOp_TRANSFORMATION,
   557  										Transformation: pipelinepb.TransformationOp{
   558  											Type: transformationpb.TransformationType_PERSECOND,
   559  										},
   560  									},
   561  									{
   562  										Type: pipelinepb.AppliedPipelineOp_ROLLUP,
   563  										Rollup: pipelinepb.AppliedRollupOp{
   564  											Id:            []byte("bar"),
   565  											AggregationId: aggregationpb.AggregationID{Id: aggregation.MustCompressTypes(aggregation.P99)[0]},
   566  										},
   567  									},
   568  								},
   569  							},
   570  						},
   571  					},
   572  				},
   573  			},
   574  		},
   575  	}
   576  	testBadStagedMetadatasProto = metricpb.StagedMetadatas{
   577  		Metadatas: []metricpb.StagedMetadata{
   578  			{
   579  				Metadata: metricpb.Metadata{
   580  					Pipelines: []metricpb.PipelineMetadata{
   581  						{
   582  							StoragePolicies: []policypb.StoragePolicy{
   583  								{},
   584  							},
   585  						},
   586  					},
   587  				},
   588  			},
   589  		},
   590  	}
   591  	testDropMustDropPolicyPipelineMetadata = PipelineMetadata{
   592  		AggregationID:   aggregation.DefaultID,
   593  		StoragePolicies: []policy.StoragePolicy{},
   594  		Pipeline:        applied.DefaultPipeline,
   595  		DropPolicy:      policy.DropMust,
   596  	}
   597  	testDropExceptIfMatchOtherDropPolicyPipelineMetadata = PipelineMetadata{
   598  		AggregationID:   aggregation.DefaultID,
   599  		StoragePolicies: []policy.StoragePolicy{},
   600  		Pipeline:        applied.DefaultPipeline,
   601  		DropPolicy:      policy.DropMust,
   602  	}
   603  )
   604  
   605  func TestStagedMetadatasIsDefault(t *testing.T) {
   606  	inputs := []struct {
   607  		metadatas StagedMetadatas
   608  		expected  bool
   609  	}{
   610  		{
   611  			metadatas: StagedMetadatas{
   612  				{
   613  					Metadata: Metadata{
   614  						Pipelines: []PipelineMetadata{
   615  							{},
   616  						},
   617  					},
   618  				},
   619  			},
   620  			expected: true,
   621  		},
   622  		{
   623  			metadatas: DefaultStagedMetadatas,
   624  			expected:  true,
   625  		},
   626  		{
   627  			metadatas: StagedMetadatas{},
   628  			expected:  false,
   629  		},
   630  		{
   631  			metadatas: StagedMetadatas{
   632  				{
   633  					CutoverNanos: 1234,
   634  					Metadata: Metadata{
   635  						Pipelines: []PipelineMetadata{
   636  							{},
   637  						},
   638  					},
   639  				},
   640  			},
   641  			expected: false,
   642  		},
   643  		{
   644  			metadatas: StagedMetadatas{
   645  				{
   646  					Tombstoned: true,
   647  					Metadata: Metadata{
   648  						Pipelines: []PipelineMetadata{
   649  							{},
   650  						},
   651  					},
   652  				},
   653  			},
   654  			expected: false,
   655  		},
   656  		{
   657  			metadatas: StagedMetadatas{
   658  				{
   659  					Metadata: Metadata{
   660  						Pipelines: []PipelineMetadata{
   661  							{
   662  								AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
   663  							},
   664  						},
   665  					},
   666  				},
   667  			},
   668  			expected: false,
   669  		},
   670  		{
   671  			metadatas: StagedMetadatas{
   672  				{
   673  					Metadata: Metadata{
   674  						Pipelines: []PipelineMetadata{
   675  							{
   676  								StoragePolicies: []policy.StoragePolicy{
   677  									policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
   678  								},
   679  							},
   680  						},
   681  					},
   682  				},
   683  			},
   684  			expected: false,
   685  		},
   686  		{
   687  			metadatas: StagedMetadatas{
   688  				{
   689  					Metadata: Metadata{
   690  						Pipelines: []PipelineMetadata{
   691  							{
   692  								Pipeline: applied.NewPipeline([]applied.OpUnion{
   693  									{
   694  										Type:           pipeline.TransformationOpType,
   695  										Transformation: pipeline.TransformationOp{Type: transformation.Absolute},
   696  									},
   697  								}),
   698  							},
   699  						},
   700  					},
   701  				},
   702  			},
   703  			expected: false,
   704  		},
   705  		{
   706  			metadatas: StagedMetadatas{
   707  				{
   708  					Metadata: Metadata{
   709  						Pipelines: []PipelineMetadata{
   710  							{
   711  								Pipeline: applied.NewPipeline([]applied.OpUnion{
   712  									{
   713  										Type:   pipeline.RollupOpType,
   714  										Rollup: applied.RollupOp{ID: []byte("foo")},
   715  									},
   716  								}),
   717  							},
   718  						},
   719  					},
   720  				},
   721  			},
   722  			expected: false,
   723  		},
   724  		{
   725  			metadatas: StagedMetadatas{
   726  				{
   727  					Metadata: Metadata{
   728  						Pipelines: []PipelineMetadata{
   729  							{
   730  								Pipeline: applied.NewPipeline([]applied.OpUnion{
   731  									{
   732  										Type:   pipeline.RollupOpType,
   733  										Rollup: applied.RollupOp{AggregationID: aggregation.MustCompressTypes(aggregation.Sum)},
   734  									},
   735  								}),
   736  							},
   737  						},
   738  					},
   739  				},
   740  			},
   741  			expected: false,
   742  		},
   743  		{
   744  			metadatas: StagedMetadatas{
   745  				{
   746  					Metadata: Metadata{
   747  						Pipelines: []PipelineMetadata{
   748  							{},
   749  						},
   750  					},
   751  				},
   752  				{
   753  					Metadata: Metadata{
   754  						Pipelines: []PipelineMetadata{
   755  							{},
   756  						},
   757  					},
   758  				},
   759  			},
   760  			expected: false,
   761  		},
   762  	}
   763  
   764  	for _, input := range inputs {
   765  		input := input
   766  		t.Run(fmt.Sprintf("%v", input.metadatas), func(t *testing.T) {
   767  			require.Equal(t, input.expected, input.metadatas.IsDefault())
   768  		})
   769  	}
   770  }
   771  
   772  func TestForwardMetadataToProto(t *testing.T) {
   773  	inputs := []struct {
   774  		sequence []ForwardMetadata
   775  		expected []metricpb.ForwardMetadata
   776  	}{
   777  		{
   778  			sequence: []ForwardMetadata{
   779  				testSmallForwardMetadata,
   780  				testLargeForwardMetadata,
   781  			},
   782  			expected: []metricpb.ForwardMetadata{
   783  				testSmallForwardMetadataProto,
   784  				testLargeForwardMetadataProto,
   785  			},
   786  		},
   787  		{
   788  			sequence: []ForwardMetadata{
   789  				testLargeForwardMetadata,
   790  				testSmallForwardMetadata,
   791  			},
   792  			expected: []metricpb.ForwardMetadata{
   793  				testLargeForwardMetadataProto,
   794  				testSmallForwardMetadataProto,
   795  			},
   796  		},
   797  	}
   798  
   799  	for _, input := range inputs {
   800  		var pb metricpb.ForwardMetadata
   801  		for i, meta := range input.sequence {
   802  			require.NoError(t, meta.ToProto(&pb))
   803  			require.Equal(t, input.expected[i], pb)
   804  		}
   805  	}
   806  }
   807  
   808  func TestForwardMetadataFromProto(t *testing.T) {
   809  	inputs := []struct {
   810  		sequence []metricpb.ForwardMetadata
   811  		expected []ForwardMetadata
   812  	}{
   813  		{
   814  			sequence: []metricpb.ForwardMetadata{
   815  				testSmallForwardMetadataProto,
   816  				testLargeForwardMetadataProto,
   817  			},
   818  			expected: []ForwardMetadata{
   819  				testSmallForwardMetadata,
   820  				testLargeForwardMetadata,
   821  			},
   822  		},
   823  		{
   824  			sequence: []metricpb.ForwardMetadata{
   825  				testLargeForwardMetadataProto,
   826  				testSmallForwardMetadataProto,
   827  			},
   828  			expected: []ForwardMetadata{
   829  				testLargeForwardMetadata,
   830  				testSmallForwardMetadata,
   831  			},
   832  		},
   833  	}
   834  
   835  	for _, input := range inputs {
   836  		var res ForwardMetadata
   837  		for i, pb := range input.sequence {
   838  			require.NoError(t, res.FromProto(pb))
   839  			require.Equal(t, input.expected[i], res)
   840  		}
   841  	}
   842  }
   843  
   844  func TestForwardMetadataRoundtrip(t *testing.T) {
   845  	inputs := [][]ForwardMetadata{
   846  		{
   847  			testSmallForwardMetadata,
   848  			testLargeForwardMetadata,
   849  		},
   850  		{
   851  			testLargeForwardMetadata,
   852  			testSmallForwardMetadata,
   853  		},
   854  	}
   855  
   856  	for _, input := range inputs {
   857  		var (
   858  			pb  metricpb.ForwardMetadata
   859  			res ForwardMetadata
   860  		)
   861  		for _, metadata := range input {
   862  			require.NoError(t, metadata.ToProto(&pb))
   863  			require.NoError(t, res.FromProto(pb))
   864  			require.Equal(t, metadata, res)
   865  		}
   866  	}
   867  }
   868  
   869  func TestForwardMetadataToProtoBadMetadata(t *testing.T) {
   870  	var pb metricpb.ForwardMetadata
   871  	require.Error(t, testBadForwardMetadata.ToProto(&pb))
   872  }
   873  
   874  func TestForwardMetadataFromProtoBadMetadataProto(t *testing.T) {
   875  	var res ForwardMetadata
   876  	require.Error(t, res.FromProto(testBadForwardMetadataProto))
   877  }
   878  
   879  func TestPipelineMetadataClone(t *testing.T) {
   880  	cloned1 := testLargePipelineMetadata.Clone()
   881  	cloned2 := testLargePipelineMetadata.Clone()
   882  	require.True(t, cloned1.Equal(testLargePipelineMetadata))
   883  	require.True(t, cloned2.Equal(testLargePipelineMetadata))
   884  
   885  	// Assert that modifying the clone does not mutate the original pipeline metadata.
   886  	cloned1.StoragePolicies[0] = policy.MustParseStoragePolicy("1h:1h")
   887  	require.False(t, cloned1.Equal(testLargePipelineMetadata))
   888  	require.True(t, cloned2.Equal(testLargePipelineMetadata))
   889  }
   890  
   891  func TestPipelineMetadataToProto(t *testing.T) {
   892  	inputs := []struct {
   893  		sequence []PipelineMetadata
   894  		expected []metricpb.PipelineMetadata
   895  	}{
   896  		{
   897  			sequence: []PipelineMetadata{
   898  				testSmallPipelineMetadata,
   899  				testLargePipelineMetadata,
   900  			},
   901  			expected: []metricpb.PipelineMetadata{
   902  				testSmallPipelineMetadataProto,
   903  				testLargePipelineMetadataProto,
   904  			},
   905  		},
   906  		{
   907  			sequence: []PipelineMetadata{
   908  				testLargePipelineMetadata,
   909  				testSmallPipelineMetadata,
   910  			},
   911  			expected: []metricpb.PipelineMetadata{
   912  				testLargePipelineMetadataProto,
   913  				testSmallPipelineMetadataProto,
   914  			},
   915  		},
   916  	}
   917  
   918  	for _, input := range inputs {
   919  		var pb metricpb.PipelineMetadata
   920  		for i, meta := range input.sequence {
   921  			require.NoError(t, meta.ToProto(&pb))
   922  			require.Equal(t, input.expected[i], pb)
   923  		}
   924  	}
   925  }
   926  
   927  func TestPipelineMetadataFromProto(t *testing.T) {
   928  	inputs := []struct {
   929  		sequence []metricpb.PipelineMetadata
   930  		expected []PipelineMetadata
   931  	}{
   932  		{
   933  			sequence: []metricpb.PipelineMetadata{
   934  				testSmallPipelineMetadataProto,
   935  				testLargePipelineMetadataProto,
   936  			},
   937  			expected: []PipelineMetadata{
   938  				testSmallPipelineMetadata,
   939  				testLargePipelineMetadata,
   940  			},
   941  		},
   942  		{
   943  			sequence: []metricpb.PipelineMetadata{
   944  				testLargePipelineMetadataProto,
   945  				testSmallPipelineMetadataProto,
   946  			},
   947  			expected: []PipelineMetadata{
   948  				testLargePipelineMetadata,
   949  				testSmallPipelineMetadata,
   950  			},
   951  		},
   952  	}
   953  
   954  	for _, input := range inputs {
   955  		var res PipelineMetadata
   956  		for i, pb := range input.sequence {
   957  			require.NoError(t, res.FromProto(pb))
   958  			require.Equal(t, input.expected[i], res)
   959  		}
   960  	}
   961  }
   962  
   963  func TestPipelineMetadataRoundTrip(t *testing.T) {
   964  	inputs := [][]PipelineMetadata{
   965  		{
   966  			testSmallPipelineMetadata,
   967  			testLargePipelineMetadata,
   968  		},
   969  		{
   970  			testLargePipelineMetadata,
   971  			testSmallPipelineMetadata,
   972  		},
   973  	}
   974  
   975  	for _, input := range inputs {
   976  		var (
   977  			pb  metricpb.PipelineMetadata
   978  			res PipelineMetadata
   979  		)
   980  		for _, metadata := range input {
   981  			require.NoError(t, metadata.ToProto(&pb))
   982  			require.NoError(t, res.FromProto(pb))
   983  			require.Equal(t, metadata, res)
   984  		}
   985  	}
   986  }
   987  
   988  func TestPipelineMetadataToProtoBadMetadata(t *testing.T) {
   989  	var pb metricpb.PipelineMetadata
   990  	require.Error(t, testBadPipelineMetadata.ToProto(&pb))
   991  }
   992  
   993  func TestPipelineMetadataFromProtoBadMetadataProto(t *testing.T) {
   994  	var res PipelineMetadata
   995  	require.Error(t, res.FromProto(testBadPipelineMetadataProto))
   996  }
   997  
   998  func TestPipelineMetadatasClone(t *testing.T) {
   999  	input := PipelineMetadatas{
  1000  		testSmallPipelineMetadata,
  1001  		testLargePipelineMetadata,
  1002  	}
  1003  	cloned1 := input.Clone()
  1004  	cloned2 := input.Clone()
  1005  	require.True(t, cloned1.Equal(input))
  1006  	require.True(t, cloned2.Equal(input))
  1007  
  1008  	// Assert that modifying the clone does not mutate the original pipeline metadata.
  1009  	cloned1[0].StoragePolicies[0] = policy.MustParseStoragePolicy("1h:1h")
  1010  	require.False(t, cloned1.Equal(input))
  1011  	require.True(t, cloned2.Equal(input))
  1012  }
  1013  
  1014  func TestStagedMetadatasToProto(t *testing.T) {
  1015  	inputs := []struct {
  1016  		sequence []StagedMetadatas
  1017  		expected []metricpb.StagedMetadatas
  1018  	}{
  1019  		{
  1020  			sequence: []StagedMetadatas{
  1021  				testSmallStagedMetadatas,
  1022  				testLargeStagedMetadatas,
  1023  			},
  1024  			expected: []metricpb.StagedMetadatas{
  1025  				testSmallStagedMetadatasProto,
  1026  				testLargeStagedMetadatasProto,
  1027  			},
  1028  		},
  1029  		{
  1030  			sequence: []StagedMetadatas{
  1031  				testLargeStagedMetadatas,
  1032  				testSmallStagedMetadatas,
  1033  			},
  1034  			expected: []metricpb.StagedMetadatas{
  1035  				testLargeStagedMetadatasProto,
  1036  				testSmallStagedMetadatasProto,
  1037  			},
  1038  		},
  1039  	}
  1040  
  1041  	for _, input := range inputs {
  1042  		var pb metricpb.StagedMetadatas
  1043  		for i, meta := range input.sequence {
  1044  			require.NoError(t, meta.ToProto(&pb))
  1045  			require.Equal(t, input.expected[i], pb)
  1046  		}
  1047  	}
  1048  }
  1049  
  1050  func TestStagedMetadatasFromProto(t *testing.T) {
  1051  	inputs := []struct {
  1052  		sequence []metricpb.StagedMetadatas
  1053  		expected []StagedMetadatas
  1054  	}{
  1055  		{
  1056  			sequence: []metricpb.StagedMetadatas{
  1057  				testSmallStagedMetadatasProto,
  1058  				testLargeStagedMetadatasProto,
  1059  			},
  1060  			expected: []StagedMetadatas{
  1061  				testSmallStagedMetadatas,
  1062  				testLargeStagedMetadatas,
  1063  			},
  1064  		},
  1065  		{
  1066  			sequence: []metricpb.StagedMetadatas{
  1067  				testLargeStagedMetadatasProto,
  1068  				testSmallStagedMetadatasProto,
  1069  			},
  1070  			expected: []StagedMetadatas{
  1071  				testLargeStagedMetadatas,
  1072  				testSmallStagedMetadatas,
  1073  			},
  1074  		},
  1075  	}
  1076  
  1077  	for _, input := range inputs {
  1078  		var resOpt, resReference StagedMetadatas
  1079  		for i, pb := range input.sequence {
  1080  			require.NoError(t, resReference.fromProto(pb))
  1081  			require.NoError(t, resOpt.FromProto(pb))
  1082  			require.Equal(t, input.expected[i], resOpt)
  1083  			require.Equal(t, input.expected[i], resReference)
  1084  			require.Equal(t, resOpt, resReference)
  1085  		}
  1086  	}
  1087  }
  1088  
  1089  func TestStagedMetadatasRoundTrip(t *testing.T) {
  1090  	inputs := [][]StagedMetadatas{
  1091  		{
  1092  			testSmallStagedMetadatas,
  1093  			testLargeStagedMetadatas,
  1094  		},
  1095  		{
  1096  			testLargeStagedMetadatas,
  1097  			testSmallStagedMetadatas,
  1098  		},
  1099  	}
  1100  
  1101  	for _, input := range inputs {
  1102  		var (
  1103  			pb  metricpb.StagedMetadatas
  1104  			res StagedMetadatas
  1105  		)
  1106  		for _, metadata := range input {
  1107  			require.NoError(t, metadata.ToProto(&pb))
  1108  			require.NoError(t, res.FromProto(pb))
  1109  			require.Equal(t, metadata, res)
  1110  		}
  1111  	}
  1112  }
  1113  
  1114  func TestStagedMetadatasToProtoBadMetadatas(t *testing.T) {
  1115  	var pb metricpb.StagedMetadatas
  1116  	require.Error(t, testBadStagedMetadatas.ToProto(&pb))
  1117  }
  1118  
  1119  func TestStagedMetadatasFromProtoBadMetadatasProto(t *testing.T) {
  1120  	var res StagedMetadatas
  1121  	require.Error(t, res.FromProto(testBadStagedMetadatasProto))
  1122  }
  1123  
  1124  func TestVersionedStagedMetadatasMarshalJSON(t *testing.T) {
  1125  	vs := VersionedStagedMetadatas{
  1126  		Version: 12,
  1127  		StagedMetadatas: StagedMetadatas{
  1128  			{
  1129  				CutoverNanos: 4567,
  1130  				Tombstoned:   true,
  1131  				Metadata: Metadata{
  1132  					Pipelines: []PipelineMetadata{
  1133  						{
  1134  							ResendEnabled: true,
  1135  							AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
  1136  							StoragePolicies: []policy.StoragePolicy{
  1137  								policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
  1138  								policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
  1139  							},
  1140  						},
  1141  						{
  1142  							AggregationID: aggregation.DefaultID,
  1143  							StoragePolicies: []policy.StoragePolicy{
  1144  								policy.NewStoragePolicy(10*time.Second, xtime.Second, time.Hour),
  1145  							},
  1146  						},
  1147  					},
  1148  				},
  1149  			},
  1150  		},
  1151  	}
  1152  	res, err := json.Marshal(vs)
  1153  	require.NoError(t, err)
  1154  
  1155  	expected := `{` +
  1156  		`"stagedMetadatas":` +
  1157  		`[{"metadata":{"pipelines":[` +
  1158  		`{"storagePolicies":["1s:1h","1m:12h"],"aggregation":["Sum"],"resendEnabled":true},` +
  1159  		`{"storagePolicies":["10s:1h"],"aggregation":null}]},` +
  1160  		`"cutoverNanos":4567,` +
  1161  		`"tombstoned":true}],` +
  1162  		`"version":12` +
  1163  		`}`
  1164  	require.Equal(t, expected, string(res))
  1165  }
  1166  
  1167  func TestVersionedStagedMetadatasMarshalJSONRoundtrip(t *testing.T) {
  1168  	vs := VersionedStagedMetadatas{
  1169  		Version: 12,
  1170  		StagedMetadatas: StagedMetadatas{
  1171  			{
  1172  				CutoverNanos: 4567,
  1173  				Tombstoned:   true,
  1174  				Metadata: Metadata{
  1175  					Pipelines: []PipelineMetadata{
  1176  						{
  1177  							AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
  1178  							StoragePolicies: []policy.StoragePolicy{
  1179  								policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
  1180  								policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
  1181  							},
  1182  							DropPolicy: policy.DropNone,
  1183  						},
  1184  						{
  1185  							AggregationID: aggregation.DefaultID,
  1186  							StoragePolicies: []policy.StoragePolicy{
  1187  								policy.NewStoragePolicy(10*time.Second, xtime.Second, time.Hour),
  1188  							},
  1189  							DropPolicy: policy.DropNone,
  1190  						},
  1191  					},
  1192  				},
  1193  			},
  1194  		},
  1195  	}
  1196  	b, err := json.Marshal(vs)
  1197  	require.NoError(t, err)
  1198  	var res VersionedStagedMetadatas
  1199  	require.NoError(t, json.Unmarshal(b, &res))
  1200  	require.Equal(t, vs, res)
  1201  }
  1202  
  1203  func TestDropMustDropPolicyPipelineMetadata(t *testing.T) {
  1204  	require.True(t, testDropMustDropPolicyPipelineMetadata.IsDropPolicyApplied())
  1205  }
  1206  
  1207  func TestDropExceptIfMatchOtherDropPolicyPipelineMetadata(t *testing.T) {
  1208  	require.True(t, testDropExceptIfMatchOtherDropPolicyPipelineMetadata.IsDropPolicyApplied())
  1209  }
  1210  
  1211  func TestApplyOrRemoveDropPoliciesDropMust(t *testing.T) {
  1212  	input := PipelineMetadatas{
  1213  		{
  1214  			AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
  1215  			StoragePolicies: []policy.StoragePolicy{
  1216  				policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
  1217  				policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
  1218  			},
  1219  			DropPolicy: policy.DropNone,
  1220  		},
  1221  		{
  1222  			AggregationID:   aggregation.DefaultID,
  1223  			StoragePolicies: nil,
  1224  			DropPolicy:      policy.DropMust,
  1225  		},
  1226  		{
  1227  			AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
  1228  			StoragePolicies: []policy.StoragePolicy{
  1229  				policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
  1230  				policy.NewStoragePolicy(10*time.Minute, xtime.Minute, 24*time.Hour),
  1231  			},
  1232  			DropPolicy: policy.DropNone,
  1233  		},
  1234  	}
  1235  	output, result := input.ApplyOrRemoveDropPolicies()
  1236  	require.Equal(t, AppliedEffectiveDropPolicyResult, result)
  1237  	require.True(t, output.Equal(DropPipelineMetadatas))
  1238  }
  1239  
  1240  func TestApplyOrRemoveDropPoliciesDropIfOnlyMatchEffective(t *testing.T) {
  1241  	input := PipelineMetadatas{
  1242  		{
  1243  			AggregationID:   aggregation.DefaultID,
  1244  			StoragePolicies: nil,
  1245  			DropPolicy:      policy.DropIfOnlyMatch,
  1246  		},
  1247  	}
  1248  	output, result := input.ApplyOrRemoveDropPolicies()
  1249  	require.Equal(t, AppliedEffectiveDropPolicyResult, result)
  1250  	require.True(t, output.Equal(DropPipelineMetadatas))
  1251  
  1252  	// Ensure that modifying output does not affect DropPipelineMetadatas,
  1253  	// to prevent regressions where global variables are returned.
  1254  	output[0].AggregationID = aggregation.MustCompressTypes(aggregation.Count)
  1255  	require.False(t, output.Equal(DropPipelineMetadatas))
  1256  }
  1257  
  1258  func TestApplyOrRemoveDropPoliciesDropIfOnlyMatchMiddleIneffective(t *testing.T) {
  1259  	validRules := PipelineMetadatas{
  1260  		{
  1261  			AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
  1262  			StoragePolicies: []policy.StoragePolicy{
  1263  				policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
  1264  				policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
  1265  			},
  1266  			DropPolicy: policy.DropNone,
  1267  		},
  1268  		{
  1269  			AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
  1270  			StoragePolicies: []policy.StoragePolicy{
  1271  				policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
  1272  				policy.NewStoragePolicy(10*time.Minute, xtime.Minute, 24*time.Hour),
  1273  			},
  1274  			DropPolicy: policy.DropNone,
  1275  		},
  1276  	}
  1277  
  1278  	// Run test for every single insertion point
  1279  	for i := 0; i < len(validRules)+1; i++ {
  1280  		t.Run(fmt.Sprintf("test insert drop if only rule at %d", i),
  1281  			func(t *testing.T) {
  1282  				var (
  1283  					copy  = append(PipelineMetadatas(nil), validRules...)
  1284  					input PipelineMetadatas
  1285  				)
  1286  				for j := 0; j < len(validRules)+1; j++ {
  1287  					if j == i {
  1288  						// Insert the drop if only match rule at this position
  1289  						input = append(input, PipelineMetadata{
  1290  							AggregationID:   aggregation.DefaultID,
  1291  							StoragePolicies: nil,
  1292  							DropPolicy:      policy.DropIfOnlyMatch,
  1293  						})
  1294  					} else {
  1295  						input = append(input, copy[0])
  1296  						copy = copy[1:]
  1297  					}
  1298  				}
  1299  
  1300  				output, result := input.ApplyOrRemoveDropPolicies()
  1301  				require.Equal(t, RemovedIneffectiveDropPoliciesResult, result)
  1302  				require.True(t, output.Equal(validRules))
  1303  			})
  1304  	}
  1305  }
  1306  
  1307  func TestApplyOrRemoveDropPoliciesDropIfOnlyMatchNone(t *testing.T) {
  1308  	input := PipelineMetadatas{
  1309  		{
  1310  			AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
  1311  			StoragePolicies: []policy.StoragePolicy{
  1312  				policy.NewStoragePolicy(time.Second, xtime.Second, time.Hour),
  1313  				policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
  1314  			},
  1315  			DropPolicy: policy.DropNone,
  1316  		},
  1317  		{
  1318  			AggregationID: aggregation.MustCompressTypes(aggregation.Sum),
  1319  			StoragePolicies: []policy.StoragePolicy{
  1320  				policy.NewStoragePolicy(time.Minute, xtime.Minute, 12*time.Hour),
  1321  				policy.NewStoragePolicy(10*time.Minute, xtime.Minute, 24*time.Hour),
  1322  			},
  1323  			DropPolicy: policy.DropNone,
  1324  		},
  1325  	}
  1326  	output, result := input.ApplyOrRemoveDropPolicies()
  1327  	require.Equal(t, NoDropPolicyPresentResult, result)
  1328  	require.True(t, output.Equal(input))
  1329  }
  1330  
  1331  func TestStagedMetadatasDropReturnsIsDropPolicyAppliedTrue(t *testing.T) {
  1332  	require.True(t, StagedMetadatas{
  1333  		StagedMetadata{Metadata: DropMetadata, CutoverNanos: 123},
  1334  	}.IsDropPolicyApplied())
  1335  }