github.com/thanos-io/thanos@v0.32.5/pkg/block/fetcher_test.go (about)

     1  // Copyright (c) The Thanos Authors.
     2  // Licensed under the Apache License 2.0.
     3  
     4  package block
     5  
     6  import (
     7  	"bytes"
     8  	"context"
     9  	"encoding/json"
    10  	"fmt"
    11  	"math/rand"
    12  	"os"
    13  	"path"
    14  	"path/filepath"
    15  	"runtime"
    16  	"sort"
    17  	"testing"
    18  	"time"
    19  
    20  	"github.com/go-kit/log"
    21  	"github.com/oklog/ulid"
    22  	"github.com/pkg/errors"
    23  	"github.com/prometheus/client_golang/prometheus"
    24  	promtest "github.com/prometheus/client_golang/prometheus/testutil"
    25  	"github.com/prometheus/prometheus/tsdb"
    26  	"github.com/thanos-io/objstore"
    27  	"github.com/thanos-io/objstore/objtesting"
    28  
    29  	"github.com/efficientgo/core/testutil"
    30  	"github.com/thanos-io/thanos/pkg/block/metadata"
    31  	"github.com/thanos-io/thanos/pkg/extprom"
    32  	"github.com/thanos-io/thanos/pkg/model"
    33  )
    34  
    35  func newTestFetcherMetrics() *FetcherMetrics {
    36  	return &FetcherMetrics{
    37  		Synced:   extprom.NewTxGaugeVec(nil, prometheus.GaugeOpts{}, []string{"state"}),
    38  		Modified: extprom.NewTxGaugeVec(nil, prometheus.GaugeOpts{}, []string{"modified"}),
    39  	}
    40  }
    41  
    42  type ulidFilter struct {
    43  	ulidToDelete *ulid.ULID
    44  }
    45  
    46  func (f *ulidFilter) Filter(_ context.Context, metas map[ulid.ULID]*metadata.Meta, synced GaugeVec, modified GaugeVec) error {
    47  	if _, ok := metas[*f.ulidToDelete]; ok {
    48  		synced.WithLabelValues("filtered").Inc()
    49  		delete(metas, *f.ulidToDelete)
    50  	}
    51  	return nil
    52  }
    53  
    54  func ULID(i int) ulid.ULID { return ulid.MustNew(uint64(i), nil) }
    55  
    56  func ULIDs(is ...int) []ulid.ULID {
    57  	ret := []ulid.ULID{}
    58  	for _, i := range is {
    59  		ret = append(ret, ulid.MustNew(uint64(i), nil))
    60  	}
    61  
    62  	return ret
    63  }
    64  
    65  func TestMetaFetcher_Fetch(t *testing.T) {
    66  	objtesting.ForeachStore(t, func(t *testing.T, bkt objstore.Bucket) {
    67  		ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
    68  		defer cancel()
    69  
    70  		dir := t.TempDir()
    71  
    72  		var ulidToDelete ulid.ULID
    73  		r := prometheus.NewRegistry()
    74  		baseFetcher, err := NewBaseFetcher(log.NewNopLogger(), 20, objstore.WithNoopInstr(bkt), dir, r)
    75  		testutil.Ok(t, err)
    76  
    77  		fetcher := baseFetcher.NewMetaFetcher(r, []MetadataFilter{
    78  			&ulidFilter{ulidToDelete: &ulidToDelete},
    79  		}, nil)
    80  
    81  		for i, tcase := range []struct {
    82  			name                  string
    83  			do                    func()
    84  			filterULID            ulid.ULID
    85  			expectedMetas         []ulid.ULID
    86  			expectedCorruptedMeta []ulid.ULID
    87  			expectedNoMeta        []ulid.ULID
    88  			expectedFiltered      int
    89  			expectedMetaErr       error
    90  		}{
    91  			{
    92  				name: "empty bucket",
    93  				do:   func() {},
    94  
    95  				expectedMetas:         ULIDs(),
    96  				expectedCorruptedMeta: ULIDs(),
    97  				expectedNoMeta:        ULIDs(),
    98  			},
    99  			{
   100  				name: "3 metas in bucket",
   101  				do: func() {
   102  					var meta metadata.Meta
   103  					meta.Version = 1
   104  					meta.ULID = ULID(1)
   105  
   106  					var buf bytes.Buffer
   107  					testutil.Ok(t, json.NewEncoder(&buf).Encode(&meta))
   108  					testutil.Ok(t, bkt.Upload(ctx, path.Join(meta.ULID.String(), metadata.MetaFilename), &buf))
   109  
   110  					meta.ULID = ULID(2)
   111  					testutil.Ok(t, json.NewEncoder(&buf).Encode(&meta))
   112  					testutil.Ok(t, bkt.Upload(ctx, path.Join(meta.ULID.String(), metadata.MetaFilename), &buf))
   113  
   114  					meta.ULID = ULID(3)
   115  					testutil.Ok(t, json.NewEncoder(&buf).Encode(&meta))
   116  					testutil.Ok(t, bkt.Upload(ctx, path.Join(meta.ULID.String(), metadata.MetaFilename), &buf))
   117  				},
   118  
   119  				expectedMetas:         ULIDs(1, 2, 3),
   120  				expectedCorruptedMeta: ULIDs(),
   121  				expectedNoMeta:        ULIDs(),
   122  			},
   123  			{
   124  				name: "nothing changed",
   125  				do:   func() {},
   126  
   127  				expectedMetas:         ULIDs(1, 2, 3),
   128  				expectedCorruptedMeta: ULIDs(),
   129  				expectedNoMeta:        ULIDs(),
   130  			},
   131  			{
   132  				name: "fresh cache",
   133  				do: func() {
   134  					baseFetcher.cached = map[ulid.ULID]*metadata.Meta{}
   135  				},
   136  
   137  				expectedMetas:         ULIDs(1, 2, 3),
   138  				expectedCorruptedMeta: ULIDs(),
   139  				expectedNoMeta:        ULIDs(),
   140  			},
   141  			{
   142  				name: "fresh cache: meta 2 and 3 have corrupted data on disk ",
   143  				do: func() {
   144  					baseFetcher.cached = map[ulid.ULID]*metadata.Meta{}
   145  
   146  					testutil.Ok(t, os.Remove(filepath.Join(dir, "meta-syncer", ULID(2).String(), MetaFilename)))
   147  
   148  					f, err := os.OpenFile(filepath.Join(dir, "meta-syncer", ULID(3).String(), MetaFilename), os.O_WRONLY, os.ModePerm)
   149  					testutil.Ok(t, err)
   150  
   151  					_, err = f.WriteString("{ almost")
   152  					testutil.Ok(t, err)
   153  					testutil.Ok(t, f.Close())
   154  				},
   155  
   156  				expectedMetas:         ULIDs(1, 2, 3),
   157  				expectedCorruptedMeta: ULIDs(),
   158  				expectedNoMeta:        ULIDs(),
   159  			},
   160  			{
   161  				name: "block without meta",
   162  				do: func() {
   163  					testutil.Ok(t, bkt.Upload(ctx, path.Join(ULID(4).String(), "some-file"), bytes.NewBuffer([]byte("something"))))
   164  				},
   165  
   166  				expectedMetas:         ULIDs(1, 2, 3),
   167  				expectedCorruptedMeta: ULIDs(),
   168  				expectedNoMeta:        ULIDs(4),
   169  			},
   170  			{
   171  				name: "corrupted meta.json",
   172  				do: func() {
   173  					testutil.Ok(t, bkt.Upload(ctx, path.Join(ULID(5).String(), MetaFilename), bytes.NewBuffer([]byte("{ not a json"))))
   174  				},
   175  
   176  				expectedMetas:         ULIDs(1, 2, 3),
   177  				expectedCorruptedMeta: ULIDs(5),
   178  				expectedNoMeta:        ULIDs(4),
   179  			},
   180  			{
   181  				name: "some added some deleted",
   182  				do: func() {
   183  					testutil.Ok(t, Delete(ctx, log.NewNopLogger(), bkt, ULID(2)))
   184  
   185  					var meta metadata.Meta
   186  					meta.Version = 1
   187  					meta.ULID = ULID(6)
   188  
   189  					var buf bytes.Buffer
   190  					testutil.Ok(t, json.NewEncoder(&buf).Encode(&meta))
   191  					testutil.Ok(t, bkt.Upload(ctx, path.Join(meta.ULID.String(), metadata.MetaFilename), &buf))
   192  				},
   193  
   194  				expectedMetas:         ULIDs(1, 3, 6),
   195  				expectedCorruptedMeta: ULIDs(5),
   196  				expectedNoMeta:        ULIDs(4),
   197  			},
   198  			{
   199  				name:       "filter not existing ulid",
   200  				do:         func() {},
   201  				filterULID: ULID(10),
   202  
   203  				expectedMetas:         ULIDs(1, 3, 6),
   204  				expectedCorruptedMeta: ULIDs(5),
   205  				expectedNoMeta:        ULIDs(4),
   206  			},
   207  			{
   208  				name:       "filter ulid 1",
   209  				do:         func() {},
   210  				filterULID: ULID(1),
   211  
   212  				expectedMetas:         ULIDs(3, 6),
   213  				expectedCorruptedMeta: ULIDs(5),
   214  				expectedNoMeta:        ULIDs(4),
   215  				expectedFiltered:      1,
   216  			},
   217  			{
   218  				name: "error: not supported meta version",
   219  				do: func() {
   220  					var meta metadata.Meta
   221  					meta.Version = 20
   222  					meta.ULID = ULID(7)
   223  
   224  					var buf bytes.Buffer
   225  					testutil.Ok(t, json.NewEncoder(&buf).Encode(&meta))
   226  					testutil.Ok(t, bkt.Upload(ctx, path.Join(meta.ULID.String(), metadata.MetaFilename), &buf))
   227  				},
   228  
   229  				expectedMetas:         ULIDs(1, 3, 6),
   230  				expectedCorruptedMeta: ULIDs(5),
   231  				expectedNoMeta:        ULIDs(4),
   232  				expectedMetaErr:       errors.New("incomplete view: unexpected meta file: 00000000070000000000000000/meta.json version: 20"),
   233  			},
   234  		} {
   235  			if ok := t.Run(tcase.name, func(t *testing.T) {
   236  				tcase.do()
   237  
   238  				ulidToDelete = tcase.filterULID
   239  				metas, partial, err := fetcher.Fetch(ctx)
   240  				if tcase.expectedMetaErr != nil {
   241  					testutil.NotOk(t, err)
   242  					testutil.Equals(t, tcase.expectedMetaErr.Error(), err.Error())
   243  				} else {
   244  					testutil.Ok(t, err)
   245  				}
   246  
   247  				{
   248  					metasSlice := make([]ulid.ULID, 0, len(metas))
   249  					for id, m := range metas {
   250  						testutil.Assert(t, m != nil, "meta is nil")
   251  						metasSlice = append(metasSlice, id)
   252  					}
   253  					sort.Slice(metasSlice, func(i, j int) bool {
   254  						return metasSlice[i].Compare(metasSlice[j]) < 0
   255  					})
   256  					testutil.Equals(t, tcase.expectedMetas, metasSlice)
   257  				}
   258  
   259  				{
   260  					partialSlice := make([]ulid.ULID, 0, len(partial))
   261  					for id := range partial {
   262  
   263  						partialSlice = append(partialSlice, id)
   264  					}
   265  					sort.Slice(partialSlice, func(i, j int) bool {
   266  						return partialSlice[i].Compare(partialSlice[j]) >= 0
   267  					})
   268  					expected := append([]ulid.ULID{}, tcase.expectedCorruptedMeta...)
   269  					expected = append(expected, tcase.expectedNoMeta...)
   270  					sort.Slice(expected, func(i, j int) bool {
   271  						return expected[i].Compare(expected[j]) >= 0
   272  					})
   273  					testutil.Equals(t, expected, partialSlice)
   274  				}
   275  
   276  				expectedFailures := 0
   277  				if tcase.expectedMetaErr != nil {
   278  					expectedFailures = 1
   279  				}
   280  				testutil.Equals(t, float64(i+1), promtest.ToFloat64(baseFetcher.syncs))
   281  				testutil.Equals(t, float64(i+1), promtest.ToFloat64(fetcher.metrics.Syncs))
   282  				testutil.Equals(t, float64(len(tcase.expectedMetas)), promtest.ToFloat64(fetcher.metrics.Synced.WithLabelValues(LoadedMeta)))
   283  				testutil.Equals(t, float64(len(tcase.expectedNoMeta)), promtest.ToFloat64(fetcher.metrics.Synced.WithLabelValues(NoMeta)))
   284  				testutil.Equals(t, float64(tcase.expectedFiltered), promtest.ToFloat64(fetcher.metrics.Synced.WithLabelValues("filtered")))
   285  				testutil.Equals(t, 0.0, promtest.ToFloat64(fetcher.metrics.Synced.WithLabelValues(labelExcludedMeta)))
   286  				testutil.Equals(t, 0.0, promtest.ToFloat64(fetcher.metrics.Synced.WithLabelValues(timeExcludedMeta)))
   287  				testutil.Equals(t, float64(expectedFailures), promtest.ToFloat64(fetcher.metrics.Synced.WithLabelValues(FailedMeta)))
   288  				testutil.Equals(t, 0.0, promtest.ToFloat64(fetcher.metrics.Synced.WithLabelValues(tooFreshMeta)))
   289  			}); !ok {
   290  				return
   291  			}
   292  		}
   293  	})
   294  }
   295  
   296  func TestLabelShardedMetaFilter_Filter_Basic(t *testing.T) {
   297  	ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
   298  	defer cancel()
   299  
   300  	relabelContentYaml := `
   301      - action: drop
   302        regex: "A"
   303        source_labels:
   304        - cluster
   305      - action: keep
   306        regex: "keepme"
   307        source_labels:
   308        - message
   309      `
   310  	relabelConfig, err := ParseRelabelConfig([]byte(relabelContentYaml), SelectorSupportedRelabelActions)
   311  	testutil.Ok(t, err)
   312  
   313  	f := NewLabelShardedMetaFilter(relabelConfig)
   314  
   315  	input := map[ulid.ULID]*metadata.Meta{
   316  		ULID(1): {
   317  			Thanos: metadata.Thanos{
   318  				Labels: map[string]string{"cluster": "B", "message": "keepme"},
   319  			},
   320  		},
   321  		ULID(2): {
   322  			Thanos: metadata.Thanos{
   323  				Labels: map[string]string{"something": "A", "message": "keepme"},
   324  			},
   325  		},
   326  		ULID(3): {
   327  			Thanos: metadata.Thanos{
   328  				Labels: map[string]string{"cluster": "A", "message": "keepme"},
   329  			},
   330  		},
   331  		ULID(4): {
   332  			Thanos: metadata.Thanos{
   333  				Labels: map[string]string{"cluster": "A", "something": "B", "message": "keepme"},
   334  			},
   335  		},
   336  		ULID(5): {
   337  			Thanos: metadata.Thanos{
   338  				Labels: map[string]string{"cluster": "B"},
   339  			},
   340  		},
   341  		ULID(6): {
   342  			Thanos: metadata.Thanos{
   343  				Labels: map[string]string{"cluster": "B", "message": "keepme"},
   344  			},
   345  		},
   346  	}
   347  	expected := map[ulid.ULID]*metadata.Meta{
   348  		ULID(1): input[ULID(1)],
   349  		ULID(2): input[ULID(2)],
   350  		ULID(6): input[ULID(6)],
   351  	}
   352  
   353  	m := newTestFetcherMetrics()
   354  	testutil.Ok(t, f.Filter(ctx, input, m.Synced, nil))
   355  
   356  	testutil.Equals(t, 3.0, promtest.ToFloat64(m.Synced.WithLabelValues(labelExcludedMeta)))
   357  	testutil.Equals(t, expected, input)
   358  
   359  }
   360  
   361  func TestLabelShardedMetaFilter_Filter_Hashmod(t *testing.T) {
   362  	ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
   363  	defer cancel()
   364  
   365  	relabelContentYamlFmt := `
   366      - action: hashmod
   367        source_labels: ["%s"]
   368        target_label: shard
   369        modulus: 3
   370      - action: keep
   371        source_labels: ["shard"]
   372        regex: %d
   373  `
   374  	for i := 0; i < 3; i++ {
   375  		t.Run(fmt.Sprintf("%v", i), func(t *testing.T) {
   376  			relabelConfig, err := ParseRelabelConfig([]byte(fmt.Sprintf(relabelContentYamlFmt, BlockIDLabel, i)), SelectorSupportedRelabelActions)
   377  			testutil.Ok(t, err)
   378  
   379  			f := NewLabelShardedMetaFilter(relabelConfig)
   380  
   381  			input := map[ulid.ULID]*metadata.Meta{
   382  				ULID(1): {
   383  					Thanos: metadata.Thanos{
   384  						Labels: map[string]string{"cluster": "B", "message": "keepme"},
   385  					},
   386  				},
   387  				ULID(2): {
   388  					Thanos: metadata.Thanos{
   389  						Labels: map[string]string{"something": "A", "message": "keepme"},
   390  					},
   391  				},
   392  				ULID(3): {
   393  					Thanos: metadata.Thanos{
   394  						Labels: map[string]string{"cluster": "A", "message": "keepme"},
   395  					},
   396  				},
   397  				ULID(4): {
   398  					Thanos: metadata.Thanos{
   399  						Labels: map[string]string{"cluster": "A", "something": "B", "message": "keepme"},
   400  					},
   401  				},
   402  				ULID(5): {
   403  					Thanos: metadata.Thanos{
   404  						Labels: map[string]string{"cluster": "B"},
   405  					},
   406  				},
   407  				ULID(6): {
   408  					Thanos: metadata.Thanos{
   409  						Labels: map[string]string{"cluster": "B", "message": "keepme"},
   410  					},
   411  				},
   412  				ULID(7):  {},
   413  				ULID(8):  {},
   414  				ULID(9):  {},
   415  				ULID(10): {},
   416  				ULID(11): {},
   417  				ULID(12): {},
   418  				ULID(13): {},
   419  				ULID(14): {},
   420  				ULID(15): {},
   421  			}
   422  			expected := map[ulid.ULID]*metadata.Meta{}
   423  			switch i {
   424  			case 0:
   425  				expected = map[ulid.ULID]*metadata.Meta{
   426  					ULID(2):  input[ULID(2)],
   427  					ULID(6):  input[ULID(6)],
   428  					ULID(11): input[ULID(11)],
   429  					ULID(13): input[ULID(13)],
   430  				}
   431  			case 1:
   432  				expected = map[ulid.ULID]*metadata.Meta{
   433  					ULID(5):  input[ULID(5)],
   434  					ULID(7):  input[ULID(7)],
   435  					ULID(10): input[ULID(10)],
   436  					ULID(12): input[ULID(12)],
   437  					ULID(14): input[ULID(14)],
   438  					ULID(15): input[ULID(15)],
   439  				}
   440  			case 2:
   441  				expected = map[ulid.ULID]*metadata.Meta{
   442  					ULID(1): input[ULID(1)],
   443  					ULID(3): input[ULID(3)],
   444  					ULID(4): input[ULID(4)],
   445  					ULID(8): input[ULID(8)],
   446  					ULID(9): input[ULID(9)],
   447  				}
   448  			}
   449  			deleted := len(input) - len(expected)
   450  
   451  			m := newTestFetcherMetrics()
   452  			testutil.Ok(t, f.Filter(ctx, input, m.Synced, nil))
   453  
   454  			testutil.Equals(t, expected, input)
   455  			testutil.Equals(t, float64(deleted), promtest.ToFloat64(m.Synced.WithLabelValues(labelExcludedMeta)))
   456  
   457  		})
   458  
   459  	}
   460  }
   461  
   462  func TestTimePartitionMetaFilter_Filter(t *testing.T) {
   463  	ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
   464  	defer cancel()
   465  
   466  	mint := time.Unix(0, 1*time.Millisecond.Nanoseconds())
   467  	maxt := time.Unix(0, 10*time.Millisecond.Nanoseconds())
   468  	f := NewTimePartitionMetaFilter(model.TimeOrDurationValue{Time: &mint}, model.TimeOrDurationValue{Time: &maxt})
   469  
   470  	input := map[ulid.ULID]*metadata.Meta{
   471  		ULID(1): {
   472  			BlockMeta: tsdb.BlockMeta{
   473  				MinTime: 0,
   474  				MaxTime: 1,
   475  			},
   476  		},
   477  		ULID(2): {
   478  			BlockMeta: tsdb.BlockMeta{
   479  				MinTime: 1,
   480  				MaxTime: 10,
   481  			},
   482  		},
   483  		ULID(3): {
   484  			BlockMeta: tsdb.BlockMeta{
   485  				MinTime: 2,
   486  				MaxTime: 30,
   487  			},
   488  		},
   489  		ULID(4): {
   490  			BlockMeta: tsdb.BlockMeta{
   491  				MinTime: 0,
   492  				MaxTime: 30,
   493  			},
   494  		},
   495  		ULID(5): {
   496  			BlockMeta: tsdb.BlockMeta{
   497  				MinTime: -1,
   498  				MaxTime: 0,
   499  			},
   500  		},
   501  		ULID(6): {
   502  			BlockMeta: tsdb.BlockMeta{
   503  				MinTime: 20,
   504  				MaxTime: 30,
   505  			},
   506  		},
   507  	}
   508  	expected := map[ulid.ULID]*metadata.Meta{
   509  		ULID(1): input[ULID(1)],
   510  		ULID(2): input[ULID(2)],
   511  		ULID(3): input[ULID(3)],
   512  		ULID(4): input[ULID(4)],
   513  	}
   514  
   515  	m := newTestFetcherMetrics()
   516  	testutil.Ok(t, f.Filter(ctx, input, m.Synced, nil))
   517  
   518  	testutil.Equals(t, 2.0, promtest.ToFloat64(m.Synced.WithLabelValues(timeExcludedMeta)))
   519  	testutil.Equals(t, expected, input)
   520  
   521  }
   522  
   523  type sourcesAndResolution struct {
   524  	sources    []ulid.ULID
   525  	resolution int64
   526  }
   527  
   528  func TestDeduplicateFilter_Filter(t *testing.T) {
   529  	ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
   530  	defer cancel()
   531  
   532  	for _, tcase := range []struct {
   533  		name     string
   534  		input    map[ulid.ULID]*sourcesAndResolution
   535  		expected []ulid.ULID
   536  	}{
   537  		{
   538  			name: "3 non compacted blocks in bucket",
   539  			input: map[ulid.ULID]*sourcesAndResolution{
   540  				ULID(1): {
   541  					sources:    []ulid.ULID{ULID(1)},
   542  					resolution: 0,
   543  				},
   544  				ULID(2): {
   545  					sources:    []ulid.ULID{ULID(2)},
   546  					resolution: 0,
   547  				},
   548  				ULID(3): {
   549  					sources:    []ulid.ULID{ULID(3)},
   550  					resolution: 0,
   551  				},
   552  			},
   553  			expected: []ulid.ULID{
   554  				ULID(1),
   555  				ULID(2),
   556  				ULID(3),
   557  			},
   558  		},
   559  		{
   560  			name: "compacted block with sources in bucket",
   561  			input: map[ulid.ULID]*sourcesAndResolution{
   562  				ULID(6): {
   563  					sources:    []ulid.ULID{ULID(6)},
   564  					resolution: 0,
   565  				},
   566  				ULID(4): {
   567  					sources:    []ulid.ULID{ULID(1), ULID(3), ULID(2)},
   568  					resolution: 0,
   569  				},
   570  				ULID(5): {
   571  					sources:    []ulid.ULID{ULID(5)},
   572  					resolution: 0,
   573  				},
   574  			},
   575  			expected: []ulid.ULID{
   576  				ULID(4),
   577  				ULID(5),
   578  				ULID(6),
   579  			},
   580  		},
   581  		{
   582  			name: "two compacted blocks with same sources",
   583  			input: map[ulid.ULID]*sourcesAndResolution{
   584  				ULID(5): {
   585  					sources:    []ulid.ULID{ULID(5)},
   586  					resolution: 0,
   587  				},
   588  				ULID(6): {
   589  					sources:    []ulid.ULID{ULID(6)},
   590  					resolution: 0,
   591  				},
   592  				ULID(3): {
   593  					sources:    []ulid.ULID{ULID(1), ULID(2)},
   594  					resolution: 0,
   595  				},
   596  				ULID(4): {
   597  					sources:    []ulid.ULID{ULID(1), ULID(2)},
   598  					resolution: 0,
   599  				},
   600  			},
   601  			expected: []ulid.ULID{
   602  				ULID(3),
   603  				ULID(5),
   604  				ULID(6),
   605  			},
   606  		},
   607  		{
   608  			name: "two compacted blocks with overlapping sources",
   609  			input: map[ulid.ULID]*sourcesAndResolution{
   610  				ULID(4): {
   611  					sources:    []ulid.ULID{ULID(1), ULID(2)},
   612  					resolution: 0,
   613  				},
   614  				ULID(6): {
   615  					sources:    []ulid.ULID{ULID(6)},
   616  					resolution: 0,
   617  				},
   618  				ULID(5): {
   619  					sources:    []ulid.ULID{ULID(1), ULID(3), ULID(2)},
   620  					resolution: 0,
   621  				},
   622  			},
   623  			expected: []ulid.ULID{
   624  				ULID(5),
   625  				ULID(6),
   626  			},
   627  		},
   628  		{
   629  			name: "3 non compacted blocks and compacted block of level 2 in bucket",
   630  			input: map[ulid.ULID]*sourcesAndResolution{
   631  				ULID(6): {
   632  					sources:    []ulid.ULID{ULID(6)},
   633  					resolution: 0,
   634  				},
   635  				ULID(1): {
   636  					sources:    []ulid.ULID{ULID(1)},
   637  					resolution: 0,
   638  				},
   639  				ULID(2): {
   640  					sources:    []ulid.ULID{ULID(2)},
   641  					resolution: 0,
   642  				},
   643  				ULID(3): {
   644  					sources:    []ulid.ULID{ULID(3)},
   645  					resolution: 0,
   646  				},
   647  				ULID(4): {
   648  					sources:    []ulid.ULID{ULID(2), ULID(1), ULID(3)},
   649  					resolution: 0,
   650  				},
   651  			},
   652  			expected: []ulid.ULID{
   653  				ULID(4),
   654  				ULID(6),
   655  			},
   656  		},
   657  		{
   658  			name: "3 compacted blocks of level 2 and one compacted block of level 3 in bucket",
   659  			input: map[ulid.ULID]*sourcesAndResolution{
   660  				ULID(10): {
   661  					sources:    []ulid.ULID{ULID(1), ULID(2), ULID(3)},
   662  					resolution: 0,
   663  				},
   664  				ULID(11): {
   665  					sources:    []ulid.ULID{ULID(6), ULID(4), ULID(5)},
   666  					resolution: 0,
   667  				},
   668  				ULID(14): {
   669  					sources:    []ulid.ULID{ULID(14)},
   670  					resolution: 0,
   671  				},
   672  				ULID(1): {
   673  					sources:    []ulid.ULID{ULID(1)},
   674  					resolution: 0,
   675  				},
   676  				ULID(13): {
   677  					sources:    []ulid.ULID{ULID(1), ULID(6), ULID(2), ULID(3), ULID(5), ULID(7), ULID(4), ULID(8), ULID(9)},
   678  					resolution: 0,
   679  				},
   680  				ULID(12): {
   681  					sources:    []ulid.ULID{ULID(7), ULID(9), ULID(8)},
   682  					resolution: 0,
   683  				},
   684  			},
   685  			expected: []ulid.ULID{
   686  				ULID(14),
   687  				ULID(13),
   688  			},
   689  		},
   690  		{
   691  			name: "compacted blocks with overlapping sources",
   692  			input: map[ulid.ULID]*sourcesAndResolution{
   693  				ULID(8): {
   694  					sources:    []ulid.ULID{ULID(1), ULID(3), ULID(2), ULID(4)},
   695  					resolution: 0,
   696  				},
   697  				ULID(1): {
   698  					sources:    []ulid.ULID{ULID(1)},
   699  					resolution: 0,
   700  				},
   701  				ULID(5): {
   702  					sources:    []ulid.ULID{ULID(1), ULID(2)},
   703  					resolution: 0,
   704  				},
   705  				ULID(6): {
   706  					sources:    []ulid.ULID{ULID(1), ULID(3), ULID(2), ULID(4)},
   707  					resolution: 0,
   708  				},
   709  				ULID(7): {
   710  					sources:    []ulid.ULID{ULID(3), ULID(1), ULID(2)},
   711  					resolution: 0,
   712  				},
   713  			},
   714  			expected: []ulid.ULID{
   715  				ULID(6),
   716  			},
   717  		},
   718  		{
   719  			name: "compacted blocks of level 3 with overlapping sources of equal length",
   720  			input: map[ulid.ULID]*sourcesAndResolution{
   721  				ULID(10): {
   722  					sources:    []ulid.ULID{ULID(1), ULID(2), ULID(6), ULID(7)},
   723  					resolution: 0,
   724  				},
   725  				ULID(1): {
   726  					sources:    []ulid.ULID{ULID(1)},
   727  					resolution: 0,
   728  				},
   729  				ULID(11): {
   730  					sources:    []ulid.ULID{ULID(6), ULID(8), ULID(1), ULID(2)},
   731  					resolution: 0,
   732  				},
   733  			},
   734  			expected: []ulid.ULID{
   735  				ULID(10),
   736  				ULID(11),
   737  			},
   738  		},
   739  		{
   740  			name: "compacted blocks of level 3 with overlapping sources of different length",
   741  			input: map[ulid.ULID]*sourcesAndResolution{
   742  				ULID(10): {
   743  					sources:    []ulid.ULID{ULID(6), ULID(7), ULID(1), ULID(2)},
   744  					resolution: 0,
   745  				},
   746  				ULID(1): {
   747  					sources:    []ulid.ULID{ULID(1)},
   748  					resolution: 0,
   749  				},
   750  				ULID(5): {
   751  					sources:    []ulid.ULID{ULID(1), ULID(2)},
   752  					resolution: 0,
   753  				},
   754  				ULID(11): {
   755  					sources:    []ulid.ULID{ULID(2), ULID(3), ULID(1)},
   756  					resolution: 0,
   757  				},
   758  			},
   759  			expected: []ulid.ULID{
   760  				ULID(10),
   761  				ULID(11),
   762  			},
   763  		},
   764  		{
   765  			name: "blocks with same sources and different resolutions",
   766  			input: map[ulid.ULID]*sourcesAndResolution{
   767  				ULID(1): {
   768  					sources:    []ulid.ULID{ULID(1)},
   769  					resolution: 0,
   770  				},
   771  				ULID(2): {
   772  					sources:    []ulid.ULID{ULID(1)},
   773  					resolution: 1000,
   774  				},
   775  				ULID(3): {
   776  					sources:    []ulid.ULID{ULID(1)},
   777  					resolution: 10000,
   778  				},
   779  			},
   780  			expected: []ulid.ULID{
   781  				ULID(1),
   782  				ULID(2),
   783  				ULID(3),
   784  			},
   785  		},
   786  		{
   787  			name: "compacted blocks with overlapping sources and different resolutions",
   788  			input: map[ulid.ULID]*sourcesAndResolution{
   789  				ULID(1): {
   790  					sources:    []ulid.ULID{ULID(1)},
   791  					resolution: 0,
   792  				},
   793  				ULID(6): {
   794  					sources:    []ulid.ULID{ULID(6)},
   795  					resolution: 10000,
   796  				},
   797  				ULID(4): {
   798  					sources:    []ulid.ULID{ULID(1), ULID(3), ULID(2)},
   799  					resolution: 0,
   800  				},
   801  				ULID(5): {
   802  					sources:    []ulid.ULID{ULID(2), ULID(3), ULID(1)},
   803  					resolution: 1000,
   804  				},
   805  			},
   806  			expected: []ulid.ULID{
   807  				ULID(4),
   808  				ULID(5),
   809  				ULID(6),
   810  			},
   811  		},
   812  		{
   813  			name: "compacted blocks of level 3 with overlapping sources of different length and different resolutions",
   814  			input: map[ulid.ULID]*sourcesAndResolution{
   815  				ULID(10): {
   816  					sources:    []ulid.ULID{ULID(7), ULID(5), ULID(1), ULID(2)},
   817  					resolution: 0,
   818  				},
   819  				ULID(12): {
   820  					sources:    []ulid.ULID{ULID(6), ULID(7), ULID(1)},
   821  					resolution: 10000,
   822  				},
   823  				ULID(1): {
   824  					sources:    []ulid.ULID{ULID(1)},
   825  					resolution: 0,
   826  				},
   827  				ULID(13): {
   828  					sources:    []ulid.ULID{ULID(1)},
   829  					resolution: 10000,
   830  				},
   831  				ULID(5): {
   832  					sources:    []ulid.ULID{ULID(1), ULID(2)},
   833  					resolution: 0,
   834  				},
   835  				ULID(11): {
   836  					sources:    []ulid.ULID{ULID(2), ULID(3), ULID(1)},
   837  					resolution: 0,
   838  				},
   839  			},
   840  			expected: []ulid.ULID{
   841  				ULID(10),
   842  				ULID(11),
   843  				ULID(12),
   844  			},
   845  		},
   846  	} {
   847  		f := NewDeduplicateFilter(1)
   848  		if ok := t.Run(tcase.name, func(t *testing.T) {
   849  			m := newTestFetcherMetrics()
   850  			metas := make(map[ulid.ULID]*metadata.Meta)
   851  			inputLen := len(tcase.input)
   852  			for id, metaInfo := range tcase.input {
   853  				metas[id] = &metadata.Meta{
   854  					BlockMeta: tsdb.BlockMeta{
   855  						ULID: id,
   856  						Compaction: tsdb.BlockMetaCompaction{
   857  							Sources: metaInfo.sources,
   858  						},
   859  					},
   860  					Thanos: metadata.Thanos{
   861  						Downsample: metadata.ThanosDownsample{
   862  							Resolution: metaInfo.resolution,
   863  						},
   864  					},
   865  				}
   866  			}
   867  			testutil.Ok(t, f.Filter(ctx, metas, m.Synced, nil))
   868  			compareSliceWithMapKeys(t, metas, tcase.expected)
   869  			testutil.Equals(t, float64(inputLen-len(tcase.expected)), promtest.ToFloat64(m.Synced.WithLabelValues(duplicateMeta)))
   870  		}); !ok {
   871  			return
   872  		}
   873  	}
   874  }
   875  
   876  func TestReplicaLabelRemover_Modify(t *testing.T) {
   877  	ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
   878  	defer cancel()
   879  
   880  	for _, tcase := range []struct {
   881  		name                string
   882  		input               map[ulid.ULID]*metadata.Meta
   883  		expected            map[ulid.ULID]*metadata.Meta
   884  		modified            float64
   885  		replicaLabelRemover *ReplicaLabelRemover
   886  	}{
   887  		{
   888  			name: "without replica labels",
   889  			input: map[ulid.ULID]*metadata.Meta{
   890  				ULID(1): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   891  				ULID(2): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   892  				ULID(3): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something1"}}},
   893  			},
   894  			expected: map[ulid.ULID]*metadata.Meta{
   895  				ULID(1): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   896  				ULID(2): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   897  				ULID(3): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something1"}}},
   898  			},
   899  			modified:            0,
   900  			replicaLabelRemover: NewReplicaLabelRemover(log.NewNopLogger(), []string{"replica", "rule_replica"}),
   901  		},
   902  		{
   903  			name: "with replica labels",
   904  			input: map[ulid.ULID]*metadata.Meta{
   905  				ULID(1): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   906  				ULID(2): {Thanos: metadata.Thanos{Labels: map[string]string{"replica": "cluster1", "message": "something"}}},
   907  				ULID(3): {Thanos: metadata.Thanos{Labels: map[string]string{"replica": "cluster1", "rule_replica": "rule1", "message": "something"}}},
   908  				ULID(4): {Thanos: metadata.Thanos{Labels: map[string]string{"replica": "cluster1", "rule_replica": "rule1"}}},
   909  			},
   910  			expected: map[ulid.ULID]*metadata.Meta{
   911  				ULID(1): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   912  				ULID(2): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   913  				ULID(3): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   914  				ULID(4): {Thanos: metadata.Thanos{Labels: map[string]string{"replica": "deduped"}}},
   915  			},
   916  			modified:            5.0,
   917  			replicaLabelRemover: NewReplicaLabelRemover(log.NewNopLogger(), []string{"replica", "rule_replica"}),
   918  		},
   919  		{
   920  			name: "no replica label specified in the ReplicaLabelRemover",
   921  			input: map[ulid.ULID]*metadata.Meta{
   922  				ULID(1): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   923  				ULID(2): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   924  				ULID(3): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something1"}}},
   925  			},
   926  			expected: map[ulid.ULID]*metadata.Meta{
   927  				ULID(1): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   928  				ULID(2): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something"}}},
   929  				ULID(3): {Thanos: metadata.Thanos{Labels: map[string]string{"message": "something1"}}},
   930  			},
   931  			modified:            0,
   932  			replicaLabelRemover: NewReplicaLabelRemover(log.NewNopLogger(), []string{}),
   933  		},
   934  	} {
   935  		m := newTestFetcherMetrics()
   936  		testutil.Ok(t, tcase.replicaLabelRemover.Filter(ctx, tcase.input, nil, m.Modified))
   937  
   938  		testutil.Equals(t, tcase.modified, promtest.ToFloat64(m.Modified.WithLabelValues(replicaRemovedMeta)))
   939  		testutil.Equals(t, tcase.expected, tcase.input)
   940  	}
   941  }
   942  
   943  func compareSliceWithMapKeys(tb testing.TB, m map[ulid.ULID]*metadata.Meta, s []ulid.ULID) {
   944  	_, file, line, _ := runtime.Caller(1)
   945  	matching := true
   946  	if len(m) != len(s) {
   947  		matching = false
   948  	}
   949  
   950  	for _, val := range s {
   951  		if m[val] == nil {
   952  			matching = false
   953  			break
   954  		}
   955  	}
   956  
   957  	if !matching {
   958  		var mapKeys []ulid.ULID
   959  		for id := range m {
   960  			mapKeys = append(mapKeys, id)
   961  		}
   962  		fmt.Printf("\033[31m%s:%d:\n\n\texp keys: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, mapKeys, s)
   963  		tb.FailNow()
   964  	}
   965  }
   966  
   967  type ulidBuilder struct {
   968  	entropy *rand.Rand
   969  
   970  	created []ulid.ULID
   971  }
   972  
   973  func (u *ulidBuilder) ULID(t time.Time) ulid.ULID {
   974  	if u.entropy == nil {
   975  		source := rand.NewSource(1234)
   976  		u.entropy = rand.New(source)
   977  	}
   978  
   979  	id := ulid.MustNew(ulid.Timestamp(t), u.entropy)
   980  	u.created = append(u.created, id)
   981  	return id
   982  }
   983  
   984  func TestConsistencyDelayMetaFilter_Filter_0(t *testing.T) {
   985  	ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
   986  	defer cancel()
   987  
   988  	u := &ulidBuilder{}
   989  	now := time.Now()
   990  
   991  	input := map[ulid.ULID]*metadata.Meta{
   992  		// Fresh blocks.
   993  		u.ULID(now):                       {Thanos: metadata.Thanos{Source: metadata.SidecarSource}},
   994  		u.ULID(now.Add(-1 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.SidecarSource}},
   995  		u.ULID(now.Add(-1 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.ReceiveSource}},
   996  		u.ULID(now.Add(-1 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.RulerSource}},
   997  
   998  		// For now non-delay delete sources, should be ignored by consistency delay.
   999  		u.ULID(now.Add(-1 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.BucketRepairSource}},
  1000  		u.ULID(now.Add(-1 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.CompactorSource}},
  1001  		u.ULID(now.Add(-1 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.CompactorRepairSource}},
  1002  
  1003  		// 29m.
  1004  		u.ULID(now.Add(-29 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.SidecarSource}},
  1005  		u.ULID(now.Add(-29 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.ReceiveSource}},
  1006  		u.ULID(now.Add(-29 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.RulerSource}},
  1007  
  1008  		// For now non-delay delete sources, should be ignored by consistency delay.
  1009  		u.ULID(now.Add(-29 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.BucketRepairSource}},
  1010  		u.ULID(now.Add(-29 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.CompactorSource}},
  1011  		u.ULID(now.Add(-29 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.CompactorRepairSource}},
  1012  
  1013  		// 30m.
  1014  		u.ULID(now.Add(-30 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.SidecarSource}},
  1015  		u.ULID(now.Add(-30 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.ReceiveSource}},
  1016  		u.ULID(now.Add(-30 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.RulerSource}},
  1017  		u.ULID(now.Add(-30 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.BucketRepairSource}},
  1018  		u.ULID(now.Add(-30 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.CompactorSource}},
  1019  		u.ULID(now.Add(-30 * time.Minute)): {Thanos: metadata.Thanos{Source: metadata.CompactorRepairSource}},
  1020  
  1021  		// 30m+.
  1022  		u.ULID(now.Add(-20 * time.Hour)): {Thanos: metadata.Thanos{Source: metadata.SidecarSource}},
  1023  		u.ULID(now.Add(-20 * time.Hour)): {Thanos: metadata.Thanos{Source: metadata.ReceiveSource}},
  1024  		u.ULID(now.Add(-20 * time.Hour)): {Thanos: metadata.Thanos{Source: metadata.RulerSource}},
  1025  		u.ULID(now.Add(-20 * time.Hour)): {Thanos: metadata.Thanos{Source: metadata.BucketRepairSource}},
  1026  		u.ULID(now.Add(-20 * time.Hour)): {Thanos: metadata.Thanos{Source: metadata.CompactorSource}},
  1027  		u.ULID(now.Add(-20 * time.Hour)): {Thanos: metadata.Thanos{Source: metadata.CompactorRepairSource}},
  1028  	}
  1029  
  1030  	t.Run("consistency 0 (turned off)", func(t *testing.T) {
  1031  		m := newTestFetcherMetrics()
  1032  		expected := map[ulid.ULID]*metadata.Meta{}
  1033  		// Copy all.
  1034  		for _, id := range u.created {
  1035  			expected[id] = input[id]
  1036  		}
  1037  
  1038  		reg := prometheus.NewRegistry()
  1039  		f := NewConsistencyDelayMetaFilter(nil, 0*time.Second, reg)
  1040  		testutil.Equals(t, map[string]float64{"consistency_delay_seconds{}": 0.0}, extprom.CurrentGaugeValuesFor(t, reg, "consistency_delay_seconds"))
  1041  
  1042  		testutil.Ok(t, f.Filter(ctx, input, m.Synced, nil))
  1043  		testutil.Equals(t, 0.0, promtest.ToFloat64(m.Synced.WithLabelValues(tooFreshMeta)))
  1044  		testutil.Equals(t, expected, input)
  1045  	})
  1046  
  1047  	t.Run("consistency 30m.", func(t *testing.T) {
  1048  		m := newTestFetcherMetrics()
  1049  		expected := map[ulid.ULID]*metadata.Meta{}
  1050  		// Only certain sources and those with 30m or more age go through.
  1051  		for i, id := range u.created {
  1052  			// Younger than 30m.
  1053  			if i < 13 {
  1054  				if input[id].Thanos.Source != metadata.BucketRepairSource &&
  1055  					input[id].Thanos.Source != metadata.CompactorSource &&
  1056  					input[id].Thanos.Source != metadata.CompactorRepairSource {
  1057  					continue
  1058  				}
  1059  			}
  1060  			expected[id] = input[id]
  1061  		}
  1062  
  1063  		reg := prometheus.NewRegistry()
  1064  		f := NewConsistencyDelayMetaFilter(nil, 30*time.Minute, reg)
  1065  		testutil.Equals(t, map[string]float64{"consistency_delay_seconds{}": (30 * time.Minute).Seconds()}, extprom.CurrentGaugeValuesFor(t, reg, "consistency_delay_seconds"))
  1066  
  1067  		testutil.Ok(t, f.Filter(ctx, input, m.Synced, nil))
  1068  		testutil.Equals(t, float64(len(u.created)-len(expected)), promtest.ToFloat64(m.Synced.WithLabelValues(tooFreshMeta)))
  1069  		testutil.Equals(t, expected, input)
  1070  	})
  1071  }
  1072  
  1073  func TestIgnoreDeletionMarkFilter_Filter(t *testing.T) {
  1074  	objtesting.ForeachStore(t, func(t *testing.T, bkt objstore.Bucket) {
  1075  		ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
  1076  		defer cancel()
  1077  
  1078  		now := time.Now()
  1079  		f := NewIgnoreDeletionMarkFilter(log.NewNopLogger(), objstore.WithNoopInstr(bkt), 48*time.Hour, 32)
  1080  
  1081  		shouldFetch := &metadata.DeletionMark{
  1082  			ID:           ULID(1),
  1083  			DeletionTime: now.Add(-15 * time.Hour).Unix(),
  1084  			Version:      1,
  1085  		}
  1086  
  1087  		shouldIgnore := &metadata.DeletionMark{
  1088  			ID:           ULID(2),
  1089  			DeletionTime: now.Add(-60 * time.Hour).Unix(),
  1090  			Version:      1,
  1091  		}
  1092  
  1093  		var buf bytes.Buffer
  1094  		testutil.Ok(t, json.NewEncoder(&buf).Encode(&shouldFetch))
  1095  		testutil.Ok(t, bkt.Upload(ctx, path.Join(shouldFetch.ID.String(), metadata.DeletionMarkFilename), &buf))
  1096  
  1097  		testutil.Ok(t, json.NewEncoder(&buf).Encode(&shouldIgnore))
  1098  		testutil.Ok(t, bkt.Upload(ctx, path.Join(shouldIgnore.ID.String(), metadata.DeletionMarkFilename), &buf))
  1099  
  1100  		testutil.Ok(t, bkt.Upload(ctx, path.Join(ULID(3).String(), metadata.DeletionMarkFilename), bytes.NewBufferString("not a valid deletion-mark.json")))
  1101  
  1102  		input := map[ulid.ULID]*metadata.Meta{
  1103  			ULID(1): {},
  1104  			ULID(2): {},
  1105  			ULID(3): {},
  1106  			ULID(4): {},
  1107  		}
  1108  
  1109  		expected := map[ulid.ULID]*metadata.Meta{
  1110  			ULID(1): {},
  1111  			ULID(3): {},
  1112  			ULID(4): {},
  1113  		}
  1114  
  1115  		m := newTestFetcherMetrics()
  1116  		testutil.Ok(t, f.Filter(ctx, input, m.Synced, nil))
  1117  		testutil.Equals(t, 1.0, promtest.ToFloat64(m.Synced.WithLabelValues(MarkedForDeletionMeta)))
  1118  		testutil.Equals(t, expected, input)
  1119  	})
  1120  }
  1121  
  1122  func BenchmarkDeduplicateFilter_Filter(b *testing.B) {
  1123  
  1124  	var (
  1125  		reg   prometheus.Registerer
  1126  		count uint64
  1127  		cases []map[ulid.ULID]*metadata.Meta
  1128  	)
  1129  
  1130  	dedupFilter := NewDeduplicateFilter(1)
  1131  	synced := extprom.NewTxGaugeVec(reg, prometheus.GaugeOpts{}, []string{"state"})
  1132  
  1133  	for blocksNum := 10; blocksNum <= 10000; blocksNum *= 10 {
  1134  
  1135  		var ctx context.Context
  1136  		// blocksNum number of blocks with all of them unique ULID and unique 100 sources.
  1137  		cases = append(cases, make(map[ulid.ULID]*metadata.Meta, blocksNum))
  1138  		for i := 0; i < blocksNum; i++ {
  1139  
  1140  			id := ulid.MustNew(count, nil)
  1141  			count++
  1142  
  1143  			cases[0][id] = &metadata.Meta{
  1144  				BlockMeta: tsdb.BlockMeta{
  1145  					ULID: id,
  1146  				},
  1147  			}
  1148  
  1149  			for j := 0; j < 100; j++ {
  1150  				cases[0][id].Compaction.Sources = append(cases[0][id].Compaction.Sources, ulid.MustNew(count, nil))
  1151  				count++
  1152  			}
  1153  		}
  1154  
  1155  		// Case for running 3x resolution as they can be run concurrently.
  1156  		// blocksNum number of blocks. all of them with unique ULID and unique 100 cases.
  1157  		cases = append(cases, make(map[ulid.ULID]*metadata.Meta, 3*blocksNum))
  1158  
  1159  		for i := 0; i < blocksNum; i++ {
  1160  			for _, res := range []int64{0, 5 * 60 * 1000, 60 * 60 * 1000} {
  1161  
  1162  				id := ulid.MustNew(count, nil)
  1163  				count++
  1164  				cases[1][id] = &metadata.Meta{
  1165  					BlockMeta: tsdb.BlockMeta{
  1166  						ULID: id,
  1167  					},
  1168  					Thanos: metadata.Thanos{
  1169  						Downsample: metadata.ThanosDownsample{Resolution: res},
  1170  					},
  1171  				}
  1172  				for j := 0; j < 100; j++ {
  1173  					cases[1][id].Compaction.Sources = append(cases[1][id].Compaction.Sources, ulid.MustNew(count, nil))
  1174  					count++
  1175  				}
  1176  
  1177  			}
  1178  		}
  1179  
  1180  		b.Run(fmt.Sprintf("Block-%d", blocksNum), func(b *testing.B) {
  1181  			for _, tcase := range cases {
  1182  				b.ResetTimer()
  1183  				b.Run("", func(b *testing.B) {
  1184  					for n := 0; n <= b.N; n++ {
  1185  						_ = dedupFilter.Filter(ctx, tcase, synced, nil)
  1186  						testutil.Equals(b, 0, len(dedupFilter.DuplicateIDs()))
  1187  					}
  1188  				})
  1189  			}
  1190  		})
  1191  	}
  1192  }
  1193  
  1194  func Test_ParseRelabelConfig(t *testing.T) {
  1195  	_, err := ParseRelabelConfig([]byte(`
  1196      - action: drop
  1197        regex: "A"
  1198        source_labels:
  1199        - cluster
  1200      `), SelectorSupportedRelabelActions)
  1201  	testutil.Ok(t, err)
  1202  
  1203  	_, err = ParseRelabelConfig([]byte(`
  1204      - action: labelmap
  1205        regex: "A"
  1206      `), SelectorSupportedRelabelActions)
  1207  	testutil.NotOk(t, err)
  1208  	testutil.Equals(t, "unsupported relabel action: labelmap", err.Error())
  1209  }