github.com/thanos-io/thanos@v0.32.5/pkg/compact/downsample/downsample_test.go (about)

     1  // Copyright (c) The Thanos Authors.
     2  // Licensed under the Apache License 2.0.
     3  
     4  package downsample
     5  
     6  import (
     7  	"math"
     8  	"os"
     9  	"path/filepath"
    10  	"sort"
    11  	"testing"
    12  
    13  	"github.com/go-kit/log"
    14  	"github.com/pkg/errors"
    15  	"github.com/prometheus/prometheus/model/histogram"
    16  	"github.com/prometheus/prometheus/model/labels"
    17  	"github.com/prometheus/prometheus/model/value"
    18  	"github.com/prometheus/prometheus/storage"
    19  	"github.com/prometheus/prometheus/tsdb"
    20  	"github.com/prometheus/prometheus/tsdb/chunkenc"
    21  	"github.com/prometheus/prometheus/tsdb/chunks"
    22  	"github.com/prometheus/prometheus/tsdb/index"
    23  	"github.com/prometheus/prometheus/tsdb/tombstones"
    24  	"github.com/prometheus/prometheus/tsdb/tsdbutil"
    25  	"go.uber.org/goleak"
    26  
    27  	"github.com/efficientgo/core/testutil"
    28  
    29  	"github.com/thanos-io/thanos/pkg/block"
    30  	"github.com/thanos-io/thanos/pkg/block/metadata"
    31  )
    32  
    33  func TestMain(m *testing.M) {
    34  	goleak.VerifyTestMain(m)
    35  }
    36  
    37  func TestDownsampleCounterBoundaryReset(t *testing.T) {
    38  	toAggrChunks := func(t *testing.T, cm []chunks.Meta) (res []*AggrChunk) {
    39  		for i := range cm {
    40  			achk, ok := cm[i].Chunk.(*AggrChunk)
    41  			testutil.Assert(t, ok, "expected *AggrChunk")
    42  			res = append(res, achk)
    43  		}
    44  		return
    45  	}
    46  
    47  	counterSamples := func(t *testing.T, achks []*AggrChunk) (res []sample) {
    48  		for _, achk := range achks {
    49  			chk, err := achk.Get(AggrCounter)
    50  			testutil.Ok(t, err)
    51  
    52  			iter := chk.Iterator(nil)
    53  			for iter.Next() != chunkenc.ValNone {
    54  				t, v := iter.At()
    55  				res = append(res, sample{t, v})
    56  			}
    57  		}
    58  		return
    59  	}
    60  
    61  	counterIterate := func(t *testing.T, achks []*AggrChunk) (res []sample) {
    62  		var iters []chunkenc.Iterator
    63  		for _, achk := range achks {
    64  			chk, err := achk.Get(AggrCounter)
    65  			testutil.Ok(t, err)
    66  			iters = append(iters, chk.Iterator(nil))
    67  		}
    68  
    69  		citer := NewApplyCounterResetsIterator(iters...)
    70  		for citer.Next() != chunkenc.ValNone {
    71  			t, v := citer.At()
    72  			res = append(res, sample{t: t, v: v})
    73  		}
    74  		return
    75  	}
    76  
    77  	type test struct {
    78  		raw                   []sample
    79  		rawAggrResolution     int64
    80  		expectedRawAggrChunks int
    81  		rawCounterSamples     []sample
    82  		rawCounterIterate     []sample
    83  		aggrAggrResolution    int64
    84  		aggrChunks            int
    85  		aggrCounterSamples    []sample
    86  		aggrCounterIterate    []sample
    87  	}
    88  
    89  	tests := []test{
    90  		{
    91  			// In this test case, counter resets occur at the
    92  			// boundaries between the t=49,t=99 and t=99,t=149
    93  			// windows, and the values in the t=49, t=99, and
    94  			// t=149 windows are high enough that the resets
    95  			// will only be accounted for if the first raw value
    96  			// of a chunk is maintained during aggregation.
    97  			// See #1568 for more details.
    98  			raw: []sample{
    99  				{t: 10, v: 1}, {t: 20, v: 3}, {t: 30, v: 5},
   100  				{t: 50, v: 1}, {t: 60, v: 8}, {t: 70, v: 10},
   101  				{t: 120, v: 1}, {t: 130, v: 18}, {t: 140, v: 20},
   102  				{t: 160, v: 21}, {t: 170, v: 38}, {t: 180, v: 40},
   103  			},
   104  			rawAggrResolution:     50,
   105  			expectedRawAggrChunks: 4,
   106  			rawCounterSamples: []sample{
   107  				{t: 10, v: 1}, {t: 30, v: 5}, {t: 30, v: 5},
   108  				{t: 50, v: 1}, {t: 70, v: 10}, {t: 70, v: 10},
   109  				{t: 120, v: 1}, {t: 140, v: 20}, {t: 140, v: 20},
   110  				{t: 160, v: 21}, {t: 180, v: 40}, {t: 180, v: 40},
   111  			},
   112  			rawCounterIterate: []sample{
   113  				{t: 10, v: 1}, {t: 30, v: 5},
   114  				{t: 50, v: 6}, {t: 70, v: 15},
   115  				{t: 120, v: 16}, {t: 140, v: 35},
   116  				{t: 160, v: 36}, {t: 180, v: 55},
   117  			},
   118  			aggrAggrResolution: 2 * 50,
   119  			aggrChunks:         2,
   120  			aggrCounterSamples: []sample{
   121  				{t: 10, v: 1}, {t: 70, v: 15}, {t: 70, v: 10},
   122  				{t: 120, v: 1}, {t: 180, v: 40}, {t: 180, v: 40},
   123  			},
   124  			aggrCounterIterate: []sample{
   125  				{t: 10, v: 1}, {t: 70, v: 15},
   126  				{t: 120, v: 16}, {t: 180, v: 55},
   127  			},
   128  		},
   129  	}
   130  
   131  	doTest := func(t *testing.T, test *test) {
   132  		// Asking for more chunks than raw samples ensures that downsampleRawLoop
   133  		// will create chunks with samples from a single window.
   134  		cm := downsampleRawLoop(test.raw, test.rawAggrResolution, len(test.raw)+1)
   135  		testutil.Equals(t, test.expectedRawAggrChunks, len(cm))
   136  
   137  		rawAggrChunks := toAggrChunks(t, cm)
   138  		testutil.Equals(t, test.rawCounterSamples, counterSamples(t, rawAggrChunks))
   139  		testutil.Equals(t, test.rawCounterIterate, counterIterate(t, rawAggrChunks))
   140  
   141  		var buf []sample
   142  		acm, err := downsampleAggrLoop(rawAggrChunks, &buf, test.aggrAggrResolution, test.aggrChunks)
   143  		testutil.Ok(t, err)
   144  		testutil.Equals(t, test.aggrChunks, len(acm))
   145  
   146  		aggrAggrChunks := toAggrChunks(t, acm)
   147  		testutil.Equals(t, test.aggrCounterSamples, counterSamples(t, aggrAggrChunks))
   148  		testutil.Equals(t, test.aggrCounterIterate, counterIterate(t, aggrAggrChunks))
   149  	}
   150  
   151  	doTest(t, &tests[0])
   152  }
   153  
   154  func TestExpandChunkIterator(t *testing.T) {
   155  	// Validate that expanding the chunk iterator filters out-of-order samples
   156  	// and staleness markers.
   157  	// Same timestamps are okay since we use them for counter markers.
   158  	var res []sample
   159  	testutil.Ok(t,
   160  		expandChunkIterator(
   161  			newSampleIterator([]sample{
   162  				{100, 1}, {200, 2}, {200, 3}, {201, 4}, {200, 5},
   163  				{300, 6}, {400, math.Float64frombits(value.StaleNaN)}, {500, 5},
   164  			}), &res,
   165  		),
   166  	)
   167  
   168  	testutil.Equals(t, []sample{{100, 1}, {200, 2}, {200, 3}, {201, 4}, {300, 6}, {500, 5}}, res)
   169  }
   170  
   171  var (
   172  	// Decoded excerpt of pkg/query/testdata/issue2401-seriesresponses.json without overlaps (downsampling works directly on blocks).
   173  	realisticChkDataWithStaleMarker = [][]sample{
   174  		{
   175  			{t: 1587690005791, v: 461968}, {t: 1587690020791, v: 462151}, {t: 1587690035797, v: 462336}, {t: 1587690050791, v: 462650}, {t: 1587690065791, v: 462813}, {t: 1587690080791, v: 462987}, {t: 1587690095791, v: 463095}, {t: 1587690110791, v: 463247}, {t: 1587690125791, v: 463440}, {t: 1587690140791, v: 463642},
   176  			{t: 1587690155791, v: 463811}, {t: 1587690170791, v: 464027}, {t: 1587690185791, v: 464308}, {t: 1587690200791, v: 464514}, {t: 1587690215791, v: 464798}, {t: 1587690230791, v: 465018}, {t: 1587690245791, v: 465215}, {t: 1587690260813, v: 465431}, {t: 1587690275791, v: 465651}, {t: 1587690290791, v: 465870},
   177  			{t: 1587690305791, v: 466070}, {t: 1587690320792, v: 466248}, {t: 1587690335791, v: 466506}, {t: 1587690350791, v: 466766}, {t: 1587690365791, v: 466970}, {t: 1587690380791, v: 467123}, {t: 1587690395791, v: 467265}, {t: 1587690410791, v: 467383}, {t: 1587690425791, v: 467629}, {t: 1587690440791, v: 467931},
   178  			{t: 1587690455791, v: 468097}, {t: 1587690470791, v: 468281}, {t: 1587690485791, v: 468477}, {t: 1587690500791, v: 468649}, {t: 1587690515791, v: 468867}, {t: 1587690530791, v: 469150}, {t: 1587690545791, v: 469268}, {t: 1587690560791, v: 469488}, {t: 1587690575791, v: 469742}, {t: 1587690590791, v: 469951},
   179  			{t: 1587690605791, v: 470131}, {t: 1587690620791, v: 470337}, {t: 1587690635791, v: 470631}, {t: 1587690650791, v: 470832}, {t: 1587690665791, v: 471077}, {t: 1587690680791, v: 471311}, {t: 1587690695791, v: 471473}, {t: 1587690710791, v: 471728}, {t: 1587690725791, v: 472002}, {t: 1587690740791, v: 472158},
   180  			{t: 1587690755791, v: 472329}, {t: 1587690770791, v: 472722}, {t: 1587690785791, v: 472925}, {t: 1587690800791, v: 473220}, {t: 1587690815791, v: 473460}, {t: 1587690830791, v: 473748}, {t: 1587690845791, v: 473968}, {t: 1587690860791, v: 474261}, {t: 1587690875791, v: 474418}, {t: 1587690890791, v: 474726},
   181  			{t: 1587690905791, v: 474913}, {t: 1587690920791, v: 475031}, {t: 1587690935791, v: 475284}, {t: 1587690950791, v: 475563}, {t: 1587690965791, v: 475762}, {t: 1587690980791, v: 475945}, {t: 1587690995791, v: 476302}, {t: 1587691010791, v: 476501}, {t: 1587691025791, v: 476849}, {t: 1587691040800, v: 477020},
   182  			{t: 1587691055791, v: 477280}, {t: 1587691070791, v: 477549}, {t: 1587691085791, v: 477758}, {t: 1587691100817, v: 477960}, {t: 1587691115791, v: 478261}, {t: 1587691130791, v: 478559}, {t: 1587691145791, v: 478704}, {t: 1587691160804, v: 478950}, {t: 1587691175791, v: 479173}, {t: 1587691190791, v: 479368},
   183  			{t: 1587691205791, v: 479625}, {t: 1587691220805, v: 479866}, {t: 1587691235791, v: 480008}, {t: 1587691250791, v: 480155}, {t: 1587691265791, v: 480472}, {t: 1587691280811, v: 480598}, {t: 1587691295791, v: 480771}, {t: 1587691310791, v: 480996}, {t: 1587691325791, v: 481200}, {t: 1587691340803, v: 481381},
   184  			{t: 1587691355791, v: 481584}, {t: 1587691370791, v: 481759}, {t: 1587691385791, v: 482003}, {t: 1587691400803, v: 482189}, {t: 1587691415791, v: 482457}, {t: 1587691430791, v: 482623}, {t: 1587691445791, v: 482768}, {t: 1587691460804, v: 483036}, {t: 1587691475791, v: 483322}, {t: 1587691490791, v: 483566},
   185  			{t: 1587691505791, v: 483709}, {t: 1587691520807, v: 483838}, {t: 1587691535791, v: 484091}, {t: 1587691550791, v: 484236}, {t: 1587691565791, v: 484454}, {t: 1587691580816, v: 484710}, {t: 1587691595791, v: 484978}, {t: 1587691610791, v: 485271}, {t: 1587691625791, v: 485476}, {t: 1587691640792, v: 485640},
   186  			{t: 1587691655791, v: 485921}, {t: 1587691670791, v: 486201}, {t: 1587691685791, v: 486555}, {t: 1587691700791, v: 486691}, {t: 1587691715791, v: 486831}, {t: 1587691730791, v: 487033}, {t: 1587691745791, v: 487268}, {t: 1587691760803, v: 487370}, {t: 1587691775791, v: 487571}, {t: 1587691790791, v: 487787},
   187  		},
   188  		{
   189  			{t: 1587691805791, v: 488036}, {t: 1587691820791, v: 488241}, {t: 1587691835791, v: 488411}, {t: 1587691850791, v: 488625}, {t: 1587691865791, v: 488868}, {t: 1587691880791, v: 489005}, {t: 1587691895791, v: 489237}, {t: 1587691910791, v: 489545}, {t: 1587691925791, v: 489750}, {t: 1587691940791, v: 489899},
   190  			{t: 1587691955791, v: 490048}, {t: 1587691970791, v: 490364}, {t: 1587691985791, v: 490485}, {t: 1587692000791, v: 490722}, {t: 1587692015791, v: 490866}, {t: 1587692030791, v: 491025}, {t: 1587692045791, v: 491286}, {t: 1587692060816, v: 491543}, {t: 1587692075791, v: 491787}, {t: 1587692090791, v: 492065},
   191  			{t: 1587692105791, v: 492223}, {t: 1587692120816, v: 492501}, {t: 1587692135791, v: 492767}, {t: 1587692150791, v: 492955}, {t: 1587692165791, v: 493194}, {t: 1587692180792, v: 493402}, {t: 1587692195791, v: 493647}, {t: 1587692210791, v: 493897}, {t: 1587692225791, v: 494117}, {t: 1587692240805, v: 494356},
   192  			{t: 1587692255791, v: 494620}, {t: 1587692270791, v: 494762}, {t: 1587692285791, v: 495001}, {t: 1587692300805, v: 495222}, {t: 1587692315791, v: 495393}, {t: 1587692330791, v: 495662}, {t: 1587692345791, v: 495875}, {t: 1587692360801, v: 496082}, {t: 1587692375791, v: 496196}, {t: 1587692390791, v: 496245},
   193  			{t: 1587692405791, v: 496295}, {t: 1587692420791, v: 496365}, {t: 1587692435791, v: 496401}, {t: 1587692450791, v: 496452}, {t: 1587692465791, v: 496491}, {t: 1587692480791, v: 496544}, {t: 1587692495791, v: math.Float64frombits(value.StaleNaN)}, {t: 1587692555791, v: 75}, {t: 1587692570791, v: 308}, {t: 1587692585791, v: 508},
   194  			{t: 1587692600791, v: 701}, {t: 1587692615791, v: 985}, {t: 1587692630791, v: 1153}, {t: 1587692645791, v: 1365}, {t: 1587692660791, v: 1612}, {t: 1587692675803, v: 1922}, {t: 1587692690791, v: 2103}, {t: 1587692705791, v: 2261}, {t: 1587692720791, v: 2469}, {t: 1587692735805, v: 2625},
   195  			{t: 1587692750791, v: 2801}, {t: 1587692765791, v: 2955}, {t: 1587692780791, v: 3187}, {t: 1587692795806, v: 3428}, {t: 1587692810791, v: 3657}, {t: 1587692825791, v: 3810}, {t: 1587692840791, v: 3968}, {t: 1587692855791, v: 4195}, {t: 1587692870791, v: 4414}, {t: 1587692885791, v: 4646},
   196  			{t: 1587692900791, v: 4689}, {t: 1587692915791, v: 4847}, {t: 1587692930791, v: 5105}, {t: 1587692945791, v: 5309}, {t: 1587692960791, v: 5521}, {t: 1587692975791, v: 5695}, {t: 1587692990810, v: 6010}, {t: 1587693005791, v: 6210}, {t: 1587693020791, v: 6394}, {t: 1587693035791, v: 6597},
   197  			{t: 1587693050791, v: 6872}, {t: 1587693065791, v: 7098}, {t: 1587693080791, v: 7329}, {t: 1587693095791, v: 7470}, {t: 1587693110791, v: 7634}, {t: 1587693125821, v: 7830}, {t: 1587693140791, v: 8034}, {t: 1587693155791, v: 8209}, {t: 1587693170791, v: 8499}, {t: 1587693185791, v: 8688},
   198  			{t: 1587693200791, v: 8893}, {t: 1587693215791, v: 9052}, {t: 1587693230791, v: 9379}, {t: 1587693245791, v: 9544}, {t: 1587693260791, v: 9763}, {t: 1587693275791, v: 9974}, {t: 1587693290791, v: 10242}, {t: 1587693305791, v: 10464}, {t: 1587693320803, v: 10716}, {t: 1587693335791, v: 10975},
   199  			{t: 1587693350791, v: 11232}, {t: 1587693365791, v: 11459}, {t: 1587693380791, v: 11778}, {t: 1587693395804, v: 12007}, {t: 1587693410791, v: 12206}, {t: 1587693425791, v: 12450}, {t: 1587693440791, v: 12693}, {t: 1587693455791, v: 12908}, {t: 1587693470791, v: 13158}, {t: 1587693485791, v: 13427},
   200  			{t: 1587693500791, v: 13603}, {t: 1587693515791, v: 13927}, {t: 1587693530816, v: 14122}, {t: 1587693545791, v: 14327}, {t: 1587693560791, v: 14579}, {t: 1587693575791, v: 14759}, {t: 1587693590791, v: 14956},
   201  		},
   202  	}
   203  	realisticChkDataWithCounterResetRes5m = []map[AggrType][]sample{
   204  		{
   205  			AggrCount:   {{t: 1587690299999, v: 20}, {t: 1587690599999, v: 20}, {t: 1587690899999, v: 20}, {t: 1587691199999, v: 20}, {t: 1587691499999, v: 20}, {t: 1587691799999, v: 20}, {t: 1587692099999, v: 20}, {t: 1587692399999, v: 20}, {t: 1587692699999, v: 16}, {t: 1587692999999, v: 20}, {t: 1587693299999, v: 20}, {t: 1587693590791, v: 20}},
   206  			AggrSum:     {{t: 1587690299999, v: 9.276972e+06}, {t: 1587690599999, v: 9.359861e+06}, {t: 1587690899999, v: 9.447457e+06}, {t: 1587691199999, v: 9.542732e+06}, {t: 1587691499999, v: 9.630379e+06}, {t: 1587691799999, v: 9.715631e+06}, {t: 1587692099999, v: 9.799808e+06}, {t: 1587692399999, v: 9.888117e+06}, {t: 1587692699999, v: 2.98928e+06}, {t: 1587692999999, v: 81592}, {t: 1587693299999, v: 163711}, {t: 1587693590791, v: 255746}},
   207  			AggrMin:     {{t: 1587690299999, v: 461968}, {t: 1587690599999, v: 466070}, {t: 1587690899999, v: 470131}, {t: 1587691199999, v: 474913}, {t: 1587691499999, v: 479625}, {t: 1587691799999, v: 483709}, {t: 1587692099999, v: 488036}, {t: 1587692399999, v: 492223}, {t: 1587692699999, v: 75}, {t: 1587692999999, v: 2261}, {t: 1587693299999, v: 6210}, {t: 1587693590791, v: 10464}},
   208  			AggrMax:     {{t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}, {t: 1587690899999, v: 474726}, {t: 1587691199999, v: 479368}, {t: 1587691499999, v: 483566}, {t: 1587691799999, v: 487787}, {t: 1587692099999, v: 492065}, {t: 1587692399999, v: 496245}, {t: 1587692699999, v: 496544}, {t: 1587692999999, v: 6010}, {t: 1587693299999, v: 10242}, {t: 1587693590791, v: 14956}},
   209  			AggrCounter: {{t: 1587690005791, v: 461968}, {t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}, {t: 1587690899999, v: 474726}, {t: 1587691199999, v: 479368}, {t: 1587691499999, v: 483566}, {t: 1587691799999, v: 487787}, {t: 1587692099999, v: 492065}, {t: 1587692399999, v: 496245}, {t: 1587692699999, v: 498647}, {t: 1587692999999, v: 502554}, {t: 1587693299999, v: 506786}, {t: 1587693590791, v: 511500}, {t: 1587693590791, v: 14956}},
   210  		},
   211  	}
   212  )
   213  
   214  func TestDownsample(t *testing.T) {
   215  	type downsampleTestCase struct {
   216  		name string
   217  
   218  		// Either inRaw or inAggr should be provided.
   219  		inRaw      [][]sample
   220  		inAggr     []map[AggrType][]sample
   221  		resolution int64
   222  
   223  		// Expected output.
   224  		expected                []map[AggrType][]sample
   225  		expectedDownsamplingErr func([]chunks.Meta) error
   226  	}
   227  	for _, tcase := range []*downsampleTestCase{
   228  		{
   229  			name: "single chunk",
   230  			inRaw: [][]sample{
   231  				{{20, 1}, {40, 2}, {60, 3}, {80, 1}, {100, 2}, {101, math.Float64frombits(value.StaleNaN)}, {120, 5}, {180, 10}, {250, 1}},
   232  			},
   233  			resolution: 100,
   234  
   235  			expected: []map[AggrType][]sample{
   236  				{
   237  					AggrCount:   {{99, 4}, {199, 3}, {250, 1}},
   238  					AggrSum:     {{99, 7}, {199, 17}, {250, 1}},
   239  					AggrMin:     {{99, 1}, {199, 2}, {250, 1}},
   240  					AggrMax:     {{99, 3}, {199, 10}, {250, 1}},
   241  					AggrCounter: {{20, 1}, {99, 4}, {199, 13}, {250, 14}, {250, 1}},
   242  				},
   243  			},
   244  		},
   245  		{
   246  			name: "three chunks",
   247  			inRaw: [][]sample{
   248  				{{20, 1}, {40, 2}, {60, 3}, {80, 1}, {100, 2}, {101, math.Float64frombits(value.StaleNaN)}, {120, 5}, {180, 10}, {250, 2}},
   249  				{{260, 1}, {300, 10}, {340, 15}, {380, 25}, {420, 35}},
   250  				{{460, math.Float64frombits(value.StaleNaN)}, {500, 10}, {540, 3}},
   251  			},
   252  			resolution: 100,
   253  
   254  			expected: []map[AggrType][]sample{
   255  				{
   256  					AggrCount:   {{t: 99, v: 4}, {t: 199, v: 3}, {t: 299, v: 2}, {t: 399, v: 3}, {t: 499, v: 1}, {t: 540, v: 2}},
   257  					AggrSum:     {{t: 99, v: 7}, {t: 199, v: 17}, {t: 299, v: 3}, {t: 399, v: 50}, {t: 499, v: 35}, {t: 540, v: 13}},
   258  					AggrMin:     {{t: 99, v: 1}, {t: 199, v: 2}, {t: 299, v: 1}, {t: 399, v: 10}, {t: 499, v: 35}, {t: 540, v: 3}},
   259  					AggrMax:     {{t: 99, v: 3}, {t: 199, v: 10}, {t: 299, v: 2}, {t: 399, v: 25}, {t: 499, v: 35}, {t: 540, v: 10}},
   260  					AggrCounter: {{t: 20, v: 1}, {t: 99, v: 4}, {t: 199, v: 13}, {t: 299, v: 16}, {t: 399, v: 40}, {t: 499, v: 50}, {t: 540, v: 63}, {t: 540, v: 3}},
   261  				},
   262  			},
   263  		},
   264  		{
   265  			name: "four chunks, two of them overlapping",
   266  			inRaw: [][]sample{
   267  				{{20, 1}, {40, 2}, {60, 3}, {80, 1}, {100, 2}, {101, math.Float64frombits(value.StaleNaN)}, {120, 5}, {180, 10}, {250, 2}},
   268  				{{20, 1}, {40, 2}, {60, 3}, {80, 1}, {100, 2}, {101, math.Float64frombits(value.StaleNaN)}, {120, 5}, {180, 10}, {250, 2}},
   269  				{{260, 1}, {300, 10}, {340, 15}, {380, 25}, {420, 35}},
   270  				{{460, math.Float64frombits(value.StaleNaN)}, {500, 10}, {540, 3}},
   271  			},
   272  			resolution: 100,
   273  
   274  			expectedDownsamplingErr: func(chks []chunks.Meta) error {
   275  				return errors.Errorf("found overlapping chunks within series 0. Chunks expected to be ordered by min time and non-overlapping, got: %v", chks)
   276  			},
   277  		},
   278  		{
   279  			name:       "realistic 15s interval raw chunks",
   280  			inRaw:      realisticChkDataWithStaleMarker,
   281  			resolution: ResLevel1, // 5m.
   282  
   283  			expected: realisticChkDataWithCounterResetRes5m,
   284  		},
   285  		// Aggregated -> Downsampled Aggregated.
   286  		{
   287  			name: "single aggregated chunks",
   288  			inAggr: []map[AggrType][]sample{
   289  				{
   290  					AggrCount: {{199, 5}, {299, 1}, {399, 10}, {400, 3}, {499, 10}, {699, 0}, {999, 100}},
   291  					AggrSum:   {{199, 5}, {299, 1}, {399, 10}, {400, 3}, {499, 10}, {699, 0}, {999, 100}},
   292  					AggrMin:   {{199, 5}, {299, 1}, {399, 10}, {400, -3}, {499, 10}, {699, 0}, {999, 100}},
   293  					AggrMax:   {{199, 5}, {299, 1}, {399, 10}, {400, -3}, {499, 10}, {699, 0}, {999, 100}},
   294  					AggrCounter: {
   295  						{99, 100}, {299, 150}, {499, 210}, {499, 10}, // Chunk 1.
   296  						{599, 20}, {799, 50}, {999, 120}, {999, 50}, // Chunk 2, no reset.
   297  						{1099, 40}, {1199, 80}, {1299, 110}, // Chunk 3, reset.
   298  					},
   299  				},
   300  			},
   301  			resolution: 500,
   302  
   303  			expected: []map[AggrType][]sample{
   304  				{
   305  					AggrCount:   {{499, 29}, {999, 100}},
   306  					AggrSum:     {{499, 29}, {999, 100}},
   307  					AggrMin:     {{499, -3}, {999, 0}},
   308  					AggrMax:     {{499, 10}, {999, 100}},
   309  					AggrCounter: {{99, 100}, {499, 210}, {999, 320}, {1299, 430}, {1299, 110}},
   310  				},
   311  			},
   312  		},
   313  		func() *downsampleTestCase {
   314  			downsample500resolutionChunk := []map[AggrType][]sample{
   315  				{
   316  					AggrCount:   {{499, 29}, {999, 100}},
   317  					AggrSum:     {{499, 29}, {999, 100}},
   318  					AggrMin:     {{499, -3}, {999, 0}},
   319  					AggrMax:     {{499, 10}, {999, 100}},
   320  					AggrCounter: {{99, 100}, {499, 210}, {999, 320}, {1299, 430}, {1299, 110}},
   321  				},
   322  			}
   323  			return &downsampleTestCase{
   324  				name:       "downsampling already downsampled to the same resolution aggregated chunks",
   325  				resolution: 500,
   326  
   327  				// Should be the output as input.
   328  				inAggr:   downsample500resolutionChunk,
   329  				expected: downsample500resolutionChunk,
   330  			}
   331  		}(),
   332  		{
   333  			name: "two aggregated chunks",
   334  			inAggr: []map[AggrType][]sample{
   335  				{
   336  					AggrCount: {{199, 5}, {299, 1}, {399, 10}, {400, 3}, {499, 10}, {699, 0}, {999, 100}},
   337  					AggrSum:   {{199, 5}, {299, 1}, {399, 10}, {400, 3}, {499, 10}, {699, 0}, {999, 100}},
   338  					AggrMin:   {{199, 5}, {299, 1}, {399, 10}, {400, -3}, {499, 10}, {699, 0}, {999, 100}},
   339  					AggrMax:   {{199, 5}, {299, 1}, {399, 10}, {400, -3}, {499, 10}, {699, 0}, {999, 100}},
   340  					AggrCounter: {
   341  						{99, 100}, {299, 150}, {499, 210}, {499, 10}, // Chunk 1.
   342  						{599, 20}, {799, 50}, {999, 120}, {999, 50}, // Chunk 2, no reset.
   343  						{1099, 40}, {1199, 80}, {1299, 110}, // Chunk 3, reset.
   344  					},
   345  				},
   346  				{
   347  					AggrCount: {{1399, 10}, {1400, 3}, {1499, 10}, {1699, 0}, {1999, 100}},
   348  					AggrSum:   {{1399, 10}, {1400, 3}, {1499, 10}, {1699, 0}, {1999, 100}},
   349  					AggrMin:   {{1399, 10}, {1400, -3}, {1499, 10}, {1699, 0}, {1999, 100}},
   350  					AggrMax:   {{1399, 10}, {1400, -3}, {1499, 10}, {1699, 0}, {1999, 100}},
   351  					AggrCounter: {
   352  						{1499, 210}, {1499, 10}, // Chunk 1.
   353  						{1599, 20}, {1799, 50}, {1999, 120}, {1999, 50}, // Chunk 2, no reset.
   354  						{2099, 40}, {2199, 80}, {2299, 110}, // Chunk 3, reset.
   355  					},
   356  				},
   357  			},
   358  			resolution: 500,
   359  
   360  			expected: []map[AggrType][]sample{
   361  				{
   362  					AggrCount:   {{t: 499, v: 29}, {t: 999, v: 100}, {t: 1499, v: 23}, {t: 1999, v: 100}},
   363  					AggrSum:     {{t: 499, v: 29}, {t: 999, v: 100}, {t: 1499, v: 23}, {t: 1999, v: 100}},
   364  					AggrMin:     {{t: 499, v: -3}, {t: 999, v: 0}, {t: 1499, v: -3}, {t: 1999, v: 0}},
   365  					AggrMax:     {{t: 499, v: 10}, {t: 999, v: 100}, {t: 1499, v: 10}, {t: 1999, v: 100}},
   366  					AggrCounter: {{t: 99, v: 100}, {t: 499, v: 210}, {t: 999, v: 320}, {t: 1499, v: 530}, {t: 1999, v: 640}, {t: 2299, v: 750}, {t: 2299, v: 110}},
   367  				},
   368  			},
   369  		},
   370  		{
   371  			name: "two aggregated, overlapping chunks",
   372  			inAggr: []map[AggrType][]sample{
   373  				{
   374  					AggrCount: {{199, 5}, {299, 1}, {399, 10}, {400, 3}, {499, 10}, {699, 0}, {999, 100}},
   375  				},
   376  				{
   377  					AggrCount: {{199, 5}, {299, 1}, {399, 10}, {400, 3}, {499, 10}, {699, 0}, {999, 100}},
   378  				},
   379  			},
   380  			resolution: 500,
   381  
   382  			expectedDownsamplingErr: func(chks []chunks.Meta) error {
   383  				return errors.Errorf("found overlapping chunks within series 0. Chunks expected to be ordered by min time and non-overlapping, got: %v", chks)
   384  			},
   385  		},
   386  		{
   387  			name: "realistic ResLevel1 (5m) downsampled chunks with from counter resets",
   388  			inAggr: []map[AggrType][]sample{
   389  				{
   390  					AggrCount:   {{t: 1587690299999, v: 20}, {t: 1587690599999, v: 20}, {t: 1587690899999, v: 20}, {t: 1587691199999, v: 20}, {t: 1587691499999, v: 20}, {t: 1587691799999, v: 20}, {t: 1587692099999, v: 20}, {t: 1587692399999, v: 20}, {t: 1587692699999, v: 16}, {t: 1587692999999, v: 20}, {t: 1587693299999, v: 20}, {t: 1587693590791, v: 20}},
   391  					AggrSum:     {{t: 1587690299999, v: 9.276972e+06}, {t: 1587690599999, v: 9.359861e+06}, {t: 1587690899999, v: 9.447457e+06}, {t: 1587691199999, v: 9.542732e+06}, {t: 1587691499999, v: 9.630379e+06}, {t: 1587691799999, v: 9.715631e+06}, {t: 1587692099999, v: 9.799808e+06}, {t: 1587692399999, v: 9.888117e+06}, {t: 1587692699999, v: 2.98928e+06}, {t: 1587692999999, v: 81592}, {t: 1587693299999, v: 163711}, {t: 1587693590791, v: 255746}},
   392  					AggrMin:     {{t: 1587690299999, v: 461968}, {t: 1587690599999, v: 466070}, {t: 1587690899999, v: 470131}, {t: 1587691199999, v: 474913}, {t: 1587691499999, v: 479625}, {t: 1587691799999, v: 483709}, {t: 1587692099999, v: 488036}, {t: 1587692399999, v: 492223}, {t: 1587692699999, v: 75}, {t: 1587692999999, v: 2261}, {t: 1587693299999, v: 6210}, {t: 1587693590791, v: 10464}},
   393  					AggrMax:     {{t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}, {t: 1587690899999, v: 474726}, {t: 1587691199999, v: 479368}, {t: 1587691499999, v: 483566}, {t: 1587691799999, v: 487787}, {t: 1587692099999, v: 492065}, {t: 1587692399999, v: 496245}, {t: 1587692699999, v: 496544}, {t: 1587692999999, v: 6010}, {t: 1587693299999, v: 10242}, {t: 1587693590791, v: 14956}},
   394  					AggrCounter: {{t: 1587690005791, v: 461968}, {t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}, {t: 1587690899999, v: 474726}, {t: 1587691199999, v: 479368}, {t: 1587691499999, v: 483566}, {t: 1587691799999, v: 487787}, {t: 1587692099999, v: 492065}, {t: 1587692399999, v: 496245}, {t: 1587692699999, v: 498647}, {t: 1587692999999, v: 502554}, {t: 1587693299999, v: 506786}, {t: 1587693590791, v: 511500}, {t: 1587693590791, v: 14956}},
   395  				},
   396  			},
   397  			resolution: ResLevel2,
   398  
   399  			expected: []map[AggrType][]sample{
   400  				{
   401  					AggrCount:   {{t: 1587693590791, v: 236}},
   402  					AggrSum:     {{t: 1587693590791, v: 8.0151286e+07}},
   403  					AggrMin:     {{t: 1587693590791, v: 75}},
   404  					AggrMax:     {{t: 1587693590791, v: 496544}},
   405  					AggrCounter: {{t: 1587690005791, v: 461968}, {t: 1587693590791, v: 511500}, {t: 1587693590791, v: 14956}},
   406  				},
   407  			},
   408  		},
   409  		// TODO(bwplotka): This is not very efficient for further query time, we should produce 2 chunks. Fix it https://github.com/thanos-io/thanos/issues/2542.
   410  		func() *downsampleTestCase {
   411  			d := &downsampleTestCase{
   412  				name:       "downsampling four, 120 sample chunks for 2x resolution should result in two chunks, but results in one.",
   413  				resolution: 2,
   414  				inAggr:     []map[AggrType][]sample{{AggrCounter: {}}, {AggrCounter: {}}, {AggrCounter: {}}, {AggrCounter: {}}},
   415  				expected:   []map[AggrType][]sample{{AggrCounter: {}}},
   416  			}
   417  
   418  			for i := int64(0); i < 120; i++ {
   419  				d.inAggr[0][AggrCounter] = append(d.inAggr[0][AggrCounter], sample{t: i, v: float64(i)})
   420  				d.inAggr[1][AggrCounter] = append(d.inAggr[1][AggrCounter], sample{t: 120 + i, v: float64(120 + i)})
   421  				d.inAggr[2][AggrCounter] = append(d.inAggr[2][AggrCounter], sample{t: 240 + i, v: float64(240 + i)})
   422  				d.inAggr[3][AggrCounter] = append(d.inAggr[3][AggrCounter], sample{t: 360 + i, v: float64(360 + i)})
   423  			}
   424  
   425  			d.expected[0][AggrCounter] = append(d.expected[0][AggrCounter], sample{t: 0, v: float64(0)})
   426  			for i := int64(0); i < 480; i += 2 {
   427  				d.expected[0][AggrCounter] = append(d.expected[0][AggrCounter], sample{t: 1 + i, v: float64(1 + i)})
   428  			}
   429  			d.expected[0][AggrCounter] = append(d.expected[0][AggrCounter], sample{t: 479, v: 479})
   430  
   431  			return d
   432  		}(),
   433  	} {
   434  		t.Run(tcase.name, func(t *testing.T) {
   435  			logger := log.NewLogfmtLogger(os.Stderr)
   436  
   437  			dir := t.TempDir()
   438  
   439  			// Ideally we would use tsdb.HeadBlock here for less dependency on our own code. However,
   440  			// it cannot accept the counter signal sample with the same timestamp as the previous sample.
   441  			mb := newMemBlock()
   442  			ser := chunksToSeriesIteratable(t, tcase.inRaw, tcase.inAggr)
   443  			mb.addSeries(ser)
   444  
   445  			fakeMeta := &metadata.Meta{}
   446  			if len(tcase.inAggr) > 0 {
   447  				fakeMeta.Thanos.Downsample.Resolution = tcase.resolution - 1
   448  			}
   449  
   450  			id, err := Downsample(logger, fakeMeta, mb, dir, tcase.resolution)
   451  			if tcase.expectedDownsamplingErr != nil {
   452  				testutil.NotOk(t, err)
   453  				testutil.Equals(t, tcase.expectedDownsamplingErr(ser.chunks).Error(), err.Error())
   454  				return
   455  			}
   456  			testutil.Ok(t, err)
   457  
   458  			_, err = metadata.ReadFromDir(filepath.Join(dir, id.String()))
   459  			testutil.Ok(t, err)
   460  
   461  			indexr, err := index.NewFileReader(filepath.Join(dir, id.String(), block.IndexFilename))
   462  			testutil.Ok(t, err)
   463  			defer func() { testutil.Ok(t, indexr.Close()) }()
   464  
   465  			chunkr, err := chunks.NewDirReader(filepath.Join(dir, id.String(), block.ChunksDirname), NewPool())
   466  			testutil.Ok(t, err)
   467  			defer func() { testutil.Ok(t, chunkr.Close()) }()
   468  
   469  			pall, err := indexr.Postings(index.AllPostingsKey())
   470  			testutil.Ok(t, err)
   471  
   472  			var series []storage.SeriesRef
   473  			for pall.Next() {
   474  				series = append(series, pall.At())
   475  			}
   476  			testutil.Ok(t, pall.Err())
   477  			testutil.Equals(t, 1, len(series))
   478  
   479  			var builder labels.ScratchBuilder
   480  			var lset labels.Labels
   481  			var chks []chunks.Meta
   482  			testutil.Ok(t, indexr.Series(series[0], &builder, &chks))
   483  
   484  			lset = builder.Labels()
   485  			testutil.Equals(t, labels.FromStrings("__name__", "a"), lset)
   486  
   487  			var got []map[AggrType][]sample
   488  			for _, c := range chks {
   489  				chk, err := chunkr.Chunk(c)
   490  				testutil.Ok(t, err)
   491  
   492  				m := map[AggrType][]sample{}
   493  				for _, at := range []AggrType{AggrCount, AggrSum, AggrMin, AggrMax, AggrCounter} {
   494  					c, err := chk.(*AggrChunk).Get(at)
   495  					if err == ErrAggrNotExist {
   496  						continue
   497  					}
   498  					testutil.Ok(t, err)
   499  
   500  					buf := m[at]
   501  					testutil.Ok(t, expandChunkIterator(c.Iterator(nil), &buf))
   502  					m[at] = buf
   503  				}
   504  				got = append(got, m)
   505  			}
   506  			testutil.Equals(t, tcase.expected, got)
   507  		})
   508  	}
   509  }
   510  
   511  func TestDownsampleAggrAndEmptyXORChunks(t *testing.T) {
   512  	logger := log.NewLogfmtLogger(os.Stderr)
   513  	dir := t.TempDir()
   514  
   515  	ser := &series{lset: labels.FromStrings("__name__", "a")}
   516  	aggr := map[AggrType][]sample{
   517  		AggrCount:   {{t: 1587690299999, v: 20}, {t: 1587690599999, v: 20}, {t: 1587690899999, v: 20}, {t: 1587691199999, v: 20}, {t: 1587691499999, v: 20}, {t: 1587691799999, v: 20}, {t: 1587692099999, v: 20}, {t: 1587692399999, v: 20}, {t: 1587692699999, v: 16}, {t: 1587692999999, v: 20}, {t: 1587693299999, v: 20}, {t: 1587693590791, v: 20}},
   518  		AggrSum:     {{t: 1587690299999, v: 9.276972e+06}, {t: 1587690599999, v: 9.359861e+06}, {t: 1587690899999, v: 9.447457e+06}, {t: 1587691199999, v: 9.542732e+06}, {t: 1587691499999, v: 9.630379e+06}, {t: 1587691799999, v: 9.715631e+06}, {t: 1587692099999, v: 9.799808e+06}, {t: 1587692399999, v: 9.888117e+06}, {t: 1587692699999, v: 2.98928e+06}, {t: 1587692999999, v: 81592}, {t: 1587693299999, v: 163711}, {t: 1587693590791, v: 255746}},
   519  		AggrMin:     {{t: 1587690299999, v: 461968}, {t: 1587690599999, v: 466070}, {t: 1587690899999, v: 470131}, {t: 1587691199999, v: 474913}, {t: 1587691499999, v: 479625}, {t: 1587691799999, v: 483709}, {t: 1587692099999, v: 488036}, {t: 1587692399999, v: 492223}, {t: 1587692699999, v: 75}, {t: 1587692999999, v: 2261}, {t: 1587693299999, v: 6210}, {t: 1587693590791, v: 10464}},
   520  		AggrMax:     {{t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}, {t: 1587690899999, v: 474726}, {t: 1587691199999, v: 479368}, {t: 1587691499999, v: 483566}, {t: 1587691799999, v: 487787}, {t: 1587692099999, v: 492065}, {t: 1587692399999, v: 496245}, {t: 1587692699999, v: 496544}, {t: 1587692999999, v: 6010}, {t: 1587693299999, v: 10242}, {t: 1587693590791, v: 14956}},
   521  		AggrCounter: {{t: 1587690005791, v: 461968}, {t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}, {t: 1587690899999, v: 474726}, {t: 1587691199999, v: 479368}, {t: 1587691499999, v: 483566}, {t: 1587691799999, v: 487787}, {t: 1587692099999, v: 492065}, {t: 1587692399999, v: 496245}, {t: 1587692699999, v: 498647}, {t: 1587692999999, v: 502554}, {t: 1587693299999, v: 506786}, {t: 1587693590791, v: 511500}, {t: 1587693590791, v: 14956}},
   522  	}
   523  	raw := chunkenc.NewXORChunk()
   524  	ser.chunks = append(ser.chunks, encodeTestAggrSeries(aggr), chunks.Meta{
   525  		MinTime: math.MaxInt64,
   526  		MaxTime: math.MinInt64,
   527  		Chunk:   raw,
   528  	})
   529  
   530  	mb := newMemBlock()
   531  	mb.addSeries(ser)
   532  
   533  	fakeMeta := &metadata.Meta{}
   534  	fakeMeta.Thanos.Downsample.Resolution = 300_000
   535  	id, err := Downsample(logger, fakeMeta, mb, dir, 3_600_000)
   536  	_ = id
   537  	testutil.Ok(t, err)
   538  }
   539  
   540  func TestDownsampleAggrAndNonEmptyXORChunks(t *testing.T) {
   541  
   542  	logger := log.NewLogfmtLogger(os.Stderr)
   543  	dir := t.TempDir()
   544  	ser := &series{lset: labels.FromStrings("__name__", "a")}
   545  	aggr := map[AggrType][]sample{
   546  		AggrCount:   {{t: 1587690299999, v: 20}, {t: 1587690599999, v: 20}, {t: 1587690899999, v: 20}},
   547  		AggrSum:     {{t: 1587690299999, v: 9.276972e+06}, {t: 1587690599999, v: 9.359861e+06}, {t: 1587693590791, v: 255746}},
   548  		AggrMin:     {{t: 1587690299999, v: 461968}, {t: 1587690599999, v: 466070}, {t: 1587690899999, v: 470131}, {t: 1587691199999, v: 474913}},
   549  		AggrMax:     {{t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}, {t: 1587690899999, v: 474726}},
   550  		AggrCounter: {{t: 1587690005791, v: 461968}, {t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}},
   551  	}
   552  	raw := chunkenc.NewXORChunk()
   553  	app, err := raw.Appender()
   554  	testutil.Ok(t, err)
   555  
   556  	app.Append(1587690005794, 42.5)
   557  
   558  	ser.chunks = append(ser.chunks, encodeTestAggrSeries(aggr), chunks.Meta{
   559  		MinTime: math.MaxInt64,
   560  		MaxTime: math.MinInt64,
   561  		Chunk:   raw,
   562  	})
   563  
   564  	mb := newMemBlock()
   565  	mb.addSeries(ser)
   566  
   567  	fakeMeta := &metadata.Meta{}
   568  	fakeMeta.Thanos.Downsample.Resolution = 300_000
   569  	id, err := Downsample(logger, fakeMeta, mb, dir, 3_600_000)
   570  	_ = id
   571  	testutil.Ok(t, err)
   572  
   573  	expected := []map[AggrType][]sample{
   574  		{
   575  			AggrCount:   {{1587690005794, 20}, {1587690005794, 20}, {1587690005794, 21}},
   576  			AggrSum:     {{1587690005794, 9.276972e+06}, {1587690005794, 9.359861e+06}, {1587690005794, 255788.5}},
   577  			AggrMin:     {{1587690005794, 461968}, {1587690005794, 466070}, {1587690005794, 470131}, {1587690005794, 42.5}},
   578  			AggrMax:     {{1587690005794, 465870}, {1587690005794, 469951}, {1587690005794, 474726}},
   579  			AggrCounter: {{1587690005791, 461968}, {1587690599999, 469951}, {1587690599999, 469951}},
   580  		},
   581  	}
   582  
   583  	_, err = metadata.ReadFromDir(filepath.Join(dir, id.String()))
   584  	testutil.Ok(t, err)
   585  
   586  	indexr, err := index.NewFileReader(filepath.Join(dir, id.String(), block.IndexFilename))
   587  	testutil.Ok(t, err)
   588  	defer func() { testutil.Ok(t, indexr.Close()) }()
   589  
   590  	chunkr, err := chunks.NewDirReader(filepath.Join(dir, id.String(), block.ChunksDirname), NewPool())
   591  	testutil.Ok(t, err)
   592  	defer func() { testutil.Ok(t, chunkr.Close()) }()
   593  
   594  	pall, err := indexr.Postings(index.AllPostingsKey())
   595  	testutil.Ok(t, err)
   596  
   597  	var series []storage.SeriesRef
   598  	for pall.Next() {
   599  		series = append(series, pall.At())
   600  	}
   601  	testutil.Ok(t, pall.Err())
   602  	testutil.Equals(t, 1, len(series))
   603  
   604  	var builder labels.ScratchBuilder
   605  	var chks []chunks.Meta
   606  	testutil.Ok(t, indexr.Series(series[0], &builder, &chks))
   607  
   608  	var got []map[AggrType][]sample
   609  	for _, c := range chks {
   610  		chk, err := chunkr.Chunk(c)
   611  		testutil.Ok(t, err)
   612  
   613  		m := map[AggrType][]sample{}
   614  		for _, at := range []AggrType{AggrCount, AggrSum, AggrMin, AggrMax, AggrCounter} {
   615  			c, err := chk.(*AggrChunk).Get(at)
   616  			if err == ErrAggrNotExist {
   617  				continue
   618  			}
   619  			testutil.Ok(t, err)
   620  
   621  			buf := m[at]
   622  			testutil.Ok(t, expandChunkIterator(c.Iterator(nil), &buf))
   623  			m[at] = buf
   624  		}
   625  		got = append(got, m)
   626  	}
   627  	testutil.Equals(t, expected, got)
   628  
   629  }
   630  
   631  func chunksToSeriesIteratable(t *testing.T, inRaw [][]sample, inAggr []map[AggrType][]sample) *series {
   632  	if len(inRaw) > 0 && len(inAggr) > 0 {
   633  		t.Fatalf("test must not have raw and aggregate input data at once")
   634  	}
   635  	ser := &series{lset: labels.FromStrings("__name__", "a")}
   636  
   637  	if len(inRaw) > 0 {
   638  		for _, samples := range inRaw {
   639  			chk := chunkenc.NewXORChunk()
   640  			app, _ := chk.Appender()
   641  
   642  			for _, s := range samples {
   643  				app.Append(s.t, s.v)
   644  			}
   645  			ser.chunks = append(ser.chunks, chunks.Meta{
   646  				MinTime: samples[0].t,
   647  				MaxTime: samples[len(samples)-1].t,
   648  				Chunk:   chk,
   649  			})
   650  		}
   651  		return ser
   652  	}
   653  
   654  	for _, chk := range inAggr {
   655  		ser.chunks = append(ser.chunks, encodeTestAggrSeries(chk))
   656  	}
   657  	return ser
   658  }
   659  func encodeTestAggrSeries(v map[AggrType][]sample) chunks.Meta {
   660  	b := newAggrChunkBuilder()
   661  	// we cannot use `b.add` as we have separate samples, do it manually, but make sure to
   662  	// calculate overall chunk time ranges.
   663  	for at, d := range v {
   664  		for _, s := range d {
   665  			if s.t < b.mint {
   666  				b.mint = s.t
   667  			}
   668  			if s.t > b.maxt {
   669  				b.maxt = s.t
   670  			}
   671  			b.apps[at].Append(s.t, s.v)
   672  		}
   673  	}
   674  	return b.encode()
   675  }
   676  
   677  func TestAverageChunkIterator(t *testing.T) {
   678  	sum := []sample{{100, 30}, {200, 40}, {300, 5}, {400, -10}}
   679  	cnt := []sample{{100, 1}, {200, 5}, {300, 2}, {400, 10}}
   680  	exp := []sample{{100, 30}, {200, 8}, {300, 2.5}, {400, -1}}
   681  
   682  	x := NewAverageChunkIterator(newSampleIterator(cnt), newSampleIterator(sum))
   683  
   684  	var res []sample
   685  	for x.Next() != chunkenc.ValNone {
   686  		t, v := x.At()
   687  		res = append(res, sample{t, v})
   688  	}
   689  	testutil.Ok(t, x.Err())
   690  	testutil.Equals(t, exp, res)
   691  }
   692  
   693  var (
   694  	realisticChkDataWithCounterResetsAfterCounterSeriesIterating = []sample{
   695  		{t: 1587690005791, v: 461968}, {t: 1587690020791, v: 462151}, {t: 1587690035797, v: 462336}, {t: 1587690050791, v: 462650}, {t: 1587690065791, v: 462813}, {t: 1587690080791, v: 462987}, {t: 1587690095791, v: 463095}, {t: 1587690110791, v: 463247}, {t: 1587690125791, v: 463440}, {t: 1587690140791, v: 463642}, {t: 1587690155791, v: 463811},
   696  		{t: 1587690170791, v: 464027}, {t: 1587690185791, v: 464308}, {t: 1587690200791, v: 464514}, {t: 1587690215791, v: 464798}, {t: 1587690230791, v: 465018}, {t: 1587690245791, v: 465215}, {t: 1587690260813, v: 465431}, {t: 1587690275791, v: 465651}, {t: 1587690290791, v: 465870}, {t: 1587690305791, v: 466070}, {t: 1587690320792, v: 466248},
   697  		{t: 1587690335791, v: 466506}, {t: 1587690350791, v: 466766}, {t: 1587690365791, v: 466970}, {t: 1587690380791, v: 467123}, {t: 1587690395791, v: 467265}, {t: 1587690410791, v: 467383}, {t: 1587690425791, v: 467629}, {t: 1587690440791, v: 467931}, {t: 1587690455791, v: 468097}, {t: 1587690470791, v: 468281}, {t: 1587690485791, v: 468477},
   698  		{t: 1587690500791, v: 468649}, {t: 1587690515791, v: 468867}, {t: 1587690530791, v: 469150}, {t: 1587690545791, v: 469268}, {t: 1587690560791, v: 469488}, {t: 1587690575791, v: 469742}, {t: 1587690590791, v: 469951}, {t: 1587690605791, v: 470131}, {t: 1587690620791, v: 470337}, {t: 1587690635791, v: 470631}, {t: 1587690650791, v: 470832},
   699  		{t: 1587690665791, v: 471077}, {t: 1587690680791, v: 471311}, {t: 1587690695791, v: 471473}, {t: 1587690710791, v: 471728}, {t: 1587690725791, v: 472002}, {t: 1587690740791, v: 472158}, {t: 1587690755791, v: 472329}, {t: 1587690770791, v: 472722}, {t: 1587690785791, v: 472925}, {t: 1587690800791, v: 473220}, {t: 1587690815791, v: 473460},
   700  		{t: 1587690830791, v: 473748}, {t: 1587690845791, v: 473968}, {t: 1587690860791, v: 474261}, {t: 1587690875791, v: 474418}, {t: 1587690890791, v: 474726}, {t: 1587690905791, v: 474913}, {t: 1587690920791, v: 475031}, {t: 1587690935791, v: 475284}, {t: 1587690950791, v: 475563}, {t: 1587690965791, v: 475762}, {t: 1587690980791, v: 475945},
   701  		{t: 1587690995791, v: 476302}, {t: 1587691010791, v: 476501}, {t: 1587691025791, v: 476849}, {t: 1587691040800, v: 477020}, {t: 1587691055791, v: 477280}, {t: 1587691070791, v: 477549}, {t: 1587691085791, v: 477758}, {t: 1587691100817, v: 477960}, {t: 1587691115791, v: 478261}, {t: 1587691130791, v: 478559}, {t: 1587691145791, v: 478704},
   702  		{t: 1587691160804, v: 478950}, {t: 1587691175791, v: 479173}, {t: 1587691190791, v: 479368}, {t: 1587691205791, v: 479625}, {t: 1587691220805, v: 479866}, {t: 1587691235791, v: 480008}, {t: 1587691250791, v: 480155}, {t: 1587691265791, v: 480472}, {t: 1587691280811, v: 480598}, {t: 1587691295791, v: 480771}, {t: 1587691310791, v: 480996},
   703  		{t: 1587691325791, v: 481200}, {t: 1587691340803, v: 481381}, {t: 1587691355791, v: 481584}, {t: 1587691370791, v: 481759}, {t: 1587691385791, v: 482003}, {t: 1587691400803, v: 482189}, {t: 1587691415791, v: 482457}, {t: 1587691430791, v: 482623}, {t: 1587691445791, v: 482768}, {t: 1587691460804, v: 483036}, {t: 1587691475791, v: 483322},
   704  		{t: 1587691490791, v: 483566}, {t: 1587691505791, v: 483709}, {t: 1587691520807, v: 483838}, {t: 1587691535791, v: 484091}, {t: 1587691550791, v: 484236}, {t: 1587691565791, v: 484454}, {t: 1587691580816, v: 484710}, {t: 1587691595791, v: 484978}, {t: 1587691610791, v: 485271}, {t: 1587691625791, v: 485476}, {t: 1587691640792, v: 485640},
   705  		{t: 1587691655791, v: 485921}, {t: 1587691670791, v: 486201}, {t: 1587691685791, v: 486555}, {t: 1587691700791, v: 486691}, {t: 1587691715791, v: 486831}, {t: 1587691730791, v: 487033}, {t: 1587691745791, v: 487268}, {t: 1587691760803, v: 487370}, {t: 1587691775791, v: 487571}, {t: 1587691790791, v: 487787}, {t: 1587691805791, v: 488036},
   706  		{t: 1587691820791, v: 488241}, {t: 1587691835791, v: 488411}, {t: 1587691850791, v: 488625}, {t: 1587691865791, v: 488868}, {t: 1587691880791, v: 489005}, {t: 1587691895791, v: 489237}, {t: 1587691910791, v: 489545}, {t: 1587691925791, v: 489750}, {t: 1587691940791, v: 489899}, {t: 1587691955791, v: 490048}, {t: 1587691970791, v: 490364},
   707  		{t: 1587691985791, v: 490485}, {t: 1587692000791, v: 490722}, {t: 1587692015791, v: 490866}, {t: 1587692030791, v: 491025}, {t: 1587692045791, v: 491286}, {t: 1587692060816, v: 491543}, {t: 1587692075791, v: 491787}, {t: 1587692090791, v: 492065}, {t: 1587692105791, v: 492223}, {t: 1587692120816, v: 492501}, {t: 1587692135791, v: 492767},
   708  		{t: 1587692150791, v: 492955}, {t: 1587692165791, v: 493194}, {t: 1587692180792, v: 493402}, {t: 1587692195791, v: 493647}, {t: 1587692210791, v: 493897}, {t: 1587692225791, v: 494117}, {t: 1587692240805, v: 494356}, {t: 1587692255791, v: 494620}, {t: 1587692270791, v: 494762}, {t: 1587692285791, v: 495001}, {t: 1587692300805, v: 495222},
   709  		{t: 1587692315791, v: 495393}, {t: 1587692330791, v: 495662}, {t: 1587692345791, v: 495875}, {t: 1587692360801, v: 496082}, {t: 1587692375791, v: 496196}, {t: 1587692390791, v: 496245}, {t: 1587692405791, v: 496295}, {t: 1587692420791, v: 496365}, {t: 1587692435791, v: 496401}, {t: 1587692450791, v: 496452}, {t: 1587692465791, v: 496491},
   710  		{t: 1587692480791, v: 496544}, {t: 1587692555791, v: 496619}, {t: 1587692570791, v: 496852}, {t: 1587692585791, v: 497052}, {t: 1587692600791, v: 497245}, {t: 1587692615791, v: 497529}, {t: 1587692630791, v: 497697}, {t: 1587692645791, v: 497909}, {t: 1587692660791, v: 498156}, {t: 1587692675803, v: 498466}, {t: 1587692690791, v: 498647},
   711  		{t: 1587692705791, v: 498805}, {t: 1587692720791, v: 499013}, {t: 1587692735805, v: 499169}, {t: 1587692750791, v: 499345}, {t: 1587692765791, v: 499499}, {t: 1587692780791, v: 499731}, {t: 1587692795806, v: 499972}, {t: 1587692810791, v: 500201}, {t: 1587692825791, v: 500354}, {t: 1587692840791, v: 500512}, {t: 1587692855791, v: 500739},
   712  		{t: 1587692870791, v: 500958}, {t: 1587692885791, v: 501190}, {t: 1587692900791, v: 501233}, {t: 1587692915791, v: 501391}, {t: 1587692930791, v: 501649}, {t: 1587692945791, v: 501853}, {t: 1587692960791, v: 502065}, {t: 1587692975791, v: 502239}, {t: 1587692990810, v: 502554}, {t: 1587693005791, v: 502754}, {t: 1587693020791, v: 502938},
   713  		{t: 1587693035791, v: 503141}, {t: 1587693050791, v: 503416}, {t: 1587693065791, v: 503642}, {t: 1587693080791, v: 503873}, {t: 1587693095791, v: 504014}, {t: 1587693110791, v: 504178}, {t: 1587693125821, v: 504374}, {t: 1587693140791, v: 504578}, {t: 1587693155791, v: 504753}, {t: 1587693170791, v: 505043}, {t: 1587693185791, v: 505232},
   714  		{t: 1587693200791, v: 505437}, {t: 1587693215791, v: 505596}, {t: 1587693230791, v: 505923}, {t: 1587693245791, v: 506088}, {t: 1587693260791, v: 506307}, {t: 1587693275791, v: 506518}, {t: 1587693290791, v: 506786}, {t: 1587693305791, v: 507008}, {t: 1587693320803, v: 507260}, {t: 1587693335791, v: 507519}, {t: 1587693350791, v: 507776},
   715  		{t: 1587693365791, v: 508003}, {t: 1587693380791, v: 508322}, {t: 1587693395804, v: 508551}, {t: 1587693410791, v: 508750}, {t: 1587693425791, v: 508994}, {t: 1587693440791, v: 509237}, {t: 1587693455791, v: 509452}, {t: 1587693470791, v: 509702}, {t: 1587693485791, v: 509971}, {t: 1587693500791, v: 510147}, {t: 1587693515791, v: 510471},
   716  		{t: 1587693530816, v: 510666}, {t: 1587693545791, v: 510871}, {t: 1587693560791, v: 511123}, {t: 1587693575791, v: 511303}, {t: 1587693590791, v: 511500},
   717  	}
   718  )
   719  
   720  func TestApplyCounterResetsIterator(t *testing.T) {
   721  	for _, tcase := range []struct {
   722  		name string
   723  
   724  		chunks [][]sample
   725  
   726  		expected []sample
   727  	}{
   728  		{
   729  			name: "series with stale marker",
   730  			chunks: [][]sample{
   731  				{{100, 10}, {200, 20}, {300, 10}, {400, 20}, {400, 5}},
   732  				{{500, 10}, {600, 20}, {700, 30}, {800, 40}, {800, 10}},                // No reset, just downsampling addded sample at the end.
   733  				{{900, 5}, {1000, 10}, {1100, 15}},                                     // Actual reset.
   734  				{{1200, 20}, {1250, math.Float64frombits(value.StaleNaN)}, {1300, 40}}, // No special last sample, no reset.
   735  				{{1400, 30}, {1500, 30}, {1600, 50}},                                   // No special last sample, reset.
   736  			},
   737  			expected: []sample{
   738  				{100, 10}, {200, 20}, {300, 30}, {400, 40}, {500, 45},
   739  				{600, 55}, {700, 65}, {800, 75}, {900, 80}, {1000, 85},
   740  				{1100, 90}, {1200, 95}, {1300, 115}, {1400, 145}, {1500, 145}, {1600, 165},
   741  			},
   742  		},
   743  		{
   744  			name:     "realistic raw data with 2 chunks that have one stale marker",
   745  			chunks:   realisticChkDataWithStaleMarker,
   746  			expected: realisticChkDataWithCounterResetsAfterCounterSeriesIterating,
   747  		},
   748  		{
   749  			// This can easily happen when querying StoreAPI with same data. Counter series should handle this.
   750  			name: "realistic raw data with many overlapping chunks with stale markers",
   751  			chunks: [][]sample{
   752  				realisticChkDataWithStaleMarker[0],
   753  				realisticChkDataWithStaleMarker[0],
   754  				realisticChkDataWithStaleMarker[0],
   755  				realisticChkDataWithStaleMarker[1],
   756  				realisticChkDataWithStaleMarker[1],
   757  				realisticChkDataWithStaleMarker[1],
   758  				realisticChkDataWithStaleMarker[1],
   759  				realisticChkDataWithStaleMarker[1],
   760  				realisticChkDataWithStaleMarker[1],
   761  				realisticChkDataWithStaleMarker[1],
   762  				realisticChkDataWithStaleMarker[1],
   763  				realisticChkDataWithStaleMarker[1],
   764  			},
   765  			expected: realisticChkDataWithCounterResetsAfterCounterSeriesIterating,
   766  		},
   767  		{
   768  			name:   "the same above input (realisticChkDataWithStaleMarker), but after 5m downsampling",
   769  			chunks: [][]sample{realisticChkDataWithCounterResetRes5m[0][AggrCounter]},
   770  			expected: []sample{
   771  				{t: 1587690005791, v: 461968}, {t: 1587690299999, v: 465870}, {t: 1587690599999, v: 469951}, {t: 1587690899999, v: 474726}, {t: 1587691199999, v: 479368},
   772  				{t: 1587691499999, v: 483566}, {t: 1587691799999, v: 487787}, {t: 1587692099999, v: 492065}, {t: 1587692399999, v: 496245}, {t: 1587692699999, v: 498647},
   773  				{t: 1587692999999, v: 502554}, {t: 1587693299999, v: 506786}, {t: 1587693590791, v: 511500},
   774  			},
   775  		},
   776  	} {
   777  		t.Run(tcase.name, func(t *testing.T) {
   778  			var its []chunkenc.Iterator
   779  			for _, c := range tcase.chunks {
   780  				its = append(its, newSampleIterator(c))
   781  			}
   782  
   783  			x := NewApplyCounterResetsIterator(its...)
   784  
   785  			var res []sample
   786  			for x.Next() != chunkenc.ValNone {
   787  				t, v := x.At()
   788  				res = append(res, sample{t, v})
   789  			}
   790  			testutil.Ok(t, x.Err())
   791  			testutil.Equals(t, tcase.expected, res)
   792  
   793  			for i := range res[1:] {
   794  				testutil.Assert(t, res[i+1].t >= res[i].t, "sample time %v is not monotonically increasing. previous sample %v is older", res[i+1], res[i])
   795  				testutil.Assert(t, res[i+1].v >= res[i].v, "sample value %v is not monotonically increasing. previous sample %v is larger", res[i+1], res[i])
   796  			}
   797  		})
   798  	}
   799  
   800  }
   801  
   802  func TestApplyCounterResetsIteratorHistograms(t *testing.T) {
   803  	const lenChunks, lenChunk = 4, 10
   804  
   805  	histograms := tsdbutil.GenerateTestHistograms(lenChunks * lenChunk)
   806  
   807  	var chunks [][]*histogramPair
   808  	for i := 0; i < lenChunks; i++ {
   809  		var chunk []*histogramPair
   810  		for j := 0; j < lenChunk; j++ {
   811  			chunk = append(chunk, &histogramPair{t: int64(i*lenChunk+j) * 100, h: histograms[i*lenChunk+j]})
   812  		}
   813  		chunks = append(chunks, chunk)
   814  	}
   815  
   816  	var expected []*histogramPair
   817  	for i, h := range histograms {
   818  		expected = append(expected, &histogramPair{t: int64(i * 100), h: h})
   819  	}
   820  
   821  	for _, tcase := range []struct {
   822  		name string
   823  
   824  		chunks [][]*histogramPair
   825  
   826  		expected []*histogramPair
   827  	}{
   828  		{
   829  			name:     "histogram series",
   830  			chunks:   chunks,
   831  			expected: expected,
   832  		},
   833  	} {
   834  		t.Run(tcase.name, func(t *testing.T) {
   835  			var its []chunkenc.Iterator
   836  			for _, c := range tcase.chunks {
   837  				its = append(its, newHistogramIterator(c))
   838  			}
   839  
   840  			x := NewApplyCounterResetsIterator(its...)
   841  
   842  			var res []*histogramPair
   843  			for x.Next() != chunkenc.ValNone {
   844  				t, h := x.AtHistogram()
   845  				res = append(res, &histogramPair{t, h})
   846  			}
   847  			testutil.Ok(t, x.Err())
   848  			testutil.Equals(t, tcase.expected, res)
   849  
   850  			for i := range res[1:] {
   851  				testutil.Assert(t, res[i+1].t >= res[i].t, "sample time %v is not monotonically increasing. previous sample %v is older", res[i+1], res[i])
   852  			}
   853  		})
   854  	}
   855  
   856  }
   857  
   858  func TestCounterSeriesIteratorSeek(t *testing.T) {
   859  	chunks := [][]sample{
   860  		{{100, 10}, {200, 20}, {300, 10}, {400, 20}, {400, 5}},
   861  	}
   862  
   863  	exp := []sample{
   864  		{200, 20}, {300, 30}, {400, 40},
   865  	}
   866  
   867  	var its []chunkenc.Iterator
   868  	for _, c := range chunks {
   869  		its = append(its, newSampleIterator(c))
   870  	}
   871  
   872  	var res []sample
   873  	x := NewApplyCounterResetsIterator(its...)
   874  
   875  	valueType := x.Seek(150)
   876  	testutil.Equals(t, chunkenc.ValFloat, valueType, "Seek should return float value type")
   877  	testutil.Ok(t, x.Err())
   878  	for {
   879  		ts, v := x.At()
   880  		res = append(res, sample{ts, v})
   881  
   882  		if x.Next() == chunkenc.ValNone {
   883  			break
   884  		}
   885  	}
   886  	testutil.Equals(t, exp, res)
   887  }
   888  
   889  func TestCounterSeriesIteratorSeekExtendTs(t *testing.T) {
   890  	chunks := [][]sample{
   891  		{{100, 10}, {200, 20}, {300, 10}, {400, 20}, {400, 5}},
   892  	}
   893  
   894  	var its []chunkenc.Iterator
   895  	for _, c := range chunks {
   896  		its = append(its, newSampleIterator(c))
   897  	}
   898  
   899  	x := NewApplyCounterResetsIterator(its...)
   900  
   901  	valueType := x.Seek(500)
   902  	testutil.Equals(t, chunkenc.ValNone, valueType, "Seek should return none value type")
   903  }
   904  
   905  func TestCounterSeriesIteratorSeekAfterNext(t *testing.T) {
   906  	chunks := [][]sample{
   907  		{{100, 10}},
   908  	}
   909  	exp := []sample{
   910  		{100, 10},
   911  	}
   912  
   913  	var its []chunkenc.Iterator
   914  	for _, c := range chunks {
   915  		its = append(its, newSampleIterator(c))
   916  	}
   917  
   918  	var res []sample
   919  	x := NewApplyCounterResetsIterator(its...)
   920  
   921  	x.Next()
   922  
   923  	valueType := x.Seek(50)
   924  	testutil.Equals(t, chunkenc.ValFloat, valueType, "Seek should return float value type")
   925  	testutil.Ok(t, x.Err())
   926  	for {
   927  		ts, v := x.At()
   928  		res = append(res, sample{ts, v})
   929  
   930  		if x.Next() == chunkenc.ValNone {
   931  			break
   932  		}
   933  	}
   934  	testutil.Equals(t, exp, res)
   935  }
   936  
   937  func TestSamplesFromTSDBSamples(t *testing.T) {
   938  	for _, tcase := range []struct {
   939  		name string
   940  
   941  		input []tsdbutil.Sample
   942  
   943  		expected []sample
   944  	}{
   945  		{
   946  			name:     "empty",
   947  			input:    []tsdbutil.Sample{},
   948  			expected: []sample{},
   949  		},
   950  		{
   951  			name:     "one sample",
   952  			input:    []tsdbutil.Sample{testSample{1, 1}},
   953  			expected: []sample{{1, 1}},
   954  		},
   955  		{
   956  			name:     "multiple samples",
   957  			input:    []tsdbutil.Sample{testSample{1, 1}, testSample{2, 2}, testSample{3, 3}, testSample{4, 4}, testSample{5, 5}},
   958  			expected: []sample{{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}},
   959  		},
   960  	} {
   961  		t.Run(tcase.name, func(t *testing.T) {
   962  			actual := SamplesFromTSDBSamples(tcase.input)
   963  			testutil.Equals(t, tcase.expected, actual)
   964  		})
   965  	}
   966  }
   967  
   968  // testSample implements tsdbutil.Sample interface.
   969  type testSample struct {
   970  	t int64
   971  	f float64
   972  }
   973  
   974  func (s testSample) T() int64 {
   975  	return s.t
   976  }
   977  
   978  func (s testSample) F() float64 {
   979  	return s.f
   980  }
   981  
   982  func (s testSample) H() *histogram.Histogram {
   983  	panic("not implemented")
   984  }
   985  
   986  func (s testSample) FH() *histogram.FloatHistogram {
   987  	panic("not implemented")
   988  }
   989  
   990  func (s testSample) Type() chunkenc.ValueType {
   991  	panic("not implemented")
   992  }
   993  
   994  type sampleIterator struct {
   995  	l []sample
   996  	i int
   997  }
   998  
   999  func newSampleIterator(l []sample) *sampleIterator {
  1000  	return &sampleIterator{l: l, i: -1}
  1001  }
  1002  
  1003  func (it *sampleIterator) Err() error {
  1004  	return nil
  1005  }
  1006  
  1007  func (it *sampleIterator) Next() chunkenc.ValueType {
  1008  	if it.i >= len(it.l)-1 {
  1009  		return chunkenc.ValNone
  1010  	}
  1011  	it.i++
  1012  	return chunkenc.ValFloat
  1013  }
  1014  
  1015  func (it *sampleIterator) Seek(int64) chunkenc.ValueType {
  1016  	panic("unexpected")
  1017  }
  1018  
  1019  func (it *sampleIterator) At() (t int64, v float64) {
  1020  	return it.l[it.i].t, it.l[it.i].v
  1021  }
  1022  
  1023  func (it *sampleIterator) AtHistogram() (int64, *histogram.Histogram) {
  1024  	panic("not implemented")
  1025  }
  1026  
  1027  func (it *sampleIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
  1028  	panic("not implemented")
  1029  }
  1030  
  1031  func (it *sampleIterator) AtT() int64 {
  1032  	return it.l[it.i].t
  1033  }
  1034  
  1035  type histogramPair struct {
  1036  	t int64
  1037  	h *histogram.Histogram
  1038  }
  1039  
  1040  type histogramIterator struct {
  1041  	l []*histogramPair
  1042  	i int
  1043  }
  1044  
  1045  func newHistogramIterator(l []*histogramPair) *histogramIterator {
  1046  	return &histogramIterator{l: l, i: -1}
  1047  }
  1048  
  1049  func (it *histogramIterator) Err() error {
  1050  	return nil
  1051  }
  1052  
  1053  func (it *histogramIterator) Next() chunkenc.ValueType {
  1054  	if it.i >= len(it.l)-1 {
  1055  		return chunkenc.ValNone
  1056  	}
  1057  	it.i++
  1058  	return chunkenc.ValHistogram
  1059  }
  1060  
  1061  func (it *histogramIterator) Seek(int64) chunkenc.ValueType {
  1062  	panic("unexpected")
  1063  }
  1064  
  1065  func (it *histogramIterator) At() (t int64, v float64) {
  1066  	panic("not implemented")
  1067  }
  1068  
  1069  func (it *histogramIterator) AtHistogram() (int64, *histogram.Histogram) {
  1070  	return it.l[it.i].t, it.l[it.i].h
  1071  }
  1072  
  1073  func (it *histogramIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
  1074  	panic("not implemented")
  1075  }
  1076  
  1077  func (it *histogramIterator) AtT() int64 {
  1078  	return it.l[it.i].t
  1079  }
  1080  
  1081  // memBlock is an in-memory block that implements a subset of the tsdb.BlockReader interface
  1082  // to allow tsdb.StreamedBlockWriter to persist the data as a block.
  1083  type memBlock struct {
  1084  	// Dummies to implement unused methods.
  1085  	tsdb.IndexReader
  1086  
  1087  	symbols  map[string]struct{}
  1088  	postings []storage.SeriesRef
  1089  	series   []*series
  1090  	chunks   []chunkenc.Chunk
  1091  
  1092  	numberOfChunks uint64
  1093  
  1094  	minTime, maxTime int64
  1095  }
  1096  
  1097  type series struct {
  1098  	lset   labels.Labels
  1099  	chunks []chunks.Meta
  1100  }
  1101  
  1102  func newMemBlock() *memBlock {
  1103  	return &memBlock{symbols: map[string]struct{}{}, minTime: -1, maxTime: -1}
  1104  }
  1105  
  1106  func (b *memBlock) addSeries(s *series) {
  1107  	sid := storage.SeriesRef(len(b.series))
  1108  	b.postings = append(b.postings, sid)
  1109  	b.series = append(b.series, s)
  1110  
  1111  	for _, l := range s.lset {
  1112  		b.symbols[l.Name] = struct{}{}
  1113  		b.symbols[l.Value] = struct{}{}
  1114  	}
  1115  
  1116  	for i, cm := range s.chunks {
  1117  		if b.minTime == -1 || cm.MinTime < b.minTime {
  1118  			b.minTime = cm.MinTime
  1119  		}
  1120  		if b.maxTime == -1 || cm.MaxTime < b.maxTime {
  1121  			b.maxTime = cm.MaxTime
  1122  		}
  1123  		s.chunks[i].Ref = chunks.ChunkRef(b.numberOfChunks)
  1124  		b.chunks = append(b.chunks, cm.Chunk)
  1125  		b.numberOfChunks++
  1126  	}
  1127  }
  1128  
  1129  func (b *memBlock) MinTime() int64 {
  1130  	if b.minTime == -1 {
  1131  		return 0
  1132  	}
  1133  
  1134  	return b.minTime
  1135  }
  1136  
  1137  func (b *memBlock) MaxTime() int64 {
  1138  	if b.maxTime == -1 {
  1139  		return 0
  1140  	}
  1141  
  1142  	return b.maxTime
  1143  }
  1144  
  1145  func (b *memBlock) Meta() tsdb.BlockMeta {
  1146  	return tsdb.BlockMeta{}
  1147  }
  1148  
  1149  func (b *memBlock) Postings(name string, val ...string) (index.Postings, error) {
  1150  	allName, allVal := index.AllPostingsKey()
  1151  
  1152  	if name != allName || val[0] != allVal {
  1153  		return nil, errors.New("unexpected call to Postings() that is not AllVall")
  1154  	}
  1155  	sort.Slice(b.postings, func(i, j int) bool {
  1156  		return labels.Compare(b.series[b.postings[i]].lset, b.series[b.postings[j]].lset) < 0
  1157  	})
  1158  	return index.NewListPostings(b.postings), nil
  1159  }
  1160  
  1161  func (b *memBlock) Series(id storage.SeriesRef, builder *labels.ScratchBuilder, chks *[]chunks.Meta) error {
  1162  	if int(id) >= len(b.series) {
  1163  		return errors.Wrapf(storage.ErrNotFound, "series with ID %d does not exist", id)
  1164  	}
  1165  	s := b.series[id]
  1166  
  1167  	builder.Reset()
  1168  	builder.Assign(s.lset)
  1169  	*chks = append((*chks)[:0], s.chunks...)
  1170  
  1171  	return nil
  1172  }
  1173  
  1174  func (b *memBlock) Chunk(m chunks.Meta) (chunkenc.Chunk, error) {
  1175  	if uint64(m.Ref) >= b.numberOfChunks {
  1176  		return nil, errors.Wrapf(storage.ErrNotFound, "chunk with ID %d does not exist", m.Ref)
  1177  	}
  1178  
  1179  	return b.chunks[m.Ref], nil
  1180  }
  1181  
  1182  func (b *memBlock) Symbols() index.StringIter {
  1183  	res := make([]string, 0, len(b.symbols))
  1184  	for s := range b.symbols {
  1185  		res = append(res, s)
  1186  	}
  1187  	sort.Strings(res)
  1188  	return index.NewStringListIter(res)
  1189  }
  1190  
  1191  func (b *memBlock) SortedPostings(p index.Postings) index.Postings {
  1192  	return p
  1193  }
  1194  
  1195  func (b *memBlock) Index() (tsdb.IndexReader, error) {
  1196  	return b, nil
  1197  }
  1198  
  1199  func (b *memBlock) Chunks() (tsdb.ChunkReader, error) {
  1200  	return b, nil
  1201  }
  1202  
  1203  func (b *memBlock) Tombstones() (tombstones.Reader, error) {
  1204  	return emptyTombstoneReader{}, nil
  1205  }
  1206  
  1207  func (b *memBlock) Close() error {
  1208  	return nil
  1209  }
  1210  
  1211  func (b *memBlock) Size() int64 {
  1212  	return 0
  1213  }
  1214  
  1215  type emptyTombstoneReader struct{}
  1216  
  1217  func (emptyTombstoneReader) Get(storage.SeriesRef) (tombstones.Intervals, error) { return nil, nil }
  1218  func (emptyTombstoneReader) Iter(func(storage.SeriesRef, tombstones.Intervals) error) error {
  1219  	return nil
  1220  }
  1221  func (emptyTombstoneReader) Total() uint64 { return 0 }
  1222  func (emptyTombstoneReader) Close() error  { return nil }