github.com/m3db/m3@v1.5.1-0.20231129193456-75a402aa583b/src/dbnode/encoding/series_iterator_split_into_blocks_test.go (about)

     1  // Copyright (c) 2016 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  //TODO(arnikola) convert to example
    22  package encoding_test
    23  
    24  import (
    25  	"testing"
    26  	"time"
    27  
    28  	"github.com/m3db/m3/src/dbnode/encoding"
    29  	"github.com/m3db/m3/src/dbnode/encoding/m3tsz"
    30  	"github.com/m3db/m3/src/dbnode/namespace"
    31  	"github.com/m3db/m3/src/dbnode/ts"
    32  	"github.com/m3db/m3/src/dbnode/x/xio"
    33  	"github.com/m3db/m3/src/x/checked"
    34  	"github.com/m3db/m3/src/x/ident"
    35  	xtime "github.com/m3db/m3/src/x/time"
    36  
    37  	"github.com/stretchr/testify/assert"
    38  	"github.com/stretchr/testify/require"
    39  )
    40  
    41  type Series struct {
    42  	ID     ident.ID
    43  	Blocks []SeriesBlock
    44  }
    45  
    46  type SeriesBlock struct {
    47  	Start          xtime.UnixNano
    48  	BlockSize      time.Duration
    49  	Replicas       []encoding.MultiReaderIterator
    50  	ValuesIterator encoding.SeriesIterator
    51  }
    52  
    53  func TestDeconstructAndReconstruct(t *testing.T) {
    54  	blockSize := 2 * time.Hour
    55  
    56  	now := xtime.Now()
    57  	start := now.Truncate(time.Hour).Add(2 * time.Minute)
    58  	end := start.Add(30 * time.Minute)
    59  
    60  	encoder := m3tsz.NewEncoder(start, checked.NewBytes(nil, nil), true, encoding.NewOptions())
    61  
    62  	i := 0
    63  	for at := start; at.Before(end); at = at.Add(time.Minute) {
    64  		datapoint := ts.Datapoint{TimestampNanos: at, Value: float64(i + 1)}
    65  		err := encoder.Encode(datapoint, xtime.Second, nil)
    66  		assert.NoError(t, err)
    67  		i++
    68  	}
    69  
    70  	iterAlloc := func(r xio.Reader64, _ namespace.SchemaDescr) encoding.ReaderIterator {
    71  		iter := m3tsz.NewDecoder(true, encoding.NewOptions())
    72  		return iter.Decode(r)
    73  	}
    74  
    75  	segment := encoder.Discard()
    76  
    77  	blockStart := start.Truncate(blockSize)
    78  
    79  	reader := xio.NewSegmentReader(segment)
    80  
    81  	multiReader := encoding.NewMultiReaderIterator(iterAlloc, nil)
    82  	multiReader.Reset([]xio.SegmentReader{reader}, blockStart, blockSize, nil)
    83  
    84  	orig := encoding.NewSeriesIterator(encoding.SeriesIteratorOptions{
    85  		ID:             ident.StringID("foo"),
    86  		Namespace:      ident.StringID("namespace"),
    87  		StartInclusive: start,
    88  		EndExclusive:   end,
    89  		Replicas:       []encoding.MultiReaderIterator{multiReader},
    90  	}, nil)
    91  
    92  	// Construct a per block view of the series
    93  	series := Series{
    94  		ID: orig.ID(),
    95  	}
    96  
    97  	replicas, err := orig.Replicas()
    98  	require.NoError(t, err)
    99  	// Collect all the replica per-block readers
   100  	for _, replica := range replicas {
   101  		perBlockSliceReaders := replica.Readers()
   102  		next := true
   103  		for next {
   104  			// we are at a block
   105  			l, start, bs := perBlockSliceReaders.CurrentReaders()
   106  
   107  			var readers []xio.SegmentReader
   108  			for i := 0; i < l; i++ {
   109  				// reader to an unmerged (or already merged) block buffer
   110  				reader := perBlockSliceReaders.CurrentReaderAt(i)
   111  
   112  				// import to clone the reader as we need its position reset before
   113  				// we use the contents of it again
   114  				clonedReader, err := reader.Clone(nil)
   115  				require.NoError(t, err)
   116  
   117  				readers = append(readers, clonedReader)
   118  			}
   119  
   120  			iter := encoding.NewMultiReaderIterator(iterAlloc, nil)
   121  			iter.Reset(readers, start, bs, nil)
   122  
   123  			inserted := false
   124  			for i := range series.Blocks {
   125  				if series.Blocks[i].Start.Equal(start) {
   126  					inserted = true
   127  					series.Blocks[i].Replicas = append(series.Blocks[i].Replicas, iter)
   128  					break
   129  				}
   130  			}
   131  			if !inserted {
   132  				series.Blocks = append(series.Blocks, SeriesBlock{
   133  					Start:     start,
   134  					BlockSize: bs,
   135  					Replicas:  []encoding.MultiReaderIterator{iter},
   136  				})
   137  			}
   138  
   139  			next = perBlockSliceReaders.Next()
   140  		}
   141  	}
   142  
   143  	// Now per-block readers all collected, construct the per-block value
   144  	// iterator combining all readers from the different replica readers
   145  	for i, block := range series.Blocks {
   146  
   147  		filterValuesStart := orig.Start()
   148  		if block.Start.After(orig.Start()) {
   149  			filterValuesStart = block.Start
   150  		}
   151  
   152  		end := block.Start.Add(block.BlockSize)
   153  
   154  		filterValuesEnd := orig.End()
   155  		if end.Before(filterValuesEnd) {
   156  			filterValuesEnd = end
   157  		}
   158  
   159  		valuesIter := encoding.NewSeriesIterator(encoding.SeriesIteratorOptions{
   160  			ID:             orig.ID(),
   161  			Namespace:      orig.Namespace(),
   162  			Tags:           orig.Tags(),
   163  			StartInclusive: filterValuesStart,
   164  			EndExclusive:   filterValuesEnd,
   165  			Replicas:       block.Replicas,
   166  		}, nil)
   167  
   168  		require.Len(t, block.Replicas, 1)
   169  		series.Blocks[i].ValuesIterator = valuesIter
   170  	}
   171  
   172  	// Now show how we can iterate per block
   173  	for _, block := range series.Blocks {
   174  		iter := block.ValuesIterator
   175  		blockCount := 0
   176  		for iter.Next() {
   177  			dp, _, _ := iter.Current()
   178  
   179  			assert.Equal(t, dp.TimestampNanos, start.Add(time.Minute*time.Duration(blockCount)))
   180  			assert.Equal(t, dp.Value, float64(blockCount+1))
   181  			blockCount++
   182  		}
   183  		assert.NoError(t, iter.Err())
   184  	}
   185  
   186  	// Close once from the original series iterator to release all resources at once
   187  	orig.Close()
   188  }