github.com/m3db/m3@v1.5.0/src/query/storage/m3/convert_test.go (about)

     1  // Copyright (c) 2018 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package m3
    22  
    23  import (
    24  	"bytes"
    25  	"fmt"
    26  	"math"
    27  	"sort"
    28  	"testing"
    29  	"time"
    30  
    31  	"github.com/m3db/m3/src/dbnode/encoding"
    32  	"github.com/m3db/m3/src/query/block"
    33  	"github.com/m3db/m3/src/query/models"
    34  	"github.com/m3db/m3/src/query/test"
    35  	xtime "github.com/m3db/m3/src/x/time"
    36  
    37  	"github.com/stretchr/testify/assert"
    38  	"github.com/stretchr/testify/require"
    39  )
    40  
    41  var (
    42  	Start     = time.Now().Truncate(time.Hour)
    43  	blockSize = time.Minute * 6
    44  	nan       = math.NaN()
    45  )
    46  
    47  func generateIterators(
    48  	t testing.TB,
    49  	stepSize time.Duration,
    50  ) (
    51  	[]encoding.SeriesIterator,
    52  	models.Bounds,
    53  ) {
    54  	datapoints := [][][]test.Datapoint{
    55  		{
    56  			[]test.Datapoint{},
    57  			[]test.Datapoint{
    58  				{Value: 1, Offset: 0},
    59  				{Value: 2, Offset: (time.Minute * 1) + time.Second},
    60  				{Value: 3, Offset: (time.Minute * 2)},
    61  				{Value: 4, Offset: (time.Minute * 3)},
    62  				{Value: 5, Offset: (time.Minute * 4)},
    63  				{Value: 6, Offset: (time.Minute * 5)},
    64  			},
    65  			[]test.Datapoint{
    66  				{Value: 7, Offset: time.Minute * 0},
    67  				{Value: 8, Offset: time.Minute * 5},
    68  			},
    69  			[]test.Datapoint{
    70  				{Value: 9, Offset: time.Minute * 4},
    71  			},
    72  		},
    73  		{
    74  			[]test.Datapoint{
    75  				{Value: 10, Offset: (time.Minute * 2)},
    76  				{Value: 20, Offset: (time.Minute * 3)},
    77  			},
    78  			[]test.Datapoint{},
    79  			[]test.Datapoint{
    80  				{Value: 30, Offset: time.Minute},
    81  			},
    82  			[]test.Datapoint{
    83  				{Value: 40, Offset: time.Second},
    84  			},
    85  		},
    86  		{
    87  			[]test.Datapoint{
    88  				{Value: 100, Offset: (time.Minute * 3)},
    89  			},
    90  			[]test.Datapoint{
    91  				{Value: 200, Offset: (time.Minute * 3)},
    92  			},
    93  			[]test.Datapoint{
    94  				{Value: 300, Offset: (time.Minute * 3)},
    95  			},
    96  			[]test.Datapoint{
    97  				{Value: 400, Offset: (time.Minute * 3)},
    98  			},
    99  			[]test.Datapoint{
   100  				{Value: 500, Offset: 0},
   101  			},
   102  		},
   103  	}
   104  
   105  	iters := make([]encoding.SeriesIterator, len(datapoints))
   106  	var (
   107  		iter   encoding.SeriesIterator
   108  		bounds models.Bounds
   109  		start  = Start
   110  	)
   111  	for i, dps := range datapoints {
   112  		iter, bounds = buildCustomIterator(t, i, start, stepSize, dps)
   113  		iters[i] = iter
   114  	}
   115  
   116  	return iters, bounds
   117  }
   118  
   119  func buildCustomIterator(
   120  	t testing.TB,
   121  	i int,
   122  	start time.Time,
   123  	stepSize time.Duration,
   124  	dps [][]test.Datapoint,
   125  ) (
   126  	encoding.SeriesIterator,
   127  	models.Bounds,
   128  ) {
   129  	iter, bounds, err := test.BuildCustomIterator(
   130  		dps,
   131  		map[string]string{"a": "b", "c": fmt.Sprint(i)},
   132  		fmt.Sprintf("abc%d", i), "namespace",
   133  		xtime.ToUnixNano(start),
   134  		blockSize, stepSize,
   135  	)
   136  	require.NoError(t, err)
   137  	return iter, bounds
   138  }
   139  
   140  // verifies that given sub-bounds are valid, and returns the index of the
   141  // generated block being checked.
   142  func verifyBoundsAndGetBlockIndex(t *testing.T, bounds, sub models.Bounds) int {
   143  	require.Equal(t, bounds.StepSize, sub.StepSize)
   144  	require.Equal(t, blockSize, sub.Duration)
   145  	diff := sub.Start.Sub(bounds.Start)
   146  	require.Equal(t, 0, int(diff%blockSize))
   147  	return int(diff / blockSize)
   148  }
   149  
   150  type ascByName []block.SeriesMeta
   151  
   152  func (m ascByName) Len() int { return len(m) }
   153  func (m ascByName) Less(i, j int) bool {
   154  	return bytes.Compare(m[i].Name, m[j].Name) == -1
   155  }
   156  func (m ascByName) Swap(i, j int) { m[i], m[j] = m[j], m[i] }
   157  
   158  func verifyMetas(
   159  	t *testing.T,
   160  	_ int,
   161  	meta block.Metadata,
   162  	metas []block.SeriesMeta,
   163  ) {
   164  	require.Equal(t, 0, meta.Tags.Len())
   165  	sort.Sort(ascByName(metas))
   166  	for i, m := range metas {
   167  		assert.Equal(t, fmt.Sprintf("abc%d", i), string(m.Name))
   168  		require.Equal(t, 2, m.Tags.Len())
   169  
   170  		val, found := m.Tags.Get([]byte("a"))
   171  		assert.True(t, found)
   172  		assert.Equal(t, "b", string(val))
   173  
   174  		val, found = m.Tags.Get([]byte("c"))
   175  		assert.True(t, found)
   176  		require.Equal(t, fmt.Sprint(i), string(val))
   177  	}
   178  }
   179  
   180  // NB: blocks are not necessarily generated in order; last block may be the first
   181  // one in the returned array. This is fine since they are processed independently
   182  // and are put back together at the read step, or at any temporal functions in
   183  // the execution pipeline.
   184  func generateBlocks(
   185  	t testing.TB,
   186  	stepSize time.Duration,
   187  	opts Options,
   188  ) ([]block.Block, models.Bounds) {
   189  	iterators, bounds := generateIterators(t, stepSize)
   190  	res, err := iterToFetchResult(iterators)
   191  	require.NoError(t, err)
   192  	blocks, err := ConvertM3DBSeriesIterators(
   193  		res,
   194  		bounds,
   195  		opts,
   196  	)
   197  	require.NoError(t, err)
   198  	return blocks, bounds
   199  }
   200  
   201  func TestUpdateTimeBySteps(t *testing.T) {
   202  	var tests = []struct {
   203  		stepSize, blockSize, expected time.Duration
   204  	}{
   205  		{time.Minute * 15, time.Hour, time.Hour},
   206  		{time.Minute * 14, time.Hour, time.Minute * 70},
   207  		{time.Minute * 13, time.Hour, time.Minute * 65},
   208  		{time.Minute * 59, time.Hour, time.Minute * 118},
   209  	}
   210  
   211  	for _, tt := range tests {
   212  		updateDuration := blockDuration(tt.blockSize, tt.stepSize)
   213  		assert.Equal(t, tt.expected/time.Minute, updateDuration/time.Minute)
   214  	}
   215  }
   216  
   217  func TestPadSeriesBlocks(t *testing.T) {
   218  	blockSize := time.Hour
   219  	start := xtime.Now().Truncate(blockSize)
   220  	readOffset := time.Minute
   221  	itStart := start.Add(readOffset)
   222  
   223  	var tests = []struct {
   224  		blockStart       xtime.UnixNano
   225  		stepSize         time.Duration
   226  		expectedStart    xtime.UnixNano
   227  		expectedStartTwo xtime.UnixNano
   228  	}{
   229  		{start, time.Minute * 30, itStart, start.Add(61 * time.Minute)},
   230  		{
   231  			start.Add(blockSize),
   232  			time.Minute * 30,
   233  			start.Add(61 * time.Minute),
   234  			start.Add(121 * time.Minute),
   235  		},
   236  		// step 0: start + 1  , start + 37
   237  		// step 1: start + 73 , start + 109
   238  		// step 2: start + 145
   239  		// step 3: start + 181, start + 217
   240  		// step 4: start + 253, ...
   241  		{
   242  			start.Add(blockSize * 3),
   243  			time.Minute * 36,
   244  			start.Add(181 * time.Minute),
   245  			start.Add(253 * time.Minute),
   246  		},
   247  		// step 0: start + 1  , start + 38
   248  		// step 1: start + 75 , start + 112
   249  		// step 2: start + 149
   250  		// step 3: start + 186
   251  		{
   252  			start.Add(blockSize * 2),
   253  			time.Minute * 37,
   254  			start.Add(149 * time.Minute),
   255  			start.Add(186 * time.Minute),
   256  		},
   257  		// step 0: start + 1  , start + 12 , start + 23,
   258  		//         start + 34 , start + 45 , start + 56
   259  		// step 1: start + 67 , start + 78 , start + 89
   260  		//         start + 100, start + 111
   261  		// step 2: start + 122 ...
   262  		{
   263  			start.Add(blockSize * 1),
   264  			time.Minute * 11,
   265  			start.Add(67 * time.Minute),
   266  			start.Add(122 * time.Minute),
   267  		},
   268  	}
   269  
   270  	for _, tt := range tests {
   271  		blocks := seriesBlocks{
   272  			{
   273  				blockStart: tt.blockStart,
   274  				blockSize:  blockSize,
   275  				replicas:   []encoding.MultiReaderIterator{},
   276  			},
   277  			{
   278  				blockStart: tt.blockStart.Add(blockSize),
   279  				blockSize:  blockSize,
   280  				replicas:   []encoding.MultiReaderIterator{},
   281  			},
   282  		}
   283  
   284  		updated := updateSeriesBlockStarts(blocks, tt.stepSize, itStart)
   285  		require.Equal(t, 2, len(updated))
   286  		assert.Equal(t, tt.blockStart, updated[0].blockStart)
   287  		assert.Equal(t, tt.expectedStart, updated[0].readStart)
   288  
   289  		assert.Equal(t, tt.blockStart.Add(blockSize), updated[1].blockStart)
   290  		assert.Equal(t, tt.expectedStartTwo, updated[1].readStart)
   291  	}
   292  }