github.com/m3db/m3@v1.5.0/src/query/test/test_series_iterator.go (about)

     1  // Copyright (c) 2018 Uber Technologies, Inc.
     2  //
     3  // Permission is hereby granted, free of charge, to any person obtaining a copy
     4  // of this software and associated documentation files (the "Software"), to deal
     5  // in the Software without restriction, including without limitation the rights
     6  // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     7  // copies of the Software, and to permit persons to whom the Software is
     8  // furnished to do so, subject to the following conditions:
     9  //
    10  // The above copyright notice and this permission notice shall be included in
    11  // all copies or substantial portions of the Software.
    12  //
    13  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    14  // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    15  // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    16  // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    17  // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    18  // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    19  // THE SOFTWARE.
    20  
    21  package test
    22  
    23  import (
    24  	"fmt"
    25  	"sort"
    26  	"time"
    27  
    28  	"github.com/m3db/m3/src/dbnode/encoding"
    29  	"github.com/m3db/m3/src/dbnode/encoding/m3tsz"
    30  	"github.com/m3db/m3/src/dbnode/namespace"
    31  	"github.com/m3db/m3/src/dbnode/ts"
    32  	"github.com/m3db/m3/src/dbnode/x/xio"
    33  	"github.com/m3db/m3/src/query/models"
    34  	"github.com/m3db/m3/src/x/checked"
    35  	"github.com/m3db/m3/src/x/ident"
    36  	xtime "github.com/m3db/m3/src/x/time"
    37  )
    38  
    39  var (
    40  	// SeriesNamespace is the expected namespace for the generated series
    41  	SeriesNamespace string
    42  	// TestTags is the expected tags for the generated series
    43  	TestTags map[string]string
    44  	// BlockSize is the expected block size for the generated series
    45  	BlockSize time.Duration
    46  	// Start is the expected start time for the first block in the generated series
    47  	Start xtime.UnixNano
    48  	// SeriesStart is the expected start time for the generated series
    49  	SeriesStart xtime.UnixNano
    50  	// Middle is the expected end for the first block, and start of the second block
    51  	Middle xtime.UnixNano
    52  	// End is the expected end time for the generated series
    53  	End xtime.UnixNano
    54  
    55  	testIterAlloc func(r xio.Reader64, d namespace.SchemaDescr) encoding.ReaderIterator
    56  )
    57  
    58  func init() {
    59  	SeriesNamespace = "namespace"
    60  
    61  	TestTags = map[string]string{"foo": "bar", "baz": "qux"}
    62  
    63  	BlockSize = time.Hour / 2
    64  
    65  	Start = xtime.Now().Truncate(time.Hour)
    66  	SeriesStart = Start.Add(2 * time.Minute)
    67  	Middle = Start.Add(BlockSize)
    68  	End = Middle.Add(BlockSize)
    69  
    70  	testIterAlloc = m3tsz.DefaultReaderIteratorAllocFn(encoding.NewOptions())
    71  }
    72  
    73  // Builds a MultiReaderIterator representing a single replica
    74  // with two segments, one merged with values from 1->30, and
    75  // one which is unmerged with 2 segments from 101->130
    76  // with one of the unmerged containing even points, other containing odd
    77  func buildReplica() (encoding.MultiReaderIterator, error) {
    78  	// Build a merged BlockReader
    79  	encoder := m3tsz.NewEncoder(Start, checked.NewBytes(nil, nil), true, encoding.NewOptions())
    80  	i := 0
    81  	for at := time.Duration(0); at < BlockSize; at += time.Minute {
    82  		i++
    83  		datapoint := ts.Datapoint{TimestampNanos: Start.Add(at), Value: float64(i)}
    84  		err := encoder.Encode(datapoint, xtime.Second, nil)
    85  		if err != nil {
    86  			return nil, err
    87  		}
    88  	}
    89  	segment := encoder.Discard()
    90  	mergedReader := xio.BlockReader{
    91  		SegmentReader: xio.NewSegmentReader(segment),
    92  		Start:         Start,
    93  		BlockSize:     BlockSize,
    94  	}
    95  
    96  	// Build two unmerged BlockReaders
    97  	i = 100
    98  	encoder = m3tsz.NewEncoder(Middle, checked.NewBytes(nil, nil), true, encoding.NewOptions())
    99  	encoderTwo := m3tsz.NewEncoder(Middle, checked.NewBytes(nil, nil), true, encoding.NewOptions())
   100  	useFirstEncoder := true
   101  
   102  	for at := time.Duration(0); at < BlockSize; at += time.Minute {
   103  		i++
   104  		datapoint := ts.Datapoint{TimestampNanos: Middle.Add(at), Value: float64(i)}
   105  		var err error
   106  		if useFirstEncoder {
   107  			err = encoder.Encode(datapoint, xtime.Second, nil)
   108  		} else {
   109  			err = encoderTwo.Encode(datapoint, xtime.Second, nil)
   110  		}
   111  		if err != nil {
   112  			return nil, err
   113  		}
   114  		useFirstEncoder = !useFirstEncoder
   115  	}
   116  
   117  	segment = encoder.Discard()
   118  	segmentTwo := encoderTwo.Discard()
   119  	unmergedReaders := []xio.BlockReader{
   120  		{
   121  			SegmentReader: xio.NewSegmentReader(segment),
   122  			Start:         Middle,
   123  			BlockSize:     BlockSize,
   124  		},
   125  		{
   126  			SegmentReader: xio.NewSegmentReader(segmentTwo),
   127  			Start:         Middle,
   128  			BlockSize:     BlockSize,
   129  		},
   130  	}
   131  
   132  	multiReader := encoding.NewMultiReaderIterator(testIterAlloc, nil)
   133  	sliceOfSlicesIter := xio.NewReaderSliceOfSlicesFromBlockReadersIterator([][]xio.BlockReader{
   134  		{mergedReader},
   135  		unmergedReaders,
   136  	})
   137  	multiReader.ResetSliceOfSlices(sliceOfSlicesIter, nil)
   138  	return multiReader, nil
   139  }
   140  
   141  type sortableTags []ident.Tag
   142  
   143  func (a sortableTags) Len() int           { return len(a) }
   144  func (a sortableTags) Swap(i, j int)      { a[i], a[j] = a[j], a[i] }
   145  func (a sortableTags) Less(i, j int) bool { return a[i].Name.String() < a[j].Name.String() }
   146  
   147  // BuildTestSeriesIterator creates a sample SeriesIterator
   148  // This series iterator has two identical replicas.
   149  // Each replica has two blocks.
   150  // The first block in each replica is merged and has values 1->30
   151  // The values 1 and 2 appear before the SeriesIterator start time, and are not expected
   152  // to appear when reading through the iterator
   153  // The second block is unmerged; when it was merged, it has values 101 -> 130
   154  // from two readers, one with even values and other with odd values
   155  // Expected data points for reading through the iterator: [3..30,101..130], 58 in total
   156  // SeriesIterator ID is given, namespace is 'namespace'
   157  // Tags are "foo": "bar" and "baz": "qux"
   158  func BuildTestSeriesIterator(id string) (encoding.SeriesIterator, error) {
   159  	replicaOne, err := buildReplica()
   160  	if err != nil {
   161  		return nil, err
   162  	}
   163  	replicaTwo, err := buildReplica()
   164  	if err != nil {
   165  		return nil, err
   166  	}
   167  
   168  	sortTags := make(sortableTags, 0, len(TestTags))
   169  	for name, value := range TestTags {
   170  		sortTags = append(sortTags, ident.StringTag(name, value))
   171  	}
   172  
   173  	sort.Sort(sortTags)
   174  	tags := ident.Tags{}
   175  	for _, t := range sortTags {
   176  		tags.Append(t)
   177  	}
   178  
   179  	return encoding.NewSeriesIterator(
   180  		encoding.SeriesIteratorOptions{
   181  			ID:             ident.StringID(id),
   182  			Namespace:      ident.StringID(SeriesNamespace),
   183  			Tags:           ident.NewTagsIterator(tags),
   184  			StartInclusive: SeriesStart,
   185  			EndExclusive:   End,
   186  			Replicas: []encoding.MultiReaderIterator{
   187  				replicaOne,
   188  				replicaTwo,
   189  			},
   190  		}, nil), nil
   191  }
   192  
   193  // Datapoint is a datapoint with a value and an offset for building a custom iterator
   194  type Datapoint struct {
   195  	Value  float64
   196  	Offset time.Duration
   197  }
   198  
   199  // BuildCustomIterator builds a custom iterator with bounds
   200  func BuildCustomIterator(
   201  	dps [][]Datapoint,
   202  	testTags map[string]string,
   203  	seriesID, seriesNamespace string,
   204  	start xtime.UnixNano,
   205  	blockSize, stepSize time.Duration,
   206  ) (encoding.SeriesIterator, models.Bounds, error) {
   207  	// Build a merged BlockReader
   208  	readers := make([][]xio.BlockReader, 0, len(dps))
   209  	currentStart := start
   210  	for _, datapoints := range dps {
   211  		encoder := m3tsz.NewEncoder(currentStart, checked.NewBytes(nil, nil), true, encoding.NewOptions())
   212  		// NB: empty datapoints should skip this block reader but still increase time
   213  		if len(datapoints) > 0 {
   214  			for _, dp := range datapoints {
   215  				offset := dp.Offset
   216  				if offset > blockSize {
   217  					return nil, models.Bounds{},
   218  						fmt.Errorf("custom series iterator offset is larger than blockSize")
   219  				}
   220  
   221  				if offset < 0 {
   222  					return nil, models.Bounds{},
   223  						fmt.Errorf("custom series iterator offset is negative")
   224  				}
   225  
   226  				tsDp := ts.Datapoint{
   227  					Value:          dp.Value,
   228  					TimestampNanos: currentStart.Add(offset),
   229  				}
   230  
   231  				err := encoder.Encode(tsDp, xtime.Second, nil)
   232  				if err != nil {
   233  					return nil, models.Bounds{}, err
   234  				}
   235  			}
   236  
   237  			segment := encoder.Discard()
   238  			readers = append(readers, []xio.BlockReader{{
   239  				SegmentReader: xio.NewSegmentReader(segment),
   240  				Start:         currentStart,
   241  				BlockSize:     blockSize,
   242  			}})
   243  		}
   244  
   245  		currentStart = currentStart.Add(blockSize)
   246  	}
   247  
   248  	multiReader := encoding.NewMultiReaderIterator(testIterAlloc, nil)
   249  	sliceOfSlicesIter := xio.NewReaderSliceOfSlicesFromBlockReadersIterator(readers)
   250  	multiReader.ResetSliceOfSlices(sliceOfSlicesIter, nil)
   251  
   252  	tags := ident.Tags{}
   253  	for name, value := range testTags {
   254  		tags.Append(ident.StringTag(name, value))
   255  	}
   256  
   257  	return encoding.NewSeriesIterator(
   258  			encoding.SeriesIteratorOptions{
   259  				ID:             ident.StringID(seriesID),
   260  				Namespace:      ident.StringID(seriesNamespace),
   261  				Tags:           ident.NewTagsIterator(tags),
   262  				StartInclusive: start,
   263  				EndExclusive:   currentStart.Add(blockSize),
   264  				Replicas: []encoding.MultiReaderIterator{
   265  					multiReader,
   266  				},
   267  			}, nil),
   268  		models.Bounds{
   269  			Start:    start,
   270  			Duration: blockSize * time.Duration(len(dps)),
   271  			StepSize: stepSize,
   272  		},
   273  		nil
   274  }