github.com/siglens/siglens@v0.0.0-20240328180423-f7ce9ae441ed/pkg/segment/writer/metrics/metricssegment_test.go (about) 1 /* 2 Copyright 2023. 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15 */ 16 17 package metrics 18 19 import ( 20 "bytes" 21 "math/rand" 22 "os" 23 "sync" 24 "testing" 25 "time" 26 27 fuzz "github.com/google/gofuzz" 28 "github.com/siglens/siglens/pkg/segment/reader/metrics/series" 29 "github.com/siglens/siglens/pkg/segment/reader/microreader" 30 "github.com/siglens/siglens/pkg/segment/structs" 31 "github.com/siglens/siglens/pkg/segment/utils" 32 "github.com/siglens/siglens/pkg/segment/writer/metrics/compress" 33 log "github.com/sirupsen/logrus" 34 "github.com/stretchr/testify/assert" 35 ) 36 37 func writeMockMetricsBlockSummaryFile(file string, blockSums []*structs.MBlockSummary) error { 38 fd, err := os.OpenFile(file, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644) 39 if err != nil { 40 log.Errorf("WriteMockMetricsBlockSummary: open failed blockSummaryFname=%v, err=%v", file, err) 41 return err 42 } 43 44 defer fd.Close() 45 46 if _, err := fd.Write(utils.VERSION_MBLOCKSUMMARY); err != nil { 47 log.Errorf("writeMockMetricsBlockSummaryFile: Cannot write version byte for filename=%v: err= %v", file, err) 48 return err 49 } 50 51 for _, block := range blockSums { 52 mbs := structs.MBlockSummary{ 53 HighTs: block.HighTs, 54 LowTs: block.LowTs, 55 Blknum: block.Blknum} 56 _, err = mbs.FlushSummary(file) 57 if err != nil { 58 log.Errorf("WriteMockMetricsBlockSummary: Failed to write in file at %v, err: %v", file, err) 59 return err 60 } 61 } 62 err = fd.Sync() 63 if err != nil { 64 log.Errorf("WriteMockMetricsBlockSummary: Failed to sync file at %v, err: %v", file, err) 65 return err 66 } 67 return nil 68 } 69 70 func Test_ReadMetricsBlockSummary(t *testing.T) { 71 72 entryCount := 10 73 dir := "data/" 74 err := os.MkdirAll(dir, os.FileMode(0755)) 75 76 if err != nil { 77 log.Fatal(err) 78 } 79 blockSumFile := dir + "query_test.mbsu" 80 blockSummaries := make([]*structs.MBlockSummary, entryCount) 81 i := 0 82 for i < entryCount { 83 blockSummaries[i] = &structs.MBlockSummary{ 84 Blknum: uint16(i), 85 HighTs: 1676089340, 86 LowTs: 1676089310, 87 } 88 i++ 89 } 90 err = writeMockMetricsBlockSummaryFile(blockSumFile, blockSummaries) 91 92 if err != nil { 93 log.Errorf("Failed to write mock block summary at %v", blockSumFile) 94 } 95 96 blockSums, err := microreader.ReadMetricsBlockSummaries(blockSumFile) 97 if err != nil { 98 os.RemoveAll(dir) 99 log.Errorf("Failed to read mock block summary at %v", blockSumFile) 100 } 101 102 for i := 0; i < len(blockSums); i++ { 103 assert.Equal(t, blockSums[i].HighTs, blockSummaries[i].HighTs) 104 assert.Equal(t, blockSums[i].LowTs, blockSummaries[i].LowTs) 105 assert.Equal(t, blockSums[i].Blknum, blockSums[i].Blknum) 106 } 107 os.RemoveAll(dir) 108 } 109 110 func generateRandomTsid() uint64 { 111 return uint64(rand.Uint32())<<32 + uint64(rand.Uint32()) 112 } 113 114 type data struct { 115 t uint32 116 v float64 117 } 118 119 func generateFakeTimeSeries() ([]data, uint32) { 120 header := uint32(time.Now().Unix()) 121 122 const dataLen = 5000 123 series := make([]data, dataLen) 124 valueFuzz := fuzz.New().NilChance(0) 125 ts := header 126 for i := 0; i < dataLen; i++ { 127 if 0 < i && i%10 == 0 { 128 ts -= uint32(rand.Intn(100)) 129 } else { 130 ts += uint32(rand.Int31n(100)) 131 } 132 var v float64 133 valueFuzz.Fuzz(&v) 134 series[i] = data{ts, v} 135 } 136 return series, header 137 } 138 139 func Test_ReadWriteTsoTsgFiles(t *testing.T) { 140 141 dir := "data/" 142 err := os.MkdirAll(dir, os.FileMode(0755)) 143 if err != nil { 144 log.Fatal(err) 145 } 146 defer os.RemoveAll(dir) 147 148 tsid_1 := generateRandomTsid() 149 tsid_2 := generateRandomTsid() 150 151 mb := initFakeMetricsBlock() 152 153 writeToTsidLookup(mb, 0, tsid_1) 154 writeToTsidLookup(mb, 1, tsid_2) 155 156 writeSortedTsids(mb, tsid_1, tsid_2) 157 158 series_1 := writeToTimeSeries(mb, 0) 159 series_2 := writeToTimeSeries(mb, 1) 160 161 err = mb.FlushTSOAndTSGFiles("data/mock_0") 162 if err != nil { 163 log.Errorf("Test_ReadWriteTsoTsgFiles: Error writing mock metrics block %v", err) 164 } 165 166 tssr, err := series.InitTimeSeriesReader("data/mock") 167 if err != nil { 168 log.Errorf("Test_ReadWriteTsoTsgFiles: Error initialising a time series reader. Err %v", err) 169 } 170 queryMetrics := &structs.MetricsQueryProcessingMetrics{ 171 UpdateLock: &sync.Mutex{}, 172 } 173 tssr_block, err := tssr.InitReaderForBlock(uint16(0), queryMetrics) 174 if err != nil { 175 log.Errorf("Test_ReadWriteTsoTsgFiles: Error initialising a time series reader for block. Err %v", err) 176 } 177 178 // verify series 1 179 ts_itr, exists, err := tssr_block.GetTimeSeriesIterator(tsid_1) 180 if err != nil { 181 log.Errorf("Test_ReadWriteTsoTsgFiles: Error initialising a time series iterator for tsid %v", tsid_1) 182 } 183 assert.True(t, exists) 184 count_1 := 0 185 for ts_itr.Next() { 186 ts, val := ts_itr.At() 187 assert.Equal(t, series_1[count_1].t, ts) 188 assert.Equal(t, series_1[count_1].v, val) 189 count_1++ 190 } 191 192 // verify series 2 193 ts_itr, exists, err = tssr_block.GetTimeSeriesIterator(tsid_2) 194 if err != nil { 195 assert.NoError(t, err) 196 log.Errorf("Test_ReadWriteTsoTsgFiles: Error initialising a time series iterator for tsid %v", tsid_2) 197 } 198 assert.True(t, exists) 199 count_2 := 0 200 for ts_itr.Next() { 201 ts, val := ts_itr.At() 202 assert.Equal(t, series_2[count_2].t, ts) 203 assert.Equal(t, series_2[count_2].v, val) 204 count_2++ 205 } 206 207 assert.Equal(t, count_1, 5000) 208 assert.Equal(t, count_2, 5000) 209 } 210 211 func initFakeMetricsBlock() *MetricsBlock { 212 mb := &MetricsBlock{ 213 tsidLookup: make(map[uint64]int), 214 sortedTsids: make([]uint64, 2), 215 allSeries: make([]*TimeSeries, 2), 216 mBlockSummary: &structs.MBlockSummary{ 217 // garbage values 218 Blknum: 0, 219 HighTs: 1676089340, 220 LowTs: 1676089310, 221 }, 222 encodedSize: 0, 223 } 224 return mb 225 } 226 227 func writeSortedTsids(mb *MetricsBlock, tsid_1, tsid_2 uint64) { 228 mb.sortedTsids[0] = tsid_2 229 mb.sortedTsids[1] = tsid_1 230 if tsid_1 < tsid_2 { 231 mb.sortedTsids[0] = tsid_1 232 mb.sortedTsids[1] = tsid_2 233 } 234 } 235 236 func writeToTsidLookup(mb *MetricsBlock, i int, tsid uint64) { 237 mb.tsidLookup[tsid] = i 238 } 239 240 func writeToTimeSeries(mb *MetricsBlock, index int) []data { 241 series, header := generateFakeTimeSeries() 242 buf := new(bytes.Buffer) 243 c, finish, err := compress.NewCompressor(buf, header) 244 if err != nil { 245 log.Error("writeToTimeSeries: Error writing mock metrics time series") 246 } 247 mb.allSeries[index] = &TimeSeries{ 248 lock: &sync.Mutex{}, 249 rawEncoding: buf, 250 cFinishFn: finish, 251 compressor: c, 252 } 253 for _, data := range series { 254 _, err := mb.allSeries[index].compressor.Compress(data.t, data.v) 255 if err != nil { 256 log.Error("writeToTimeSeries: Error writing mock metrics time series") 257 } 258 } 259 return series 260 }