github.com/m3db/m3@v1.5.0/src/dbnode/integration/integration_data_verify.go (about) 1 // Copyright (c) 2016 Uber Technologies, Inc. 2 // 3 // Permission is hereby granted, free of charge, to any person obtaining a copy 4 // of this software and associated documentation files (the "Software"), to deal 5 // in the Software without restriction, including without limitation the rights 6 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 // copies of the Software, and to permit persons to whom the Software is 8 // furnished to do so, subject to the following conditions: 9 // 10 // The above copyright notice and this permission notice shall be included in 11 // all copies or substantial portions of the Software. 12 // 13 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 // THE SOFTWARE. 20 21 package integration 22 23 import ( 24 "bytes" 25 "encoding/json" 26 "fmt" 27 "os" 28 "reflect" 29 "sort" 30 "testing" 31 "time" 32 33 "github.com/m3db/m3/src/dbnode/generated/thrift/rpc" 34 "github.com/m3db/m3/src/dbnode/integration/generate" 35 "github.com/m3db/m3/src/dbnode/storage" 36 "github.com/m3db/m3/src/dbnode/storage/block" 37 "github.com/m3db/m3/src/dbnode/ts" 38 "github.com/m3db/m3/src/x/context" 39 "github.com/m3db/m3/src/x/ident" 40 xtime "github.com/m3db/m3/src/x/time" 41 42 "github.com/stretchr/testify/assert" 43 "go.uber.org/zap" 44 ) 45 46 type readableSeries struct { 47 ID string 48 Tags []readableSeriesTag 49 Data []generate.TestValue 50 } 51 52 type readableSeriesTag struct { 53 Name string 54 Value string 55 } 56 57 type readableSeriesList []readableSeries 58 59 func toDatapoints(fetched *rpc.FetchResult_) []generate.TestValue { 60 converted := make([]generate.TestValue, len(fetched.Datapoints)) 61 for i, dp := range fetched.Datapoints { 62 converted[i] = generate.TestValue{ 63 Datapoint: ts.Datapoint{ 64 TimestampNanos: xtime.FromNormalizedTime(dp.Timestamp, time.Second), 65 Value: dp.Value, 66 }, 67 Annotation: dp.Annotation, 68 } 69 } 70 return converted 71 } 72 73 func verifySeriesMapForRange( 74 t *testing.T, 75 ts TestSetup, 76 start, end xtime.UnixNano, 77 namespace ident.ID, 78 input generate.SeriesBlock, 79 expectedDebugFilePath string, 80 actualDebugFilePath string, 81 ) bool { 82 // Construct a copy of the input that we will use to compare 83 // with only the fields we need to compare against (fetch doesn't 84 // return the tags for a series ID) 85 expected := make(generate.SeriesBlock, len(input)) 86 actual := make(generate.SeriesBlock, len(input)) 87 88 expectedMetadata := map[string]generate.Series{} 89 req := rpc.NewFetchRequest() 90 for i := range input { 91 idString := input[i].ID.String() 92 req.NameSpace = namespace.String() 93 req.ID = idString 94 req.RangeStart = start.Seconds() 95 req.RangeEnd = end.Seconds() 96 req.ResultTimeType = rpc.TimeType_UNIX_SECONDS 97 fetched, err := ts.Fetch(req) 98 99 if !assert.NoError(t, err) { 100 return false 101 } 102 expected[i] = generate.Series{ 103 ID: input[i].ID, 104 Data: input[i].Data, 105 } 106 actual[i] = generate.Series{ 107 ID: input[i].ID, 108 Data: fetched, 109 } 110 111 // Build expected metadata map at the same time 112 expectedMetadata[idString] = input[i] 113 } 114 115 if len(expectedDebugFilePath) > 0 { 116 if !writeVerifyDebugOutput(t, expectedDebugFilePath, start, end, expected) { 117 return false 118 } 119 } 120 if len(actualDebugFilePath) > 0 { 121 if !writeVerifyDebugOutput(t, actualDebugFilePath, start, end, actual) { 122 return false 123 } 124 } 125 126 if !assert.Equal(t, len(expected), len(actual)) { 127 return false 128 } 129 for i, series := range actual { 130 if ts.ShouldBeEqual() { 131 if !assert.Equal(t, expected[i], series) { 132 return false 133 } 134 } else { 135 assert.Equal(t, expected[i].ID, series.ID) 136 if !ts.AssertEqual(t, expected[i].Data, series.Data) { 137 return false 138 } 139 } 140 } 141 142 // Now check the metadata of all the series match 143 ctx := context.NewBackground() 144 defer ctx.Close() 145 for _, shard := range ts.DB().ShardSet().AllIDs() { 146 var ( 147 opts block.FetchBlocksMetadataOptions 148 pageToken storage.PageToken 149 first = true 150 ) 151 for { 152 if first { 153 first = false 154 } else if pageToken == nil { 155 // Done, next shard 156 break 157 } 158 159 results, nextPageToken, err := ts.DB().FetchBlocksMetadataV2(ctx, 160 namespace, shard, start, end, 4096, pageToken, opts) 161 assert.NoError(t, err) 162 163 // Use the next one for the next iteration 164 pageToken = nextPageToken 165 166 for _, actual := range results.Results() { 167 id := actual.ID.String() 168 expected, ok := expectedMetadata[id] 169 if !assert.True(t, ok, fmt.Sprintf("unexpected ID: %s", id)) { 170 return false 171 } 172 173 expectedTagsIter := ident.NewTagsIterator(expected.Tags) 174 actualTagsIter := actual.Tags.Duplicate() 175 tagMatcher := ident.NewTagIterMatcher(expectedTagsIter) 176 tagsMatch := tagMatcher.Matches(actualTagsIter) 177 if !tagsMatch { 178 expectedTagsIter.Reset(expected.Tags) 179 actualTagsIter = actual.Tags.Duplicate() 180 var expected, actual string 181 for expectedTagsIter.Next() { 182 tag := expectedTagsIter.Current() 183 entry := "" 184 if expected != "" { 185 entry += ", " 186 } 187 entry += tag.Name.String() + "=" + tag.Value.String() 188 expected += entry 189 } 190 for actualTagsIter.Next() { 191 tag := actualTagsIter.Current() 192 entry := "" 193 if actual != "" { 194 entry += " " 195 } 196 entry += tag.Name.String() + "=" + tag.Value.String() 197 actual += entry 198 } 199 ts.StorageOpts().InstrumentOptions().Logger(). 200 Error("series does not match expected tags", 201 zap.String("id", id), 202 zap.String("expectedTags", expected), 203 zap.String("actualTags", actual), 204 zap.Any("expectedTagsErr", expectedTagsIter.Err()), 205 zap.Any("actualTagsErrr", actualTagsIter.Err()), 206 ) 207 } 208 209 if !assert.True(t, tagMatcher.Matches(actualTagsIter)) { 210 return false 211 } 212 } 213 } 214 } 215 216 return true 217 } 218 219 func containsSeries(ts TestSetup, namespace, seriesID ident.ID, start, end xtime.UnixNano) (bool, error) { 220 req := rpc.NewFetchRequest() 221 req.NameSpace = namespace.String() 222 req.ID = seriesID.String() 223 req.RangeStart = start.Seconds() 224 req.RangeEnd = end.Seconds() 225 req.ResultTimeType = rpc.TimeType_UNIX_SECONDS 226 fetched, err := ts.Fetch(req) 227 return len(fetched) != 0, err 228 } 229 230 func writeVerifyDebugOutput( 231 t *testing.T, filePath string, start, end xtime.UnixNano, series generate.SeriesBlock) bool { 232 w, err := os.OpenFile(filePath, os.O_APPEND|os.O_WRONLY, os.ModeAppend) 233 if !assert.NoError(t, err) { 234 return false 235 } 236 237 list := make(readableSeriesList, 0, len(series)) 238 for i := range series { 239 tags := make([]readableSeriesTag, len(series[i].Tags.Values())) 240 for _, tag := range series[i].Tags.Values() { 241 tags = append(tags, readableSeriesTag{ 242 Name: tag.Name.String(), 243 Value: tag.Value.String(), 244 }) 245 } 246 list = append(list, readableSeries{ 247 ID: series[i].ID.String(), 248 Tags: tags, 249 Data: series[i].Data, 250 }) 251 } 252 253 data, err := json.MarshalIndent(struct { 254 Start time.Time 255 End time.Time 256 Series readableSeriesList 257 }{ 258 Start: start.ToTime(), 259 End: end.ToTime(), 260 Series: list, 261 }, "", " ") 262 if !assert.NoError(t, err) { 263 return false 264 } 265 266 _, err = w.Write(data) 267 if !assert.NoError(t, err) { 268 return false 269 } 270 return assert.NoError(t, w.Close()) 271 } 272 273 func verifySeriesMaps( 274 t *testing.T, 275 ts TestSetup, 276 namespace ident.ID, 277 seriesMaps map[xtime.UnixNano]generate.SeriesBlock, 278 ) bool { 279 debugFilePathPrefix := ts.Opts().VerifySeriesDebugFilePathPrefix() 280 expectedDebugFilePath, ok := createFileIfPrefixSet(t, debugFilePathPrefix, fmt.Sprintf("%s-expected.log", namespace.String())) 281 if !ok { 282 return false 283 } 284 actualDebugFilePath, ok := createFileIfPrefixSet(t, debugFilePathPrefix, fmt.Sprintf("%s-actual.log", namespace.String())) 285 if !ok { 286 return false 287 } 288 289 nsMetadata, ok := ts.DB().Namespace(namespace) 290 if !assert.True(t, ok) { 291 return false 292 } 293 nsOpts := nsMetadata.Options() 294 295 for timestamp, sm := range seriesMaps { 296 start := timestamp 297 end := start.Add(nsOpts.RetentionOptions().BlockSize()) 298 matches := verifySeriesMapForRange( 299 t, ts, start, end, namespace, sm, 300 expectedDebugFilePath, actualDebugFilePath) 301 if !matches { 302 return false 303 } 304 } 305 306 return true 307 } 308 309 func createFileIfPrefixSet(t *testing.T, prefix, suffix string) (string, bool) { 310 if len(prefix) == 0 { 311 return "", true 312 } 313 filePath := prefix + "_" + suffix 314 w, err := os.Create(filePath) 315 if !assert.NoError(t, err) { 316 return "", false 317 } 318 if !assert.NoError(t, w.Close()) { 319 return "", false 320 } 321 return filePath, true 322 } 323 324 func compareSeriesList( 325 expected generate.SeriesBlock, 326 actual generate.SeriesBlock, 327 ) error { 328 sort.Sort(expected) 329 sort.Sort(actual) 330 331 if len(expected) != len(actual) { 332 return fmt.Errorf( 333 "number of expected series: %d did not match actual: %d", 334 len(expected), len(actual)) 335 } 336 337 for i := range expected { 338 if !bytes.Equal(expected[i].ID.Bytes(), actual[i].ID.Bytes()) { 339 return fmt.Errorf( 340 "series ID did not match, expected: %s, actual: %s", 341 expected[i].ID.String(), actual[i].ID.String()) 342 } 343 if len(expected[i].Data) != len(actual[i].Data) { 344 return fmt.Errorf( 345 "data for series: %s did not match, expected: %d data points, actual: %d", 346 expected[i].ID.String(), len(expected[i].Data), len(actual[i].Data)) 347 } 348 if !reflect.DeepEqual(expected[i].Data, actual[i].Data) { 349 return fmt.Errorf( 350 "data for series: %s did not match, expected: %v, actual: %v", 351 expected[i].ID.String(), expected[i].Data, actual[i].Data) 352 } 353 } 354 355 return nil 356 }