github.com/yankunsam/loki/v2@v2.6.3-0.20220817130409-389df5235c27/pkg/storage/stores/tsdb/compact_test.go (about) 1 package tsdb 2 3 import ( 4 "context" 5 "testing" 6 7 "github.com/prometheus/common/model" 8 "github.com/prometheus/prometheus/model/labels" 9 "github.com/stretchr/testify/require" 10 11 "github.com/grafana/loki/pkg/storage/stores/tsdb/index" 12 ) 13 14 func TestCompactor(t *testing.T) { 15 for _, tc := range []struct { 16 desc string 17 err bool 18 input [][]LoadableSeries 19 exp []ChunkRef 20 }{ 21 { 22 desc: "errors no file", 23 err: true, 24 }, 25 { 26 desc: "single file", 27 err: false, 28 input: [][]LoadableSeries{ 29 { 30 { 31 Labels: mustParseLabels(`{foo="bar"}`), 32 Chunks: []index.ChunkMeta{ 33 { 34 MinTime: 0, 35 MaxTime: 3, 36 Checksum: 0, 37 }, 38 { 39 MinTime: 1, 40 MaxTime: 4, 41 Checksum: 1, 42 }, 43 { 44 MinTime: 2, 45 MaxTime: 5, 46 Checksum: 2, 47 }, 48 }, 49 }, 50 { 51 Labels: mustParseLabels(`{foo="bar", bazz="buzz"}`), 52 Chunks: []index.ChunkMeta{ 53 { 54 MinTime: 1, 55 MaxTime: 10, 56 Checksum: 3, 57 }, 58 }, 59 }, 60 }, 61 }, 62 exp: []ChunkRef{ 63 { 64 User: "fake", 65 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 66 Start: 0, 67 End: 3, 68 Checksum: 0, 69 }, 70 { 71 User: "fake", 72 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 73 Start: 1, 74 End: 4, 75 Checksum: 1, 76 }, 77 { 78 User: "fake", 79 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 80 Start: 2, 81 End: 5, 82 Checksum: 2, 83 }, 84 { 85 User: "fake", 86 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", bazz="buzz"}`).Hash()), 87 Start: 1, 88 End: 10, 89 Checksum: 3, 90 }, 91 }, 92 }, 93 { 94 desc: "multi file", 95 err: false, 96 input: [][]LoadableSeries{ 97 { 98 { 99 Labels: mustParseLabels(`{foo="bar"}`), 100 Chunks: []index.ChunkMeta{ 101 { 102 MinTime: 0, 103 MaxTime: 3, 104 Checksum: 0, 105 }, 106 { 107 MinTime: 1, 108 MaxTime: 4, 109 Checksum: 1, 110 }, 111 { 112 MinTime: 2, 113 MaxTime: 5, 114 Checksum: 2, 115 }, 116 }, 117 }, 118 { 119 Labels: mustParseLabels(`{foo="bar", bazz="buzz"}`), 120 Chunks: []index.ChunkMeta{ 121 { 122 MinTime: 1, 123 MaxTime: 10, 124 Checksum: 3, 125 }, 126 }, 127 }, 128 }, 129 { 130 { 131 Labels: mustParseLabels(`{foo="bar", a="b"}`), 132 Chunks: []index.ChunkMeta{ 133 { 134 MinTime: 10, 135 MaxTime: 11, 136 Checksum: 0, 137 }, 138 { 139 MinTime: 11, 140 MaxTime: 14, 141 Checksum: 1, 142 }, 143 { 144 MinTime: 12, 145 MaxTime: 15, 146 Checksum: 2, 147 }, 148 }, 149 }, 150 }, 151 }, 152 exp: []ChunkRef{ 153 { 154 User: "fake", 155 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 156 Start: 0, 157 End: 3, 158 Checksum: 0, 159 }, 160 { 161 User: "fake", 162 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 163 Start: 1, 164 End: 4, 165 Checksum: 1, 166 }, 167 { 168 User: "fake", 169 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 170 Start: 2, 171 End: 5, 172 Checksum: 2, 173 }, 174 175 { 176 User: "fake", 177 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", a="b"}`).Hash()), 178 Start: 10, 179 End: 11, 180 Checksum: 0, 181 }, 182 { 183 User: "fake", 184 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", a="b"}`).Hash()), 185 Start: 11, 186 End: 14, 187 Checksum: 1, 188 }, 189 { 190 User: "fake", 191 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", a="b"}`).Hash()), 192 Start: 12, 193 End: 15, 194 Checksum: 2, 195 }, 196 { 197 User: "fake", 198 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", bazz="buzz"}`).Hash()), 199 Start: 1, 200 End: 10, 201 Checksum: 3, 202 }, 203 }, 204 }, 205 { 206 desc: "multi file dedupe", 207 err: false, 208 input: [][]LoadableSeries{ 209 { 210 { 211 Labels: mustParseLabels(`{foo="bar"}`), 212 Chunks: []index.ChunkMeta{ 213 { 214 MinTime: 0, 215 MaxTime: 3, 216 Checksum: 0, 217 }, 218 { 219 MinTime: 1, 220 MaxTime: 4, 221 Checksum: 1, 222 }, 223 { 224 MinTime: 2, 225 MaxTime: 5, 226 Checksum: 2, 227 }, 228 }, 229 }, 230 { 231 Labels: mustParseLabels(`{foo="bar", bazz="buzz"}`), 232 Chunks: []index.ChunkMeta{ 233 { 234 MinTime: 1, 235 MaxTime: 10, 236 Checksum: 3, 237 }, 238 }, 239 }, 240 }, 241 // duplicate of first index 242 { 243 { 244 Labels: mustParseLabels(`{foo="bar"}`), 245 Chunks: []index.ChunkMeta{ 246 { 247 MinTime: 0, 248 MaxTime: 3, 249 Checksum: 0, 250 }, 251 { 252 MinTime: 1, 253 MaxTime: 4, 254 Checksum: 1, 255 }, 256 { 257 MinTime: 2, 258 MaxTime: 5, 259 Checksum: 2, 260 }, 261 }, 262 }, 263 { 264 Labels: mustParseLabels(`{foo="bar", bazz="buzz"}`), 265 Chunks: []index.ChunkMeta{ 266 { 267 MinTime: 1, 268 MaxTime: 10, 269 Checksum: 3, 270 }, 271 }, 272 }, 273 }, 274 { 275 { 276 Labels: mustParseLabels(`{foo="bar", a="b"}`), 277 Chunks: []index.ChunkMeta{ 278 { 279 MinTime: 10, 280 MaxTime: 11, 281 Checksum: 0, 282 }, 283 { 284 MinTime: 11, 285 MaxTime: 14, 286 Checksum: 1, 287 }, 288 { 289 MinTime: 12, 290 MaxTime: 15, 291 Checksum: 2, 292 }, 293 }, 294 }, 295 }, 296 }, 297 exp: []ChunkRef{ 298 { 299 User: "fake", 300 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 301 Start: 0, 302 End: 3, 303 Checksum: 0, 304 }, 305 { 306 User: "fake", 307 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 308 Start: 1, 309 End: 4, 310 Checksum: 1, 311 }, 312 { 313 User: "fake", 314 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar"}`).Hash()), 315 Start: 2, 316 End: 5, 317 Checksum: 2, 318 }, 319 320 { 321 User: "fake", 322 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", a="b"}`).Hash()), 323 Start: 10, 324 End: 11, 325 Checksum: 0, 326 }, 327 { 328 User: "fake", 329 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", a="b"}`).Hash()), 330 Start: 11, 331 End: 14, 332 Checksum: 1, 333 }, 334 { 335 User: "fake", 336 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", a="b"}`).Hash()), 337 Start: 12, 338 End: 15, 339 Checksum: 2, 340 }, 341 { 342 User: "fake", 343 Fingerprint: model.Fingerprint(mustParseLabels(`{foo="bar", bazz="buzz"}`).Hash()), 344 Start: 1, 345 End: 10, 346 Checksum: 3, 347 }, 348 }, 349 }, 350 } { 351 t.Run(tc.desc, func(t *testing.T) { 352 dir := t.TempDir() 353 c := NewCompactor("fake", dir) 354 355 var indices []*TSDBIndex 356 for _, cases := range tc.input { 357 idx := BuildIndex(t, dir, "fake", cases) 358 defer idx.Close() 359 casted, ok := idx.Index.(*TSDBIndex) 360 require.Equal(t, true, ok) 361 indices = append(indices, casted) 362 } 363 364 out, err := c.Compact(context.Background(), indices...) 365 if tc.err { 366 require.NotNil(t, err) 367 return 368 } 369 require.Nil(t, err) 370 371 idx, err := NewShippableTSDBFile(out, false) 372 require.Nil(t, err) 373 defer idx.Close() 374 375 from, through := inclusiveBounds(idx) 376 res, err := idx.GetChunkRefs( 377 context.Background(), 378 "fake", 379 from, 380 through, 381 nil, 382 nil, 383 labels.MustNewMatcher(labels.MatchEqual, "", ""), 384 ) 385 386 require.Nil(t, err) 387 require.Equal(t, tc.exp, res) 388 }) 389 } 390 }