github.com/m3db/m3@v1.5.1-0.20231129193456-75a402aa583b/src/dbnode/storage/index/aggregate_results_test.go (about) 1 // Copyright (c) 2019 Uber Technologies, Inc. 2 // 3 // Permission is hereby granted, free of charge, to any person obtaining a copy 4 // of this software and associated documentation files (the "Software"), to deal 5 // in the Software without restriction, including without limitation the rights 6 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 // copies of the Software, and to permit persons to whom the Software is 8 // furnished to do so, subject to the following conditions: 9 // 10 // The above copyright notice and this permission notice shall be included in 11 // all copies or substantial portions of the Software. 12 // 13 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 // THE SOFTWARE. 20 21 package index 22 23 import ( 24 "sort" 25 "testing" 26 27 "github.com/stretchr/testify/assert" 28 "github.com/stretchr/testify/require" 29 "github.com/uber-go/tally" 30 31 "github.com/m3db/m3/src/x/ident" 32 "github.com/m3db/m3/src/x/instrument" 33 xtest "github.com/m3db/m3/src/x/test" 34 ) 35 36 func entries(entries ...AggregateResultsEntry) []AggregateResultsEntry { return entries } 37 38 func genResultsEntry(field string, terms ...string) AggregateResultsEntry { 39 entryTerms := make([]ident.ID, 0, len(terms)) 40 for _, term := range terms { 41 entryTerms = append(entryTerms, ident.StringID(term)) 42 } 43 44 return AggregateResultsEntry{ 45 Field: ident.StringID(field), 46 Terms: entryTerms, 47 } 48 } 49 50 func toMap(res AggregateResults) map[string][]string { 51 entries := res.Map().Iter() 52 resultMap := make(map[string][]string, len(entries)) 53 for _, entry := range entries { //nolint:gocritic 54 terms := entry.value.Map().Iter() 55 resultTerms := make([]string, 0, len(terms)) 56 for _, term := range terms { 57 resultTerms = append(resultTerms, term.Key().String()) 58 } 59 60 sort.Strings(resultTerms) 61 resultMap[entry.Key().String()] = resultTerms 62 } 63 64 return resultMap 65 } 66 67 func TestWithLimits(t *testing.T) { 68 tests := []struct { 69 name string 70 entries []AggregateResultsEntry 71 sizeLimit int 72 docLimit int 73 exSeries int 74 exDocs int 75 expected map[string][]string 76 exMetrics map[string]int64 77 }{ 78 { 79 name: "single term", 80 entries: entries(genResultsEntry("foo")), 81 exSeries: 1, 82 exDocs: 1, 83 expected: map[string][]string{"foo": {}}, 84 85 exMetrics: map[string]int64{ 86 "total": 1, "total-fields": 1, "deduped-fields": 1, 87 "total-terms": 0, "deduped-terms": 0, 88 }, 89 }, 90 { 91 name: "same term", 92 entries: entries(genResultsEntry("foo"), genResultsEntry("foo")), 93 exSeries: 1, 94 exDocs: 2, 95 expected: map[string][]string{"foo": {}}, 96 exMetrics: map[string]int64{ 97 "total": 2, "total-fields": 2, "deduped-fields": 1, 98 "total-terms": 0, "deduped-terms": 0, 99 }, 100 }, 101 { 102 name: "multiple terms", 103 entries: entries(genResultsEntry("foo"), genResultsEntry("bar")), 104 exSeries: 2, 105 exDocs: 2, 106 expected: map[string][]string{"foo": {}, "bar": {}}, 107 exMetrics: map[string]int64{ 108 "total": 2, "total-fields": 2, "deduped-fields": 2, 109 "total-terms": 0, "deduped-terms": 0, 110 }, 111 }, 112 { 113 name: "single entry", 114 entries: entries(genResultsEntry("foo", "bar")), 115 exSeries: 2, 116 exDocs: 2, 117 expected: map[string][]string{"foo": {"bar"}}, 118 exMetrics: map[string]int64{ 119 "total": 2, "total-fields": 1, "deduped-fields": 1, 120 "total-terms": 1, "deduped-terms": 1, 121 }, 122 }, 123 { 124 name: "single entry multiple fields", 125 entries: entries(genResultsEntry("foo", "bar", "baz", "baz", "baz", "qux")), 126 exSeries: 4, 127 exDocs: 6, 128 expected: map[string][]string{"foo": {"bar", "baz", "qux"}}, 129 exMetrics: map[string]int64{ 130 "total": 6, "total-fields": 1, "deduped-fields": 1, 131 "total-terms": 5, "deduped-terms": 3, 132 }, 133 }, 134 { 135 name: "multiple entry multiple fields", 136 entries: entries( 137 genResultsEntry("foo", "bar", "baz"), 138 genResultsEntry("foo", "baz", "baz", "qux")), 139 exSeries: 4, 140 exDocs: 7, 141 expected: map[string][]string{"foo": {"bar", "baz", "qux"}}, 142 exMetrics: map[string]int64{ 143 "total": 7, "total-fields": 2, "deduped-fields": 1, 144 "total-terms": 5, "deduped-terms": 3, 145 }, 146 }, 147 { 148 name: "multiple entries", 149 entries: entries(genResultsEntry("foo", "baz"), genResultsEntry("bar", "baz", "qux")), 150 exSeries: 5, 151 exDocs: 5, 152 expected: map[string][]string{"foo": {"baz"}, "bar": {"baz", "qux"}}, 153 exMetrics: map[string]int64{ 154 "total": 5, "total-fields": 2, "deduped-fields": 2, 155 "total-terms": 3, "deduped-terms": 3, 156 }, 157 }, 158 159 { 160 name: "single entry query at size limit", 161 entries: entries(genResultsEntry("foo", "bar", "baz", "baz", "qux")), 162 sizeLimit: 4, 163 exSeries: 4, 164 exDocs: 5, 165 expected: map[string][]string{"foo": {"bar", "baz", "qux"}}, 166 exMetrics: map[string]int64{ 167 "total": 5, "total-fields": 1, "deduped-fields": 1, 168 "total-terms": 4, "deduped-terms": 3, 169 }, 170 }, 171 { 172 name: "single entry query at doc limit", 173 entries: entries(genResultsEntry("foo", "bar", "baz", "baz", "qux")), 174 docLimit: 5, 175 exSeries: 4, 176 exDocs: 5, 177 expected: map[string][]string{"foo": {"bar", "baz", "qux"}}, 178 exMetrics: map[string]int64{ 179 "total": 5, "total-fields": 1, "deduped-fields": 1, 180 "total-terms": 4, "deduped-terms": 3, 181 }, 182 }, 183 184 { 185 name: "single entry query below size limit", 186 entries: entries(genResultsEntry("foo", "bar", "baz", "qux")), 187 sizeLimit: 3, 188 exSeries: 3, 189 exDocs: 4, 190 expected: map[string][]string{"foo": {"bar", "baz"}}, 191 exMetrics: map[string]int64{ 192 "total": 4, "total-fields": 1, "deduped-fields": 1, 193 "total-terms": 3, "deduped-terms": 2, 194 }, 195 }, 196 { 197 name: "single entry query below doc limit", 198 entries: entries(genResultsEntry("foo", "bar", "bar", "bar", "baz")), 199 docLimit: 3, 200 exSeries: 2, 201 exDocs: 3, 202 expected: map[string][]string{"foo": {"bar"}}, 203 exMetrics: map[string]int64{ 204 "total": 5, "total-fields": 1, "deduped-fields": 1, 205 "total-terms": 4, "deduped-terms": 1, 206 }, 207 }, 208 { 209 name: "multiple entry query below series limit", 210 entries: entries(genResultsEntry("foo", "bar"), genResultsEntry("baz", "qux")), 211 sizeLimit: 3, 212 exSeries: 3, 213 exDocs: 4, 214 expected: map[string][]string{"foo": {"bar"}, "baz": {}}, 215 exMetrics: map[string]int64{ 216 "total": 4, "total-fields": 2, "deduped-fields": 2, 217 "total-terms": 2, "deduped-terms": 1, 218 }, 219 }, 220 { 221 name: "multiple entry query below doc limit", 222 entries: entries(genResultsEntry("foo", "bar"), genResultsEntry("baz", "qux")), 223 docLimit: 3, 224 exSeries: 3, 225 exDocs: 3, 226 expected: map[string][]string{"foo": {"bar"}, "baz": {}}, 227 exMetrics: map[string]int64{ 228 "total": 4, "total-fields": 2, "deduped-fields": 2, 229 "total-terms": 2, "deduped-terms": 1, 230 }, 231 }, 232 { 233 name: "multiple entry query both limits", 234 entries: entries(genResultsEntry("foo", "bar"), genResultsEntry("baz", "qux")), 235 docLimit: 3, 236 sizeLimit: 10, 237 exSeries: 3, 238 exDocs: 3, 239 expected: map[string][]string{"foo": {"bar"}, "baz": {}}, 240 exMetrics: map[string]int64{ 241 "total": 4, "total-fields": 2, "deduped-fields": 2, 242 "total-terms": 2, "deduped-terms": 1, 243 }, 244 }, 245 } 246 247 for _, tt := range tests { 248 t.Run(tt.name, func(t *testing.T) { 249 scope := tally.NewTestScope("", nil) 250 iOpts := instrument.NewOptions().SetMetricsScope(scope) 251 res := NewAggregateResults(ident.StringID("ns"), AggregateResultsOptions{ 252 SizeLimit: tt.sizeLimit, 253 DocsLimit: tt.docLimit, 254 AggregateUsageMetrics: NewAggregateUsageMetrics(ident.StringID("ns"), iOpts), 255 }, testOpts) 256 257 size, docsCount := res.AddFields(tt.entries) 258 assert.Equal(t, tt.exSeries, size) 259 assert.Equal(t, tt.exDocs, docsCount) 260 assert.Equal(t, tt.exSeries, res.Size()) 261 assert.Equal(t, tt.exDocs, res.TotalDocsCount()) 262 263 assert.Equal(t, tt.expected, toMap(res)) 264 265 counters := scope.Snapshot().Counters() 266 actualCounters := make(map[string]int64, len(counters)) 267 for _, v := range counters { 268 actualCounters[v.Tags()["type"]] = v.Value() 269 } 270 271 assert.Equal(t, tt.exMetrics, actualCounters) 272 }) 273 } 274 } 275 276 func TestAggResultsReset(t *testing.T) { 277 res := NewAggregateResults(ident.StringID("qux"), 278 AggregateResultsOptions{}, testOpts) 279 size, docsCount := res.AddFields(entries(genResultsEntry("foo", "bar"))) 280 require.Equal(t, 2, size) 281 require.Equal(t, 2, docsCount) 282 283 aggVals, ok := res.Map().Get(ident.StringID("foo")) 284 require.True(t, ok) 285 require.Equal(t, 1, aggVals.Size()) 286 287 // Check result options correct. 288 aggResults, ok := res.(*aggregatedResults) 289 require.True(t, ok) 290 require.Equal(t, 0, aggResults.aggregateOpts.SizeLimit) 291 require.Equal(t, ident.StringID("qux"), aggResults.nsID) 292 293 newID := ident.StringID("qaz") 294 res.Reset(newID, AggregateResultsOptions{SizeLimit: 100}) 295 _, ok = res.Map().Get(ident.StringID("foo")) 296 require.False(t, ok) 297 require.Equal(t, 0, aggVals.Size()) 298 require.Equal(t, 0, res.Size()) 299 300 // Check result options change. 301 aggResults, ok = res.(*aggregatedResults) 302 require.True(t, ok) 303 require.Equal(t, 100, aggResults.aggregateOpts.SizeLimit) 304 require.Equal(t, newID.Bytes(), aggResults.nsID.Bytes()) 305 306 // Ensure new NS is cloned 307 require.False(t, 308 xtest.ByteSlicesBackedBySameData(newID.Bytes(), aggResults.nsID.Bytes())) 309 } 310 311 func TestAggResultsResetNamespaceClones(t *testing.T) { 312 res := NewAggregateResults(nil, AggregateResultsOptions{}, testOpts) 313 require.Equal(t, nil, res.Namespace()) 314 nsID := ident.StringID("something") 315 res.Reset(nsID, AggregateResultsOptions{}) 316 nsID.Finalize() 317 require.Equal(t, nsID.Bytes(), res.Namespace().Bytes()) 318 319 // Ensure new NS is cloned 320 require.False(t, 321 xtest.ByteSlicesBackedBySameData(nsID.Bytes(), res.Namespace().Bytes())) 322 } 323 324 func TestAggResultFinalize(t *testing.T) { 325 // Create a Results and insert some data. 326 res := NewAggregateResults(nil, AggregateResultsOptions{}, testOpts) 327 size, docsCount := res.AddFields(entries(genResultsEntry("foo", "bar"))) 328 require.Equal(t, 2, size) 329 require.Equal(t, 2, docsCount) 330 331 // Ensure the data is present. 332 rMap := res.Map() 333 aggVals, ok := rMap.Get(ident.StringID("foo")) 334 require.True(t, ok) 335 require.Equal(t, 1, aggVals.Size()) 336 337 // Call Finalize() to reset the Results. 338 res.Finalize() 339 340 // Ensure data was removed by call to Finalize(). 341 aggVals, ok = rMap.Get(ident.StringID("foo")) 342 require.False(t, ok) 343 require.Nil(t, aggVals.Map()) 344 require.Equal(t, 0, res.Size()) 345 346 for _, entry := range rMap.Iter() { 347 id := entry.Key() 348 require.False(t, id.IsNoFinalize()) 349 } 350 } 351 352 func TestResetUpdatesMetics(t *testing.T) { 353 scope := tally.NewTestScope("", nil) 354 iOpts := instrument.NewOptions().SetMetricsScope(scope) 355 testOpts = testOpts.SetInstrumentOptions(iOpts) 356 res := NewAggregateResults(nil, AggregateResultsOptions{ 357 AggregateUsageMetrics: NewAggregateUsageMetrics(ident.StringID("ns1"), iOpts), 358 }, testOpts) 359 res.AddFields(entries(genResultsEntry("foo"))) 360 res.Reset(ident.StringID("ns2"), AggregateResultsOptions{}) 361 res.AddFields(entries(genResultsEntry("bar"))) 362 363 counters := scope.Snapshot().Counters() 364 seenNamespaces := make(map[string]struct{}) 365 for _, v := range counters { 366 ns := v.Tags()["namespace"] 367 seenNamespaces[ns] = struct{}{} 368 } 369 370 assert.Equal(t, map[string]struct{}{ 371 "ns1": {}, 372 "ns2": {}, 373 }, seenNamespaces) 374 375 res.Finalize() 376 }