github.com/yankunsam/loki/v2@v2.6.3-0.20220817130409-389df5235c27/pkg/logqlmodel/stats/context_test.go (about) 1 package stats 2 3 import ( 4 "context" 5 "testing" 6 "time" 7 8 "github.com/stretchr/testify/require" 9 10 util_log "github.com/grafana/loki/pkg/util/log" 11 ) 12 13 func TestResult(t *testing.T) { 14 stats, ctx := NewContext(context.Background()) 15 16 stats.AddHeadChunkBytes(10) 17 stats.AddHeadChunkLines(20) 18 stats.AddDecompressedBytes(40) 19 stats.AddDecompressedLines(20) 20 stats.AddCompressedBytes(30) 21 stats.AddDuplicates(10) 22 stats.AddChunksRef(50) 23 stats.AddChunksDownloaded(60) 24 stats.AddChunksDownloadTime(time.Second) 25 stats.AddCacheRequest(ChunkCache, 3) 26 stats.AddCacheRequest(IndexCache, 4) 27 stats.AddCacheRequest(ResultCache, 1) 28 29 fakeIngesterQuery(ctx) 30 fakeIngesterQuery(ctx) 31 32 res := stats.Result(2*time.Second, 2*time.Nanosecond, 10) 33 res.Log(util_log.Logger) 34 expected := Result{ 35 Ingester: Ingester{ 36 TotalChunksMatched: 200, 37 TotalBatches: 50, 38 TotalLinesSent: 60, 39 TotalReached: 2, 40 Store: Store{ 41 Chunk: Chunk{ 42 HeadChunkBytes: 10, 43 HeadChunkLines: 20, 44 DecompressedBytes: 24, 45 DecompressedLines: 40, 46 CompressedBytes: 60, 47 TotalDuplicates: 2, 48 }, 49 }, 50 }, 51 Querier: Querier{ 52 Store: Store{ 53 TotalChunksRef: 50, 54 TotalChunksDownloaded: 60, 55 ChunksDownloadTime: time.Second.Nanoseconds(), 56 Chunk: Chunk{ 57 HeadChunkBytes: 10, 58 HeadChunkLines: 20, 59 DecompressedBytes: 40, 60 DecompressedLines: 20, 61 CompressedBytes: 30, 62 TotalDuplicates: 10, 63 }, 64 }, 65 }, 66 Caches: Caches{ 67 Chunk: Cache{ 68 Requests: 3, 69 }, 70 Index: Cache{ 71 Requests: 4, 72 }, 73 Result: Cache{ 74 Requests: 1, 75 }, 76 }, 77 Summary: Summary{ 78 ExecTime: 2 * time.Second.Seconds(), 79 QueueTime: 2 * time.Nanosecond.Seconds(), 80 BytesProcessedPerSecond: int64(42), 81 LinesProcessedPerSecond: int64(50), 82 TotalBytesProcessed: int64(84), 83 TotalLinesProcessed: int64(100), 84 TotalEntriesReturned: int64(10), 85 Subqueries: 1, 86 }, 87 } 88 require.Equal(t, expected, res) 89 } 90 91 func TestSnapshot_JoinResults(t *testing.T) { 92 statsCtx, ctx := NewContext(context.Background()) 93 expected := Result{ 94 Ingester: Ingester{ 95 TotalChunksMatched: 200, 96 TotalBatches: 50, 97 TotalLinesSent: 60, 98 TotalReached: 2, 99 Store: Store{ 100 Chunk: Chunk{ 101 HeadChunkBytes: 10, 102 HeadChunkLines: 20, 103 DecompressedBytes: 24, 104 DecompressedLines: 40, 105 CompressedBytes: 60, 106 TotalDuplicates: 2, 107 }, 108 }, 109 }, 110 Querier: Querier{ 111 Store: Store{ 112 TotalChunksRef: 50, 113 TotalChunksDownloaded: 60, 114 ChunksDownloadTime: time.Second.Nanoseconds(), 115 Chunk: Chunk{ 116 HeadChunkBytes: 10, 117 HeadChunkLines: 20, 118 DecompressedBytes: 40, 119 DecompressedLines: 20, 120 CompressedBytes: 30, 121 TotalDuplicates: 10, 122 }, 123 }, 124 }, 125 Summary: Summary{ 126 ExecTime: 2 * time.Second.Seconds(), 127 QueueTime: 2 * time.Nanosecond.Seconds(), 128 BytesProcessedPerSecond: int64(42), 129 LinesProcessedPerSecond: int64(50), 130 TotalBytesProcessed: int64(84), 131 TotalLinesProcessed: int64(100), 132 TotalEntriesReturned: int64(10), 133 Subqueries: 2, 134 }, 135 } 136 137 JoinResults(ctx, expected) 138 res := statsCtx.Result(2*time.Second, 2*time.Nanosecond, 10) 139 require.Equal(t, expected, res) 140 } 141 142 func fakeIngesterQuery(ctx context.Context) { 143 FromContext(ctx).AddIngesterReached(1) 144 JoinIngesters(ctx, Ingester{ 145 TotalChunksMatched: 100, 146 TotalBatches: 25, 147 TotalLinesSent: 30, 148 Store: Store{ 149 Chunk: Chunk{ 150 HeadChunkBytes: 5, 151 HeadChunkLines: 10, 152 DecompressedBytes: 12, 153 DecompressedLines: 20, 154 CompressedBytes: 30, 155 TotalDuplicates: 1, 156 }, 157 }, 158 }) 159 } 160 161 func TestResult_Merge(t *testing.T) { 162 var res Result 163 164 res.Merge(res) // testing zero. 165 require.Equal(t, res, res) 166 167 toMerge := Result{ 168 Ingester: Ingester{ 169 TotalChunksMatched: 200, 170 TotalBatches: 50, 171 TotalLinesSent: 60, 172 TotalReached: 2, 173 Store: Store{ 174 Chunk: Chunk{ 175 HeadChunkBytes: 10, 176 HeadChunkLines: 20, 177 DecompressedBytes: 24, 178 DecompressedLines: 40, 179 CompressedBytes: 60, 180 TotalDuplicates: 2, 181 }, 182 }, 183 }, 184 Querier: Querier{ 185 Store: Store{ 186 TotalChunksRef: 50, 187 TotalChunksDownloaded: 60, 188 ChunksDownloadTime: time.Second.Nanoseconds(), 189 Chunk: Chunk{ 190 HeadChunkBytes: 10, 191 HeadChunkLines: 20, 192 DecompressedBytes: 40, 193 DecompressedLines: 20, 194 CompressedBytes: 30, 195 TotalDuplicates: 10, 196 }, 197 }, 198 }, 199 Caches: Caches{ 200 Chunk: Cache{ 201 Requests: 5, 202 BytesReceived: 1024, 203 BytesSent: 512, 204 }, 205 Index: Cache{ 206 EntriesRequested: 22, 207 EntriesFound: 2, 208 }, 209 Result: Cache{ 210 EntriesStored: 3, 211 }, 212 }, 213 Summary: Summary{ 214 ExecTime: 2 * time.Second.Seconds(), 215 QueueTime: 2 * time.Nanosecond.Seconds(), 216 BytesProcessedPerSecond: int64(42), 217 LinesProcessedPerSecond: int64(50), 218 TotalBytesProcessed: int64(84), 219 TotalLinesProcessed: int64(100), 220 }, 221 } 222 223 res.Merge(toMerge) 224 toMerge.Summary.Subqueries = 2 225 require.Equal(t, toMerge, res) 226 227 // merge again 228 res.Merge(toMerge) 229 require.Equal(t, Result{ 230 Ingester: Ingester{ 231 TotalChunksMatched: 2 * 200, 232 TotalBatches: 2 * 50, 233 TotalLinesSent: 2 * 60, 234 Store: Store{ 235 Chunk: Chunk{ 236 HeadChunkBytes: 2 * 10, 237 HeadChunkLines: 2 * 20, 238 DecompressedBytes: 2 * 24, 239 DecompressedLines: 2 * 40, 240 CompressedBytes: 2 * 60, 241 TotalDuplicates: 2 * 2, 242 }, 243 }, 244 TotalReached: 2 * 2, 245 }, 246 Querier: Querier{ 247 Store: Store{ 248 TotalChunksRef: 2 * 50, 249 TotalChunksDownloaded: 2 * 60, 250 ChunksDownloadTime: 2 * time.Second.Nanoseconds(), 251 Chunk: Chunk{ 252 HeadChunkBytes: 2 * 10, 253 HeadChunkLines: 2 * 20, 254 DecompressedBytes: 2 * 40, 255 DecompressedLines: 2 * 20, 256 CompressedBytes: 2 * 30, 257 TotalDuplicates: 2 * 10, 258 }, 259 }, 260 }, 261 Caches: Caches{ 262 Chunk: Cache{ 263 Requests: 2 * 5, 264 BytesReceived: 2 * 1024, 265 BytesSent: 2 * 512, 266 }, 267 Index: Cache{ 268 EntriesRequested: 2 * 22, 269 EntriesFound: 2 * 2, 270 }, 271 Result: Cache{ 272 EntriesStored: 2 * 3, 273 }, 274 }, 275 Summary: Summary{ 276 ExecTime: 2 * 2 * time.Second.Seconds(), 277 QueueTime: 2 * 2 * time.Nanosecond.Seconds(), 278 BytesProcessedPerSecond: int64(42), // 2 requests at the same pace should give the same bytes/lines per sec 279 LinesProcessedPerSecond: int64(50), 280 TotalBytesProcessed: 2 * int64(84), 281 TotalLinesProcessed: 2 * int64(100), 282 Subqueries: 3, 283 }, 284 }, res) 285 } 286 287 func TestReset(t *testing.T) { 288 statsCtx, ctx := NewContext(context.Background()) 289 fakeIngesterQuery(ctx) 290 res := statsCtx.Result(2*time.Second, 2*time.Millisecond, 10) 291 require.NotEmpty(t, res) 292 statsCtx.Reset() 293 res = statsCtx.Result(0, 0, 0) 294 res.Summary.Subqueries = 0 295 require.Empty(t, res) 296 } 297 298 func TestIngester(t *testing.T) { 299 statsCtx, ctx := NewContext(context.Background()) 300 fakeIngesterQuery(ctx) 301 statsCtx.AddCompressedBytes(100) 302 statsCtx.AddDuplicates(10) 303 statsCtx.AddHeadChunkBytes(200) 304 require.Equal(t, Ingester{ 305 TotalReached: 1, 306 TotalChunksMatched: 100, 307 TotalBatches: 25, 308 TotalLinesSent: 30, 309 Store: Store{ 310 Chunk: Chunk{ 311 HeadChunkBytes: 200, 312 CompressedBytes: 100, 313 TotalDuplicates: 10, 314 }, 315 }, 316 }, statsCtx.Ingester()) 317 } 318 319 func TestCaches(t *testing.T) { 320 statsCtx, _ := NewContext(context.Background()) 321 322 statsCtx.AddCacheRequest(ChunkCache, 5) 323 statsCtx.AddCacheEntriesStored(ResultCache, 3) 324 statsCtx.AddCacheEntriesRequested(IndexCache, 22) 325 statsCtx.AddCacheBytesRetrieved(ChunkCache, 1024) 326 statsCtx.AddCacheBytesSent(ChunkCache, 512) 327 statsCtx.AddCacheEntriesFound(IndexCache, 2) 328 329 require.Equal(t, Caches{ 330 Chunk: Cache{ 331 Requests: 5, 332 BytesReceived: 1024, 333 BytesSent: 512, 334 }, 335 Index: Cache{ 336 EntriesRequested: 22, 337 EntriesFound: 2, 338 }, 339 Result: Cache{ 340 EntriesStored: 3, 341 }, 342 }, statsCtx.Caches()) 343 }