go.temporal.io/server@v1.23.0/common/cache/lru_test.go (about) 1 // The MIT License 2 // 3 // Copyright (c) 2020 Temporal Technologies Inc. All rights reserved. 4 // 5 // Copyright (c) 2020 Uber Technologies, Inc. 6 // 7 // Permission is hereby granted, free of charge, to any person obtaining a copy 8 // of this software and associated documentation files (the "Software"), to deal 9 // in the Software without restriction, including without limitation the rights 10 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 // copies of the Software, and to permit persons to whom the Software is 12 // furnished to do so, subject to the following conditions: 13 // 14 // The above copyright notice and this permission notice shall be included in 15 // all copies or substantial portions of the Software. 16 // 17 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 23 // THE SOFTWARE. 24 25 package cache 26 27 import ( 28 "math/rand" 29 "sync" 30 "testing" 31 "time" 32 33 "github.com/google/uuid" 34 "github.com/stretchr/testify/assert" 35 "go.temporal.io/server/common/clock" 36 ) 37 38 type ( 39 keyType struct { 40 dummyString string 41 dummyInt int 42 } 43 44 testEntryWithCacheSize struct { 45 cacheSize int 46 } 47 ) 48 49 func (c *testEntryWithCacheSize) CacheSize() int { 50 return c.cacheSize 51 } 52 53 func TestLRU(t *testing.T) { 54 t.Parallel() 55 56 cache := NewLRU(4) 57 58 cache.Put("A", "Foo") 59 assert.Equal(t, "Foo", cache.Get("A")) 60 assert.Nil(t, cache.Get("B")) 61 assert.Equal(t, 1, cache.Size()) 62 63 cache.Put("B", "Bar") 64 cache.Put("C", "Cid") 65 cache.Put("D", "Delt") 66 assert.Equal(t, 4, cache.Size()) 67 68 assert.Equal(t, "Bar", cache.Get("B")) 69 assert.Equal(t, "Cid", cache.Get("C")) 70 assert.Equal(t, "Delt", cache.Get("D")) 71 72 cache.Put("A", "Foo2") 73 assert.Equal(t, "Foo2", cache.Get("A")) 74 assert.Equal(t, 4, cache.Size()) 75 76 cache.Put("E", "Epsi") 77 assert.Equal(t, "Epsi", cache.Get("E")) 78 assert.Equal(t, "Foo2", cache.Get("A")) 79 assert.Nil(t, cache.Get("B")) // Oldest, should be evicted 80 assert.Equal(t, 4, cache.Size()) 81 82 // Access C, D is now LRU 83 cache.Get("C") 84 cache.Put("F", "Felp") 85 assert.Nil(t, cache.Get("D")) 86 assert.Equal(t, 4, cache.Size()) 87 88 cache.Delete("A") 89 assert.Nil(t, cache.Get("A")) 90 assert.Equal(t, 3, cache.Size()) 91 } 92 93 func TestGenerics(t *testing.T) { 94 t.Parallel() 95 96 key := keyType{ 97 dummyString: "some random key", 98 dummyInt: 59, 99 } 100 value := "some random value" 101 102 cache := NewLRU(5) 103 cache.Put(key, value) 104 105 assert.Equal(t, value, cache.Get(key)) 106 assert.Equal(t, value, cache.Get(keyType{ 107 dummyString: "some random key", 108 dummyInt: 59, 109 })) 110 assert.Nil(t, cache.Get(keyType{ 111 dummyString: "some other random key", 112 dummyInt: 56, 113 })) 114 assert.Equal(t, 1, cache.Size()) 115 116 cache.Put(key, "some other random value") 117 assert.Equal(t, "some other random value", cache.Get(key)) 118 assert.Equal(t, 1, cache.Size()) 119 } 120 121 func TestLRUWithTTL(t *testing.T) { 122 t.Parallel() 123 124 timeSource := clock.NewEventTimeSource() 125 cache := New(5, &Options{ 126 TTL: time.Millisecond * 100, 127 TimeSource: timeSource, 128 }) 129 cache.Put("A", "foo") 130 assert.Equal(t, "foo", cache.Get("A")) 131 timeSource.Advance(time.Millisecond * 300) 132 assert.Nil(t, cache.Get("A")) 133 assert.Equal(t, 0, cache.Size()) 134 } 135 136 func TestLRUCacheConcurrentAccess(t *testing.T) { 137 t.Parallel() 138 139 cache := NewLRU(5) 140 values := map[string]string{ 141 "A": "foo", 142 "B": "bar", 143 "C": "zed", 144 "D": "dank", 145 "E": "ezpz", 146 } 147 148 for k, v := range values { 149 cache.Put(k, v) 150 } 151 152 start := make(chan struct{}) 153 var wg sync.WaitGroup 154 for i := 0; i < 20; i++ { 155 wg.Add(2) 156 157 // concurrent get and put 158 go func() { 159 defer wg.Done() 160 161 <-start 162 163 for j := 0; j < 1000; j++ { 164 cache.Get("A") 165 cache.Put("A", "fooo") 166 } 167 }() 168 169 // concurrent iteration 170 go func() { 171 defer wg.Done() 172 173 <-start 174 175 for j := 0; j < 50; j++ { 176 it := cache.Iterator() 177 for it.HasNext() { 178 _ = it.Next() 179 } 180 it.Close() 181 } 182 }() 183 } 184 185 close(start) 186 wg.Wait() 187 } 188 189 func TestTTL(t *testing.T) { 190 t.Parallel() 191 192 timeSource := clock.NewEventTimeSource() 193 cache := New(5, &Options{ 194 TTL: time.Millisecond * 50, 195 TimeSource: timeSource, 196 }) 197 198 cache.Put("A", t) 199 assert.Equal(t, t, cache.Get("A")) 200 timeSource.Advance(time.Millisecond * 100) 201 assert.Nil(t, cache.Get("A")) 202 } 203 204 func TestTTLWithPin(t *testing.T) { 205 t.Parallel() 206 207 timeSource := clock.NewEventTimeSource() 208 cache := New(5, &Options{ 209 TTL: time.Millisecond * 50, 210 Pin: true, 211 TimeSource: timeSource, 212 }) 213 214 _, err := cache.PutIfNotExist("A", t) 215 assert.NoError(t, err) 216 assert.Equal(t, t, cache.Get("A")) 217 assert.Equal(t, 1, cache.Size()) 218 timeSource.Advance(time.Millisecond * 100) 219 assert.Equal(t, t, cache.Get("A")) 220 assert.Equal(t, 1, cache.Size()) 221 // release 3 time since put if not exist also increase the counter 222 cache.Release("A") 223 cache.Release("A") 224 cache.Release("A") 225 assert.Nil(t, cache.Get("A")) 226 assert.Equal(t, 0, cache.Size()) 227 } 228 229 func TestMaxSizeWithPin_MidItem(t *testing.T) { 230 t.Parallel() 231 232 timeSource := clock.NewEventTimeSource() 233 cache := New(2, &Options{ 234 TTL: time.Millisecond * 50, 235 Pin: true, 236 TimeSource: timeSource, 237 }) 238 239 _, err := cache.PutIfNotExist("A", t) 240 assert.NoError(t, err) 241 assert.Equal(t, 1, cache.Size()) 242 243 _, err = cache.PutIfNotExist("B", t) 244 assert.NoError(t, err) 245 assert.Equal(t, 2, cache.Size()) 246 247 _, err = cache.PutIfNotExist("C", t) 248 assert.Error(t, err) 249 assert.Equal(t, 2, cache.Size()) 250 251 assert.Equal(t, t, cache.Get("A")) 252 cache.Release("A") // get will also increase the ref count 253 assert.Equal(t, t, cache.Get("B")) 254 cache.Release("B") // get will also increase the ref count 255 assert.Equal(t, 2, cache.Size()) 256 257 cache.Release("B") // B's ref count is 0 258 _, err = cache.PutIfNotExist("C", t) 259 assert.NoError(t, err) 260 assert.Equal(t, t, cache.Get("C")) 261 cache.Release("C") // get will also increase the ref count 262 assert.Equal(t, 2, cache.Size()) 263 264 cache.Release("A") // A's ref count is 0 265 cache.Release("C") // C's ref count is 0 266 assert.Equal(t, 2, cache.Size()) 267 268 timeSource.Advance(time.Millisecond * 100) 269 assert.Nil(t, cache.Get("A")) 270 assert.Nil(t, cache.Get("B")) 271 assert.Nil(t, cache.Get("C")) 272 assert.Equal(t, 0, cache.Size()) 273 } 274 275 func TestMaxSizeWithPin_LastItem(t *testing.T) { 276 t.Parallel() 277 278 timeSource := clock.NewEventTimeSource() 279 cache := New(2, &Options{ 280 TTL: time.Millisecond * 50, 281 Pin: true, 282 TimeSource: timeSource, 283 }) 284 285 _, err := cache.PutIfNotExist("A", t) 286 assert.NoError(t, err) 287 assert.Equal(t, 1, cache.Size()) 288 289 _, err = cache.PutIfNotExist("B", t) 290 assert.NoError(t, err) 291 assert.Equal(t, 2, cache.Size()) 292 293 _, err = cache.PutIfNotExist("C", t) 294 assert.Error(t, err) 295 assert.Equal(t, 2, cache.Size()) 296 297 assert.Equal(t, t, cache.Get("A")) 298 cache.Release("A") // get will also increase the ref count 299 assert.Equal(t, t, cache.Get("B")) 300 cache.Release("B") // get will also increase the ref count 301 assert.Equal(t, 2, cache.Size()) 302 303 cache.Release("A") // A's ref count is 0 304 _, err = cache.PutIfNotExist("C", t) 305 assert.NoError(t, err) 306 assert.Equal(t, t, cache.Get("C")) 307 cache.Release("C") // get will also increase the ref count 308 assert.Equal(t, 2, cache.Size()) 309 310 cache.Release("B") // B's ref count is 0 311 cache.Release("C") // C's ref count is 0 312 assert.Equal(t, 2, cache.Size()) 313 314 timeSource.Advance(time.Millisecond * 100) 315 assert.Nil(t, cache.Get("A")) 316 assert.Nil(t, cache.Get("B")) 317 assert.Nil(t, cache.Get("C")) 318 assert.Equal(t, 0, cache.Size()) 319 } 320 321 func TestIterator(t *testing.T) { 322 t.Parallel() 323 324 expected := map[string]string{ 325 "A": "Alpha", 326 "B": "Beta", 327 "G": "Gamma", 328 "D": "Delta", 329 } 330 331 cache := NewLRU(5) 332 333 for k, v := range expected { 334 cache.Put(k, v) 335 } 336 337 actual := map[string]string{} 338 339 it := cache.Iterator() 340 for it.HasNext() { 341 entry := it.Next() 342 actual[entry.Key().(string)] = entry.Value().(string) 343 } 344 it.Close() 345 assert.Equal(t, expected, actual) 346 347 it = cache.Iterator() 348 for i := 0; i < len(expected); i++ { 349 entry := it.Next() 350 actual[entry.Key().(string)] = entry.Value().(string) 351 } 352 it.Close() 353 assert.Equal(t, expected, actual) 354 } 355 356 func TestZeroSizeCache(t *testing.T) { 357 t.Parallel() 358 359 cache := NewLRU(0) 360 _, err := cache.PutIfNotExist("A", t) 361 assert.NoError(t, err) 362 assert.Equal(t, nil, cache.Get("A")) 363 assert.Equal(t, 0, cache.Size()) 364 it := cache.Iterator() 365 assert.False(t, it.HasNext()) 366 it.Close() 367 cache.Release("A") 368 cache.Delete("A") 369 v, err := cache.PutIfNotExist("A", t) 370 assert.Equal(t, v, t) 371 assert.Nil(t, err) 372 assert.Equal(t, 0, cache.Size()) 373 } 374 375 func TestCache_ItemSizeTooLarge(t *testing.T) { 376 t.Parallel() 377 378 maxTotalBytes := 10 379 cache := NewLRU(maxTotalBytes) 380 381 res := cache.Put(uuid.New(), &testEntryWithCacheSize{maxTotalBytes}) 382 assert.Equal(t, res, nil) 383 assert.Equal(t, 10, cache.Size()) 384 385 res, err := cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{maxTotalBytes + 1}) 386 assert.Equal(t, err, ErrCacheItemTooLarge) 387 assert.Equal(t, res, nil) 388 assert.Equal(t, 10, cache.Size()) 389 390 } 391 392 func TestCache_ItemHasCacheSizeDefined(t *testing.T) { 393 t.Parallel() 394 395 maxTotalBytes := 10 396 cache := NewLRU(maxTotalBytes) 397 398 numPuts := rand.Intn(1024) 399 400 startWG := sync.WaitGroup{} 401 endWG := sync.WaitGroup{} 402 403 startWG.Add(numPuts) 404 endWG.Add(numPuts) 405 406 go func() { 407 startWG.Wait() 408 assert.True(t, cache.Size() < maxTotalBytes) 409 }() 410 for i := 0; i < numPuts; i++ { 411 go func() { 412 defer endWG.Done() 413 414 startWG.Wait() 415 key := uuid.New() 416 cache.Put(key, &testEntryWithCacheSize{rand.Int()}) 417 }() 418 startWG.Done() 419 } 420 421 endWG.Wait() 422 } 423 424 func TestCache_ItemHasCacheSizeDefined_PutWithNewKeys(t *testing.T) { 425 t.Parallel() 426 427 maxTotalBytes := 10 428 cache := NewLRU(maxTotalBytes) 429 430 // Put with new key and value size greater than cache size, should not be added to cache 431 cache.Put(uuid.New(), &testEntryWithCacheSize{15}) 432 assert.Equal(t, 0, cache.Size()) 433 434 // Put with new key and value size less than cache size, should be added to cache 435 cache.Put(uuid.New(), &testEntryWithCacheSize{5}) 436 assert.Equal(t, 5, cache.Size()) 437 438 // Put with new key and value size less than cache size, should evict 0 ref items and added to cache 439 cache.Put(uuid.New(), &testEntryWithCacheSize{10}) 440 assert.Equal(t, 10, cache.Size()) 441 442 // Put with new key and value size less than cache size, should evict 0 ref items until enough spaces and added to cache 443 cache.Put(uuid.New(), &testEntryWithCacheSize{3}) 444 assert.Equal(t, 3, cache.Size()) 445 cache.Put(uuid.New(), &testEntryWithCacheSize{7}) 446 assert.Equal(t, 10, cache.Size()) 447 } 448 449 func TestCache_ItemHasCacheSizeDefined_PutWithSameKeyAndDifferentSizes(t *testing.T) { 450 t.Parallel() 451 452 maxTotalBytes := 10 453 cache := NewLRU(maxTotalBytes) 454 455 key1 := "A" 456 cache.Put(key1, &testEntryWithCacheSize{4}) 457 assert.Equal(t, 4, cache.Size()) 458 459 key2 := "B" 460 cache.Put(key2, &testEntryWithCacheSize{4}) 461 // 4 + 4 = 8 < 10 should not evict any items 462 assert.Equal(t, 8, cache.Size()) 463 // put same key with smaller size, should not evict any items 464 cache.Put(key2, &testEntryWithCacheSize{3}) 465 assert.Equal(t, cache.Get(key1), &testEntryWithCacheSize{4}) 466 // 8 - 4 + 3 = 7 < 10, should not evict any items 467 assert.Equal(t, 7, cache.Size()) 468 469 // put same key with larger size, but below cache size, should not evict any items 470 cache.Put(key2, &testEntryWithCacheSize{6}) 471 // 7 - 3 + 6 = 10 =< 10, should not evict any items 472 assert.Equal(t, 10, cache.Size()) 473 // get key1 after to make it the most recently used 474 assert.Equal(t, cache.Get(key2), &testEntryWithCacheSize{6}) 475 assert.Equal(t, cache.Get(key1), &testEntryWithCacheSize{4}) 476 477 // put same key with larger size, but take all cache size, should evict all items 478 cache.Put(key2, &testEntryWithCacheSize{10}) 479 // 10 - 4 - 6 + 10 = 10 =< 10, should evict all items 480 assert.Equal(t, 10, cache.Size()) 481 assert.Equal(t, cache.Get(key1), nil) 482 assert.Equal(t, cache.Get(key2), &testEntryWithCacheSize{10}) 483 } 484 485 func TestCache_ItemHasCacheSizeDefined_PutWithSameKey(t *testing.T) { 486 t.Parallel() 487 488 maxTotalBytes := 10 489 cache := NewLRU(maxTotalBytes) 490 491 key := uuid.New() 492 493 // Put with same key and value size greater than cache size, should not be added to cache 494 cache.Put(key, &testEntryWithCacheSize{15}) 495 assert.Equal(t, 0, cache.Size()) 496 497 // Put with same key and value size less than cache size, should be added to cache 498 cache.Put(key, &testEntryWithCacheSize{5}) 499 assert.Equal(t, 5, cache.Size()) 500 501 // Put with same key and value size less than cache size, should be evicted until enough space and added to cache 502 cache.Put(key, &testEntryWithCacheSize{10}) 503 assert.Equal(t, 10, cache.Size()) 504 505 // Put with same key and value size less than cache size, should be evicted until enough space and added to cache 506 cache.Put(key, &testEntryWithCacheSize{3}) 507 assert.Equal(t, 3, cache.Size()) 508 cache.Put(key, &testEntryWithCacheSize{7}) 509 assert.Equal(t, 7, cache.Size()) 510 } 511 512 func TestCache_ItemHasCacheSizeDefined_PutIfNotExistWithNewKeys(t *testing.T) { 513 t.Parallel() 514 515 maxTotalBytes := 10 516 cache := NewLRU(maxTotalBytes) 517 518 // PutIfNotExist with new keys with size greater than cache size, should return error and not add to cache 519 val, err := cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{15}) 520 assert.Equal(t, ErrCacheItemTooLarge, err) 521 assert.Nil(t, val) 522 assert.Equal(t, 0, cache.Size()) 523 524 // PutIfNotExist with new keys with size less than cache size, should add to cache 525 val, err = cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{5}) 526 assert.NoError(t, err) 527 assert.Equal(t, &testEntryWithCacheSize{5}, val) 528 assert.Equal(t, 5, cache.Size()) 529 530 // PutIfNotExist with new keys with size less than cache size, should evict item and add to cache 531 val, err = cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{10}) 532 assert.NoError(t, err) 533 assert.Equal(t, &testEntryWithCacheSize{10}, val) 534 assert.Equal(t, 10, cache.Size()) 535 536 // PutIfNotExist with new keys with size less than cache size, should evict item and add to cache 537 val, err = cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{5}) 538 assert.NoError(t, err) 539 assert.Equal(t, &testEntryWithCacheSize{5}, val) 540 assert.Equal(t, 5, cache.Size()) 541 } 542 543 func TestCache_ItemHasCacheSizeDefined_PutIfNotExistWithSameKey(t *testing.T) { 544 t.Parallel() 545 546 maxTotalBytes := 10 547 cache := NewLRU(maxTotalBytes) 548 key := uuid.New().String() 549 550 // PutIfNotExist with new keys with size greater than cache size, should return error and not add to cache 551 val, err := cache.PutIfNotExist(key, &testEntryWithCacheSize{15}) 552 assert.Equal(t, ErrCacheItemTooLarge, err) 553 assert.Nil(t, val) 554 assert.Equal(t, 0, cache.Size()) 555 556 // PutIfNotExist with new keys with size less than cache size, should add to cache 557 val, err = cache.PutIfNotExist(key, &testEntryWithCacheSize{5}) 558 assert.NoError(t, err) 559 assert.Equal(t, &testEntryWithCacheSize{5}, val) 560 assert.Equal(t, 5, cache.Size()) 561 562 // PutIfNotExist with same keys with size less than cache size, should not be added to cache 563 val, err = cache.PutIfNotExist(key, &testEntryWithCacheSize{10}) 564 assert.NoError(t, err) 565 assert.Equal(t, &testEntryWithCacheSize{5}, val) 566 assert.Equal(t, 5, cache.Size()) 567 } 568 569 func TestCache_PutIfNotExistWithNewKeys_Pin(t *testing.T) { 570 t.Parallel() 571 572 maxTotalBytes := 10 573 cache := New(maxTotalBytes, &Options{Pin: true}) 574 575 val, err := cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{15}) 576 assert.Equal(t, ErrCacheItemTooLarge, err) 577 assert.Nil(t, val) 578 assert.Equal(t, 0, cache.Size()) 579 580 val, err = cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{3}) 581 assert.NoError(t, err) 582 assert.Equal(t, &testEntryWithCacheSize{3}, val) 583 assert.Equal(t, 3, cache.Size()) 584 585 val, err = cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{7}) 586 assert.NoError(t, err) 587 assert.Equal(t, &testEntryWithCacheSize{7}, val) 588 assert.Equal(t, 10, cache.Size()) 589 590 val, err = cache.PutIfNotExist(uuid.New(), &testEntryWithCacheSize{8}) 591 assert.Equal(t, ErrCacheFull, err) 592 assert.Nil(t, val) 593 assert.Equal(t, 10, cache.Size()) 594 } 595 596 func TestCache_PutIfNotExistWithSameKeys_Pin(t *testing.T) { 597 t.Parallel() 598 599 maxTotalBytes := 10 600 cache := New(maxTotalBytes, &Options{Pin: true}) 601 602 key := uuid.New() 603 val, err := cache.PutIfNotExist(key, &testEntryWithCacheSize{15}) 604 assert.Equal(t, ErrCacheItemTooLarge, err) 605 assert.Nil(t, val) 606 assert.Equal(t, 0, cache.Size()) 607 608 val, err = cache.PutIfNotExist(key, &testEntryWithCacheSize{3}) 609 assert.NoError(t, err) 610 assert.Equal(t, &testEntryWithCacheSize{3}, val) 611 assert.Equal(t, 3, cache.Size()) 612 613 val, err = cache.PutIfNotExist(key, &testEntryWithCacheSize{7}) 614 assert.NoError(t, err) 615 assert.Equal(t, &testEntryWithCacheSize{3}, val) 616 assert.Equal(t, 3, cache.Size()) 617 }