github.com/koko1123/flow-go-1@v0.29.6/storage/badger/chunk_data_pack_test.go (about) 1 package badger_test 2 3 import ( 4 "errors" 5 "sync" 6 "testing" 7 "time" 8 9 "github.com/dgraph-io/badger/v3" 10 11 "github.com/koko1123/flow-go-1/model/flow" 12 "github.com/koko1123/flow-go-1/module/metrics" 13 "github.com/koko1123/flow-go-1/storage" 14 badgerstorage "github.com/koko1123/flow-go-1/storage/badger" 15 "github.com/koko1123/flow-go-1/utils/unittest" 16 17 "github.com/stretchr/testify/assert" 18 "github.com/stretchr/testify/require" 19 ) 20 21 // TestChunkDataPacks_Store evaluates correct storage and retrieval of chunk data packs in the storage. 22 // It also evaluates that re-inserting is idempotent. 23 func TestChunkDataPacks_Store(t *testing.T) { 24 WithChunkDataPacks(t, 100, func(t *testing.T, chunkDataPacks []*flow.ChunkDataPack, chunkDataPackStore *badgerstorage.ChunkDataPacks, _ *badger.DB) { 25 wg := sync.WaitGroup{} 26 wg.Add(len(chunkDataPacks)) 27 for _, chunkDataPack := range chunkDataPacks { 28 go func(cdp flow.ChunkDataPack) { 29 err := chunkDataPackStore.Store(&cdp) 30 require.NoError(t, err) 31 32 wg.Done() 33 }(*chunkDataPack) 34 } 35 36 unittest.RequireReturnsBefore(t, wg.Wait, 1*time.Second, "could not store chunk data packs on time") 37 38 // re-insert - should be idempotent 39 for _, chunkDataPack := range chunkDataPacks { 40 err := chunkDataPackStore.Store(chunkDataPack) 41 require.NoError(t, err) 42 } 43 }) 44 } 45 46 // TestChunkDataPack_BatchStore evaluates correct batch storage and retrieval of chunk data packs in the storage. 47 func TestChunkDataPacks_BatchStore(t *testing.T) { 48 WithChunkDataPacks(t, 100, func(t *testing.T, chunkDataPacks []*flow.ChunkDataPack, chunkDataPackStore *badgerstorage.ChunkDataPacks, db *badger.DB) { 49 batch := badgerstorage.NewBatch(db) 50 51 wg := sync.WaitGroup{} 52 wg.Add(len(chunkDataPacks)) 53 for _, chunkDataPack := range chunkDataPacks { 54 go func(cdp flow.ChunkDataPack) { 55 err := chunkDataPackStore.BatchStore(&cdp, batch) 56 require.NoError(t, err) 57 58 wg.Done() 59 }(*chunkDataPack) 60 } 61 62 unittest.RequireReturnsBefore(t, wg.Wait, 1*time.Second, "could not store chunk data packs on time") 63 64 err := batch.Flush() 65 require.NoError(t, err) 66 }) 67 } 68 69 // TestChunkDataPacks_MissingItem evaluates querying a missing item returns a storage.ErrNotFound error. 70 func TestChunkDataPacks_MissingItem(t *testing.T) { 71 unittest.RunWithBadgerDB(t, func(db *badger.DB) { 72 transactions := badgerstorage.NewTransactions(&metrics.NoopCollector{}, db) 73 collections := badgerstorage.NewCollections(db, transactions) 74 store := badgerstorage.NewChunkDataPacks(&metrics.NoopCollector{}, db, collections, 1) 75 76 // attempt to get an invalid 77 _, err := store.ByChunkID(unittest.IdentifierFixture()) 78 assert.True(t, errors.Is(err, storage.ErrNotFound)) 79 }) 80 } 81 82 // WithChunkDataPacks is a test helper that generates specified number of chunk data packs, store them using the storeFunc, and 83 // then evaluates whether they are successfully retrieved from storage. 84 func WithChunkDataPacks(t *testing.T, chunks int, storeFunc func(*testing.T, []*flow.ChunkDataPack, *badgerstorage.ChunkDataPacks, *badger.DB)) { 85 unittest.RunWithBadgerDB(t, func(db *badger.DB) { 86 transactions := badgerstorage.NewTransactions(&metrics.NoopCollector{}, db) 87 collections := badgerstorage.NewCollections(db, transactions) 88 // keep the cache size at 1 to make sure that entries are written and read from storage itself. 89 store := badgerstorage.NewChunkDataPacks(&metrics.NoopCollector{}, db, collections, 1) 90 91 chunkDataPacks := unittest.ChunkDataPacksFixture(chunks) 92 for _, chunkDataPack := range chunkDataPacks { 93 // stores collection in Collections storage (which ChunkDataPacks store uses internally) 94 err := collections.Store(chunkDataPack.Collection) 95 require.NoError(t, err) 96 } 97 98 // stores chunk data packs in the memory using provided store function. 99 storeFunc(t, chunkDataPacks, store, db) 100 101 // stored chunk data packs should be retrieved successfully. 102 for _, expected := range chunkDataPacks { 103 actual, err := store.ByChunkID(expected.ChunkID) 104 require.NoError(t, err) 105 106 assert.Equal(t, expected, actual) 107 } 108 }) 109 }