github.com/onflow/flow-go@v0.35.7-crescendo-preview.23-atree-inlining/module/executiondatasync/provider/provider_test.go (about)

     1  package provider_test
     2  
     3  import (
     4  	"context"
     5  	"testing"
     6  	"time"
     7  
     8  	"github.com/ipfs/go-cid"
     9  	"github.com/ipfs/go-datastore"
    10  	dssync "github.com/ipfs/go-datastore/sync"
    11  	"github.com/stretchr/testify/assert"
    12  	"github.com/stretchr/testify/mock"
    13  	"github.com/stretchr/testify/require"
    14  	goassert "gotest.tools/assert"
    15  
    16  	"github.com/onflow/flow-go/ledger"
    17  	"github.com/onflow/flow-go/model/flow"
    18  	"github.com/onflow/flow-go/module/blobs"
    19  	"github.com/onflow/flow-go/module/executiondatasync/execution_data"
    20  	"github.com/onflow/flow-go/module/executiondatasync/provider"
    21  	mocktracker "github.com/onflow/flow-go/module/executiondatasync/tracker/mock"
    22  	"github.com/onflow/flow-go/module/metrics"
    23  	"github.com/onflow/flow-go/network"
    24  	"github.com/onflow/flow-go/network/mocknetwork"
    25  	"github.com/onflow/flow-go/utils/unittest"
    26  )
    27  
    28  func getDatastore() datastore.Batching {
    29  	return dssync.MutexWrap(datastore.NewMapDatastore())
    30  }
    31  
    32  func getExecutionDataStore(ds datastore.Batching) execution_data.ExecutionDataStore {
    33  	return execution_data.NewExecutionDataStore(blobs.NewBlobstore(ds), execution_data.DefaultSerializer)
    34  }
    35  
    36  func getBlobservice(t *testing.T, ds datastore.Batching) network.BlobService {
    37  	blobstore := blobs.NewBlobstore(ds)
    38  	blobService := mocknetwork.NewBlobService(t)
    39  	blobService.On("AddBlobs", mock.Anything, mock.AnythingOfType("[]blocks.Block")).Return(blobstore.PutMany)
    40  	return blobService
    41  }
    42  
    43  func getProvider(blobService network.BlobService) provider.Provider {
    44  	trackerStorage := mocktracker.NewMockStorage()
    45  
    46  	return provider.NewProvider(
    47  		unittest.Logger(),
    48  		metrics.NewNoopCollector(),
    49  		execution_data.DefaultSerializer,
    50  		blobService,
    51  		trackerStorage,
    52  	)
    53  }
    54  
    55  func generateBlockExecutionData(t *testing.T, numChunks int, minSerializedSizePerChunk uint64) *execution_data.BlockExecutionData {
    56  	chunkData := make([]*execution_data.ChunkExecutionData, 0, numChunks)
    57  	for i := 0; i < numChunks; i++ {
    58  		chunkData = append(chunkData, unittest.ChunkExecutionDataFixture(t, int(minSerializedSizePerChunk)))
    59  	}
    60  
    61  	return unittest.BlockExecutionDataFixture(unittest.WithChunkExecutionDatas(chunkData...))
    62  }
    63  
    64  func deepEqual(t *testing.T, expected, actual *execution_data.BlockExecutionData) {
    65  	assert.Equal(t, expected.BlockID, actual.BlockID)
    66  	assert.Equal(t, len(expected.ChunkExecutionDatas), len(actual.ChunkExecutionDatas))
    67  
    68  	for i, expectedChunk := range expected.ChunkExecutionDatas {
    69  		actualChunk := actual.ChunkExecutionDatas[i]
    70  
    71  		goassert.DeepEqual(t, expectedChunk.Collection, actualChunk.Collection)
    72  		goassert.DeepEqual(t, expectedChunk.Events, actualChunk.Events)
    73  		assert.True(t, expectedChunk.TrieUpdate.Equals(actualChunk.TrieUpdate))
    74  	}
    75  }
    76  
    77  func TestHappyPath(t *testing.T) {
    78  	t.Parallel()
    79  
    80  	ds := getDatastore()
    81  	provider := getProvider(getBlobservice(t, ds))
    82  	store := getExecutionDataStore(ds)
    83  
    84  	test := func(numChunks int, minSerializedSizePerChunk uint64) {
    85  		expected := generateBlockExecutionData(t, numChunks, minSerializedSizePerChunk)
    86  		executionDataID, executionDataRoot, err := provider.Provide(context.Background(), 0, expected)
    87  		require.NoError(t, err)
    88  
    89  		actual, err := store.Get(context.Background(), executionDataID)
    90  		require.NoError(t, err)
    91  		deepEqual(t, expected, actual)
    92  
    93  		assert.Equal(t, expected.BlockID, executionDataRoot.BlockID)
    94  		assert.Len(t, executionDataRoot.ChunkExecutionDataIDs, numChunks)
    95  	}
    96  
    97  	test(1, 0)                                   // small execution data (single level blob tree)
    98  	test(5, 5*execution_data.DefaultMaxBlobSize) // large execution data (multi level blob tree)
    99  }
   100  
   101  func TestProvideContextCanceled(t *testing.T) {
   102  	t.Parallel()
   103  
   104  	bed := generateBlockExecutionData(t, 5, 5*execution_data.DefaultMaxBlobSize)
   105  
   106  	provider := getProvider(getBlobservice(t, getDatastore()))
   107  	_, _, err := provider.Provide(context.Background(), 0, bed)
   108  	require.NoError(t, err)
   109  
   110  	blobService := mocknetwork.NewBlobService(t)
   111  	blobService.On("AddBlobs", mock.Anything, mock.AnythingOfType("[]blocks.Block")).
   112  		Return(func(ctx context.Context, blobs []blobs.Blob) error {
   113  			<-ctx.Done()
   114  			return ctx.Err()
   115  		})
   116  	provider = getProvider(blobService)
   117  	ctx, cancel := context.WithTimeout(context.Background(), time.Second)
   118  	defer cancel()
   119  	_, _, err = provider.Provide(ctx, 0, bed)
   120  	assert.ErrorIs(t, err, ctx.Err())
   121  }
   122  
   123  // TestCalculateExecutionDataRootID tests that CalculateExecutionDataRootID calculates the correct ID given a static BlockExecutionDataRoot
   124  func TestCalculateExecutionDataRootID(t *testing.T) {
   125  	t.Parallel()
   126  
   127  	expected := flow.MustHexStringToIdentifier("ae80bb200545de7ff009d2f3e20970643198be635a9b90fffb9da1198a988deb")
   128  	edRoot := flow.BlockExecutionDataRoot{
   129  		BlockID: flow.MustHexStringToIdentifier("2b31c5e26b999a41d18dc62584ac68476742b071fc9412d68be9e516e1dfc79e"),
   130  		ChunkExecutionDataIDs: []cid.Cid{
   131  			cid.MustParse("QmcA2h3jARWXkCc9VvpR4jvt9cNc7RdiqSMvPJ1TU69Xvw"),
   132  			cid.MustParse("QmQN81Y7KdHWNdsLthDxtdf2dCHLb3ddjDWmDZQ4Znqfs4"),
   133  			cid.MustParse("QmcfMmNPa8jFN64t1Hu7Afk7Trx8a6dg7gZfEEUqzC827b"),
   134  			cid.MustParse("QmYTooZGux6epKrdHbzgubUN4JFHkLK9hw6Z6F3fAMEDH5"),
   135  			cid.MustParse("QmXxYakkZKZEoCVdLLzVisctMxyiWQSfassMMzvCdaCjAj"),
   136  		},
   137  	}
   138  
   139  	cidProvider := provider.NewExecutionDataCIDProvider(execution_data.DefaultSerializer)
   140  	actual, err := cidProvider.CalculateExecutionDataRootID(edRoot)
   141  	require.NoError(t, err)
   142  
   143  	assert.Equal(t, expected, actual)
   144  }
   145  
   146  // TestCalculateChunkExecutionDataID tests that CalculateChunkExecutionDataID calculates the correct ID given a static ChunkExecutionData
   147  // This is used to ensure library updates or modification to the provider do not change the ID calculation logic
   148  func TestCalculateChunkExecutionDataID(t *testing.T) {
   149  	t.Parallel()
   150  
   151  	rootHash, err := ledger.ToRootHash([]byte("0123456789acbdef0123456789acbdef"))
   152  	require.NoError(t, err)
   153  
   154  	expected := cid.MustParse("QmWJsC7DTufdGijftpphuxZ6EbNsDar1knP2BnvgBaMf9n")
   155  
   156  	ced := execution_data.ChunkExecutionData{
   157  		Collection: &flow.Collection{
   158  			Transactions: []*flow.TransactionBody{
   159  				{Script: []byte("access(all) fun main() {}")},
   160  			},
   161  		},
   162  		Events: []flow.Event{
   163  			unittest.EventFixture(
   164  				"A.0123456789abcdef.SomeContract.SomeEvent",
   165  				1,
   166  				2,
   167  				flow.MustHexStringToIdentifier("95e0929839063afbe334a3d175bea0775cdf5d93f64299e369d16ce21aa423d3"),
   168  				0,
   169  			),
   170  		},
   171  		TrieUpdate: &ledger.TrieUpdate{
   172  			RootHash: rootHash,
   173  		},
   174  		TransactionResults: []flow.LightTransactionResult{
   175  			{
   176  				TransactionID:   flow.MustHexStringToIdentifier("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"),
   177  				ComputationUsed: 100,
   178  				Failed:          true,
   179  			},
   180  		},
   181  	}
   182  
   183  	cidProvider := provider.NewExecutionDataCIDProvider(execution_data.DefaultSerializer)
   184  	actual, err := cidProvider.CalculateChunkExecutionDataID(ced)
   185  	require.NoError(t, err)
   186  
   187  	// This can be used for updating the expected ID when the format is *intentionally* updated
   188  	t.Log(actual)
   189  
   190  	assert.Equal(t,
   191  		expected, actual,
   192  		"expected and actual CID do not match: expected %s, actual %s",
   193  		expected,
   194  		actual,
   195  	)
   196  }