github.com/onflow/flow-go@v0.33.17/engine/access/state_stream/backend/backend_events_test.go (about)

     1  package backend
     2  
     3  import (
     4  	"bytes"
     5  	"context"
     6  	"fmt"
     7  	"sort"
     8  	"testing"
     9  	"time"
    10  
    11  	"github.com/stretchr/testify/assert"
    12  	"github.com/stretchr/testify/mock"
    13  	"github.com/stretchr/testify/require"
    14  	"github.com/stretchr/testify/suite"
    15  	"google.golang.org/grpc/codes"
    16  	"google.golang.org/grpc/status"
    17  
    18  	"github.com/onflow/flow-go/engine/access/state_stream"
    19  	"github.com/onflow/flow-go/model/flow"
    20  	syncmock "github.com/onflow/flow-go/module/state_synchronization/mock"
    21  	"github.com/onflow/flow-go/utils/unittest"
    22  	"github.com/onflow/flow-go/utils/unittest/mocks"
    23  )
    24  
    25  type BackendEventsSuite struct {
    26  	BackendExecutionDataSuite
    27  }
    28  
    29  func TestBackendEventsSuite(t *testing.T) {
    30  	suite.Run(t, new(BackendEventsSuite))
    31  }
    32  
    33  func (s *BackendEventsSuite) SetupTest() {
    34  	s.BackendExecutionDataSuite.SetupTest()
    35  }
    36  
    37  // TestSubscribeEventsFromExecutionData tests the SubscribeEvents method happy path for events
    38  // extracted from ExecutionData
    39  func (s *BackendEventsSuite) TestSubscribeEventsFromExecutionData() {
    40  	s.runTestSubscribeEvents()
    41  }
    42  
    43  // TestSubscribeEventsFromLocalStorage tests the SubscribeEvents method happy path for events
    44  // extracted from local storage
    45  func (s *BackendEventsSuite) TestSubscribeEventsFromLocalStorage() {
    46  	s.backend.useIndex = true
    47  
    48  	// events returned from the db are sorted by txID, txIndex, then eventIndex.
    49  	// reproduce that here to ensure output order works as expected
    50  	blockEvents := make(map[flow.Identifier][]flow.Event)
    51  	for _, b := range s.blocks {
    52  		events := make([]flow.Event, len(s.blockEvents[b.ID()]))
    53  		for i, event := range s.blockEvents[b.ID()] {
    54  			events[i] = event
    55  		}
    56  		sort.Slice(events, func(i, j int) bool {
    57  			cmp := bytes.Compare(events[i].TransactionID[:], events[j].TransactionID[:])
    58  			if cmp == 0 {
    59  				if events[i].TransactionIndex == events[j].TransactionIndex {
    60  					return events[i].EventIndex < events[j].EventIndex
    61  				}
    62  				return events[i].TransactionIndex < events[j].TransactionIndex
    63  			}
    64  			return cmp < 0
    65  		})
    66  		blockEvents[b.ID()] = events
    67  	}
    68  
    69  	s.events.On("ByBlockID", mock.AnythingOfType("flow.Identifier")).Return(
    70  		mocks.StorageMapGetter(blockEvents),
    71  	)
    72  
    73  	reporter := syncmock.NewIndexReporter(s.T())
    74  	reporter.On("LowestIndexedHeight").Return(s.blocks[0].Header.Height, nil)
    75  	reporter.On("HighestIndexedHeight").Return(s.blocks[len(s.blocks)-1].Header.Height, nil)
    76  	err := s.eventsIndex.Initialize(reporter)
    77  	s.Require().NoError(err)
    78  
    79  	s.runTestSubscribeEvents()
    80  }
    81  
    82  func (s *BackendEventsSuite) runTestSubscribeEvents() {
    83  	ctx, cancel := context.WithCancel(context.Background())
    84  	defer cancel()
    85  
    86  	var err error
    87  
    88  	type testType struct {
    89  		name            string
    90  		highestBackfill int
    91  		startBlockID    flow.Identifier
    92  		startHeight     uint64
    93  		filters         state_stream.EventFilter
    94  	}
    95  
    96  	baseTests := []testType{
    97  		{
    98  			name:            "happy path - all new blocks",
    99  			highestBackfill: -1, // no backfill
   100  			startBlockID:    flow.ZeroID,
   101  			startHeight:     0,
   102  		},
   103  		{
   104  			name:            "happy path - partial backfill",
   105  			highestBackfill: 2, // backfill the first 3 blocks
   106  			startBlockID:    flow.ZeroID,
   107  			startHeight:     s.blocks[0].Header.Height,
   108  		},
   109  		{
   110  			name:            "happy path - complete backfill",
   111  			highestBackfill: len(s.blocks) - 1, // backfill all blocks
   112  			startBlockID:    s.blocks[0].ID(),
   113  			startHeight:     0,
   114  		},
   115  		{
   116  			name:            "happy path - start from root block by height",
   117  			highestBackfill: len(s.blocks) - 1, // backfill all blocks
   118  			startBlockID:    flow.ZeroID,
   119  			startHeight:     s.backend.rootBlockHeight, // start from root block
   120  		},
   121  		{
   122  			name:            "happy path - start from root block by id",
   123  			highestBackfill: len(s.blocks) - 1,     // backfill all blocks
   124  			startBlockID:    s.backend.rootBlockID, // start from root block
   125  			startHeight:     0,
   126  		},
   127  	}
   128  
   129  	// create variations for each of the base test
   130  	tests := make([]testType, 0, len(baseTests)*3)
   131  	for _, test := range baseTests {
   132  		t1 := test
   133  		t1.name = fmt.Sprintf("%s - all events", test.name)
   134  		t1.filters = state_stream.EventFilter{}
   135  		tests = append(tests, t1)
   136  
   137  		t2 := test
   138  		t2.name = fmt.Sprintf("%s - some events", test.name)
   139  		t2.filters, err = state_stream.NewEventFilter(state_stream.DefaultEventFilterConfig, chainID.Chain(), []string{string(testEventTypes[0])}, nil, nil)
   140  		require.NoError(s.T(), err)
   141  		tests = append(tests, t2)
   142  
   143  		t3 := test
   144  		t3.name = fmt.Sprintf("%s - no events", test.name)
   145  		t3.filters, err = state_stream.NewEventFilter(state_stream.DefaultEventFilterConfig, chainID.Chain(), []string{"A.0x1.NonExistent.Event"}, nil, nil)
   146  		require.NoError(s.T(), err)
   147  		tests = append(tests, t3)
   148  	}
   149  
   150  	for _, test := range tests {
   151  		s.Run(test.name, func() {
   152  			s.T().Logf("len(s.execDataMap) %d", len(s.execDataMap))
   153  
   154  			// add "backfill" block - blocks that are already in the database before the test starts
   155  			// this simulates a subscription on a past block
   156  			for i := 0; i <= test.highestBackfill; i++ {
   157  				s.T().Logf("backfilling block %d", i)
   158  				s.backend.setHighestHeight(s.blocks[i].Header.Height)
   159  			}
   160  
   161  			subCtx, subCancel := context.WithCancel(ctx)
   162  			sub := s.backend.SubscribeEvents(subCtx, test.startBlockID, test.startHeight, test.filters)
   163  
   164  			// loop over all of the blocks
   165  			for i, b := range s.blocks {
   166  				s.T().Logf("checking block %d %v", i, b.ID())
   167  
   168  				// simulate new exec data received.
   169  				// exec data for all blocks with index <= highestBackfill were already received
   170  				if i > test.highestBackfill {
   171  					s.backend.setHighestHeight(b.Header.Height)
   172  					s.broadcaster.Publish()
   173  				}
   174  
   175  				var expectedEvents flow.EventsList
   176  				for _, event := range s.blockEvents[b.ID()] {
   177  					if test.filters.Match(event) {
   178  						expectedEvents = append(expectedEvents, event)
   179  					}
   180  				}
   181  
   182  				// consume execution data from subscription
   183  				unittest.RequireReturnsBefore(s.T(), func() {
   184  					v, ok := <-sub.Channel()
   185  					require.True(s.T(), ok, "channel closed while waiting for exec data for block %d %v: err: %v", b.Header.Height, b.ID(), sub.Err())
   186  
   187  					resp, ok := v.(*EventsResponse)
   188  					require.True(s.T(), ok, "unexpected response type: %T", v)
   189  
   190  					assert.Equal(s.T(), b.Header.ID(), resp.BlockID)
   191  					assert.Equal(s.T(), b.Header.Height, resp.Height)
   192  					assert.Equal(s.T(), expectedEvents, resp.Events)
   193  				}, time.Second, fmt.Sprintf("timed out waiting for exec data for block %d %v", b.Header.Height, b.ID()))
   194  			}
   195  
   196  			// make sure there are no new messages waiting. the channel should be opened with nothing waiting
   197  			unittest.RequireNeverReturnBefore(s.T(), func() {
   198  				<-sub.Channel()
   199  			}, 100*time.Millisecond, "timed out waiting for subscription to shutdown")
   200  
   201  			// stop the subscription
   202  			subCancel()
   203  
   204  			// ensure subscription shuts down gracefully
   205  			unittest.RequireReturnsBefore(s.T(), func() {
   206  				v, ok := <-sub.Channel()
   207  				assert.Nil(s.T(), v)
   208  				assert.False(s.T(), ok)
   209  				assert.ErrorIs(s.T(), sub.Err(), context.Canceled)
   210  			}, 100*time.Millisecond, "timed out waiting for subscription to shutdown")
   211  		})
   212  	}
   213  }
   214  
   215  func (s *BackendExecutionDataSuite) TestSubscribeEventsHandlesErrors() {
   216  	ctx, cancel := context.WithCancel(context.Background())
   217  	defer cancel()
   218  
   219  	s.Run("returns error if both start blockID and start height are provided", func() {
   220  		subCtx, subCancel := context.WithCancel(ctx)
   221  		defer subCancel()
   222  
   223  		sub := s.backend.SubscribeEvents(subCtx, unittest.IdentifierFixture(), 1, state_stream.EventFilter{})
   224  		assert.Equal(s.T(), codes.InvalidArgument, status.Code(sub.Err()))
   225  	})
   226  
   227  	s.Run("returns error for start height before root height", func() {
   228  		subCtx, subCancel := context.WithCancel(ctx)
   229  		defer subCancel()
   230  
   231  		sub := s.backend.SubscribeEvents(subCtx, flow.ZeroID, s.backend.rootBlockHeight-1, state_stream.EventFilter{})
   232  		assert.Equal(s.T(), codes.InvalidArgument, status.Code(sub.Err()), "expected InvalidArgument, got %v: %v", status.Code(sub.Err()).String(), sub.Err())
   233  	})
   234  
   235  	s.Run("returns error for unindexed start blockID", func() {
   236  		subCtx, subCancel := context.WithCancel(ctx)
   237  		defer subCancel()
   238  
   239  		sub := s.backend.SubscribeEvents(subCtx, unittest.IdentifierFixture(), 0, state_stream.EventFilter{})
   240  		assert.Equal(s.T(), codes.NotFound, status.Code(sub.Err()), "expected NotFound, got %v: %v", status.Code(sub.Err()).String(), sub.Err())
   241  	})
   242  
   243  	// make sure we're starting with a fresh cache
   244  	s.execDataHeroCache.Clear()
   245  
   246  	s.Run("returns error for unindexed start height", func() {
   247  		subCtx, subCancel := context.WithCancel(ctx)
   248  		defer subCancel()
   249  
   250  		sub := s.backend.SubscribeEvents(subCtx, flow.ZeroID, s.blocks[len(s.blocks)-1].Header.Height+10, state_stream.EventFilter{})
   251  		assert.Equal(s.T(), codes.NotFound, status.Code(sub.Err()), "expected NotFound, got %v: %v", status.Code(sub.Err()).String(), sub.Err())
   252  	})
   253  
   254  	s.backend.useIndex = true
   255  
   256  	s.Run("returns error for uninitialized index", func() {
   257  		subCtx, subCancel := context.WithCancel(ctx)
   258  		defer subCancel()
   259  
   260  		// Note: eventIndex.Initialize() is not called in this test
   261  		sub := s.backend.SubscribeEvents(subCtx, flow.ZeroID, 0, state_stream.EventFilter{})
   262  		assert.Equal(s.T(), codes.FailedPrecondition, status.Code(sub.Err()), "expected FailedPrecondition, got %v: %v", status.Code(sub.Err()).String(), sub.Err())
   263  	})
   264  
   265  	reporter := syncmock.NewIndexReporter(s.T())
   266  	reporter.On("LowestIndexedHeight").Return(s.blocks[1].Header.Height, nil)
   267  	reporter.On("HighestIndexedHeight").Return(s.blocks[len(s.blocks)-2].Header.Height, nil)
   268  	err := s.eventsIndex.Initialize(reporter)
   269  	s.Require().NoError(err)
   270  
   271  	s.Run("returns error for start below lowest indexed", func() {
   272  		subCtx, subCancel := context.WithCancel(ctx)
   273  		defer subCancel()
   274  
   275  		sub := s.backend.SubscribeEvents(subCtx, flow.ZeroID, s.blocks[0].Header.Height, state_stream.EventFilter{})
   276  		assert.Equal(s.T(), codes.InvalidArgument, status.Code(sub.Err()), "expected InvalidArgument, got %v: %v", status.Code(sub.Err()).String(), sub.Err())
   277  	})
   278  
   279  	s.Run("returns error for start above highest indexed", func() {
   280  		subCtx, subCancel := context.WithCancel(ctx)
   281  		defer subCancel()
   282  
   283  		sub := s.backend.SubscribeEvents(subCtx, flow.ZeroID, s.blocks[len(s.blocks)-1].Header.Height, state_stream.EventFilter{})
   284  		assert.Equal(s.T(), codes.InvalidArgument, status.Code(sub.Err()), "expected InvalidArgument, got %v: %v", status.Code(sub.Err()).String(), sub.Err())
   285  	})
   286  }