github.com/osdi23p228/fabric@v0.0.0-20221218062954-77808885f5db/gossip/privdata/coordinator_test.go (about)

     1  /*
     2  Copyright IBM Corp. All Rights Reserved.
     3  
     4  SPDX-License-Identifier: Apache-2.0
     5  */
     6  
     7  package privdata
     8  
     9  import (
    10  	"encoding/asn1"
    11  	"encoding/hex"
    12  	"errors"
    13  	"fmt"
    14  	"io/ioutil"
    15  	"os"
    16  	"reflect"
    17  	"testing"
    18  	"time"
    19  
    20  	pb "github.com/golang/protobuf/proto"
    21  	"github.com/hyperledger/fabric-protos-go/common"
    22  	proto "github.com/hyperledger/fabric-protos-go/gossip"
    23  	"github.com/hyperledger/fabric-protos-go/ledger/rwset"
    24  	"github.com/hyperledger/fabric-protos-go/ledger/rwset/kvrwset"
    25  	mspproto "github.com/hyperledger/fabric-protos-go/msp"
    26  	"github.com/hyperledger/fabric-protos-go/peer"
    27  	tspb "github.com/hyperledger/fabric-protos-go/transientstore"
    28  	"github.com/osdi23p228/fabric/bccsp/factory"
    29  	"github.com/osdi23p228/fabric/common/metrics/disabled"
    30  	util2 "github.com/osdi23p228/fabric/common/util"
    31  	"github.com/osdi23p228/fabric/core/common/privdata"
    32  	"github.com/osdi23p228/fabric/core/ledger"
    33  	"github.com/osdi23p228/fabric/core/ledger/kvledger/txmgmt/rwsetutil"
    34  	"github.com/osdi23p228/fabric/core/transientstore"
    35  	"github.com/osdi23p228/fabric/gossip/metrics"
    36  	gmetricsmocks "github.com/osdi23p228/fabric/gossip/metrics/mocks"
    37  	privdatacommon "github.com/osdi23p228/fabric/gossip/privdata/common"
    38  	"github.com/osdi23p228/fabric/gossip/privdata/mocks"
    39  	capabilitymock "github.com/osdi23p228/fabric/gossip/privdata/mocks"
    40  	"github.com/osdi23p228/fabric/gossip/util"
    41  	"github.com/osdi23p228/fabric/msp"
    42  	"github.com/osdi23p228/fabric/msp/mgmt"
    43  	mspmgmt "github.com/osdi23p228/fabric/msp/mgmt"
    44  	msptesttools "github.com/osdi23p228/fabric/msp/mgmt/testtools"
    45  	"github.com/osdi23p228/fabric/protoutil"
    46  	"github.com/stretchr/testify/assert"
    47  	"github.com/stretchr/testify/mock"
    48  	"github.com/stretchr/testify/require"
    49  )
    50  
    51  var testConfig = CoordinatorConfig{
    52  	PullRetryThreshold:             time.Second * 3,
    53  	TransientBlockRetention:        1000,
    54  	SkipPullingInvalidTransactions: false,
    55  }
    56  
    57  // CollectionCriteria aggregates criteria of
    58  // a collection
    59  type CollectionCriteria struct {
    60  	Channel    string
    61  	Collection string
    62  	Namespace  string
    63  }
    64  
    65  func fromCollectionCriteria(criteria privdata.CollectionCriteria) CollectionCriteria {
    66  	return CollectionCriteria{
    67  		Collection: criteria.Collection,
    68  		Namespace:  criteria.Namespace,
    69  		Channel:    criteria.Channel,
    70  	}
    71  }
    72  
    73  type validatorMock struct {
    74  	err error
    75  }
    76  
    77  func (v *validatorMock) Validate(block *common.Block) error {
    78  	if v.err != nil {
    79  		return v.err
    80  	}
    81  	return nil
    82  }
    83  
    84  type digests []privdatacommon.DigKey
    85  
    86  func (d digests) Equal(other digests) bool {
    87  	flatten := func(d digests) map[privdatacommon.DigKey]struct{} {
    88  		m := map[privdatacommon.DigKey]struct{}{}
    89  		for _, dig := range d {
    90  			m[dig] = struct{}{}
    91  		}
    92  		return m
    93  	}
    94  	return reflect.DeepEqual(flatten(d), flatten(other))
    95  }
    96  
    97  type fetchCall struct {
    98  	fetcher *fetcherMock
    99  	*mock.Call
   100  }
   101  
   102  func (fc *fetchCall) expectingEndorsers(orgs ...string) *fetchCall {
   103  	if fc.fetcher.expectedEndorsers == nil {
   104  		fc.fetcher.expectedEndorsers = make(map[string]struct{})
   105  	}
   106  	for _, org := range orgs {
   107  		sID := &mspproto.SerializedIdentity{Mspid: org, IdBytes: []byte(fmt.Sprintf("p0%s", org))}
   108  		b, _ := pb.Marshal(sID)
   109  		fc.fetcher.expectedEndorsers[string(b)] = struct{}{}
   110  	}
   111  
   112  	return fc
   113  }
   114  
   115  func (fc *fetchCall) expectingDigests(digests []privdatacommon.DigKey) *fetchCall {
   116  	fc.fetcher.expectedDigests = digests
   117  	return fc
   118  }
   119  
   120  func (fc *fetchCall) Return(returnArguments ...interface{}) *mock.Call {
   121  
   122  	return fc.Call.Return(returnArguments...)
   123  }
   124  
   125  type fetcherMock struct {
   126  	t *testing.T
   127  	mock.Mock
   128  	expectedDigests   []privdatacommon.DigKey
   129  	expectedEndorsers map[string]struct{}
   130  }
   131  
   132  func (f *fetcherMock) On(methodName string, arguments ...interface{}) *fetchCall {
   133  	return &fetchCall{
   134  		fetcher: f,
   135  		Call:    f.Mock.On(methodName, arguments...),
   136  	}
   137  }
   138  
   139  func (f *fetcherMock) fetch(dig2src dig2sources) (*privdatacommon.FetchedPvtDataContainer, error) {
   140  	uniqueEndorsements := make(map[string]interface{})
   141  	for _, endorsements := range dig2src {
   142  		for _, endorsement := range endorsements {
   143  			_, exists := f.expectedEndorsers[string(endorsement.Endorser)]
   144  			if !exists {
   145  				f.t.Fatalf("Encountered a non-expected endorser: %s", string(endorsement.Endorser))
   146  			}
   147  			uniqueEndorsements[string(endorsement.Endorser)] = struct{}{}
   148  		}
   149  	}
   150  	assert.True(f.t, digests(f.expectedDigests).Equal(digests(dig2src.keys())))
   151  	assert.Equal(f.t, len(f.expectedEndorsers), len(uniqueEndorsements))
   152  	args := f.Called(dig2src)
   153  	if args.Get(1) == nil {
   154  		return args.Get(0).(*privdatacommon.FetchedPvtDataContainer), nil
   155  	}
   156  	return nil, args.Get(1).(error)
   157  }
   158  
   159  type testTransientStore struct {
   160  	storeProvider transientstore.StoreProvider
   161  	store         *transientstore.Store
   162  	tempdir       string
   163  }
   164  
   165  func newTransientStore(t *testing.T) *testTransientStore {
   166  	s := &testTransientStore{}
   167  	var err error
   168  	s.tempdir, err = ioutil.TempDir("", "ts")
   169  	if err != nil {
   170  		t.Fatalf("Failed to create test directory, got err %s", err)
   171  		return s
   172  	}
   173  	s.storeProvider, err = transientstore.NewStoreProvider(s.tempdir)
   174  	if err != nil {
   175  		t.Fatalf("Failed to open store, got err %s", err)
   176  		return s
   177  	}
   178  	s.store, err = s.storeProvider.OpenStore("testchannelid")
   179  	if err != nil {
   180  		t.Fatalf("Failed to open store, got err %s", err)
   181  		return s
   182  	}
   183  	return s
   184  }
   185  
   186  func (s *testTransientStore) tearDown() {
   187  	s.storeProvider.Close()
   188  	os.RemoveAll(s.tempdir)
   189  }
   190  
   191  func (s *testTransientStore) Persist(txid string, blockHeight uint64,
   192  	privateSimulationResultsWithConfig *tspb.TxPvtReadWriteSetWithConfigInfo) error {
   193  	return s.store.Persist(txid, blockHeight, privateSimulationResultsWithConfig)
   194  }
   195  
   196  func (s *testTransientStore) GetTxPvtRWSetByTxid(txid string, filter ledger.PvtNsCollFilter) (RWSetScanner, error) {
   197  	return s.store.GetTxPvtRWSetByTxid(txid, filter)
   198  }
   199  
   200  func createcollectionStore(expectedSignedData protoutil.SignedData) *collectionStore {
   201  	return &collectionStore{
   202  		expectedSignedData: expectedSignedData,
   203  		policies:           make(map[collectionAccessPolicy]CollectionCriteria),
   204  		store:              make(map[CollectionCriteria]collectionAccessPolicy),
   205  	}
   206  }
   207  
   208  type collectionStore struct {
   209  	expectedSignedData protoutil.SignedData
   210  	acceptsAll         bool
   211  	acceptsNone        bool
   212  	lenient            bool
   213  	mspIdentifier      string
   214  	store              map[CollectionCriteria]collectionAccessPolicy
   215  	policies           map[collectionAccessPolicy]CollectionCriteria
   216  }
   217  
   218  func (cs *collectionStore) thatAcceptsAll() *collectionStore {
   219  	cs.acceptsAll = true
   220  	return cs
   221  }
   222  
   223  func (cs *collectionStore) thatAcceptsNone() *collectionStore {
   224  	cs.acceptsNone = true
   225  	return cs
   226  }
   227  
   228  func (cs *collectionStore) thatAccepts(cc CollectionCriteria) *collectionStore {
   229  	sp := collectionAccessPolicy{
   230  		cs: cs,
   231  		n:  util.RandomUInt64(),
   232  	}
   233  	cs.store[cc] = sp
   234  	cs.policies[sp] = cc
   235  	return cs
   236  }
   237  
   238  func (cs *collectionStore) withMSPIdentity(identifier string) *collectionStore {
   239  	cs.mspIdentifier = identifier
   240  	return cs
   241  }
   242  
   243  func (cs *collectionStore) RetrieveCollectionAccessPolicy(cc privdata.CollectionCriteria) (privdata.CollectionAccessPolicy, error) {
   244  	if sp, exists := cs.store[fromCollectionCriteria(cc)]; exists {
   245  		return &sp, nil
   246  	}
   247  	if cs.acceptsAll || cs.acceptsNone || cs.lenient {
   248  		return &collectionAccessPolicy{
   249  			cs: cs,
   250  			n:  util.RandomUInt64(),
   251  		}, nil
   252  	}
   253  	return nil, privdata.NoSuchCollectionError{}
   254  }
   255  
   256  func (cs *collectionStore) RetrieveCollection(privdata.CollectionCriteria) (privdata.Collection, error) {
   257  	panic("implement me")
   258  }
   259  
   260  func (cs *collectionStore) RetrieveCollectionConfig(cc privdata.CollectionCriteria) (*peer.StaticCollectionConfig, error) {
   261  	mspIdentifier := "different-org"
   262  	if _, exists := cs.store[fromCollectionCriteria(cc)]; exists || cs.acceptsAll {
   263  		mspIdentifier = cs.mspIdentifier
   264  	}
   265  	return &peer.StaticCollectionConfig{
   266  		Name:           cc.Collection,
   267  		MemberOnlyRead: true,
   268  		MemberOrgsPolicy: &peer.CollectionPolicyConfig{
   269  			Payload: &peer.CollectionPolicyConfig_SignaturePolicy{
   270  				SignaturePolicy: &common.SignaturePolicyEnvelope{
   271  					Rule: &common.SignaturePolicy{
   272  						Type: &common.SignaturePolicy_SignedBy{
   273  							SignedBy: 0,
   274  						},
   275  					},
   276  					Identities: []*mspproto.MSPPrincipal{
   277  						{
   278  							PrincipalClassification: mspproto.MSPPrincipal_ROLE,
   279  							Principal: protoutil.MarshalOrPanic(&mspproto.MSPRole{
   280  								MspIdentifier: mspIdentifier,
   281  								Role:          mspproto.MSPRole_MEMBER,
   282  							}),
   283  						},
   284  					},
   285  				},
   286  			},
   287  		},
   288  	}, nil
   289  }
   290  
   291  func (cs *collectionStore) RetrieveReadWritePermission(cc privdata.CollectionCriteria, sp *peer.SignedProposal, qe ledger.QueryExecutor) (bool, bool, error) {
   292  	panic("implement me")
   293  }
   294  
   295  func (cs *collectionStore) RetrieveCollectionConfigPackage(cc privdata.CollectionCriteria) (*peer.CollectionConfigPackage, error) {
   296  	return &peer.CollectionConfigPackage{
   297  		Config: []*peer.CollectionConfig{
   298  			{
   299  				Payload: &peer.CollectionConfig_StaticCollectionConfig{
   300  					StaticCollectionConfig: &peer.StaticCollectionConfig{
   301  						Name:              cc.Collection,
   302  						MaximumPeerCount:  1,
   303  						RequiredPeerCount: 1,
   304  					},
   305  				},
   306  			},
   307  		},
   308  	}, nil
   309  }
   310  
   311  func (cs *collectionStore) RetrieveCollectionPersistenceConfigs(cc privdata.CollectionCriteria) (privdata.CollectionPersistenceConfigs, error) {
   312  	panic("implement me")
   313  }
   314  
   315  func (cs *collectionStore) AccessFilter(channelName string, collectionPolicyConfig *peer.CollectionPolicyConfig) (privdata.Filter, error) {
   316  	panic("implement me")
   317  }
   318  
   319  type collectionAccessPolicy struct {
   320  	cs *collectionStore
   321  	n  uint64
   322  }
   323  
   324  func (cap *collectionAccessPolicy) MemberOrgs() map[string]struct{} {
   325  	return map[string]struct{}{
   326  		"org0": {},
   327  		"org1": {},
   328  	}
   329  }
   330  
   331  func (cap *collectionAccessPolicy) RequiredPeerCount() int {
   332  	return 1
   333  }
   334  
   335  func (cap *collectionAccessPolicy) MaximumPeerCount() int {
   336  	return 2
   337  }
   338  
   339  func (cap *collectionAccessPolicy) IsMemberOnlyRead() bool {
   340  	return false
   341  }
   342  
   343  func (cap *collectionAccessPolicy) IsMemberOnlyWrite() bool {
   344  	return false
   345  }
   346  
   347  func (cap *collectionAccessPolicy) AccessFilter() privdata.Filter {
   348  	return func(sd protoutil.SignedData) bool {
   349  		that, _ := asn1.Marshal(sd)
   350  		this, _ := asn1.Marshal(cap.cs.expectedSignedData)
   351  		if hex.EncodeToString(that) != hex.EncodeToString(this) {
   352  			panic(fmt.Errorf("self signed data passed isn't equal to expected:%v, %v", sd, cap.cs.expectedSignedData))
   353  		}
   354  
   355  		if cap.cs.acceptsNone {
   356  			return false
   357  		} else if cap.cs.acceptsAll {
   358  			return true
   359  		}
   360  
   361  		_, exists := cap.cs.policies[*cap]
   362  		return exists
   363  	}
   364  }
   365  
   366  func TestPvtDataCollections_FailOnEmptyPayload(t *testing.T) {
   367  	collection := &util.PvtDataCollections{
   368  		&ledger.TxPvtData{
   369  			SeqInBlock: uint64(1),
   370  			WriteSet: &rwset.TxPvtReadWriteSet{
   371  				DataModel: rwset.TxReadWriteSet_KV,
   372  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   373  					{
   374  						Namespace: "ns1",
   375  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   376  							{
   377  								CollectionName: "secretCollection",
   378  								Rwset:          []byte{1, 2, 3, 4, 5, 6, 7},
   379  							},
   380  						},
   381  					},
   382  				},
   383  			},
   384  		},
   385  
   386  		nil,
   387  	}
   388  
   389  	_, err := collection.Marshal()
   390  	assertion := assert.New(t)
   391  	assertion.Error(err, "Expected to fail since second item has nil payload")
   392  	assertion.Equal("Mallformed private data payload, rwset index 1 is nil", fmt.Sprintf("%s", err))
   393  }
   394  
   395  func TestPvtDataCollections_FailMarshalingWriteSet(t *testing.T) {
   396  	collection := &util.PvtDataCollections{
   397  		&ledger.TxPvtData{
   398  			SeqInBlock: uint64(1),
   399  			WriteSet:   nil,
   400  		},
   401  	}
   402  
   403  	_, err := collection.Marshal()
   404  	assertion := assert.New(t)
   405  	assertion.Error(err, "Expected to fail since first item has nil writeset")
   406  	assertion.Contains(fmt.Sprintf("%s", err), "Could not marshal private rwset index 0")
   407  }
   408  
   409  func TestPvtDataCollections_Marshal(t *testing.T) {
   410  	collection := &util.PvtDataCollections{
   411  		&ledger.TxPvtData{
   412  			SeqInBlock: uint64(1),
   413  			WriteSet: &rwset.TxPvtReadWriteSet{
   414  				DataModel: rwset.TxReadWriteSet_KV,
   415  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   416  					{
   417  						Namespace: "ns1",
   418  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   419  							{
   420  								CollectionName: "secretCollection",
   421  								Rwset:          []byte{1, 2, 3, 4, 5, 6, 7},
   422  							},
   423  						},
   424  					},
   425  				},
   426  			},
   427  		},
   428  
   429  		&ledger.TxPvtData{
   430  			SeqInBlock: uint64(2),
   431  			WriteSet: &rwset.TxPvtReadWriteSet{
   432  				DataModel: rwset.TxReadWriteSet_KV,
   433  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   434  					{
   435  						Namespace: "ns1",
   436  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   437  							{
   438  								CollectionName: "secretCollection",
   439  								Rwset:          []byte{42, 42, 42, 42, 42, 42, 42},
   440  							},
   441  						},
   442  					},
   443  					{
   444  						Namespace: "ns2",
   445  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   446  							{
   447  								CollectionName: "otherCollection",
   448  								Rwset:          []byte{10, 9, 8, 7, 6, 5, 4, 3, 2, 1},
   449  							},
   450  						},
   451  					},
   452  				},
   453  			},
   454  		},
   455  	}
   456  
   457  	bytes, err := collection.Marshal()
   458  
   459  	assertion := assert.New(t)
   460  	assertion.NoError(err)
   461  	assertion.NotNil(bytes)
   462  	assertion.Equal(2, len(bytes))
   463  }
   464  
   465  func TestPvtDataCollections_Unmarshal(t *testing.T) {
   466  	collection := util.PvtDataCollections{
   467  		&ledger.TxPvtData{
   468  			SeqInBlock: uint64(1),
   469  			WriteSet: &rwset.TxPvtReadWriteSet{
   470  				DataModel: rwset.TxReadWriteSet_KV,
   471  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   472  					{
   473  						Namespace: "ns1",
   474  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   475  							{
   476  								CollectionName: "secretCollection",
   477  								Rwset:          []byte{1, 2, 3, 4, 5, 6, 7},
   478  							},
   479  						},
   480  					},
   481  				},
   482  			},
   483  		},
   484  	}
   485  
   486  	bytes, err := collection.Marshal()
   487  
   488  	assertion := assert.New(t)
   489  	assertion.NoError(err)
   490  	assertion.NotNil(bytes)
   491  	assertion.Equal(1, len(bytes))
   492  
   493  	var newCol util.PvtDataCollections
   494  
   495  	err = newCol.Unmarshal(bytes)
   496  	assertion.NoError(err)
   497  	assertion.Equal(1, len(newCol))
   498  	assertion.Equal(newCol[0].SeqInBlock, collection[0].SeqInBlock)
   499  	assertion.True(pb.Equal(newCol[0].WriteSet, collection[0].WriteSet))
   500  }
   501  
   502  type rwsTriplet struct {
   503  	namespace  string
   504  	collection string
   505  	rwset      string
   506  }
   507  
   508  func flattenTxPvtDataMap(pd ledger.TxPvtDataMap) map[uint64]map[rwsTriplet]struct{} {
   509  	m := make(map[uint64]map[rwsTriplet]struct{})
   510  	for seqInBlock, namespaces := range pd {
   511  		triplets := make(map[rwsTriplet]struct{})
   512  		for _, namespace := range namespaces.WriteSet.NsPvtRwset {
   513  			for _, col := range namespace.CollectionPvtRwset {
   514  				triplets[rwsTriplet{
   515  					namespace:  namespace.Namespace,
   516  					collection: col.CollectionName,
   517  					rwset:      hex.EncodeToString(col.Rwset),
   518  				}] = struct{}{}
   519  			}
   520  		}
   521  		m[seqInBlock] = triplets
   522  	}
   523  	return m
   524  }
   525  
   526  var expectedCommittedPrivateData1 = map[uint64]*ledger.TxPvtData{
   527  	0: {SeqInBlock: 0, WriteSet: &rwset.TxPvtReadWriteSet{
   528  		DataModel: rwset.TxReadWriteSet_KV,
   529  		NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   530  			{
   531  				Namespace: "ns1",
   532  				CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   533  					{
   534  						CollectionName: "c1",
   535  						Rwset:          []byte("rws-pre-image"),
   536  					},
   537  					{
   538  						CollectionName: "c2",
   539  						Rwset:          []byte("rws-pre-image"),
   540  					},
   541  				},
   542  			},
   543  		},
   544  	}},
   545  	1: {SeqInBlock: 1, WriteSet: &rwset.TxPvtReadWriteSet{
   546  		DataModel: rwset.TxReadWriteSet_KV,
   547  		NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   548  			{
   549  				Namespace: "ns2",
   550  				CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   551  					{
   552  						CollectionName: "c1",
   553  						Rwset:          []byte("rws-pre-image"),
   554  					},
   555  				},
   556  			},
   557  		},
   558  	}},
   559  }
   560  
   561  var expectedCommittedPrivateData2 = map[uint64]*ledger.TxPvtData{
   562  	0: {SeqInBlock: 0, WriteSet: &rwset.TxPvtReadWriteSet{
   563  		DataModel: rwset.TxReadWriteSet_KV,
   564  		NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   565  			{
   566  				Namespace: "ns3",
   567  				CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   568  					{
   569  						CollectionName: "c3",
   570  						Rwset:          []byte("rws-pre-image"),
   571  					},
   572  				},
   573  			},
   574  		},
   575  	}},
   576  }
   577  
   578  var expectedCommittedPrivateData3 = map[uint64]*ledger.TxPvtData{}
   579  
   580  func TestCoordinatorStoreInvalidBlock(t *testing.T) {
   581  	err := msptesttools.LoadMSPSetupForTesting()
   582  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   583  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
   584  	serializedID, err := identity.Serialize()
   585  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   586  	data := []byte{1, 2, 3}
   587  	signature, err := identity.Sign(data)
   588  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   589  	mspID := "Org1MSP"
   590  	peerSelfSignedData := protoutil.SignedData{
   591  		Identity:  serializedID,
   592  		Signature: signature,
   593  		Data:      data,
   594  	}
   595  
   596  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
   597  
   598  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
   599  	committer := &mocks.Committer{}
   600  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   601  		t.Fatal("Shouldn't have committed")
   602  	}).Return(nil)
   603  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
   604  
   605  	store := newTransientStore(t)
   606  	defer store.tearDown()
   607  
   608  	assertPurged := func(txns ...string) {
   609  		for _, txn := range txns {
   610  			iterator, err := store.GetTxPvtRWSetByTxid(txn, nil)
   611  			if err != nil {
   612  				t.Fatalf("Failed iterating, got err %s", err)
   613  				iterator.Close()
   614  				return
   615  			}
   616  			res, err := iterator.Next()
   617  			if err != nil {
   618  				t.Fatalf("Failed iterating, got err %s", err)
   619  				iterator.Close()
   620  				return
   621  			}
   622  			assert.Nil(t, res)
   623  			iterator.Close()
   624  		}
   625  	}
   626  	fetcher := &fetcherMock{t: t}
   627  	pdFactory := &pvtDataFactory{}
   628  	bf := &blockFactory{
   629  		channelID: "testchannelid",
   630  	}
   631  
   632  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
   633  		return mgmt.GetManagerForChain("testchannelid")
   634  	})
   635  	block := bf.withoutMetadata().create()
   636  	// Scenario I: Block we got doesn't have any metadata with it
   637  	pvtData := pdFactory.create()
   638  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   639  	capabilityProvider := &capabilitymock.CapabilityProvider{}
   640  	appCapability := &capabilitymock.AppCapabilities{}
   641  	capabilityProvider.On("Capabilities").Return(appCapability)
   642  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
   643  	coordinator := NewCoordinator(mspID, Support{
   644  		ChainID:            "testchannelid",
   645  		CollectionStore:    cs,
   646  		Committer:          committer,
   647  		Fetcher:            fetcher,
   648  		Validator:          &validatorMock{},
   649  		CapabilityProvider: capabilityProvider,
   650  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   651  	err = coordinator.StoreBlock(block, pvtData)
   652  	assert.Error(t, err)
   653  	assert.Contains(t, err.Error(), "Block.Metadata is nil or Block.Metadata lacks a Tx filter bitmap")
   654  
   655  	// Scenario II: Validator has an error while validating the block
   656  	block = bf.create()
   657  	pvtData = pdFactory.create()
   658  	coordinator = NewCoordinator(mspID, Support{
   659  		ChainID:            "testchannelid",
   660  		CollectionStore:    cs,
   661  		Committer:          committer,
   662  		Fetcher:            fetcher,
   663  		Validator:          &validatorMock{fmt.Errorf("failed validating block")},
   664  		CapabilityProvider: capabilityProvider,
   665  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   666  	err = coordinator.StoreBlock(block, pvtData)
   667  	assert.Error(t, err)
   668  	assert.Contains(t, err.Error(), "failed validating block")
   669  
   670  	// Scenario III: Block we got contains an inadequate length of Tx filter in the metadata
   671  	block = bf.withMetadataSize(100).create()
   672  	pvtData = pdFactory.create()
   673  	coordinator = NewCoordinator(mspID, Support{
   674  		ChainID:            "testchannelid",
   675  		CollectionStore:    cs,
   676  		Committer:          committer,
   677  		Fetcher:            fetcher,
   678  		Validator:          &validatorMock{},
   679  		CapabilityProvider: capabilityProvider,
   680  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   681  	err = coordinator.StoreBlock(block, pvtData)
   682  	assert.Error(t, err)
   683  	assert.Contains(t, err.Error(), "block data size")
   684  	assert.Contains(t, err.Error(), "is different from Tx filter size")
   685  
   686  	// Scenario IV: The second transaction in the block we got is invalid, and we have no private data for that.
   687  	// As the StorePvtDataOfInvalidTx is set of false, if the coordinator would try to fetch private data, the
   688  	// test would fall because we haven't defined the mock operations for the transientstore (or for gossip)
   689  	// in this test.
   690  	var commitHappened bool
   691  	assertCommitHappened := func() {
   692  		assert.True(t, commitHappened)
   693  		commitHappened = false
   694  	}
   695  	digKeys := []privdatacommon.DigKey{
   696  		{
   697  			TxId:       "tx2",
   698  			Namespace:  "ns2",
   699  			Collection: "c1",
   700  			BlockSeq:   1,
   701  			SeqInBlock: 1,
   702  		},
   703  	}
   704  	fetcher = &fetcherMock{t: t}
   705  	fetcher.On("fetch", mock.Anything).expectingDigests(digKeys).expectingEndorsers(identity.GetMSPIdentifier()).Return(&privdatacommon.FetchedPvtDataContainer{
   706  		AvailableElements: nil,
   707  	}, nil)
   708  	committer = &mocks.Committer{}
   709  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   710  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
   711  		commitHappened = true
   712  		// Only the first transaction's private data is passed to the ledger
   713  		assert.Len(t, privateDataPassed2Ledger, 1)
   714  		assert.Equal(t, 0, int(privateDataPassed2Ledger[0].SeqInBlock))
   715  		// The private data passed to the ledger contains "ns1" and has 2 collections in it
   716  		assert.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset, 1)
   717  		assert.Equal(t, "ns1", privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].Namespace)
   718  		assert.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].CollectionPvtRwset, 2)
   719  	}).Return(nil)
   720  	block = bf.withInvalidTxns(1).AddTxn("tx1", "ns1", hash, "c1", "c2").AddTxn("tx2", "ns2", hash, "c1").create()
   721  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").create()
   722  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   723  
   724  	capabilityProvider = &capabilitymock.CapabilityProvider{}
   725  	appCapability = &capabilitymock.AppCapabilities{}
   726  	capabilityProvider.On("Capabilities").Return(appCapability)
   727  	appCapability.On("StorePvtDataOfInvalidTx").Return(false)
   728  	coordinator = NewCoordinator(mspID, Support{
   729  		ChainID:            "testchannelid",
   730  		CollectionStore:    cs,
   731  		Committer:          committer,
   732  		Fetcher:            fetcher,
   733  		Validator:          &validatorMock{},
   734  		CapabilityProvider: capabilityProvider,
   735  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   736  	err = coordinator.StoreBlock(block, pvtData)
   737  	assert.NoError(t, err)
   738  	assertCommitHappened()
   739  	// Ensure the 2nd transaction which is invalid and wasn't committed - is still purged.
   740  	// This is so that if we get a transaction via dissemination from an endorser, we purge it
   741  	// when its block comes.
   742  	assertPurged("tx1", "tx2")
   743  
   744  	// Scenario V: The second transaction in the block we got is invalid, and we have no private
   745  	// data for that in the transient store. As we have set StorePvtDataOfInvalidTx to true and
   746  	// configured the coordinator to skip pulling pvtData of invalid transactions from other peers,
   747  	// it should not store the pvtData of invalid transaction in the ledger instead a missing entry.
   748  	testConfig.SkipPullingInvalidTransactions = true
   749  	assertCommitHappened = func() {
   750  		assert.True(t, commitHappened)
   751  		commitHappened = false
   752  	}
   753  	committer = &mocks.Committer{}
   754  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   755  		blockAndPvtData := args.Get(0).(*ledger.BlockAndPvtData)
   756  		commitHappened = true
   757  		// Only the first transaction's private data is passed to the ledger
   758  		privateDataPassed2Ledger := blockAndPvtData.PvtData
   759  		assert.Len(t, privateDataPassed2Ledger, 1)
   760  		assert.Equal(t, 0, int(privateDataPassed2Ledger[0].SeqInBlock))
   761  		// The private data passed to the ledger contains "ns1" and has 2 collections in it
   762  		assert.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset, 1)
   763  		assert.Equal(t, "ns1", privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].Namespace)
   764  		assert.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].CollectionPvtRwset, 2)
   765  
   766  		missingPrivateDataPassed2Ledger := blockAndPvtData.MissingPvtData
   767  		assert.Len(t, missingPrivateDataPassed2Ledger, 1)
   768  		assert.Len(t, missingPrivateDataPassed2Ledger[1], 1)
   769  		assert.Equal(t, missingPrivateDataPassed2Ledger[1][0].Namespace, "ns2")
   770  		assert.Equal(t, missingPrivateDataPassed2Ledger[1][0].Collection, "c1")
   771  		assert.Equal(t, missingPrivateDataPassed2Ledger[1][0].IsEligible, true)
   772  
   773  		commitOpts := args.Get(1).(*ledger.CommitOptions)
   774  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
   775  		assert.Equal(t, expectedCommitOpts, commitOpts)
   776  	}).Return(nil)
   777  
   778  	block = bf.withInvalidTxns(1).AddTxn("tx1", "ns1", hash, "c1", "c2").AddTxn("tx2", "ns2", hash, "c1").create()
   779  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").create()
   780  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   781  	capabilityProvider = &capabilitymock.CapabilityProvider{}
   782  	appCapability = &capabilitymock.AppCapabilities{}
   783  	capabilityProvider.On("Capabilities").Return(appCapability)
   784  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
   785  	digKeys = []privdatacommon.DigKey{}
   786  	fetcher = &fetcherMock{t: t}
   787  	fetcher.On("fetch", mock.Anything).expectingDigests(digKeys).Return(&privdatacommon.FetchedPvtDataContainer{
   788  		AvailableElements: nil,
   789  	}, nil)
   790  	coordinator = NewCoordinator(mspID, Support{
   791  		ChainID:            "testchannelid",
   792  		CollectionStore:    cs,
   793  		Committer:          committer,
   794  		Fetcher:            fetcher,
   795  		Validator:          &validatorMock{},
   796  		CapabilityProvider: capabilityProvider,
   797  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   798  	err = coordinator.StoreBlock(block, pvtData)
   799  	assert.NoError(t, err)
   800  	assertCommitHappened()
   801  	assertPurged("tx1", "tx2")
   802  
   803  	// Scenario VI: The second transaction in the block we got is invalid. As we have set the
   804  	// StorePvtDataOfInvalidTx to true and configured the coordinator to pull pvtData of invalid
   805  	// transactions, it should store the pvtData of invalid transactions in the ledger.
   806  	testConfig.SkipPullingInvalidTransactions = false
   807  	committer = &mocks.Committer{}
   808  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   809  		blockAndPvtData := args.Get(0).(*ledger.BlockAndPvtData)
   810  		commitHappened = true
   811  		// pvtData of both transactions must be present though the second transaction
   812  		// is invalid.
   813  		privateDataPassed2Ledger := blockAndPvtData.PvtData
   814  		assert.Len(t, privateDataPassed2Ledger, 2)
   815  		assert.Equal(t, 0, int(privateDataPassed2Ledger[0].SeqInBlock))
   816  		assert.Equal(t, 1, int(privateDataPassed2Ledger[1].SeqInBlock))
   817  		// The private data passed to the ledger for tx1 contains "ns1" and has 2 collections in it
   818  		assert.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset, 1)
   819  		assert.Equal(t, "ns1", privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].Namespace)
   820  		assert.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].CollectionPvtRwset, 2)
   821  		// The private data passed to the ledger for tx2 contains "ns2" and has 1 collection in it
   822  		assert.Len(t, privateDataPassed2Ledger[1].WriteSet.NsPvtRwset, 1)
   823  		assert.Equal(t, "ns2", privateDataPassed2Ledger[1].WriteSet.NsPvtRwset[0].Namespace)
   824  		assert.Len(t, privateDataPassed2Ledger[1].WriteSet.NsPvtRwset[0].CollectionPvtRwset, 1)
   825  
   826  		missingPrivateDataPassed2Ledger := blockAndPvtData.MissingPvtData
   827  		assert.Len(t, missingPrivateDataPassed2Ledger, 0)
   828  
   829  		commitOpts := args.Get(1).(*ledger.CommitOptions)
   830  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
   831  		assert.Equal(t, expectedCommitOpts, commitOpts)
   832  	}).Return(nil)
   833  
   834  	fetcher = &fetcherMock{t: t}
   835  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
   836  		{
   837  			TxId: "tx2", Namespace: "ns2", Collection: "c1", BlockSeq: 1, SeqInBlock: 1,
   838  		},
   839  	}).Return(&privdatacommon.FetchedPvtDataContainer{
   840  		AvailableElements: []*proto.PvtDataElement{
   841  			{
   842  				Digest: &proto.PvtDataDigest{
   843  					SeqInBlock: 1,
   844  					BlockSeq:   1,
   845  					Collection: "c1",
   846  					Namespace:  "ns2",
   847  					TxId:       "tx2",
   848  				},
   849  				Payload: [][]byte{[]byte("rws-pre-image")},
   850  			},
   851  		},
   852  	}, nil)
   853  
   854  	block = bf.withInvalidTxns(1).AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1", "c2").
   855  		AddTxnWithEndorsement("tx2", "ns2", hash, "org2", true, "c1").create()
   856  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").create()
   857  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   858  	coordinator = NewCoordinator(mspID, Support{
   859  		ChainID:            "testchannelid",
   860  		CollectionStore:    cs,
   861  		Committer:          committer,
   862  		Fetcher:            fetcher,
   863  		Validator:          &validatorMock{},
   864  		CapabilityProvider: capabilityProvider,
   865  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   866  	err = coordinator.StoreBlock(block, pvtData)
   867  	assert.NoError(t, err)
   868  	assertCommitHappened()
   869  	assertPurged("tx1", "tx2")
   870  
   871  	// Scenario VII: Block doesn't contain a header
   872  	block.Header = nil
   873  	err = coordinator.StoreBlock(block, pvtData)
   874  	assert.Error(t, err)
   875  	assert.Contains(t, err.Error(), "Block header is nil")
   876  
   877  	// Scenario VIII: Block doesn't contain Data
   878  	block.Data = nil
   879  	err = coordinator.StoreBlock(block, pvtData)
   880  	assert.Error(t, err)
   881  	assert.Contains(t, err.Error(), "Block data is empty")
   882  }
   883  
   884  func TestCoordinatorToFilterOutPvtRWSetsWithWrongHash(t *testing.T) {
   885  	/*
   886  		Test case, where peer receives new block for commit
   887  		it has ns1:c1 in transient store, while it has wrong
   888  		hash, hence it will fetch ns1:c1 from other peers
   889  	*/
   890  	err := msptesttools.LoadMSPSetupForTesting()
   891  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   892  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
   893  	serializedID, err := identity.Serialize()
   894  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   895  	data := []byte{1, 2, 3}
   896  	signature, err := identity.Sign(data)
   897  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   898  	mspID := "Org1MSP"
   899  	peerSelfSignedData := protoutil.SignedData{
   900  		Identity:  serializedID,
   901  		Signature: signature,
   902  		Data:      data,
   903  	}
   904  
   905  	expectedPvtData := map[uint64]*ledger.TxPvtData{
   906  		0: {SeqInBlock: 0, WriteSet: &rwset.TxPvtReadWriteSet{
   907  			DataModel: rwset.TxReadWriteSet_KV,
   908  			NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   909  				{
   910  					Namespace: "ns1",
   911  					CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   912  						{
   913  							CollectionName: "c1",
   914  							Rwset:          []byte("rws-original"),
   915  						},
   916  					},
   917  				},
   918  			},
   919  		}},
   920  	}
   921  
   922  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
   923  	committer := &mocks.Committer{}
   924  
   925  	store := newTransientStore(t)
   926  	defer store.tearDown()
   927  
   928  	assertPurged := func(txns ...string) {
   929  		for _, txn := range txns {
   930  			iterator, err := store.GetTxPvtRWSetByTxid(txn, nil)
   931  			if err != nil {
   932  				t.Fatalf("Failed iterating, got err %s", err)
   933  				iterator.Close()
   934  				return
   935  			}
   936  			res, err := iterator.Next()
   937  			if err != nil {
   938  				t.Fatalf("Failed iterating, got err %s", err)
   939  				iterator.Close()
   940  				return
   941  			}
   942  			assert.Nil(t, res)
   943  			iterator.Close()
   944  		}
   945  	}
   946  
   947  	fetcher := &fetcherMock{t: t}
   948  
   949  	var commitHappened bool
   950  
   951  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   952  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
   953  		assert.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
   954  			flattenTxPvtDataMap(expectedPvtData)))
   955  		commitHappened = true
   956  
   957  		commitOpts := args.Get(1).(*ledger.CommitOptions)
   958  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
   959  		assert.Equal(t, expectedCommitOpts, commitOpts)
   960  	}).Return(nil)
   961  
   962  	hash := util2.ComputeSHA256([]byte("rws-original"))
   963  	bf := &blockFactory{
   964  		channelID: "testchannelid",
   965  	}
   966  
   967  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
   968  		return mgmt.GetManagerForChain("testchannelid")
   969  	})
   970  
   971  	block := bf.AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1").create()
   972  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   973  
   974  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
   975  
   976  	capabilityProvider := &capabilitymock.CapabilityProvider{}
   977  	appCapability := &capabilitymock.AppCapabilities{}
   978  	capabilityProvider.On("Capabilities").Return(appCapability)
   979  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
   980  	coordinator := NewCoordinator(mspID, Support{
   981  		ChainID:            "testchannelid",
   982  		CollectionStore:    cs,
   983  		Committer:          committer,
   984  		Fetcher:            fetcher,
   985  		Validator:          &validatorMock{},
   986  		CapabilityProvider: capabilityProvider,
   987  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   988  
   989  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
   990  		{
   991  			TxId: "tx1", Namespace: "ns1", Collection: "c1", BlockSeq: 1,
   992  		},
   993  	}).Return(&privdatacommon.FetchedPvtDataContainer{
   994  		AvailableElements: []*proto.PvtDataElement{
   995  			{
   996  				Digest: &proto.PvtDataDigest{
   997  					BlockSeq:   1,
   998  					Collection: "c1",
   999  					Namespace:  "ns1",
  1000  					TxId:       "tx1",
  1001  				},
  1002  				Payload: [][]byte{[]byte("rws-original")},
  1003  			},
  1004  		},
  1005  	}, nil)
  1006  
  1007  	coordinator.StoreBlock(block, nil)
  1008  	// Assert blocks was eventually committed
  1009  	assert.True(t, commitHappened)
  1010  
  1011  	// Assert transaction has been purged
  1012  	assertPurged("tx1")
  1013  }
  1014  
  1015  func TestCoordinatorStoreBlock(t *testing.T) {
  1016  	err := msptesttools.LoadMSPSetupForTesting()
  1017  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1018  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
  1019  	serializedID, err := identity.Serialize()
  1020  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1021  	data := []byte{1, 2, 3}
  1022  	signature, err := identity.Sign(data)
  1023  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1024  	mspID := "Org1MSP"
  1025  	peerSelfSignedData := protoutil.SignedData{
  1026  		Identity:  serializedID,
  1027  		Signature: signature,
  1028  		Data:      data,
  1029  	}
  1030  	// Green path test, all private data should be obtained successfully
  1031  
  1032  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
  1033  
  1034  	var commitHappened bool
  1035  	assertCommitHappened := func() {
  1036  		assert.True(t, commitHappened)
  1037  		commitHappened = false
  1038  	}
  1039  	committer := &mocks.Committer{}
  1040  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1041  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
  1042  		assert.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1043  			flattenTxPvtDataMap(expectedCommittedPrivateData1)))
  1044  		commitHappened = true
  1045  
  1046  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1047  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1048  		assert.Equal(t, expectedCommitOpts, commitOpts)
  1049  	}).Return(nil)
  1050  
  1051  	store := newTransientStore(t)
  1052  	defer store.tearDown()
  1053  
  1054  	assertPurged := func(txns ...string) bool {
  1055  		for _, txn := range txns {
  1056  			iterator, err := store.GetTxPvtRWSetByTxid(txn, nil)
  1057  			if err != nil {
  1058  				iterator.Close()
  1059  				t.Fatalf("Failed iterating, got err %s", err)
  1060  			}
  1061  			res, err := iterator.Next()
  1062  			iterator.Close()
  1063  			if err != nil {
  1064  				t.Fatalf("Failed iterating, got err %s", err)
  1065  			}
  1066  			if res != nil {
  1067  				return false
  1068  			}
  1069  		}
  1070  		return true
  1071  	}
  1072  
  1073  	fetcher := &fetcherMock{t: t}
  1074  
  1075  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1076  	pdFactory := &pvtDataFactory{}
  1077  	bf := &blockFactory{
  1078  		channelID: "testchannelid",
  1079  	}
  1080  
  1081  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1082  		return mgmt.GetManagerForChain("testchannelid")
  1083  	})
  1084  
  1085  	block := bf.AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1", "c2").
  1086  		AddTxnWithEndorsement("tx2", "ns2", hash, "org2", true, "c1").create()
  1087  
  1088  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1089  
  1090  	fmt.Println("Scenario I")
  1091  	// Scenario I: Block we got has sufficient private data alongside it.
  1092  	// If the coordinator tries fetching from the transientstore, or peers it would result in panic,
  1093  	// because we didn't define yet the "On(...)" invocation of the transient store or other peers.
  1094  	pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").addRWSet().addNSRWSet("ns2", "c1").create()
  1095  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1096  
  1097  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1098  	appCapability := &capabilitymock.AppCapabilities{}
  1099  	capabilityProvider.On("Capabilities").Return(appCapability)
  1100  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1101  	coordinator := NewCoordinator(mspID, Support{
  1102  		ChainID:            "testchannelid",
  1103  		CollectionStore:    cs,
  1104  		Committer:          committer,
  1105  		Fetcher:            fetcher,
  1106  		Validator:          &validatorMock{},
  1107  		CapabilityProvider: capabilityProvider,
  1108  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1109  	err = coordinator.StoreBlock(block, pvtData)
  1110  	assert.NoError(t, err)
  1111  	assertCommitHappened()
  1112  	assertPurgeTxs := func() bool {
  1113  		return assertPurged("tx1", "tx2")
  1114  	}
  1115  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1116  
  1117  	fmt.Println("Scenario II")
  1118  	// Scenario II: Block we got doesn't have sufficient private data alongside it,
  1119  	// it is missing ns1: c2, but the data exists in the transient store
  1120  	store.Persist("tx1", 1, &tspb.TxPvtReadWriteSetWithConfigInfo{
  1121  		PvtRwset: &rwset.TxPvtReadWriteSet{
  1122  			NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1123  				{
  1124  					Namespace: "ns1",
  1125  					CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
  1126  						{
  1127  							CollectionName: "c2",
  1128  							Rwset:          []byte("rws-pre-image"),
  1129  						},
  1130  					},
  1131  				},
  1132  			},
  1133  		},
  1134  		CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1135  	})
  1136  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1").addRWSet().addNSRWSet("ns2", "c1").create()
  1137  	err = coordinator.StoreBlock(block, pvtData)
  1138  	assert.NoError(t, err)
  1139  	assertCommitHappened()
  1140  	assertPurgeTxs = func() bool {
  1141  		return assertPurged("tx1", "tx2")
  1142  	}
  1143  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1144  
  1145  	fmt.Println("Scenario III")
  1146  	// Scenario III: Block doesn't have sufficient private data alongside it,
  1147  	// it is missing ns1: c2, and the data exists in the transient store,
  1148  	// but it is also missing ns2: c1, and that data doesn't exist in the transient store - but in a peer.
  1149  	// Additionally, the coordinator should pass an endorser identity of org1, but not of org2, since
  1150  	// the MemberOrgs() call doesn't return org2 but only org0 and org1.
  1151  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
  1152  		{
  1153  			TxId: "tx1", Namespace: "ns1", Collection: "c2", BlockSeq: 1,
  1154  		},
  1155  		{
  1156  			TxId: "tx2", Namespace: "ns2", Collection: "c1", BlockSeq: 1, SeqInBlock: 1,
  1157  		},
  1158  	}).Return(&privdatacommon.FetchedPvtDataContainer{
  1159  		AvailableElements: []*proto.PvtDataElement{
  1160  			{
  1161  				Digest: &proto.PvtDataDigest{
  1162  					BlockSeq:   1,
  1163  					Collection: "c2",
  1164  					Namespace:  "ns1",
  1165  					TxId:       "tx1",
  1166  				},
  1167  				Payload: [][]byte{[]byte("rws-pre-image")},
  1168  			},
  1169  			{
  1170  				Digest: &proto.PvtDataDigest{
  1171  					SeqInBlock: 1,
  1172  					BlockSeq:   1,
  1173  					Collection: "c1",
  1174  					Namespace:  "ns2",
  1175  					TxId:       "tx2",
  1176  				},
  1177  				Payload: [][]byte{[]byte("rws-pre-image")},
  1178  			},
  1179  		},
  1180  	}, nil)
  1181  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1").create()
  1182  	err = coordinator.StoreBlock(block, pvtData)
  1183  	assert.NoError(t, err)
  1184  	assertCommitHappened()
  1185  	assertPurgeTxs = func() bool {
  1186  		return assertPurged("tx1", "tx2")
  1187  	}
  1188  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1189  
  1190  	fmt.Println("Scenario IV")
  1191  	// Scenario IV: Block came with more than sufficient private data alongside it, some of it is redundant.
  1192  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2", "c3").
  1193  		addRWSet().addNSRWSet("ns2", "c1", "c3").addRWSet().addNSRWSet("ns1", "c4").create()
  1194  	err = coordinator.StoreBlock(block, pvtData)
  1195  	assert.NoError(t, err)
  1196  	assertCommitHappened()
  1197  	assertPurgeTxs = func() bool {
  1198  		return assertPurged("tx1", "tx2")
  1199  	}
  1200  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1201  
  1202  	fmt.Println("Scenario V")
  1203  	// Scenario V: Block we got has private data alongside it but coordinator cannot retrieve collection access
  1204  	// policy of collections due to databse unavailability error.
  1205  	// we verify that the error propagates properly.
  1206  	mockCs := &mocks.CollectionStore{}
  1207  	mockCs.On("RetrieveCollectionConfig", mock.Anything).Return(nil, errors.New("test error"))
  1208  	coordinator = NewCoordinator(mspID, Support{
  1209  		ChainID:            "testchannelid",
  1210  		CollectionStore:    mockCs,
  1211  		Committer:          committer,
  1212  		Fetcher:            fetcher,
  1213  		Validator:          &validatorMock{},
  1214  		CapabilityProvider: capabilityProvider,
  1215  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1216  	err = coordinator.StoreBlock(block, nil)
  1217  	assert.Error(t, err)
  1218  	assert.Equal(t, "test error", err.Error())
  1219  
  1220  	fmt.Println("Scenario VI")
  1221  	// Scenario VI: Block didn't get with any private data alongside it, and the transient store
  1222  	// has some problem.
  1223  	// In this case, we should try to fetch data from peers.
  1224  	block = bf.AddTxn("tx3", "ns3", hash, "c3").create()
  1225  	fetcher = &fetcherMock{t: t}
  1226  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
  1227  		{
  1228  			TxId: "tx3", Namespace: "ns3", Collection: "c3", BlockSeq: 1,
  1229  		},
  1230  	}).Return(&privdatacommon.FetchedPvtDataContainer{
  1231  		AvailableElements: []*proto.PvtDataElement{
  1232  			{
  1233  				Digest: &proto.PvtDataDigest{
  1234  					BlockSeq:   1,
  1235  					Collection: "c3",
  1236  					Namespace:  "ns3",
  1237  					TxId:       "tx3",
  1238  				},
  1239  				Payload: [][]byte{[]byte("rws-pre-image")},
  1240  			},
  1241  		},
  1242  	}, nil)
  1243  	committer = &mocks.Committer{}
  1244  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1245  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
  1246  		assert.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1247  			flattenTxPvtDataMap(expectedCommittedPrivateData2)))
  1248  		commitHappened = true
  1249  
  1250  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1251  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1252  		assert.Equal(t, expectedCommitOpts, commitOpts)
  1253  	}).Return(nil)
  1254  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1255  	coordinator = NewCoordinator(mspID, Support{
  1256  		ChainID:            "testchannelid",
  1257  		CollectionStore:    cs,
  1258  		Committer:          committer,
  1259  		Fetcher:            fetcher,
  1260  		Validator:          &validatorMock{},
  1261  		CapabilityProvider: capabilityProvider,
  1262  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1263  	err = coordinator.StoreBlock(block, nil)
  1264  	assert.NoError(t, err)
  1265  	assertCommitHappened()
  1266  	assertPurgeTxs = func() bool {
  1267  		return assertPurged("tx3")
  1268  	}
  1269  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1270  
  1271  	fmt.Println("Scenario VII")
  1272  	// Scenario VII: Block contains 2 transactions, and the peer is eligible for only tx3-ns3-c3.
  1273  	// Also, the blocks comes with a private data for tx3-ns3-c3 so that the peer won't have to fetch the
  1274  	// private data from the transient store or peers, and in fact- if it attempts to fetch the data it's not eligible
  1275  	// for from the transient store or from peers - the test would fail because the Mock wasn't initialized.
  1276  	block = bf.AddTxn("tx3", "ns3", hash, "c3", "c2", "c1").AddTxn("tx1", "ns1", hash, "c1").create()
  1277  	cs = createcollectionStore(peerSelfSignedData).thatAccepts(CollectionCriteria{
  1278  		Collection: "c3",
  1279  		Namespace:  "ns3",
  1280  		Channel:    "testchannelid",
  1281  	}).withMSPIdentity(identity.GetMSPIdentifier())
  1282  	fetcher = &fetcherMock{t: t}
  1283  	committer = &mocks.Committer{}
  1284  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1285  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
  1286  		assert.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1287  			flattenTxPvtDataMap(expectedCommittedPrivateData2)))
  1288  		commitHappened = true
  1289  
  1290  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1291  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1292  		assert.Equal(t, expectedCommitOpts, commitOpts)
  1293  	}).Return(nil)
  1294  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1295  	coordinator = NewCoordinator(mspID, Support{
  1296  		ChainID:            "testchannelid",
  1297  		CollectionStore:    cs,
  1298  		Committer:          committer,
  1299  		Fetcher:            fetcher,
  1300  		Validator:          &validatorMock{},
  1301  		CapabilityProvider: capabilityProvider,
  1302  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1303  
  1304  	pvtData = pdFactory.addRWSet().addNSRWSet("ns3", "c3").create()
  1305  	err = coordinator.StoreBlock(block, pvtData)
  1306  	assert.NoError(t, err)
  1307  	assertCommitHappened()
  1308  	// In any case, all transactions in the block are purged from the transient store
  1309  	assertPurgeTxs = func() bool {
  1310  		return assertPurged("tx3", "tx1")
  1311  	}
  1312  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1313  }
  1314  
  1315  func TestCoordinatorStoreBlockWhenPvtDataExistInLedger(t *testing.T) {
  1316  	err := msptesttools.LoadMSPSetupForTesting()
  1317  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1318  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
  1319  	serializedID, err := identity.Serialize()
  1320  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1321  	data := []byte{1, 2, 3}
  1322  	signature, err := identity.Sign(data)
  1323  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1324  	mspID := "Org1MSP"
  1325  	peerSelfSignedData := protoutil.SignedData{
  1326  		Identity:  serializedID,
  1327  		Signature: signature,
  1328  		Data:      data,
  1329  	}
  1330  
  1331  	var commitHappened bool
  1332  	assertCommitHappened := func() {
  1333  		assert.True(t, commitHappened)
  1334  		commitHappened = false
  1335  	}
  1336  	committer := &mocks.Committer{}
  1337  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1338  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
  1339  		assert.Equal(t, ledger.TxPvtDataMap{}, privateDataPassed2Ledger)
  1340  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1341  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: true}
  1342  		assert.Equal(t, expectedCommitOpts, commitOpts)
  1343  		commitHappened = true
  1344  	}).Return(nil)
  1345  
  1346  	fetcher := &fetcherMock{t: t}
  1347  
  1348  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1349  	pdFactory := &pvtDataFactory{}
  1350  	bf := &blockFactory{
  1351  		channelID: "testchannelid",
  1352  	}
  1353  
  1354  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1355  		return mgmt.GetManagerForChain("testchannelid")
  1356  	})
  1357  
  1358  	block := bf.AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1", "c2").
  1359  		AddTxnWithEndorsement("tx2", "ns2", hash, "org2", true, "c1").create()
  1360  
  1361  	// Scenario: Block we got has been reprocessed and hence the sufficient pvtData is present
  1362  	// in the local pvtdataStore itself. The pvtData would be fetched from the local pvtdataStore.
  1363  	// If the coordinator tries fetching from the transientstore, or peers it would result in panic,
  1364  	// because we didn't define yet the "On(...)" invocation of the transient store or other peers.
  1365  	pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").addRWSet().addNSRWSet("ns2", "c1").create()
  1366  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(true, nil)
  1367  
  1368  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1369  
  1370  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1371  	appCapability := &capabilitymock.AppCapabilities{}
  1372  	capabilityProvider.On("Capabilities").Return(appCapability)
  1373  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1374  	coordinator := NewCoordinator(mspID, Support{
  1375  		ChainID:            "testchannelid",
  1376  		CollectionStore:    nil,
  1377  		Committer:          committer,
  1378  		Fetcher:            fetcher,
  1379  		Validator:          &validatorMock{},
  1380  		CapabilityProvider: capabilityProvider,
  1381  	}, nil, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1382  	err = coordinator.StoreBlock(block, pvtData)
  1383  	assert.NoError(t, err)
  1384  	assertCommitHappened()
  1385  }
  1386  
  1387  func TestProceedWithoutPrivateData(t *testing.T) {
  1388  	// Scenario: we are missing private data (c2 in ns3) and it cannot be obtained from any peer.
  1389  	// Block needs to be committed with missing private data.
  1390  	err := msptesttools.LoadMSPSetupForTesting()
  1391  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1392  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
  1393  	serializedID, err := identity.Serialize()
  1394  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1395  	data := []byte{1, 2, 3}
  1396  	signature, err := identity.Sign(data)
  1397  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1398  	mspID := "Org1MSP"
  1399  	peerSelfSignedData := protoutil.SignedData{
  1400  		Identity:  serializedID,
  1401  		Signature: signature,
  1402  		Data:      data,
  1403  	}
  1404  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
  1405  	var commitHappened bool
  1406  	assertCommitHappened := func() {
  1407  		assert.True(t, commitHappened)
  1408  		commitHappened = false
  1409  	}
  1410  	committer := &mocks.Committer{}
  1411  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1412  		blockAndPrivateData := args.Get(0).(*ledger.BlockAndPvtData)
  1413  		privateDataPassed2Ledger := blockAndPrivateData.PvtData
  1414  		assert.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1415  			flattenTxPvtDataMap(expectedCommittedPrivateData2)))
  1416  		missingPrivateData := blockAndPrivateData.MissingPvtData
  1417  		expectedMissingPvtData := make(ledger.TxMissingPvtDataMap)
  1418  		expectedMissingPvtData.Add(0, "ns3", "c2", true)
  1419  		assert.Equal(t, expectedMissingPvtData, missingPrivateData)
  1420  		commitHappened = true
  1421  
  1422  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1423  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1424  		assert.Equal(t, expectedCommitOpts, commitOpts)
  1425  	}).Return(nil)
  1426  
  1427  	store := newTransientStore(t)
  1428  	defer store.tearDown()
  1429  
  1430  	assertPurged := func(txns ...string) {
  1431  		for _, txn := range txns {
  1432  			iterator, err := store.GetTxPvtRWSetByTxid(txn, nil)
  1433  			if err != nil {
  1434  				t.Fatalf("Failed iterating, got err %s", err)
  1435  				iterator.Close()
  1436  				return
  1437  			}
  1438  			res, err := iterator.Next()
  1439  			if err != nil {
  1440  				t.Fatalf("Failed iterating, got err %s", err)
  1441  				iterator.Close()
  1442  				return
  1443  			}
  1444  			assert.Nil(t, res)
  1445  			iterator.Close()
  1446  		}
  1447  	}
  1448  
  1449  	fetcher := &fetcherMock{t: t}
  1450  	// Have the peer return in response to the pull, a private data with a non matching hash
  1451  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
  1452  		{
  1453  			TxId: "tx1", Namespace: "ns3", Collection: "c2", BlockSeq: 1,
  1454  		},
  1455  	}).Return(&privdatacommon.FetchedPvtDataContainer{
  1456  		AvailableElements: []*proto.PvtDataElement{
  1457  			{
  1458  				Digest: &proto.PvtDataDigest{
  1459  					BlockSeq:   1,
  1460  					Collection: "c2",
  1461  					Namespace:  "ns3",
  1462  					TxId:       "tx1",
  1463  				},
  1464  				Payload: [][]byte{[]byte("wrong pre-image")},
  1465  			},
  1466  		},
  1467  	}, nil)
  1468  
  1469  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1470  	pdFactory := &pvtDataFactory{}
  1471  	bf := &blockFactory{
  1472  		channelID: "testchannelid",
  1473  	}
  1474  
  1475  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1476  		return mgmt.GetManagerForChain("testchannelid")
  1477  	})
  1478  
  1479  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1480  
  1481  	block := bf.AddTxn("tx1", "ns3", hash, "c3", "c2").create()
  1482  	pvtData := pdFactory.addRWSet().addNSRWSet("ns3", "c3").create()
  1483  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1484  
  1485  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1486  	appCapability := &capabilitymock.AppCapabilities{}
  1487  	capabilityProvider.On("Capabilities").Return(appCapability)
  1488  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1489  	coordinator := NewCoordinator(mspID, Support{
  1490  		ChainID:            "testchannelid",
  1491  		CollectionStore:    cs,
  1492  		Committer:          committer,
  1493  		Fetcher:            fetcher,
  1494  		Validator:          &validatorMock{},
  1495  		CapabilityProvider: capabilityProvider,
  1496  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1497  	err = coordinator.StoreBlock(block, pvtData)
  1498  	assert.NoError(t, err)
  1499  	assertCommitHappened()
  1500  	assertPurged("tx1")
  1501  }
  1502  
  1503  func TestProceedWithInEligiblePrivateData(t *testing.T) {
  1504  	// Scenario: we are missing private data (c2 in ns3) and it cannot be obtained from any peer.
  1505  	// Block needs to be committed with missing private data.
  1506  	err := msptesttools.LoadMSPSetupForTesting()
  1507  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1508  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
  1509  	serializedID, err := identity.Serialize()
  1510  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1511  	data := []byte{1, 2, 3}
  1512  	signature, err := identity.Sign(data)
  1513  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1514  	mspID := "Org1MSP"
  1515  	peerSelfSignedData := protoutil.SignedData{
  1516  		Identity:  serializedID,
  1517  		Signature: signature,
  1518  		Data:      data,
  1519  	}
  1520  
  1521  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsNone().withMSPIdentity(identity.GetMSPIdentifier())
  1522  
  1523  	var commitHappened bool
  1524  	assertCommitHappened := func() {
  1525  		assert.True(t, commitHappened)
  1526  		commitHappened = false
  1527  	}
  1528  	committer := &mocks.Committer{}
  1529  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1530  		blockAndPrivateData := args.Get(0).(*ledger.BlockAndPvtData)
  1531  		privateDataPassed2Ledger := blockAndPrivateData.PvtData
  1532  		assert.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1533  			flattenTxPvtDataMap(expectedCommittedPrivateData3)))
  1534  		missingPrivateData := blockAndPrivateData.MissingPvtData
  1535  		expectedMissingPvtData := make(ledger.TxMissingPvtDataMap)
  1536  		expectedMissingPvtData.Add(0, "ns3", "c2", false)
  1537  		assert.Equal(t, expectedMissingPvtData, missingPrivateData)
  1538  		commitHappened = true
  1539  
  1540  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1541  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1542  		assert.Equal(t, expectedCommitOpts, commitOpts)
  1543  	}).Return(nil)
  1544  
  1545  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1546  	bf := &blockFactory{
  1547  		channelID: "testchannelid",
  1548  	}
  1549  
  1550  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1551  		return mgmt.GetManagerForChain("testchannelid")
  1552  	})
  1553  
  1554  	block := bf.AddTxn("tx1", "ns3", hash, "c2").create()
  1555  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1556  
  1557  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1558  
  1559  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1560  	appCapability := &capabilitymock.AppCapabilities{}
  1561  	capabilityProvider.On("Capabilities").Return(appCapability)
  1562  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1563  	coordinator := NewCoordinator(mspID, Support{
  1564  		ChainID:            "testchannelid",
  1565  		CollectionStore:    cs,
  1566  		Committer:          committer,
  1567  		Fetcher:            nil,
  1568  		Validator:          &validatorMock{},
  1569  		CapabilityProvider: capabilityProvider,
  1570  	}, nil, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1571  	err = coordinator.StoreBlock(block, nil)
  1572  	assert.NoError(t, err)
  1573  	assertCommitHappened()
  1574  }
  1575  
  1576  func TestCoordinatorGetBlocks(t *testing.T) {
  1577  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1578  	err := msptesttools.LoadMSPSetupForTesting()
  1579  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1580  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
  1581  	serializedID, err := identity.Serialize()
  1582  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1583  	data := []byte{1, 2, 3}
  1584  	signature, err := identity.Sign(data)
  1585  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1586  	mspID := "Org1MSP"
  1587  	peerSelfSignedData := protoutil.SignedData{
  1588  		Identity:  serializedID,
  1589  		Signature: signature,
  1590  		Data:      data,
  1591  	}
  1592  
  1593  	store := newTransientStore(t)
  1594  	defer store.tearDown()
  1595  
  1596  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1597  		return mgmt.GetManagerForChain("testchannelid")
  1598  	})
  1599  
  1600  	fetcher := &fetcherMock{t: t}
  1601  
  1602  	committer := &mocks.Committer{}
  1603  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1604  
  1605  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1606  	appCapability := &capabilitymock.AppCapabilities{}
  1607  	capabilityProvider.On("Capabilities").Return(appCapability)
  1608  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1609  
  1610  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1611  	bf := &blockFactory{
  1612  		channelID: "testchannelid",
  1613  	}
  1614  	block := bf.AddTxn("tx1", "ns1", hash, "c1", "c2").AddTxn("tx2", "ns2", hash, "c1").create()
  1615  
  1616  	// Green path - block and private data is returned, but the requester isn't eligible for all the private data,
  1617  	// but only to a subset of it.
  1618  	cs := createcollectionStore(peerSelfSignedData).thatAccepts(CollectionCriteria{
  1619  		Namespace:  "ns1",
  1620  		Collection: "c2",
  1621  		Channel:    "testchannelid",
  1622  	}).withMSPIdentity(identity.GetMSPIdentifier())
  1623  	committer.Mock = mock.Mock{}
  1624  	committer.On("GetPvtDataAndBlockByNum", mock.Anything).Return(&ledger.BlockAndPvtData{
  1625  		Block:   block,
  1626  		PvtData: expectedCommittedPrivateData1,
  1627  	}, nil)
  1628  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1629  	coordinator := NewCoordinator(mspID, Support{
  1630  		ChainID:            "testchannelid",
  1631  		CollectionStore:    cs,
  1632  		Committer:          committer,
  1633  		Fetcher:            fetcher,
  1634  		Validator:          &validatorMock{},
  1635  		CapabilityProvider: capabilityProvider,
  1636  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1637  	expectedPrivData := (&pvtDataFactory{}).addRWSet().addNSRWSet("ns1", "c2").create()
  1638  	block2, returnedPrivateData, err := coordinator.GetPvtDataAndBlockByNum(1, peerSelfSignedData)
  1639  	assert.NoError(t, err)
  1640  	assert.Equal(t, block, block2)
  1641  	assert.Equal(t, expectedPrivData, []*ledger.TxPvtData(returnedPrivateData))
  1642  
  1643  	// Bad path - error occurs when trying to retrieve the block and private data
  1644  	committer.Mock = mock.Mock{}
  1645  	committer.On("GetPvtDataAndBlockByNum", mock.Anything).Return(nil, errors.New("uh oh"))
  1646  	block2, returnedPrivateData, err = coordinator.GetPvtDataAndBlockByNum(1, peerSelfSignedData)
  1647  	assert.Nil(t, block2)
  1648  	assert.Empty(t, returnedPrivateData)
  1649  	assert.Error(t, err)
  1650  }
  1651  
  1652  func TestPurgeBelowHeight(t *testing.T) {
  1653  	conf := testConfig
  1654  	conf.TransientBlockRetention = 5
  1655  	mspID := "Org1MSP"
  1656  	peerSelfSignedData := protoutil.SignedData{}
  1657  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll()
  1658  
  1659  	committer := &mocks.Committer{}
  1660  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Return(nil)
  1661  
  1662  	store := newTransientStore(t)
  1663  	defer store.tearDown()
  1664  
  1665  	// store 9 data sets initially
  1666  	for i := 0; i < 9; i++ {
  1667  		txID := fmt.Sprintf("tx%d", i+1)
  1668  		store.Persist(txID, uint64(i), &tspb.TxPvtReadWriteSetWithConfigInfo{
  1669  			PvtRwset: &rwset.TxPvtReadWriteSet{
  1670  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1671  					{
  1672  						Namespace: "ns1",
  1673  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
  1674  							{
  1675  								CollectionName: "c1",
  1676  								Rwset:          []byte("rws-pre-image"),
  1677  							},
  1678  						},
  1679  					},
  1680  				},
  1681  			},
  1682  			CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1683  		})
  1684  	}
  1685  	assertPurged := func(purged bool) bool {
  1686  		numTx := 9
  1687  		if purged {
  1688  			numTx = 10
  1689  		}
  1690  		for i := 1; i <= numTx; i++ {
  1691  			txID := fmt.Sprintf("tx%d", i)
  1692  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1693  			if err != nil {
  1694  				iterator.Close()
  1695  				t.Fatalf("Failed iterating, got err %s", err)
  1696  			}
  1697  			res, err := iterator.Next()
  1698  			iterator.Close()
  1699  			if err != nil {
  1700  				t.Fatalf("Failed iterating, got err %s", err)
  1701  			}
  1702  			if (i < 6 || i == numTx) && purged {
  1703  				if res != nil {
  1704  					return false
  1705  				}
  1706  				continue
  1707  			}
  1708  			if res == nil {
  1709  				return false
  1710  			}
  1711  		}
  1712  		return true
  1713  	}
  1714  
  1715  	fetcher := &fetcherMock{t: t}
  1716  
  1717  	bf := &blockFactory{
  1718  		channelID: "testchannelid",
  1719  	}
  1720  
  1721  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1722  		return mgmt.GetManagerForChain("testchannelid")
  1723  	})
  1724  
  1725  	pdFactory := &pvtDataFactory{}
  1726  
  1727  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1728  
  1729  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1730  
  1731  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1732  	appCapability := &capabilitymock.AppCapabilities{}
  1733  	capabilityProvider.On("Capabilities").Return(appCapability)
  1734  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1735  	coordinator := NewCoordinator(mspID, Support{
  1736  		ChainID:            "testchannelid",
  1737  		CollectionStore:    cs,
  1738  		Committer:          committer,
  1739  		Fetcher:            fetcher,
  1740  		Validator:          &validatorMock{},
  1741  		CapabilityProvider: capabilityProvider,
  1742  	}, store.store, peerSelfSignedData, metrics, conf, idDeserializerFactory)
  1743  
  1744  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1745  	block := bf.AddTxn("tx10", "ns1", hash, "c1").create()
  1746  	block.Header.Number = 10
  1747  	pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1").create()
  1748  	// test no blocks purged yet
  1749  	assertPurgedBlocks := func() bool {
  1750  		return assertPurged(false)
  1751  	}
  1752  	require.Eventually(t, assertPurgedBlocks, 2*time.Second, 100*time.Millisecond)
  1753  	err := coordinator.StoreBlock(block, pvtData)
  1754  	assert.NoError(t, err)
  1755  	// test first 6 blocks were purged
  1756  	assertPurgedBlocks = func() bool {
  1757  		return assertPurged(true)
  1758  	}
  1759  	require.Eventually(t, assertPurgedBlocks, 2*time.Second, 100*time.Millisecond)
  1760  }
  1761  
  1762  func TestCoordinatorStorePvtData(t *testing.T) {
  1763  	mspID := "Org1MSP"
  1764  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1765  	cs := createcollectionStore(protoutil.SignedData{}).thatAcceptsAll()
  1766  	committer := &mocks.Committer{}
  1767  
  1768  	store := newTransientStore(t)
  1769  	defer store.tearDown()
  1770  
  1771  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1772  		return mgmt.GetManagerForChain("testchannelid")
  1773  	})
  1774  
  1775  	fetcher := &fetcherMock{t: t}
  1776  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1777  
  1778  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1779  	appCapability := &capabilitymock.AppCapabilities{}
  1780  	capabilityProvider.On("Capabilities").Return(appCapability)
  1781  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1782  	coordinator := NewCoordinator(mspID, Support{
  1783  		ChainID:            "testchannelid",
  1784  		CollectionStore:    cs,
  1785  		Committer:          committer,
  1786  		Fetcher:            fetcher,
  1787  		Validator:          &validatorMock{},
  1788  		CapabilityProvider: capabilityProvider,
  1789  	}, store.store, protoutil.SignedData{}, metrics, testConfig, idDeserializerFactory)
  1790  	pvtData := (&pvtDataFactory{}).addRWSet().addNSRWSet("ns1", "c1").create()
  1791  	// Green path: ledger height can be retrieved from ledger/committer
  1792  	err := coordinator.StorePvtData("tx1", &tspb.TxPvtReadWriteSetWithConfigInfo{
  1793  		PvtRwset:          pvtData[0].WriteSet,
  1794  		CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1795  	}, uint64(5))
  1796  	assert.NoError(t, err)
  1797  }
  1798  
  1799  func TestContainsWrites(t *testing.T) {
  1800  	// Scenario I: Nil HashedRwSet in collection
  1801  	col := &rwsetutil.CollHashedRwSet{
  1802  		CollectionName: "col1",
  1803  	}
  1804  	assert.False(t, containsWrites("tx", "ns", col))
  1805  
  1806  	// Scenario II: No writes in collection
  1807  	col.HashedRwSet = &kvrwset.HashedRWSet{}
  1808  	assert.False(t, containsWrites("tx", "ns", col))
  1809  
  1810  	// Scenario III: Some writes in collection
  1811  	col.HashedRwSet.HashedWrites = append(col.HashedRwSet.HashedWrites, &kvrwset.KVWriteHash{})
  1812  	assert.True(t, containsWrites("tx", "ns", col))
  1813  }
  1814  
  1815  func TestIgnoreReadOnlyColRWSets(t *testing.T) {
  1816  	// Scenario: The transaction has some ColRWSets that have only reads and no writes,
  1817  	// These should be ignored and not considered as missing private data that needs to be retrieved
  1818  	// from the transient store or other peers.
  1819  	// The gossip and transient store mocks in this test aren't initialized with
  1820  	// actions, so if the coordinator attempts to fetch private data from the
  1821  	// transient store or other peers, the test would fail.
  1822  	// Also - we check that at commit time - the coordinator concluded that
  1823  	// no missing private data was found.
  1824  	err := msptesttools.LoadMSPSetupForTesting()
  1825  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1826  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
  1827  	serializedID, err := identity.Serialize()
  1828  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1829  	data := []byte{1, 2, 3}
  1830  	signature, err := identity.Sign(data)
  1831  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1832  	mspID := "Org1MSP"
  1833  	peerSelfSignedData := protoutil.SignedData{
  1834  		Identity:  serializedID,
  1835  		Signature: signature,
  1836  		Data:      data,
  1837  	}
  1838  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
  1839  	var commitHappened bool
  1840  	assertCommitHappened := func() {
  1841  		assert.True(t, commitHappened)
  1842  		commitHappened = false
  1843  	}
  1844  	committer := &mocks.Committer{}
  1845  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1846  		blockAndPrivateData := args.Get(0).(*ledger.BlockAndPvtData)
  1847  		// Ensure there is no private data to commit
  1848  		assert.Empty(t, blockAndPrivateData.PvtData)
  1849  		// Ensure there is no missing private data
  1850  		assert.Empty(t, blockAndPrivateData.MissingPvtData)
  1851  		commitHappened = true
  1852  
  1853  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1854  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1855  		assert.Equal(t, expectedCommitOpts, commitOpts)
  1856  	}).Return(nil)
  1857  
  1858  	store := newTransientStore(t)
  1859  	defer store.tearDown()
  1860  
  1861  	fetcher := &fetcherMock{t: t}
  1862  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1863  	bf := &blockFactory{
  1864  		channelID: "testchannelid",
  1865  	}
  1866  
  1867  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1868  		return mgmt.GetManagerForChain("testchannelid")
  1869  	})
  1870  
  1871  	// The block contains a read only private data transaction
  1872  	block := bf.AddReadOnlyTxn("tx1", "ns3", hash, "c3", "c2").create()
  1873  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1874  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1875  
  1876  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1877  	appCapability := &capabilitymock.AppCapabilities{}
  1878  	capabilityProvider.On("Capabilities").Return(appCapability)
  1879  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1880  	coordinator := NewCoordinator(mspID, Support{
  1881  		ChainID:            "testchannelid",
  1882  		CollectionStore:    cs,
  1883  		Committer:          committer,
  1884  		Fetcher:            fetcher,
  1885  		Validator:          &validatorMock{},
  1886  		CapabilityProvider: capabilityProvider,
  1887  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1888  	// We pass a nil private data slice to indicate no pre-images though the block contains
  1889  	// private data reads.
  1890  	err = coordinator.StoreBlock(block, nil)
  1891  	assert.NoError(t, err)
  1892  	assertCommitHappened()
  1893  }
  1894  
  1895  func TestCoordinatorMetrics(t *testing.T) {
  1896  	err := msptesttools.LoadMSPSetupForTesting()
  1897  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1898  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
  1899  	serializedID, err := identity.Serialize()
  1900  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1901  	data := []byte{1, 2, 3}
  1902  	signature, err := identity.Sign(data)
  1903  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1904  	mspID := "Org1MSP"
  1905  	peerSelfSignedData := protoutil.SignedData{
  1906  		Identity:  serializedID,
  1907  		Signature: signature,
  1908  		Data:      data,
  1909  	}
  1910  
  1911  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
  1912  
  1913  	committer := &mocks.Committer{}
  1914  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Return(nil)
  1915  
  1916  	store := newTransientStore(t)
  1917  	defer store.tearDown()
  1918  
  1919  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1920  	pdFactory := &pvtDataFactory{}
  1921  	bf := &blockFactory{
  1922  		channelID: "testchannelid",
  1923  	}
  1924  
  1925  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1926  		return mgmt.GetManagerForChain("testchannelid")
  1927  	})
  1928  
  1929  	block := bf.AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1", "c2").
  1930  		AddTxnWithEndorsement("tx2", "ns2", hash, "org2", true, "c1").
  1931  		AddTxnWithEndorsement("tx3", "ns3", hash, "org3", true, "c1").create()
  1932  
  1933  	pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").addRWSet().addNSRWSet("ns2", "c1").create()
  1934  	// fetch duration metric only reported when fetching from remote peer
  1935  	fetcher := &fetcherMock{t: t}
  1936  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
  1937  		{
  1938  			TxId: "tx3", Namespace: "ns3", Collection: "c1", BlockSeq: 1, SeqInBlock: 2,
  1939  		},
  1940  	}).Return(&privdatacommon.FetchedPvtDataContainer{
  1941  		AvailableElements: []*proto.PvtDataElement{
  1942  			{
  1943  				Digest: &proto.PvtDataDigest{
  1944  					SeqInBlock: 2,
  1945  					BlockSeq:   1,
  1946  					Collection: "c1",
  1947  					Namespace:  "ns3",
  1948  					TxId:       "tx3",
  1949  				},
  1950  				Payload: [][]byte{[]byte("rws-pre-image")},
  1951  			},
  1952  		},
  1953  	}, nil)
  1954  
  1955  	testMetricProvider := gmetricsmocks.TestUtilConstructMetricProvider()
  1956  	metrics := metrics.NewGossipMetrics(testMetricProvider.FakeProvider).PrivdataMetrics
  1957  
  1958  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1959  
  1960  	capabilityProvider := &capabilitymock.CapabilityProvider{}
  1961  	appCapability := &capabilitymock.AppCapabilities{}
  1962  	capabilityProvider.On("Capabilities").Return(appCapability)
  1963  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1964  	coordinator := NewCoordinator(mspID, Support{
  1965  		ChainID:            "testchannelid",
  1966  		CollectionStore:    cs,
  1967  		Committer:          committer,
  1968  		Fetcher:            fetcher,
  1969  		Validator:          &validatorMock{},
  1970  		CapabilityProvider: capabilityProvider,
  1971  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1972  	err = coordinator.StoreBlock(block, pvtData)
  1973  	assert.NoError(t, err)
  1974  
  1975  	// make sure all coordinator metrics were reported
  1976  
  1977  	assert.Equal(t,
  1978  		[]string{"channel", "testchannelid"},
  1979  		testMetricProvider.FakeValidationDuration.WithArgsForCall(0),
  1980  	)
  1981  	assert.True(t, testMetricProvider.FakeValidationDuration.ObserveArgsForCall(0) > 0)
  1982  	assert.Equal(t,
  1983  		[]string{"channel", "testchannelid"},
  1984  		testMetricProvider.FakeListMissingPrivateDataDuration.WithArgsForCall(0),
  1985  	)
  1986  	assert.True(t, testMetricProvider.FakeListMissingPrivateDataDuration.ObserveArgsForCall(0) > 0)
  1987  	assert.Equal(t,
  1988  		[]string{"channel", "testchannelid"},
  1989  		testMetricProvider.FakeFetchDuration.WithArgsForCall(0),
  1990  	)
  1991  	// fetch duration metric only reported when fetching from remote peer
  1992  	assert.True(t, testMetricProvider.FakeFetchDuration.ObserveArgsForCall(0) > 0)
  1993  	assert.Equal(t,
  1994  		[]string{"channel", "testchannelid"},
  1995  		testMetricProvider.FakeCommitPrivateDataDuration.WithArgsForCall(0),
  1996  	)
  1997  	assert.True(t, testMetricProvider.FakeCommitPrivateDataDuration.ObserveArgsForCall(0) > 0)
  1998  	assert.Equal(t,
  1999  		[]string{"channel", "testchannelid"},
  2000  		testMetricProvider.FakePurgeDuration.WithArgsForCall(0),
  2001  	)
  2002  
  2003  	purgeDuration := func() bool {
  2004  		return testMetricProvider.FakePurgeDuration.ObserveArgsForCall(0) > 0
  2005  	}
  2006  	assert.Eventually(t, purgeDuration, 2*time.Second, 100*time.Millisecond)
  2007  }