github.com/hechain20/hechain@v0.0.0-20220316014945-b544036ba106/gossip/privdata/coordinator_test.go (about)

     1  /*
     2  Copyright hechain. All Rights Reserved.
     3  
     4  SPDX-License-Identifier: Apache-2.0
     5  */
     6  
     7  package privdata
     8  
     9  import (
    10  	"encoding/asn1"
    11  	"encoding/hex"
    12  	"errors"
    13  	"fmt"
    14  	"io/ioutil"
    15  	"os"
    16  	"reflect"
    17  	"testing"
    18  	"time"
    19  
    20  	pb "github.com/golang/protobuf/proto"
    21  	"github.com/hechain20/hechain/bccsp/factory"
    22  	"github.com/hechain20/hechain/common/metrics/disabled"
    23  	util2 "github.com/hechain20/hechain/common/util"
    24  	"github.com/hechain20/hechain/core/common/privdata"
    25  	"github.com/hechain20/hechain/core/ledger"
    26  	"github.com/hechain20/hechain/core/ledger/kvledger/txmgmt/rwsetutil"
    27  	"github.com/hechain20/hechain/core/transientstore"
    28  	"github.com/hechain20/hechain/gossip/metrics"
    29  	gmetricsmocks "github.com/hechain20/hechain/gossip/metrics/mocks"
    30  	privdatacommon "github.com/hechain20/hechain/gossip/privdata/common"
    31  	privdatamocks "github.com/hechain20/hechain/gossip/privdata/mocks"
    32  	"github.com/hechain20/hechain/gossip/util"
    33  	"github.com/hechain20/hechain/msp"
    34  	mspmgmt "github.com/hechain20/hechain/msp/mgmt"
    35  	msptesttools "github.com/hechain20/hechain/msp/mgmt/testtools"
    36  	"github.com/hechain20/hechain/protoutil"
    37  	"github.com/hyperledger/fabric-protos-go/common"
    38  	proto "github.com/hyperledger/fabric-protos-go/gossip"
    39  	"github.com/hyperledger/fabric-protos-go/ledger/rwset"
    40  	"github.com/hyperledger/fabric-protos-go/ledger/rwset/kvrwset"
    41  	mspproto "github.com/hyperledger/fabric-protos-go/msp"
    42  	"github.com/hyperledger/fabric-protos-go/peer"
    43  	tspb "github.com/hyperledger/fabric-protos-go/transientstore"
    44  	"github.com/stretchr/testify/mock"
    45  	"github.com/stretchr/testify/require"
    46  )
    47  
    48  var testConfig = CoordinatorConfig{
    49  	PullRetryThreshold:             time.Second * 3,
    50  	TransientBlockRetention:        1000,
    51  	SkipPullingInvalidTransactions: false,
    52  }
    53  
    54  // CollectionCriteria aggregates criteria of
    55  // a collection
    56  type CollectionCriteria struct {
    57  	Channel    string
    58  	Collection string
    59  	Namespace  string
    60  }
    61  
    62  func fromCollectionCriteria(criteria privdata.CollectionCriteria) CollectionCriteria {
    63  	return CollectionCriteria{
    64  		Collection: criteria.Collection,
    65  		Namespace:  criteria.Namespace,
    66  		Channel:    criteria.Channel,
    67  	}
    68  }
    69  
    70  type validatorMock struct {
    71  	err error
    72  }
    73  
    74  func (v *validatorMock) Validate(block *common.Block) error {
    75  	if v.err != nil {
    76  		return v.err
    77  	}
    78  	return nil
    79  }
    80  
    81  type digests []privdatacommon.DigKey
    82  
    83  func (d digests) Equal(other digests) bool {
    84  	flatten := func(d digests) map[privdatacommon.DigKey]struct{} {
    85  		m := map[privdatacommon.DigKey]struct{}{}
    86  		for _, dig := range d {
    87  			m[dig] = struct{}{}
    88  		}
    89  		return m
    90  	}
    91  	return reflect.DeepEqual(flatten(d), flatten(other))
    92  }
    93  
    94  type fetchCall struct {
    95  	fetcher *fetcherMock
    96  	*mock.Call
    97  }
    98  
    99  func (fc *fetchCall) expectingEndorsers(orgs ...string) *fetchCall {
   100  	if fc.fetcher.expectedEndorsers == nil {
   101  		fc.fetcher.expectedEndorsers = make(map[string]struct{})
   102  	}
   103  	for _, org := range orgs {
   104  		sID := &mspproto.SerializedIdentity{Mspid: org, IdBytes: []byte(fmt.Sprintf("p0%s", org))}
   105  		b, _ := pb.Marshal(sID)
   106  		fc.fetcher.expectedEndorsers[string(b)] = struct{}{}
   107  	}
   108  
   109  	return fc
   110  }
   111  
   112  func (fc *fetchCall) expectingDigests(digests []privdatacommon.DigKey) *fetchCall {
   113  	fc.fetcher.expectedDigests = digests
   114  	return fc
   115  }
   116  
   117  func (fc *fetchCall) Return(returnArguments ...interface{}) *mock.Call {
   118  	return fc.Call.Return(returnArguments...)
   119  }
   120  
   121  type fetcherMock struct {
   122  	t *testing.T
   123  	mock.Mock
   124  	expectedDigests   []privdatacommon.DigKey
   125  	expectedEndorsers map[string]struct{}
   126  }
   127  
   128  func (f *fetcherMock) On(methodName string, arguments ...interface{}) *fetchCall {
   129  	return &fetchCall{
   130  		fetcher: f,
   131  		Call:    f.Mock.On(methodName, arguments...),
   132  	}
   133  }
   134  
   135  func (f *fetcherMock) fetch(dig2src dig2sources) (*privdatacommon.FetchedPvtDataContainer, error) {
   136  	uniqueEndorsements := make(map[string]interface{})
   137  	for _, endorsements := range dig2src {
   138  		for _, endorsement := range endorsements {
   139  			_, exists := f.expectedEndorsers[string(endorsement.Endorser)]
   140  			if !exists {
   141  				f.t.Fatalf("Encountered a non-expected endorser: %s", string(endorsement.Endorser))
   142  			}
   143  			uniqueEndorsements[string(endorsement.Endorser)] = struct{}{}
   144  		}
   145  	}
   146  	require.True(f.t, digests(f.expectedDigests).Equal(digests(dig2src.keys())))
   147  	require.Equal(f.t, len(f.expectedEndorsers), len(uniqueEndorsements))
   148  	args := f.Called(dig2src)
   149  	if args.Get(1) == nil {
   150  		return args.Get(0).(*privdatacommon.FetchedPvtDataContainer), nil
   151  	}
   152  	return nil, args.Get(1).(error)
   153  }
   154  
   155  type testTransientStore struct {
   156  	storeProvider transientstore.StoreProvider
   157  	store         *transientstore.Store
   158  	tempdir       string
   159  }
   160  
   161  func newTransientStore(t *testing.T) *testTransientStore {
   162  	s := &testTransientStore{}
   163  	var err error
   164  	s.tempdir, err = ioutil.TempDir("", "ts")
   165  	if err != nil {
   166  		t.Fatalf("Failed to create test directory, got err %s", err)
   167  		return s
   168  	}
   169  	s.storeProvider, err = transientstore.NewStoreProvider(s.tempdir)
   170  	if err != nil {
   171  		t.Fatalf("Failed to open store, got err %s", err)
   172  		return s
   173  	}
   174  	s.store, err = s.storeProvider.OpenStore("testchannelid")
   175  	if err != nil {
   176  		t.Fatalf("Failed to open store, got err %s", err)
   177  		return s
   178  	}
   179  	return s
   180  }
   181  
   182  func (s *testTransientStore) tearDown() {
   183  	s.storeProvider.Close()
   184  	os.RemoveAll(s.tempdir)
   185  }
   186  
   187  func (s *testTransientStore) Persist(txid string, blockHeight uint64,
   188  	privateSimulationResultsWithConfig *tspb.TxPvtReadWriteSetWithConfigInfo) error {
   189  	return s.store.Persist(txid, blockHeight, privateSimulationResultsWithConfig)
   190  }
   191  
   192  func (s *testTransientStore) GetTxPvtRWSetByTxid(txid string, filter ledger.PvtNsCollFilter) (RWSetScanner, error) {
   193  	return s.store.GetTxPvtRWSetByTxid(txid, filter)
   194  }
   195  
   196  func createcollectionStore(expectedSignedData protoutil.SignedData) *collectionStore {
   197  	return &collectionStore{
   198  		expectedSignedData: expectedSignedData,
   199  		policies:           make(map[collectionAccessPolicy]CollectionCriteria),
   200  		store:              make(map[CollectionCriteria]collectionAccessPolicy),
   201  	}
   202  }
   203  
   204  type collectionStore struct {
   205  	expectedSignedData protoutil.SignedData
   206  	acceptsAll         bool
   207  	acceptsNone        bool
   208  	lenient            bool
   209  	mspIdentifier      string
   210  	store              map[CollectionCriteria]collectionAccessPolicy
   211  	policies           map[collectionAccessPolicy]CollectionCriteria
   212  }
   213  
   214  func (cs *collectionStore) thatAcceptsAll() *collectionStore {
   215  	cs.acceptsAll = true
   216  	return cs
   217  }
   218  
   219  func (cs *collectionStore) thatAcceptsNone() *collectionStore {
   220  	cs.acceptsNone = true
   221  	return cs
   222  }
   223  
   224  func (cs *collectionStore) thatAccepts(cc CollectionCriteria) *collectionStore {
   225  	sp := collectionAccessPolicy{
   226  		cs: cs,
   227  		n:  util.RandomUInt64(),
   228  	}
   229  	cs.store[cc] = sp
   230  	cs.policies[sp] = cc
   231  	return cs
   232  }
   233  
   234  func (cs *collectionStore) withMSPIdentity(identifier string) *collectionStore {
   235  	cs.mspIdentifier = identifier
   236  	return cs
   237  }
   238  
   239  func (cs *collectionStore) RetrieveCollectionAccessPolicy(cc privdata.CollectionCriteria) (privdata.CollectionAccessPolicy, error) {
   240  	if sp, exists := cs.store[fromCollectionCriteria(cc)]; exists {
   241  		return &sp, nil
   242  	}
   243  	if cs.acceptsAll || cs.acceptsNone || cs.lenient {
   244  		return &collectionAccessPolicy{
   245  			cs: cs,
   246  			n:  util.RandomUInt64(),
   247  		}, nil
   248  	}
   249  	return nil, privdata.NoSuchCollectionError{}
   250  }
   251  
   252  func (cs *collectionStore) RetrieveCollection(privdata.CollectionCriteria) (privdata.Collection, error) {
   253  	panic("implement me")
   254  }
   255  
   256  func (cs *collectionStore) RetrieveCollectionConfig(cc privdata.CollectionCriteria) (*peer.StaticCollectionConfig, error) {
   257  	mspIdentifier := "different-org"
   258  	if _, exists := cs.store[fromCollectionCriteria(cc)]; exists || cs.acceptsAll {
   259  		mspIdentifier = cs.mspIdentifier
   260  	}
   261  	return &peer.StaticCollectionConfig{
   262  		Name:           cc.Collection,
   263  		MemberOnlyRead: true,
   264  		MemberOrgsPolicy: &peer.CollectionPolicyConfig{
   265  			Payload: &peer.CollectionPolicyConfig_SignaturePolicy{
   266  				SignaturePolicy: &common.SignaturePolicyEnvelope{
   267  					Rule: &common.SignaturePolicy{
   268  						Type: &common.SignaturePolicy_SignedBy{
   269  							SignedBy: 0,
   270  						},
   271  					},
   272  					Identities: []*mspproto.MSPPrincipal{
   273  						{
   274  							PrincipalClassification: mspproto.MSPPrincipal_ROLE,
   275  							Principal: protoutil.MarshalOrPanic(&mspproto.MSPRole{
   276  								MspIdentifier: mspIdentifier,
   277  								Role:          mspproto.MSPRole_MEMBER,
   278  							}),
   279  						},
   280  					},
   281  				},
   282  			},
   283  		},
   284  	}, nil
   285  }
   286  
   287  func (cs *collectionStore) RetrieveReadWritePermission(cc privdata.CollectionCriteria, sp *peer.SignedProposal, qe ledger.QueryExecutor) (bool, bool, error) {
   288  	panic("implement me")
   289  }
   290  
   291  func (cs *collectionStore) RetrieveCollectionConfigPackage(cc privdata.CollectionCriteria) (*peer.CollectionConfigPackage, error) {
   292  	return &peer.CollectionConfigPackage{
   293  		Config: []*peer.CollectionConfig{
   294  			{
   295  				Payload: &peer.CollectionConfig_StaticCollectionConfig{
   296  					StaticCollectionConfig: &peer.StaticCollectionConfig{
   297  						Name:              cc.Collection,
   298  						MaximumPeerCount:  1,
   299  						RequiredPeerCount: 1,
   300  					},
   301  				},
   302  			},
   303  		},
   304  	}, nil
   305  }
   306  
   307  func (cs *collectionStore) RetrieveCollectionPersistenceConfigs(cc privdata.CollectionCriteria) (privdata.CollectionPersistenceConfigs, error) {
   308  	panic("implement me")
   309  }
   310  
   311  func (cs *collectionStore) AccessFilter(channelName string, collectionPolicyConfig *peer.CollectionPolicyConfig) (privdata.Filter, error) {
   312  	panic("implement me")
   313  }
   314  
   315  type collectionAccessPolicy struct {
   316  	cs *collectionStore
   317  	n  uint64
   318  }
   319  
   320  func (cap *collectionAccessPolicy) MemberOrgs() map[string]struct{} {
   321  	return map[string]struct{}{
   322  		"org0": {},
   323  		"org1": {},
   324  	}
   325  }
   326  
   327  func (cap *collectionAccessPolicy) RequiredPeerCount() int {
   328  	return 1
   329  }
   330  
   331  func (cap *collectionAccessPolicy) MaximumPeerCount() int {
   332  	return 2
   333  }
   334  
   335  func (cap *collectionAccessPolicy) IsMemberOnlyRead() bool {
   336  	return false
   337  }
   338  
   339  func (cap *collectionAccessPolicy) IsMemberOnlyWrite() bool {
   340  	return false
   341  }
   342  
   343  func (cap *collectionAccessPolicy) AccessFilter() privdata.Filter {
   344  	return func(sd protoutil.SignedData) bool {
   345  		that, _ := asn1.Marshal(sd)
   346  		this, _ := asn1.Marshal(cap.cs.expectedSignedData)
   347  		if hex.EncodeToString(that) != hex.EncodeToString(this) {
   348  			panic(fmt.Errorf("self signed data passed isn't equal to expected:%v, %v", sd, cap.cs.expectedSignedData))
   349  		}
   350  
   351  		if cap.cs.acceptsNone {
   352  			return false
   353  		} else if cap.cs.acceptsAll {
   354  			return true
   355  		}
   356  
   357  		_, exists := cap.cs.policies[*cap]
   358  		return exists
   359  	}
   360  }
   361  
   362  func TestPvtDataCollections_FailOnEmptyPayload(t *testing.T) {
   363  	collection := &util.PvtDataCollections{
   364  		&ledger.TxPvtData{
   365  			SeqInBlock: uint64(1),
   366  			WriteSet: &rwset.TxPvtReadWriteSet{
   367  				DataModel: rwset.TxReadWriteSet_KV,
   368  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   369  					{
   370  						Namespace: "ns1",
   371  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   372  							{
   373  								CollectionName: "secretCollection",
   374  								Rwset:          []byte{1, 2, 3, 4, 5, 6, 7},
   375  							},
   376  						},
   377  					},
   378  				},
   379  			},
   380  		},
   381  
   382  		nil,
   383  	}
   384  
   385  	_, err := collection.Marshal()
   386  	assertion := require.New(t)
   387  	assertion.Error(err, "Expected to fail since second item has nil payload")
   388  	assertion.Equal("Mallformed private data payload, rwset index 1 is nil", fmt.Sprintf("%s", err))
   389  }
   390  
   391  func TestPvtDataCollections_FailMarshalingWriteSet(t *testing.T) {
   392  	collection := &util.PvtDataCollections{
   393  		&ledger.TxPvtData{
   394  			SeqInBlock: uint64(1),
   395  			WriteSet:   nil,
   396  		},
   397  	}
   398  
   399  	_, err := collection.Marshal()
   400  	assertion := require.New(t)
   401  	assertion.Error(err, "Expected to fail since first item has nil writeset")
   402  	assertion.Contains(fmt.Sprintf("%s", err), "Could not marshal private rwset index 0")
   403  }
   404  
   405  func TestPvtDataCollections_Marshal(t *testing.T) {
   406  	collection := &util.PvtDataCollections{
   407  		&ledger.TxPvtData{
   408  			SeqInBlock: uint64(1),
   409  			WriteSet: &rwset.TxPvtReadWriteSet{
   410  				DataModel: rwset.TxReadWriteSet_KV,
   411  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   412  					{
   413  						Namespace: "ns1",
   414  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   415  							{
   416  								CollectionName: "secretCollection",
   417  								Rwset:          []byte{1, 2, 3, 4, 5, 6, 7},
   418  							},
   419  						},
   420  					},
   421  				},
   422  			},
   423  		},
   424  
   425  		&ledger.TxPvtData{
   426  			SeqInBlock: uint64(2),
   427  			WriteSet: &rwset.TxPvtReadWriteSet{
   428  				DataModel: rwset.TxReadWriteSet_KV,
   429  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   430  					{
   431  						Namespace: "ns1",
   432  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   433  							{
   434  								CollectionName: "secretCollection",
   435  								Rwset:          []byte{42, 42, 42, 42, 42, 42, 42},
   436  							},
   437  						},
   438  					},
   439  					{
   440  						Namespace: "ns2",
   441  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   442  							{
   443  								CollectionName: "otherCollection",
   444  								Rwset:          []byte{10, 9, 8, 7, 6, 5, 4, 3, 2, 1},
   445  							},
   446  						},
   447  					},
   448  				},
   449  			},
   450  		},
   451  	}
   452  
   453  	bytes, err := collection.Marshal()
   454  
   455  	assertion := require.New(t)
   456  	assertion.NoError(err)
   457  	assertion.NotNil(bytes)
   458  	assertion.Equal(2, len(bytes))
   459  }
   460  
   461  func TestPvtDataCollections_Unmarshal(t *testing.T) {
   462  	collection := util.PvtDataCollections{
   463  		&ledger.TxPvtData{
   464  			SeqInBlock: uint64(1),
   465  			WriteSet: &rwset.TxPvtReadWriteSet{
   466  				DataModel: rwset.TxReadWriteSet_KV,
   467  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   468  					{
   469  						Namespace: "ns1",
   470  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   471  							{
   472  								CollectionName: "secretCollection",
   473  								Rwset:          []byte{1, 2, 3, 4, 5, 6, 7},
   474  							},
   475  						},
   476  					},
   477  				},
   478  			},
   479  		},
   480  	}
   481  
   482  	bytes, err := collection.Marshal()
   483  
   484  	assertion := require.New(t)
   485  	assertion.NoError(err)
   486  	assertion.NotNil(bytes)
   487  	assertion.Equal(1, len(bytes))
   488  
   489  	var newCol util.PvtDataCollections
   490  
   491  	err = newCol.Unmarshal(bytes)
   492  	assertion.NoError(err)
   493  	assertion.Equal(1, len(newCol))
   494  	assertion.Equal(newCol[0].SeqInBlock, collection[0].SeqInBlock)
   495  	assertion.True(pb.Equal(newCol[0].WriteSet, collection[0].WriteSet))
   496  }
   497  
   498  type rwsTriplet struct {
   499  	namespace  string
   500  	collection string
   501  	rwset      string
   502  }
   503  
   504  func flattenTxPvtDataMap(pd ledger.TxPvtDataMap) map[uint64]map[rwsTriplet]struct{} {
   505  	m := make(map[uint64]map[rwsTriplet]struct{})
   506  	for seqInBlock, namespaces := range pd {
   507  		triplets := make(map[rwsTriplet]struct{})
   508  		for _, namespace := range namespaces.WriteSet.NsPvtRwset {
   509  			for _, col := range namespace.CollectionPvtRwset {
   510  				triplets[rwsTriplet{
   511  					namespace:  namespace.Namespace,
   512  					collection: col.CollectionName,
   513  					rwset:      hex.EncodeToString(col.Rwset),
   514  				}] = struct{}{}
   515  			}
   516  		}
   517  		m[seqInBlock] = triplets
   518  	}
   519  	return m
   520  }
   521  
   522  var expectedCommittedPrivateData1 = map[uint64]*ledger.TxPvtData{
   523  	0: {SeqInBlock: 0, WriteSet: &rwset.TxPvtReadWriteSet{
   524  		DataModel: rwset.TxReadWriteSet_KV,
   525  		NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   526  			{
   527  				Namespace: "ns1",
   528  				CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   529  					{
   530  						CollectionName: "c1",
   531  						Rwset:          []byte("rws-pre-image"),
   532  					},
   533  					{
   534  						CollectionName: "c2",
   535  						Rwset:          []byte("rws-pre-image"),
   536  					},
   537  				},
   538  			},
   539  		},
   540  	}},
   541  	1: {SeqInBlock: 1, WriteSet: &rwset.TxPvtReadWriteSet{
   542  		DataModel: rwset.TxReadWriteSet_KV,
   543  		NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   544  			{
   545  				Namespace: "ns2",
   546  				CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   547  					{
   548  						CollectionName: "c1",
   549  						Rwset:          []byte("rws-pre-image"),
   550  					},
   551  				},
   552  			},
   553  		},
   554  	}},
   555  }
   556  
   557  var expectedCommittedPrivateData2 = map[uint64]*ledger.TxPvtData{
   558  	0: {SeqInBlock: 0, WriteSet: &rwset.TxPvtReadWriteSet{
   559  		DataModel: rwset.TxReadWriteSet_KV,
   560  		NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   561  			{
   562  				Namespace: "ns3",
   563  				CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   564  					{
   565  						CollectionName: "c3",
   566  						Rwset:          []byte("rws-pre-image"),
   567  					},
   568  				},
   569  			},
   570  		},
   571  	}},
   572  }
   573  
   574  var expectedCommittedPrivateData3 = map[uint64]*ledger.TxPvtData{}
   575  
   576  func TestCoordinatorStoreInvalidBlock(t *testing.T) {
   577  	err := msptesttools.LoadMSPSetupForTesting()
   578  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   579  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
   580  	require.NoError(t, err)
   581  	serializedID, err := identity.Serialize()
   582  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   583  	data := []byte{1, 2, 3}
   584  	signature, err := identity.Sign(data)
   585  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   586  	mspID := "Org1MSP"
   587  	peerSelfSignedData := protoutil.SignedData{
   588  		Identity:  serializedID,
   589  		Signature: signature,
   590  		Data:      data,
   591  	}
   592  
   593  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
   594  
   595  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
   596  	committer := &privdatamocks.Committer{}
   597  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   598  		t.Fatal("Shouldn't have committed")
   599  	}).Return(nil)
   600  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
   601  
   602  	store := newTransientStore(t)
   603  	defer store.tearDown()
   604  
   605  	assertPurged := func(txns ...string) {
   606  		for _, txn := range txns {
   607  			iterator, err := store.GetTxPvtRWSetByTxid(txn, nil)
   608  			if err != nil {
   609  				t.Fatalf("Failed iterating, got err %s", err)
   610  				iterator.Close()
   611  				return
   612  			}
   613  			res, err := iterator.Next()
   614  			if err != nil {
   615  				t.Fatalf("Failed iterating, got err %s", err)
   616  				iterator.Close()
   617  				return
   618  			}
   619  			require.Nil(t, res)
   620  			iterator.Close()
   621  		}
   622  	}
   623  	fetcher := &fetcherMock{t: t}
   624  	pdFactory := &pvtDataFactory{}
   625  	bf := &blockFactory{
   626  		channelID: "testchannelid",
   627  	}
   628  
   629  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
   630  		return mspmgmt.GetManagerForChain("testchannelid")
   631  	})
   632  	block := bf.withoutMetadata().create()
   633  	// Scenario I: Block we got doesn't have any metadata with it
   634  	pvtData := pdFactory.create()
   635  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   636  	capabilityProvider := &privdatamocks.CapabilityProvider{}
   637  	appCapability := &privdatamocks.AppCapabilities{}
   638  	capabilityProvider.On("Capabilities").Return(appCapability)
   639  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
   640  	coordinator := NewCoordinator(mspID, Support{
   641  		ChainID:            "testchannelid",
   642  		CollectionStore:    cs,
   643  		Committer:          committer,
   644  		Fetcher:            fetcher,
   645  		Validator:          &validatorMock{},
   646  		CapabilityProvider: capabilityProvider,
   647  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   648  	err = coordinator.StoreBlock(block, pvtData)
   649  	require.Error(t, err)
   650  	require.Contains(t, err.Error(), "Block.Metadata is nil or Block.Metadata lacks a Tx filter bitmap")
   651  
   652  	// Scenario II: Validator has an error while validating the block
   653  	block = bf.create()
   654  	pvtData = pdFactory.create()
   655  	coordinator = NewCoordinator(mspID, Support{
   656  		ChainID:            "testchannelid",
   657  		CollectionStore:    cs,
   658  		Committer:          committer,
   659  		Fetcher:            fetcher,
   660  		Validator:          &validatorMock{fmt.Errorf("failed validating block")},
   661  		CapabilityProvider: capabilityProvider,
   662  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   663  	err = coordinator.StoreBlock(block, pvtData)
   664  	require.Error(t, err)
   665  	require.Contains(t, err.Error(), "failed validating block")
   666  
   667  	// Scenario III: Block we got contains an inadequate length of Tx filter in the metadata
   668  	block = bf.withMetadataSize(100).create()
   669  	pvtData = pdFactory.create()
   670  	coordinator = NewCoordinator(mspID, Support{
   671  		ChainID:            "testchannelid",
   672  		CollectionStore:    cs,
   673  		Committer:          committer,
   674  		Fetcher:            fetcher,
   675  		Validator:          &validatorMock{},
   676  		CapabilityProvider: capabilityProvider,
   677  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   678  	err = coordinator.StoreBlock(block, pvtData)
   679  	require.Error(t, err)
   680  	require.Contains(t, err.Error(), "block data size")
   681  	require.Contains(t, err.Error(), "is different from Tx filter size")
   682  
   683  	// Scenario IV: The second transaction in the block we got is invalid, and we have no private data for that.
   684  	// As the StorePvtDataOfInvalidTx is set of false, if the coordinator would try to fetch private data, the
   685  	// test would fall because we haven't defined the mock operations for the transientstore (or for gossip)
   686  	// in this test.
   687  	var commitHappened bool
   688  	assertCommitHappened := func() {
   689  		require.True(t, commitHappened)
   690  		commitHappened = false
   691  	}
   692  	digKeys := []privdatacommon.DigKey{
   693  		{
   694  			TxId:       "tx2",
   695  			Namespace:  "ns2",
   696  			Collection: "c1",
   697  			BlockSeq:   1,
   698  			SeqInBlock: 1,
   699  		},
   700  	}
   701  	fetcher = &fetcherMock{t: t}
   702  	fetcher.On("fetch", mock.Anything).expectingDigests(digKeys).expectingEndorsers(identity.GetMSPIdentifier()).Return(&privdatacommon.FetchedPvtDataContainer{
   703  		AvailableElements: nil,
   704  	}, nil)
   705  	committer = &privdatamocks.Committer{}
   706  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   707  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
   708  		commitHappened = true
   709  		// Only the first transaction's private data is passed to the ledger
   710  		require.Len(t, privateDataPassed2Ledger, 1)
   711  		require.Equal(t, 0, int(privateDataPassed2Ledger[0].SeqInBlock))
   712  		// The private data passed to the ledger contains "ns1" and has 2 collections in it
   713  		require.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset, 1)
   714  		require.Equal(t, "ns1", privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].Namespace)
   715  		require.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].CollectionPvtRwset, 2)
   716  	}).Return(nil)
   717  	block = bf.withInvalidTxns(1).AddTxn("tx1", "ns1", hash, "c1", "c2").AddTxn("tx2", "ns2", hash, "c1").create()
   718  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").create()
   719  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   720  
   721  	capabilityProvider = &privdatamocks.CapabilityProvider{}
   722  	appCapability = &privdatamocks.AppCapabilities{}
   723  	capabilityProvider.On("Capabilities").Return(appCapability)
   724  	appCapability.On("StorePvtDataOfInvalidTx").Return(false)
   725  	coordinator = NewCoordinator(mspID, Support{
   726  		ChainID:            "testchannelid",
   727  		CollectionStore:    cs,
   728  		Committer:          committer,
   729  		Fetcher:            fetcher,
   730  		Validator:          &validatorMock{},
   731  		CapabilityProvider: capabilityProvider,
   732  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   733  	err = coordinator.StoreBlock(block, pvtData)
   734  	require.NoError(t, err)
   735  	assertCommitHappened()
   736  	// Ensure the 2nd transaction which is invalid and wasn't committed - is still purged.
   737  	// This is so that if we get a transaction via dissemination from an endorser, we purge it
   738  	// when its block comes.
   739  	assertPurged("tx1", "tx2")
   740  
   741  	// Scenario V: The second transaction in the block we got is invalid, and we have no private
   742  	// data for that in the transient store. As we have set StorePvtDataOfInvalidTx to true and
   743  	// configured the coordinator to skip pulling pvtData of invalid transactions from other peers,
   744  	// it should not store the pvtData of invalid transaction in the ledger instead a missing entry.
   745  	testConfig.SkipPullingInvalidTransactions = true
   746  	assertCommitHappened = func() {
   747  		require.True(t, commitHappened)
   748  		commitHappened = false
   749  	}
   750  	committer = &privdatamocks.Committer{}
   751  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   752  		blockAndPvtData := args.Get(0).(*ledger.BlockAndPvtData)
   753  		commitHappened = true
   754  		// Only the first transaction's private data is passed to the ledger
   755  		privateDataPassed2Ledger := blockAndPvtData.PvtData
   756  		require.Len(t, privateDataPassed2Ledger, 1)
   757  		require.Equal(t, 0, int(privateDataPassed2Ledger[0].SeqInBlock))
   758  		// The private data passed to the ledger contains "ns1" and has 2 collections in it
   759  		require.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset, 1)
   760  		require.Equal(t, "ns1", privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].Namespace)
   761  		require.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].CollectionPvtRwset, 2)
   762  
   763  		missingPrivateDataPassed2Ledger := blockAndPvtData.MissingPvtData
   764  		require.Len(t, missingPrivateDataPassed2Ledger, 1)
   765  		require.Len(t, missingPrivateDataPassed2Ledger[1], 1)
   766  		require.Equal(t, missingPrivateDataPassed2Ledger[1][0].Namespace, "ns2")
   767  		require.Equal(t, missingPrivateDataPassed2Ledger[1][0].Collection, "c1")
   768  		require.Equal(t, missingPrivateDataPassed2Ledger[1][0].IsEligible, true)
   769  
   770  		commitOpts := args.Get(1).(*ledger.CommitOptions)
   771  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
   772  		require.Equal(t, expectedCommitOpts, commitOpts)
   773  	}).Return(nil)
   774  
   775  	block = bf.withInvalidTxns(1).AddTxn("tx1", "ns1", hash, "c1", "c2").AddTxn("tx2", "ns2", hash, "c1").create()
   776  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").create()
   777  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   778  	capabilityProvider = &privdatamocks.CapabilityProvider{}
   779  	appCapability = &privdatamocks.AppCapabilities{}
   780  	capabilityProvider.On("Capabilities").Return(appCapability)
   781  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
   782  	digKeys = []privdatacommon.DigKey{}
   783  	fetcher = &fetcherMock{t: t}
   784  	fetcher.On("fetch", mock.Anything).expectingDigests(digKeys).Return(&privdatacommon.FetchedPvtDataContainer{
   785  		AvailableElements: nil,
   786  	}, nil)
   787  	coordinator = NewCoordinator(mspID, Support{
   788  		ChainID:            "testchannelid",
   789  		CollectionStore:    cs,
   790  		Committer:          committer,
   791  		Fetcher:            fetcher,
   792  		Validator:          &validatorMock{},
   793  		CapabilityProvider: capabilityProvider,
   794  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   795  	err = coordinator.StoreBlock(block, pvtData)
   796  	require.NoError(t, err)
   797  	assertCommitHappened()
   798  	assertPurged("tx1", "tx2")
   799  
   800  	// Scenario VI: The second transaction in the block we got is invalid. As we have set the
   801  	// StorePvtDataOfInvalidTx to true and configured the coordinator to pull pvtData of invalid
   802  	// transactions, it should store the pvtData of invalid transactions in the ledger.
   803  	testConfig.SkipPullingInvalidTransactions = false
   804  	committer = &privdatamocks.Committer{}
   805  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   806  		blockAndPvtData := args.Get(0).(*ledger.BlockAndPvtData)
   807  		commitHappened = true
   808  		// pvtData of both transactions must be present though the second transaction
   809  		// is invalid.
   810  		privateDataPassed2Ledger := blockAndPvtData.PvtData
   811  		require.Len(t, privateDataPassed2Ledger, 2)
   812  		require.Equal(t, 0, int(privateDataPassed2Ledger[0].SeqInBlock))
   813  		require.Equal(t, 1, int(privateDataPassed2Ledger[1].SeqInBlock))
   814  		// The private data passed to the ledger for tx1 contains "ns1" and has 2 collections in it
   815  		require.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset, 1)
   816  		require.Equal(t, "ns1", privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].Namespace)
   817  		require.Len(t, privateDataPassed2Ledger[0].WriteSet.NsPvtRwset[0].CollectionPvtRwset, 2)
   818  		// The private data passed to the ledger for tx2 contains "ns2" and has 1 collection in it
   819  		require.Len(t, privateDataPassed2Ledger[1].WriteSet.NsPvtRwset, 1)
   820  		require.Equal(t, "ns2", privateDataPassed2Ledger[1].WriteSet.NsPvtRwset[0].Namespace)
   821  		require.Len(t, privateDataPassed2Ledger[1].WriteSet.NsPvtRwset[0].CollectionPvtRwset, 1)
   822  
   823  		missingPrivateDataPassed2Ledger := blockAndPvtData.MissingPvtData
   824  		require.Len(t, missingPrivateDataPassed2Ledger, 0)
   825  
   826  		commitOpts := args.Get(1).(*ledger.CommitOptions)
   827  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
   828  		require.Equal(t, expectedCommitOpts, commitOpts)
   829  	}).Return(nil)
   830  
   831  	fetcher = &fetcherMock{t: t}
   832  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
   833  		{
   834  			TxId: "tx2", Namespace: "ns2", Collection: "c1", BlockSeq: 1, SeqInBlock: 1,
   835  		},
   836  	}).Return(&privdatacommon.FetchedPvtDataContainer{
   837  		AvailableElements: []*proto.PvtDataElement{
   838  			{
   839  				Digest: &proto.PvtDataDigest{
   840  					SeqInBlock: 1,
   841  					BlockSeq:   1,
   842  					Collection: "c1",
   843  					Namespace:  "ns2",
   844  					TxId:       "tx2",
   845  				},
   846  				Payload: [][]byte{[]byte("rws-pre-image")},
   847  			},
   848  		},
   849  	}, nil)
   850  
   851  	block = bf.withInvalidTxns(1).AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1", "c2").
   852  		AddTxnWithEndorsement("tx2", "ns2", hash, "org2", true, "c1").create()
   853  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").create()
   854  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   855  	coordinator = NewCoordinator(mspID, Support{
   856  		ChainID:            "testchannelid",
   857  		CollectionStore:    cs,
   858  		Committer:          committer,
   859  		Fetcher:            fetcher,
   860  		Validator:          &validatorMock{},
   861  		CapabilityProvider: capabilityProvider,
   862  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   863  	err = coordinator.StoreBlock(block, pvtData)
   864  	require.NoError(t, err)
   865  	assertCommitHappened()
   866  	assertPurged("tx1", "tx2")
   867  
   868  	// Scenario VII: Block doesn't contain a header
   869  	block.Header = nil
   870  	err = coordinator.StoreBlock(block, pvtData)
   871  	require.Error(t, err)
   872  	require.Contains(t, err.Error(), "Block header is nil")
   873  
   874  	// Scenario VIII: Block doesn't contain Data
   875  	block.Data = nil
   876  	err = coordinator.StoreBlock(block, pvtData)
   877  	require.Error(t, err)
   878  	require.Contains(t, err.Error(), "Block data is empty")
   879  }
   880  
   881  func TestCoordinatorToFilterOutPvtRWSetsWithWrongHash(t *testing.T) {
   882  	/*
   883  		Test case, where peer receives new block for commit
   884  		it has ns1:c1 in transient store, while it has wrong
   885  		hash, hence it will fetch ns1:c1 from other peers
   886  	*/
   887  	err := msptesttools.LoadMSPSetupForTesting()
   888  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   889  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
   890  	require.NoError(t, err)
   891  	serializedID, err := identity.Serialize()
   892  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   893  	data := []byte{1, 2, 3}
   894  	signature, err := identity.Sign(data)
   895  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   896  	mspID := "Org1MSP"
   897  	peerSelfSignedData := protoutil.SignedData{
   898  		Identity:  serializedID,
   899  		Signature: signature,
   900  		Data:      data,
   901  	}
   902  
   903  	expectedPvtData := map[uint64]*ledger.TxPvtData{
   904  		0: {SeqInBlock: 0, WriteSet: &rwset.TxPvtReadWriteSet{
   905  			DataModel: rwset.TxReadWriteSet_KV,
   906  			NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   907  				{
   908  					Namespace: "ns1",
   909  					CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
   910  						{
   911  							CollectionName: "c1",
   912  							Rwset:          []byte("rws-original"),
   913  						},
   914  					},
   915  				},
   916  			},
   917  		}},
   918  	}
   919  
   920  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
   921  	committer := &privdatamocks.Committer{}
   922  
   923  	store := newTransientStore(t)
   924  	defer store.tearDown()
   925  
   926  	assertPurged := func(txns ...string) {
   927  		for _, txn := range txns {
   928  			iterator, err := store.GetTxPvtRWSetByTxid(txn, nil)
   929  			if err != nil {
   930  				t.Fatalf("Failed iterating, got err %s", err)
   931  				iterator.Close()
   932  				return
   933  			}
   934  			res, err := iterator.Next()
   935  			if err != nil {
   936  				t.Fatalf("Failed iterating, got err %s", err)
   937  				iterator.Close()
   938  				return
   939  			}
   940  			require.Nil(t, res)
   941  			iterator.Close()
   942  		}
   943  	}
   944  
   945  	fetcher := &fetcherMock{t: t}
   946  
   947  	var commitHappened bool
   948  
   949  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
   950  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
   951  		require.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
   952  			flattenTxPvtDataMap(expectedPvtData)))
   953  		commitHappened = true
   954  
   955  		commitOpts := args.Get(1).(*ledger.CommitOptions)
   956  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
   957  		require.Equal(t, expectedCommitOpts, commitOpts)
   958  	}).Return(nil)
   959  
   960  	hash := util2.ComputeSHA256([]byte("rws-original"))
   961  	bf := &blockFactory{
   962  		channelID: "testchannelid",
   963  	}
   964  
   965  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
   966  		return mspmgmt.GetManagerForChain("testchannelid")
   967  	})
   968  
   969  	block := bf.AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1").create()
   970  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
   971  
   972  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
   973  
   974  	capabilityProvider := &privdatamocks.CapabilityProvider{}
   975  	appCapability := &privdatamocks.AppCapabilities{}
   976  	capabilityProvider.On("Capabilities").Return(appCapability)
   977  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
   978  	coordinator := NewCoordinator(mspID, Support{
   979  		ChainID:            "testchannelid",
   980  		CollectionStore:    cs,
   981  		Committer:          committer,
   982  		Fetcher:            fetcher,
   983  		Validator:          &validatorMock{},
   984  		CapabilityProvider: capabilityProvider,
   985  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
   986  
   987  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
   988  		{
   989  			TxId: "tx1", Namespace: "ns1", Collection: "c1", BlockSeq: 1,
   990  		},
   991  	}).Return(&privdatacommon.FetchedPvtDataContainer{
   992  		AvailableElements: []*proto.PvtDataElement{
   993  			{
   994  				Digest: &proto.PvtDataDigest{
   995  					BlockSeq:   1,
   996  					Collection: "c1",
   997  					Namespace:  "ns1",
   998  					TxId:       "tx1",
   999  				},
  1000  				Payload: [][]byte{[]byte("rws-original")},
  1001  			},
  1002  		},
  1003  	}, nil)
  1004  
  1005  	coordinator.StoreBlock(block, nil)
  1006  	// Assert blocks was eventually committed
  1007  	require.True(t, commitHappened)
  1008  
  1009  	// Assert transaction has been purged
  1010  	assertPurged("tx1")
  1011  }
  1012  
  1013  func TestCoordinatorStoreBlock(t *testing.T) {
  1014  	err := msptesttools.LoadMSPSetupForTesting()
  1015  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1016  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
  1017  	require.NoError(t, err)
  1018  	serializedID, err := identity.Serialize()
  1019  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1020  	data := []byte{1, 2, 3}
  1021  	signature, err := identity.Sign(data)
  1022  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1023  	mspID := "Org1MSP"
  1024  	peerSelfSignedData := protoutil.SignedData{
  1025  		Identity:  serializedID,
  1026  		Signature: signature,
  1027  		Data:      data,
  1028  	}
  1029  	// Green path test, all private data should be obtained successfully
  1030  
  1031  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
  1032  
  1033  	var commitHappened bool
  1034  	assertCommitHappened := func() {
  1035  		require.True(t, commitHappened)
  1036  		commitHappened = false
  1037  	}
  1038  	committer := &privdatamocks.Committer{}
  1039  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1040  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
  1041  		require.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1042  			flattenTxPvtDataMap(expectedCommittedPrivateData1)))
  1043  		commitHappened = true
  1044  
  1045  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1046  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1047  		require.Equal(t, expectedCommitOpts, commitOpts)
  1048  	}).Return(nil)
  1049  
  1050  	store := newTransientStore(t)
  1051  	defer store.tearDown()
  1052  
  1053  	assertPurged := func(txns ...string) bool {
  1054  		for _, txn := range txns {
  1055  			iterator, err := store.GetTxPvtRWSetByTxid(txn, nil)
  1056  			if err != nil {
  1057  				iterator.Close()
  1058  				t.Fatalf("Failed iterating, got err %s", err)
  1059  			}
  1060  			res, err := iterator.Next()
  1061  			iterator.Close()
  1062  			if err != nil {
  1063  				t.Fatalf("Failed iterating, got err %s", err)
  1064  			}
  1065  			if res != nil {
  1066  				return false
  1067  			}
  1068  		}
  1069  		return true
  1070  	}
  1071  
  1072  	fetcher := &fetcherMock{t: t}
  1073  
  1074  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1075  	pdFactory := &pvtDataFactory{}
  1076  	bf := &blockFactory{
  1077  		channelID: "testchannelid",
  1078  	}
  1079  
  1080  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1081  		return mspmgmt.GetManagerForChain("testchannelid")
  1082  	})
  1083  
  1084  	block := bf.AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1", "c2").
  1085  		AddTxnWithEndorsement("tx2", "ns2", hash, "org2", true, "c1").create()
  1086  
  1087  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1088  
  1089  	fmt.Println("Scenario I")
  1090  	// Scenario I: Block we got has sufficient private data alongside it.
  1091  	// If the coordinator tries fetching from the transientstore, or peers it would result in panic,
  1092  	// because we didn't define yet the "On(...)" invocation of the transient store or other peers.
  1093  	pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").addRWSet().addNSRWSet("ns2", "c1").create()
  1094  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1095  
  1096  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1097  	appCapability := &privdatamocks.AppCapabilities{}
  1098  	capabilityProvider.On("Capabilities").Return(appCapability)
  1099  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1100  	coordinator := NewCoordinator(mspID, Support{
  1101  		ChainID:            "testchannelid",
  1102  		CollectionStore:    cs,
  1103  		Committer:          committer,
  1104  		Fetcher:            fetcher,
  1105  		Validator:          &validatorMock{},
  1106  		CapabilityProvider: capabilityProvider,
  1107  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1108  	err = coordinator.StoreBlock(block, pvtData)
  1109  	require.NoError(t, err)
  1110  	assertCommitHappened()
  1111  	assertPurgeTxs := func() bool {
  1112  		return assertPurged("tx1", "tx2")
  1113  	}
  1114  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1115  
  1116  	fmt.Println("Scenario II")
  1117  	// Scenario II: Block we got doesn't have sufficient private data alongside it,
  1118  	// it is missing ns1: c2, but the data exists in the transient store
  1119  	store.Persist("tx1", 1, &tspb.TxPvtReadWriteSetWithConfigInfo{
  1120  		PvtRwset: &rwset.TxPvtReadWriteSet{
  1121  			NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1122  				{
  1123  					Namespace: "ns1",
  1124  					CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
  1125  						{
  1126  							CollectionName: "c2",
  1127  							Rwset:          []byte("rws-pre-image"),
  1128  						},
  1129  					},
  1130  				},
  1131  			},
  1132  		},
  1133  		CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1134  	})
  1135  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1").addRWSet().addNSRWSet("ns2", "c1").create()
  1136  	err = coordinator.StoreBlock(block, pvtData)
  1137  	require.NoError(t, err)
  1138  	assertCommitHappened()
  1139  	assertPurgeTxs = func() bool {
  1140  		return assertPurged("tx1", "tx2")
  1141  	}
  1142  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1143  
  1144  	fmt.Println("Scenario III")
  1145  	// Scenario III: Block doesn't have sufficient private data alongside it,
  1146  	// it is missing ns1: c2, and the data exists in the transient store,
  1147  	// but it is also missing ns2: c1, and that data doesn't exist in the transient store - but in a peer.
  1148  	// Additionally, the coordinator should pass an endorser identity of org1, but not of org2, since
  1149  	// the MemberOrgs() call doesn't return org2 but only org0 and org1.
  1150  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
  1151  		{
  1152  			TxId: "tx1", Namespace: "ns1", Collection: "c2", BlockSeq: 1,
  1153  		},
  1154  		{
  1155  			TxId: "tx2", Namespace: "ns2", Collection: "c1", BlockSeq: 1, SeqInBlock: 1,
  1156  		},
  1157  	}).Return(&privdatacommon.FetchedPvtDataContainer{
  1158  		AvailableElements: []*proto.PvtDataElement{
  1159  			{
  1160  				Digest: &proto.PvtDataDigest{
  1161  					BlockSeq:   1,
  1162  					Collection: "c2",
  1163  					Namespace:  "ns1",
  1164  					TxId:       "tx1",
  1165  				},
  1166  				Payload: [][]byte{[]byte("rws-pre-image")},
  1167  			},
  1168  			{
  1169  				Digest: &proto.PvtDataDigest{
  1170  					SeqInBlock: 1,
  1171  					BlockSeq:   1,
  1172  					Collection: "c1",
  1173  					Namespace:  "ns2",
  1174  					TxId:       "tx2",
  1175  				},
  1176  				Payload: [][]byte{[]byte("rws-pre-image")},
  1177  			},
  1178  		},
  1179  	}, nil)
  1180  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1").create()
  1181  	err = coordinator.StoreBlock(block, pvtData)
  1182  	require.NoError(t, err)
  1183  	assertCommitHappened()
  1184  	assertPurgeTxs = func() bool {
  1185  		return assertPurged("tx1", "tx2")
  1186  	}
  1187  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1188  
  1189  	fmt.Println("Scenario IV")
  1190  	// Scenario IV: Block came with more than sufficient private data alongside it, some of it is redundant.
  1191  	pvtData = pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2", "c3").
  1192  		addRWSet().addNSRWSet("ns2", "c1", "c3").addRWSet().addNSRWSet("ns1", "c4").create()
  1193  	err = coordinator.StoreBlock(block, pvtData)
  1194  	require.NoError(t, err)
  1195  	assertCommitHappened()
  1196  	assertPurgeTxs = func() bool {
  1197  		return assertPurged("tx1", "tx2")
  1198  	}
  1199  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1200  
  1201  	fmt.Println("Scenario V")
  1202  	// Scenario V: Block we got has private data alongside it but coordinator cannot retrieve collection access
  1203  	// policy of collections due to databse unavailability error.
  1204  	// we verify that the error propagates properly.
  1205  	mockCs := &privdatamocks.CollectionStore{}
  1206  	mockCs.On("RetrieveCollectionConfig", mock.Anything).Return(nil, errors.New("test error"))
  1207  	coordinator = NewCoordinator(mspID, Support{
  1208  		ChainID:            "testchannelid",
  1209  		CollectionStore:    mockCs,
  1210  		Committer:          committer,
  1211  		Fetcher:            fetcher,
  1212  		Validator:          &validatorMock{},
  1213  		CapabilityProvider: capabilityProvider,
  1214  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1215  	err = coordinator.StoreBlock(block, nil)
  1216  	require.Error(t, err)
  1217  	require.Equal(t, "test error", err.Error())
  1218  
  1219  	fmt.Println("Scenario VI")
  1220  	// Scenario VI: Block didn't get with any private data alongside it, and the transient store
  1221  	// has some problem.
  1222  	// In this case, we should try to fetch data from peers.
  1223  	block = bf.AddTxn("tx3", "ns3", hash, "c3").create()
  1224  	fetcher = &fetcherMock{t: t}
  1225  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
  1226  		{
  1227  			TxId: "tx3", Namespace: "ns3", Collection: "c3", BlockSeq: 1,
  1228  		},
  1229  	}).Return(&privdatacommon.FetchedPvtDataContainer{
  1230  		AvailableElements: []*proto.PvtDataElement{
  1231  			{
  1232  				Digest: &proto.PvtDataDigest{
  1233  					BlockSeq:   1,
  1234  					Collection: "c3",
  1235  					Namespace:  "ns3",
  1236  					TxId:       "tx3",
  1237  				},
  1238  				Payload: [][]byte{[]byte("rws-pre-image")},
  1239  			},
  1240  		},
  1241  	}, nil)
  1242  	committer = &privdatamocks.Committer{}
  1243  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1244  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
  1245  		require.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1246  			flattenTxPvtDataMap(expectedCommittedPrivateData2)))
  1247  		commitHappened = true
  1248  
  1249  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1250  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1251  		require.Equal(t, expectedCommitOpts, commitOpts)
  1252  	}).Return(nil)
  1253  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1254  	coordinator = NewCoordinator(mspID, Support{
  1255  		ChainID:            "testchannelid",
  1256  		CollectionStore:    cs,
  1257  		Committer:          committer,
  1258  		Fetcher:            fetcher,
  1259  		Validator:          &validatorMock{},
  1260  		CapabilityProvider: capabilityProvider,
  1261  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1262  	err = coordinator.StoreBlock(block, nil)
  1263  	require.NoError(t, err)
  1264  	assertCommitHappened()
  1265  	assertPurgeTxs = func() bool {
  1266  		return assertPurged("tx3")
  1267  	}
  1268  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1269  
  1270  	fmt.Println("Scenario VII")
  1271  	// Scenario VII: Block contains 2 transactions, and the peer is eligible for only tx3-ns3-c3.
  1272  	// Also, the blocks comes with a private data for tx3-ns3-c3 so that the peer won't have to fetch the
  1273  	// private data from the transient store or peers, and in fact- if it attempts to fetch the data it's not eligible
  1274  	// for from the transient store or from peers - the test would fail because the Mock wasn't initialized.
  1275  	block = bf.AddTxn("tx3", "ns3", hash, "c3", "c2", "c1").AddTxn("tx1", "ns1", hash, "c1").create()
  1276  	cs = createcollectionStore(peerSelfSignedData).thatAccepts(CollectionCriteria{
  1277  		Collection: "c3",
  1278  		Namespace:  "ns3",
  1279  		Channel:    "testchannelid",
  1280  	}).withMSPIdentity(identity.GetMSPIdentifier())
  1281  	fetcher = &fetcherMock{t: t}
  1282  	committer = &privdatamocks.Committer{}
  1283  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1284  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
  1285  		require.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1286  			flattenTxPvtDataMap(expectedCommittedPrivateData2)))
  1287  		commitHappened = true
  1288  
  1289  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1290  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1291  		require.Equal(t, expectedCommitOpts, commitOpts)
  1292  	}).Return(nil)
  1293  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1294  	coordinator = NewCoordinator(mspID, Support{
  1295  		ChainID:            "testchannelid",
  1296  		CollectionStore:    cs,
  1297  		Committer:          committer,
  1298  		Fetcher:            fetcher,
  1299  		Validator:          &validatorMock{},
  1300  		CapabilityProvider: capabilityProvider,
  1301  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1302  
  1303  	pvtData = pdFactory.addRWSet().addNSRWSet("ns3", "c3").create()
  1304  	err = coordinator.StoreBlock(block, pvtData)
  1305  	require.NoError(t, err)
  1306  	assertCommitHappened()
  1307  	// In any case, all transactions in the block are purged from the transient store
  1308  	assertPurgeTxs = func() bool {
  1309  		return assertPurged("tx3", "tx1")
  1310  	}
  1311  	require.Eventually(t, assertPurgeTxs, 2*time.Second, 100*time.Millisecond)
  1312  }
  1313  
  1314  func TestCoordinatorStoreBlockWhenPvtDataExistInLedger(t *testing.T) {
  1315  	err := msptesttools.LoadMSPSetupForTesting()
  1316  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1317  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
  1318  	require.NoError(t, err)
  1319  	serializedID, err := identity.Serialize()
  1320  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1321  	data := []byte{1, 2, 3}
  1322  	signature, err := identity.Sign(data)
  1323  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1324  	mspID := "Org1MSP"
  1325  	peerSelfSignedData := protoutil.SignedData{
  1326  		Identity:  serializedID,
  1327  		Signature: signature,
  1328  		Data:      data,
  1329  	}
  1330  
  1331  	var commitHappened bool
  1332  	assertCommitHappened := func() {
  1333  		require.True(t, commitHappened)
  1334  		commitHappened = false
  1335  	}
  1336  	committer := &privdatamocks.Committer{}
  1337  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1338  		privateDataPassed2Ledger := args.Get(0).(*ledger.BlockAndPvtData).PvtData
  1339  		require.Equal(t, ledger.TxPvtDataMap{}, privateDataPassed2Ledger)
  1340  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1341  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: true}
  1342  		require.Equal(t, expectedCommitOpts, commitOpts)
  1343  		commitHappened = true
  1344  	}).Return(nil)
  1345  
  1346  	fetcher := &fetcherMock{t: t}
  1347  
  1348  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1349  	pdFactory := &pvtDataFactory{}
  1350  	bf := &blockFactory{
  1351  		channelID: "testchannelid",
  1352  	}
  1353  
  1354  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1355  		return mspmgmt.GetManagerForChain("testchannelid")
  1356  	})
  1357  
  1358  	block := bf.AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1", "c2").
  1359  		AddTxnWithEndorsement("tx2", "ns2", hash, "org2", true, "c1").create()
  1360  
  1361  	// Scenario: Block we got has been reprocessed and hence the sufficient pvtData is present
  1362  	// in the local pvtdataStore itself. The pvtData would be fetched from the local pvtdataStore.
  1363  	// If the coordinator tries fetching from the transientstore, or peers it would result in panic,
  1364  	// because we didn't define yet the "On(...)" invocation of the transient store or other peers.
  1365  	pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").addRWSet().addNSRWSet("ns2", "c1").create()
  1366  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(true, nil)
  1367  
  1368  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1369  
  1370  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1371  	appCapability := &privdatamocks.AppCapabilities{}
  1372  	capabilityProvider.On("Capabilities").Return(appCapability)
  1373  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1374  	coordinator := NewCoordinator(mspID, Support{
  1375  		ChainID:            "testchannelid",
  1376  		CollectionStore:    nil,
  1377  		Committer:          committer,
  1378  		Fetcher:            fetcher,
  1379  		Validator:          &validatorMock{},
  1380  		CapabilityProvider: capabilityProvider,
  1381  	}, nil, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1382  	err = coordinator.StoreBlock(block, pvtData)
  1383  	require.NoError(t, err)
  1384  	assertCommitHappened()
  1385  }
  1386  
  1387  func TestProceedWithoutPrivateData(t *testing.T) {
  1388  	// Scenario: we are missing private data (c2 in ns3) and it cannot be obtained from any peer.
  1389  	// Block needs to be committed with missing private data.
  1390  	err := msptesttools.LoadMSPSetupForTesting()
  1391  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1392  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
  1393  	require.NoError(t, err)
  1394  	serializedID, err := identity.Serialize()
  1395  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1396  	data := []byte{1, 2, 3}
  1397  	signature, err := identity.Sign(data)
  1398  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1399  	mspID := "Org1MSP"
  1400  	peerSelfSignedData := protoutil.SignedData{
  1401  		Identity:  serializedID,
  1402  		Signature: signature,
  1403  		Data:      data,
  1404  	}
  1405  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
  1406  	var commitHappened bool
  1407  	assertCommitHappened := func() {
  1408  		require.True(t, commitHappened)
  1409  		commitHappened = false
  1410  	}
  1411  	committer := &privdatamocks.Committer{}
  1412  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1413  		blockAndPrivateData := args.Get(0).(*ledger.BlockAndPvtData)
  1414  		privateDataPassed2Ledger := blockAndPrivateData.PvtData
  1415  		require.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1416  			flattenTxPvtDataMap(expectedCommittedPrivateData2)))
  1417  		missingPrivateData := blockAndPrivateData.MissingPvtData
  1418  		expectedMissingPvtData := make(ledger.TxMissingPvtData)
  1419  		expectedMissingPvtData.Add(0, "ns3", "c2", true)
  1420  		require.Equal(t, expectedMissingPvtData, missingPrivateData)
  1421  		commitHappened = true
  1422  
  1423  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1424  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1425  		require.Equal(t, expectedCommitOpts, commitOpts)
  1426  	}).Return(nil)
  1427  
  1428  	store := newTransientStore(t)
  1429  	defer store.tearDown()
  1430  
  1431  	assertPurged := func(txns ...string) {
  1432  		for _, txn := range txns {
  1433  			iterator, err := store.GetTxPvtRWSetByTxid(txn, nil)
  1434  			if err != nil {
  1435  				t.Fatalf("Failed iterating, got err %s", err)
  1436  				iterator.Close()
  1437  				return
  1438  			}
  1439  			res, err := iterator.Next()
  1440  			if err != nil {
  1441  				t.Fatalf("Failed iterating, got err %s", err)
  1442  				iterator.Close()
  1443  				return
  1444  			}
  1445  			require.Nil(t, res)
  1446  			iterator.Close()
  1447  		}
  1448  	}
  1449  
  1450  	fetcher := &fetcherMock{t: t}
  1451  	// Have the peer return in response to the pull, a private data with a non matching hash
  1452  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
  1453  		{
  1454  			TxId: "tx1", Namespace: "ns3", Collection: "c2", BlockSeq: 1,
  1455  		},
  1456  	}).Return(&privdatacommon.FetchedPvtDataContainer{
  1457  		AvailableElements: []*proto.PvtDataElement{
  1458  			{
  1459  				Digest: &proto.PvtDataDigest{
  1460  					BlockSeq:   1,
  1461  					Collection: "c2",
  1462  					Namespace:  "ns3",
  1463  					TxId:       "tx1",
  1464  				},
  1465  				Payload: [][]byte{[]byte("wrong pre-image")},
  1466  			},
  1467  		},
  1468  	}, nil)
  1469  
  1470  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1471  	pdFactory := &pvtDataFactory{}
  1472  	bf := &blockFactory{
  1473  		channelID: "testchannelid",
  1474  	}
  1475  
  1476  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1477  		return mspmgmt.GetManagerForChain("testchannelid")
  1478  	})
  1479  
  1480  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1481  
  1482  	block := bf.AddTxn("tx1", "ns3", hash, "c3", "c2").create()
  1483  	pvtData := pdFactory.addRWSet().addNSRWSet("ns3", "c3").create()
  1484  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1485  
  1486  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1487  	appCapability := &privdatamocks.AppCapabilities{}
  1488  	capabilityProvider.On("Capabilities").Return(appCapability)
  1489  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1490  	coordinator := NewCoordinator(mspID, Support{
  1491  		ChainID:            "testchannelid",
  1492  		CollectionStore:    cs,
  1493  		Committer:          committer,
  1494  		Fetcher:            fetcher,
  1495  		Validator:          &validatorMock{},
  1496  		CapabilityProvider: capabilityProvider,
  1497  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1498  	err = coordinator.StoreBlock(block, pvtData)
  1499  	require.NoError(t, err)
  1500  	assertCommitHappened()
  1501  	assertPurged("tx1")
  1502  }
  1503  
  1504  func TestProceedWithInEligiblePrivateData(t *testing.T) {
  1505  	// Scenario: we are missing private data (c2 in ns3) and it cannot be obtained from any peer.
  1506  	// Block needs to be committed with missing private data.
  1507  	err := msptesttools.LoadMSPSetupForTesting()
  1508  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1509  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
  1510  	require.NoError(t, err)
  1511  	serializedID, err := identity.Serialize()
  1512  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1513  	data := []byte{1, 2, 3}
  1514  	signature, err := identity.Sign(data)
  1515  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1516  	mspID := "Org1MSP"
  1517  	peerSelfSignedData := protoutil.SignedData{
  1518  		Identity:  serializedID,
  1519  		Signature: signature,
  1520  		Data:      data,
  1521  	}
  1522  
  1523  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsNone().withMSPIdentity(identity.GetMSPIdentifier())
  1524  
  1525  	var commitHappened bool
  1526  	assertCommitHappened := func() {
  1527  		require.True(t, commitHappened)
  1528  		commitHappened = false
  1529  	}
  1530  	committer := &privdatamocks.Committer{}
  1531  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1532  		blockAndPrivateData := args.Get(0).(*ledger.BlockAndPvtData)
  1533  		privateDataPassed2Ledger := blockAndPrivateData.PvtData
  1534  		require.True(t, reflect.DeepEqual(flattenTxPvtDataMap(privateDataPassed2Ledger),
  1535  			flattenTxPvtDataMap(expectedCommittedPrivateData3)))
  1536  		missingPrivateData := blockAndPrivateData.MissingPvtData
  1537  		expectedMissingPvtData := make(ledger.TxMissingPvtData)
  1538  		expectedMissingPvtData.Add(0, "ns3", "c2", false)
  1539  		require.Equal(t, expectedMissingPvtData, missingPrivateData)
  1540  		commitHappened = true
  1541  
  1542  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1543  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1544  		require.Equal(t, expectedCommitOpts, commitOpts)
  1545  	}).Return(nil)
  1546  
  1547  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1548  	bf := &blockFactory{
  1549  		channelID: "testchannelid",
  1550  	}
  1551  
  1552  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1553  		return mspmgmt.GetManagerForChain("testchannelid")
  1554  	})
  1555  
  1556  	block := bf.AddTxn("tx1", "ns3", hash, "c2").create()
  1557  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1558  
  1559  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1560  
  1561  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1562  	appCapability := &privdatamocks.AppCapabilities{}
  1563  	capabilityProvider.On("Capabilities").Return(appCapability)
  1564  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1565  	coordinator := NewCoordinator(mspID, Support{
  1566  		ChainID:            "testchannelid",
  1567  		CollectionStore:    cs,
  1568  		Committer:          committer,
  1569  		Fetcher:            nil,
  1570  		Validator:          &validatorMock{},
  1571  		CapabilityProvider: capabilityProvider,
  1572  	}, nil, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1573  	err = coordinator.StoreBlock(block, nil)
  1574  	require.NoError(t, err)
  1575  	assertCommitHappened()
  1576  }
  1577  
  1578  func TestCoordinatorGetBlocks(t *testing.T) {
  1579  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1580  	err := msptesttools.LoadMSPSetupForTesting()
  1581  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1582  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
  1583  	require.NoError(t, err)
  1584  	serializedID, err := identity.Serialize()
  1585  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1586  	data := []byte{1, 2, 3}
  1587  	signature, err := identity.Sign(data)
  1588  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1589  	mspID := "Org1MSP"
  1590  	peerSelfSignedData := protoutil.SignedData{
  1591  		Identity:  serializedID,
  1592  		Signature: signature,
  1593  		Data:      data,
  1594  	}
  1595  
  1596  	store := newTransientStore(t)
  1597  	defer store.tearDown()
  1598  
  1599  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1600  		return mspmgmt.GetManagerForChain("testchannelid")
  1601  	})
  1602  
  1603  	fetcher := &fetcherMock{t: t}
  1604  
  1605  	committer := &privdatamocks.Committer{}
  1606  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1607  
  1608  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1609  	appCapability := &privdatamocks.AppCapabilities{}
  1610  	capabilityProvider.On("Capabilities").Return(appCapability)
  1611  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1612  
  1613  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1614  	bf := &blockFactory{
  1615  		channelID: "testchannelid",
  1616  	}
  1617  	block := bf.AddTxn("tx1", "ns1", hash, "c1", "c2").AddTxn("tx2", "ns2", hash, "c1").create()
  1618  
  1619  	// Green path - block and private data is returned, but the requester isn't eligible for all the private data,
  1620  	// but only to a subset of it.
  1621  	cs := createcollectionStore(peerSelfSignedData).thatAccepts(CollectionCriteria{
  1622  		Namespace:  "ns1",
  1623  		Collection: "c2",
  1624  		Channel:    "testchannelid",
  1625  	}).withMSPIdentity(identity.GetMSPIdentifier())
  1626  	committer.Mock = mock.Mock{}
  1627  	committer.On("GetPvtDataAndBlockByNum", mock.Anything).Return(&ledger.BlockAndPvtData{
  1628  		Block:   block,
  1629  		PvtData: expectedCommittedPrivateData1,
  1630  	}, nil)
  1631  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1632  	coordinator := NewCoordinator(mspID, Support{
  1633  		ChainID:            "testchannelid",
  1634  		CollectionStore:    cs,
  1635  		Committer:          committer,
  1636  		Fetcher:            fetcher,
  1637  		Validator:          &validatorMock{},
  1638  		CapabilityProvider: capabilityProvider,
  1639  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1640  	expectedPrivData := (&pvtDataFactory{}).addRWSet().addNSRWSet("ns1", "c2").create()
  1641  	block2, returnedPrivateData, err := coordinator.GetPvtDataAndBlockByNum(1, peerSelfSignedData)
  1642  	require.NoError(t, err)
  1643  	require.Equal(t, block, block2)
  1644  	require.Equal(t, expectedPrivData, []*ledger.TxPvtData(returnedPrivateData))
  1645  
  1646  	// Bad path - error occurs when trying to retrieve the block and private data
  1647  	committer.Mock = mock.Mock{}
  1648  	committer.On("GetPvtDataAndBlockByNum", mock.Anything).Return(nil, errors.New("uh oh"))
  1649  	block2, returnedPrivateData, err = coordinator.GetPvtDataAndBlockByNum(1, peerSelfSignedData)
  1650  	require.Nil(t, block2)
  1651  	require.Empty(t, returnedPrivateData)
  1652  	require.Error(t, err)
  1653  }
  1654  
  1655  func TestPurgeBelowHeight(t *testing.T) {
  1656  	conf := testConfig
  1657  	conf.TransientBlockRetention = 5
  1658  	mspID := "Org1MSP"
  1659  	peerSelfSignedData := protoutil.SignedData{}
  1660  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll()
  1661  
  1662  	committer := &privdatamocks.Committer{}
  1663  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Return(nil)
  1664  
  1665  	store := newTransientStore(t)
  1666  	defer store.tearDown()
  1667  
  1668  	// store 9 data sets initially
  1669  	for i := 0; i < 9; i++ {
  1670  		txID := fmt.Sprintf("tx%d", i+1)
  1671  		store.Persist(txID, uint64(i), &tspb.TxPvtReadWriteSetWithConfigInfo{
  1672  			PvtRwset: &rwset.TxPvtReadWriteSet{
  1673  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1674  					{
  1675  						Namespace: "ns1",
  1676  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
  1677  							{
  1678  								CollectionName: "c1",
  1679  								Rwset:          []byte("rws-pre-image"),
  1680  							},
  1681  						},
  1682  					},
  1683  				},
  1684  			},
  1685  			CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1686  		})
  1687  	}
  1688  	assertPurged := func(purged bool) bool {
  1689  		numTx := 9
  1690  		if purged {
  1691  			numTx = 10
  1692  		}
  1693  		for i := 1; i <= numTx; i++ {
  1694  			txID := fmt.Sprintf("tx%d", i)
  1695  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1696  			if err != nil {
  1697  				iterator.Close()
  1698  				t.Fatalf("Failed iterating, got err %s", err)
  1699  			}
  1700  			res, err := iterator.Next()
  1701  			iterator.Close()
  1702  			if err != nil {
  1703  				t.Fatalf("Failed iterating, got err %s", err)
  1704  			}
  1705  			if (i < 6 || i == numTx) && purged {
  1706  				if res != nil {
  1707  					return false
  1708  				}
  1709  				continue
  1710  			}
  1711  			if res == nil {
  1712  				return false
  1713  			}
  1714  		}
  1715  		return true
  1716  	}
  1717  
  1718  	fetcher := &fetcherMock{t: t}
  1719  
  1720  	bf := &blockFactory{
  1721  		channelID: "testchannelid",
  1722  	}
  1723  
  1724  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1725  		return mspmgmt.GetManagerForChain("testchannelid")
  1726  	})
  1727  
  1728  	pdFactory := &pvtDataFactory{}
  1729  
  1730  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1731  
  1732  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1733  
  1734  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1735  	appCapability := &privdatamocks.AppCapabilities{}
  1736  	capabilityProvider.On("Capabilities").Return(appCapability)
  1737  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1738  	coordinator := NewCoordinator(mspID, Support{
  1739  		ChainID:            "testchannelid",
  1740  		CollectionStore:    cs,
  1741  		Committer:          committer,
  1742  		Fetcher:            fetcher,
  1743  		Validator:          &validatorMock{},
  1744  		CapabilityProvider: capabilityProvider,
  1745  	}, store.store, peerSelfSignedData, metrics, conf, idDeserializerFactory)
  1746  
  1747  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1748  	block := bf.AddTxn("tx10", "ns1", hash, "c1").create()
  1749  	block.Header.Number = 10
  1750  	pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1").create()
  1751  	// test no blocks purged yet
  1752  	assertPurgedBlocks := func() bool {
  1753  		return assertPurged(false)
  1754  	}
  1755  	require.Eventually(t, assertPurgedBlocks, 2*time.Second, 100*time.Millisecond)
  1756  	err := coordinator.StoreBlock(block, pvtData)
  1757  	require.NoError(t, err)
  1758  	// test first 6 blocks were purged
  1759  	assertPurgedBlocks = func() bool {
  1760  		return assertPurged(true)
  1761  	}
  1762  	require.Eventually(t, assertPurgedBlocks, 2*time.Second, 100*time.Millisecond)
  1763  }
  1764  
  1765  func TestCoordinatorStorePvtData(t *testing.T) {
  1766  	mspID := "Org1MSP"
  1767  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1768  	cs := createcollectionStore(protoutil.SignedData{}).thatAcceptsAll()
  1769  	committer := &privdatamocks.Committer{}
  1770  
  1771  	store := newTransientStore(t)
  1772  	defer store.tearDown()
  1773  
  1774  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1775  		return mspmgmt.GetManagerForChain("testchannelid")
  1776  	})
  1777  
  1778  	fetcher := &fetcherMock{t: t}
  1779  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1780  
  1781  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1782  	appCapability := &privdatamocks.AppCapabilities{}
  1783  	capabilityProvider.On("Capabilities").Return(appCapability)
  1784  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1785  	coordinator := NewCoordinator(mspID, Support{
  1786  		ChainID:            "testchannelid",
  1787  		CollectionStore:    cs,
  1788  		Committer:          committer,
  1789  		Fetcher:            fetcher,
  1790  		Validator:          &validatorMock{},
  1791  		CapabilityProvider: capabilityProvider,
  1792  	}, store.store, protoutil.SignedData{}, metrics, testConfig, idDeserializerFactory)
  1793  	pvtData := (&pvtDataFactory{}).addRWSet().addNSRWSet("ns1", "c1").create()
  1794  	// Green path: ledger height can be retrieved from ledger/committer
  1795  	err := coordinator.StorePvtData("tx1", &tspb.TxPvtReadWriteSetWithConfigInfo{
  1796  		PvtRwset:          pvtData[0].WriteSet,
  1797  		CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1798  	}, uint64(5))
  1799  	require.NoError(t, err)
  1800  }
  1801  
  1802  func TestContainsWrites(t *testing.T) {
  1803  	// Scenario I: Nil HashedRwSet in collection
  1804  	col := &rwsetutil.CollHashedRwSet{
  1805  		CollectionName: "col1",
  1806  	}
  1807  	require.False(t, containsWrites("tx", "ns", col))
  1808  
  1809  	// Scenario II: No writes in collection
  1810  	col.HashedRwSet = &kvrwset.HashedRWSet{}
  1811  	require.False(t, containsWrites("tx", "ns", col))
  1812  
  1813  	// Scenario III: Some writes in collection
  1814  	col.HashedRwSet.HashedWrites = append(col.HashedRwSet.HashedWrites, &kvrwset.KVWriteHash{})
  1815  	require.True(t, containsWrites("tx", "ns", col))
  1816  }
  1817  
  1818  func TestIgnoreReadOnlyColRWSets(t *testing.T) {
  1819  	// Scenario: The transaction has some ColRWSets that have only reads and no writes,
  1820  	// These should be ignored and not considered as missing private data that needs to be retrieved
  1821  	// from the transient store or other peers.
  1822  	// The gossip and transient store mocks in this test aren't initialized with
  1823  	// actions, so if the coordinator attempts to fetch private data from the
  1824  	// transient store or other peers, the test would fail.
  1825  	// Also - we check that at commit time - the coordinator concluded that
  1826  	// no missing private data was found.
  1827  	err := msptesttools.LoadMSPSetupForTesting()
  1828  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1829  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
  1830  	require.NoError(t, err)
  1831  	serializedID, err := identity.Serialize()
  1832  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1833  	data := []byte{1, 2, 3}
  1834  	signature, err := identity.Sign(data)
  1835  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1836  	mspID := "Org1MSP"
  1837  	peerSelfSignedData := protoutil.SignedData{
  1838  		Identity:  serializedID,
  1839  		Signature: signature,
  1840  		Data:      data,
  1841  	}
  1842  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
  1843  	var commitHappened bool
  1844  	assertCommitHappened := func() {
  1845  		require.True(t, commitHappened)
  1846  		commitHappened = false
  1847  	}
  1848  	committer := &privdatamocks.Committer{}
  1849  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
  1850  		blockAndPrivateData := args.Get(0).(*ledger.BlockAndPvtData)
  1851  		// Ensure there is no private data to commit
  1852  		require.Empty(t, blockAndPrivateData.PvtData)
  1853  		// Ensure there is no missing private data
  1854  		require.Empty(t, blockAndPrivateData.MissingPvtData)
  1855  		commitHappened = true
  1856  
  1857  		commitOpts := args.Get(1).(*ledger.CommitOptions)
  1858  		expectedCommitOpts := &ledger.CommitOptions{FetchPvtDataFromLedger: false}
  1859  		require.Equal(t, expectedCommitOpts, commitOpts)
  1860  	}).Return(nil)
  1861  
  1862  	store := newTransientStore(t)
  1863  	defer store.tearDown()
  1864  
  1865  	fetcher := &fetcherMock{t: t}
  1866  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1867  	bf := &blockFactory{
  1868  		channelID: "testchannelid",
  1869  	}
  1870  
  1871  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1872  		return mspmgmt.GetManagerForChain("testchannelid")
  1873  	})
  1874  
  1875  	// The block contains a read only private data transaction
  1876  	block := bf.AddReadOnlyTxn("tx1", "ns3", hash, "c3", "c2").create()
  1877  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1878  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1879  
  1880  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1881  	appCapability := &privdatamocks.AppCapabilities{}
  1882  	capabilityProvider.On("Capabilities").Return(appCapability)
  1883  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1884  	coordinator := NewCoordinator(mspID, Support{
  1885  		ChainID:            "testchannelid",
  1886  		CollectionStore:    cs,
  1887  		Committer:          committer,
  1888  		Fetcher:            fetcher,
  1889  		Validator:          &validatorMock{},
  1890  		CapabilityProvider: capabilityProvider,
  1891  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1892  	// We pass a nil private data slice to indicate no pre-images though the block contains
  1893  	// private data reads.
  1894  	err = coordinator.StoreBlock(block, nil)
  1895  	require.NoError(t, err)
  1896  	assertCommitHappened()
  1897  }
  1898  
  1899  func TestCoordinatorMetrics(t *testing.T) {
  1900  	err := msptesttools.LoadMSPSetupForTesting()
  1901  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1902  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
  1903  	require.NoError(t, err)
  1904  	serializedID, err := identity.Serialize()
  1905  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1906  	data := []byte{1, 2, 3}
  1907  	signature, err := identity.Sign(data)
  1908  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1909  	mspID := "Org1MSP"
  1910  	peerSelfSignedData := protoutil.SignedData{
  1911  		Identity:  serializedID,
  1912  		Signature: signature,
  1913  		Data:      data,
  1914  	}
  1915  
  1916  	cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll().withMSPIdentity(identity.GetMSPIdentifier())
  1917  
  1918  	committer := &privdatamocks.Committer{}
  1919  	committer.On("CommitLegacy", mock.Anything, mock.Anything).Return(nil)
  1920  
  1921  	store := newTransientStore(t)
  1922  	defer store.tearDown()
  1923  
  1924  	hash := util2.ComputeSHA256([]byte("rws-pre-image"))
  1925  	pdFactory := &pvtDataFactory{}
  1926  	bf := &blockFactory{
  1927  		channelID: "testchannelid",
  1928  	}
  1929  
  1930  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1931  		return mspmgmt.GetManagerForChain("testchannelid")
  1932  	})
  1933  
  1934  	block := bf.AddTxnWithEndorsement("tx1", "ns1", hash, "org1", true, "c1", "c2").
  1935  		AddTxnWithEndorsement("tx2", "ns2", hash, "org2", true, "c1").
  1936  		AddTxnWithEndorsement("tx3", "ns3", hash, "org3", true, "c1").create()
  1937  
  1938  	pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").addRWSet().addNSRWSet("ns2", "c1").create()
  1939  	// fetch duration metric only reported when fetching from remote peer
  1940  	fetcher := &fetcherMock{t: t}
  1941  	fetcher.On("fetch", mock.Anything).expectingDigests([]privdatacommon.DigKey{
  1942  		{
  1943  			TxId: "tx3", Namespace: "ns3", Collection: "c1", BlockSeq: 1, SeqInBlock: 2,
  1944  		},
  1945  	}).Return(&privdatacommon.FetchedPvtDataContainer{
  1946  		AvailableElements: []*proto.PvtDataElement{
  1947  			{
  1948  				Digest: &proto.PvtDataDigest{
  1949  					SeqInBlock: 2,
  1950  					BlockSeq:   1,
  1951  					Collection: "c1",
  1952  					Namespace:  "ns3",
  1953  					TxId:       "tx3",
  1954  				},
  1955  				Payload: [][]byte{[]byte("rws-pre-image")},
  1956  			},
  1957  		},
  1958  	}, nil)
  1959  
  1960  	testMetricProvider := gmetricsmocks.TestUtilConstructMetricProvider()
  1961  	metrics := metrics.NewGossipMetrics(testMetricProvider.FakeProvider).PrivdataMetrics
  1962  
  1963  	committer.On("DoesPvtDataInfoExistInLedger", mock.Anything).Return(false, nil)
  1964  
  1965  	capabilityProvider := &privdatamocks.CapabilityProvider{}
  1966  	appCapability := &privdatamocks.AppCapabilities{}
  1967  	capabilityProvider.On("Capabilities").Return(appCapability)
  1968  	appCapability.On("StorePvtDataOfInvalidTx").Return(true)
  1969  	coordinator := NewCoordinator(mspID, Support{
  1970  		ChainID:            "testchannelid",
  1971  		CollectionStore:    cs,
  1972  		Committer:          committer,
  1973  		Fetcher:            fetcher,
  1974  		Validator:          &validatorMock{},
  1975  		CapabilityProvider: capabilityProvider,
  1976  	}, store.store, peerSelfSignedData, metrics, testConfig, idDeserializerFactory)
  1977  	err = coordinator.StoreBlock(block, pvtData)
  1978  	require.NoError(t, err)
  1979  
  1980  	// make sure all coordinator metrics were reported
  1981  
  1982  	require.Equal(t,
  1983  		[]string{"channel", "testchannelid"},
  1984  		testMetricProvider.FakeValidationDuration.WithArgsForCall(0),
  1985  	)
  1986  	require.True(t, testMetricProvider.FakeValidationDuration.ObserveArgsForCall(0) > 0)
  1987  	require.Equal(t,
  1988  		[]string{"channel", "testchannelid"},
  1989  		testMetricProvider.FakeListMissingPrivateDataDuration.WithArgsForCall(0),
  1990  	)
  1991  	require.True(t, testMetricProvider.FakeListMissingPrivateDataDuration.ObserveArgsForCall(0) > 0)
  1992  	require.Equal(t,
  1993  		[]string{"channel", "testchannelid"},
  1994  		testMetricProvider.FakeFetchDuration.WithArgsForCall(0),
  1995  	)
  1996  	// fetch duration metric only reported when fetching from remote peer
  1997  	require.True(t, testMetricProvider.FakeFetchDuration.ObserveArgsForCall(0) > 0)
  1998  	require.Equal(t,
  1999  		[]string{"channel", "testchannelid"},
  2000  		testMetricProvider.FakeCommitPrivateDataDuration.WithArgsForCall(0),
  2001  	)
  2002  	require.True(t, testMetricProvider.FakeCommitPrivateDataDuration.ObserveArgsForCall(0) > 0)
  2003  	require.Equal(t,
  2004  		[]string{"channel", "testchannelid"},
  2005  		testMetricProvider.FakePurgeDuration.WithArgsForCall(0),
  2006  	)
  2007  
  2008  	purgeDuration := func() bool {
  2009  		return testMetricProvider.FakePurgeDuration.ObserveArgsForCall(0) > 0
  2010  	}
  2011  	require.Eventually(t, purgeDuration, 2*time.Second, 100*time.Millisecond)
  2012  }