github.com/hechain20/hechain@v0.0.0-20220316014945-b544036ba106/gossip/privdata/pvtdataprovider_test.go (about)

     1  /*
     2  Copyright hechain. All Rights Reserved.
     3  
     4  SPDX-License-Identifier: Apache-2.0
     5  */
     6  
     7  package privdata
     8  
     9  import (
    10  	"fmt"
    11  	"io/ioutil"
    12  	"os"
    13  	"sort"
    14  	"testing"
    15  	"time"
    16  
    17  	"github.com/hechain20/hechain/bccsp/factory"
    18  	"github.com/hechain20/hechain/common/metrics/disabled"
    19  	util2 "github.com/hechain20/hechain/common/util"
    20  	"github.com/hechain20/hechain/core/ledger"
    21  	"github.com/hechain20/hechain/core/transientstore"
    22  	"github.com/hechain20/hechain/gossip/metrics"
    23  	privdatacommon "github.com/hechain20/hechain/gossip/privdata/common"
    24  	"github.com/hechain20/hechain/gossip/privdata/mocks"
    25  	"github.com/hechain20/hechain/gossip/util"
    26  	"github.com/hechain20/hechain/msp"
    27  	mspmgmt "github.com/hechain20/hechain/msp/mgmt"
    28  	msptesttools "github.com/hechain20/hechain/msp/mgmt/testtools"
    29  	"github.com/hechain20/hechain/protoutil"
    30  	"github.com/hyperledger/fabric-protos-go/common"
    31  	proto "github.com/hyperledger/fabric-protos-go/gossip"
    32  	"github.com/hyperledger/fabric-protos-go/ledger/rwset"
    33  	mspproto "github.com/hyperledger/fabric-protos-go/msp"
    34  	"github.com/hyperledger/fabric-protos-go/peer"
    35  	tspb "github.com/hyperledger/fabric-protos-go/transientstore"
    36  	"github.com/stretchr/testify/mock"
    37  	"github.com/stretchr/testify/require"
    38  )
    39  
    40  type testSupport struct {
    41  	preHash, hash      []byte
    42  	channelID          string
    43  	blockNum           uint64
    44  	endorsers          []string
    45  	peerSelfSignedData protoutil.SignedData
    46  }
    47  
    48  type rwSet struct {
    49  	txID          string
    50  	namespace     string
    51  	collections   []string
    52  	preHash, hash []byte
    53  	seqInBlock    uint64
    54  }
    55  
    56  func init() {
    57  	util.SetupTestLoggingWithLevel("INFO")
    58  }
    59  
    60  func TestRetrievePvtdata(t *testing.T) {
    61  	err := msptesttools.LoadMSPSetupForTesting()
    62  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
    63  
    64  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
    65  	require.NoError(t, err)
    66  	serializedID, err := identity.Serialize()
    67  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
    68  	data := []byte{1, 2, 3}
    69  	signature, err := identity.Sign(data)
    70  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
    71  	peerSelfSignedData := protoutil.SignedData{
    72  		Identity:  serializedID,
    73  		Signature: signature,
    74  		Data:      data,
    75  	}
    76  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
    77  		Mspid:   identity.GetMSPIdentifier(),
    78  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
    79  	})
    80  
    81  	ts := testSupport{
    82  		preHash:            []byte("rws-pre-image"),
    83  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
    84  		channelID:          "testchannelid",
    85  		blockNum:           uint64(1),
    86  		endorsers:          []string{identity.GetMSPIdentifier()},
    87  		peerSelfSignedData: peerSelfSignedData,
    88  	}
    89  
    90  	ns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
    91  	ns1c2 := collectionPvtdataInfoFromTemplate("ns1", "c2", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
    92  	ineligiblens1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", "different-org", ts.hash, endorser, signature)
    93  
    94  	tests := []struct {
    95  		scenario                                                string
    96  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions bool
    97  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer     []rwSet
    98  		expectedDigKeys                                         []privdatacommon.DigKey
    99  		pvtdataToRetrieve                                       []*ledger.TxPvtdataInfo
   100  		expectedBlockPvtdata                                    *ledger.BlockPvtdata
   101  	}{
   102  		{
   103  			// Scenario I
   104  			scenario:                       "Scenario I: Only eligible private data in cache, no missing private data",
   105  			storePvtdataOfInvalidTx:        true,
   106  			skipPullingInvalidTransactions: false,
   107  			rwSetsInCache: []rwSet{
   108  				{
   109  					txID:        "tx1",
   110  					namespace:   "ns1",
   111  					collections: []string{"c1", "c2"},
   112  					preHash:     ts.preHash,
   113  					hash:        ts.hash,
   114  					seqInBlock:  1,
   115  				},
   116  			},
   117  			rwSetsInTransientStore: []rwSet{},
   118  			rwSetsInPeer:           []rwSet{},
   119  			expectedDigKeys:        []privdatacommon.DigKey{},
   120  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   121  				{
   122  					TxID:       "tx1",
   123  					Invalid:    false,
   124  					SeqInBlock: 1,
   125  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   126  						ns1c1,
   127  						ns1c2,
   128  					},
   129  				},
   130  			},
   131  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   132  				PvtData: ledger.TxPvtDataMap{
   133  					1: &ledger.TxPvtData{
   134  						SeqInBlock: 1,
   135  						WriteSet: &rwset.TxPvtReadWriteSet{
   136  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   137  								{
   138  									Namespace: "ns1",
   139  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   140  										preHash:     ts.preHash,
   141  										collections: []string{"c1", "c2"},
   142  									}),
   143  								},
   144  							},
   145  						},
   146  					},
   147  				},
   148  				MissingPvtData: ledger.TxMissingPvtData{},
   149  			},
   150  		},
   151  		{
   152  			// Scenario II
   153  			scenario:                       "Scenario II: No eligible private data, skip ineligible private data from all sources even if found in cache",
   154  			storePvtdataOfInvalidTx:        true,
   155  			skipPullingInvalidTransactions: false,
   156  			rwSetsInCache: []rwSet{
   157  				{
   158  					txID:        "tx1",
   159  					namespace:   "ns1",
   160  					collections: []string{"c1"},
   161  					preHash:     ts.preHash,
   162  					hash:        ts.hash,
   163  					seqInBlock:  1,
   164  				},
   165  			},
   166  			rwSetsInTransientStore: []rwSet{
   167  				{
   168  					txID:        "tx2",
   169  					namespace:   "ns1",
   170  					collections: []string{"c1"},
   171  					preHash:     ts.preHash,
   172  					hash:        ts.hash,
   173  					seqInBlock:  2,
   174  				},
   175  			},
   176  			rwSetsInPeer: []rwSet{
   177  				{
   178  					txID:        "tx3",
   179  					namespace:   "ns1",
   180  					collections: []string{"c1"},
   181  					preHash:     ts.preHash,
   182  					hash:        ts.hash,
   183  					seqInBlock:  3,
   184  				},
   185  			},
   186  			expectedDigKeys: []privdatacommon.DigKey{},
   187  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   188  				{
   189  					TxID:       "tx1",
   190  					Invalid:    false,
   191  					SeqInBlock: 1,
   192  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   193  						ineligiblens1c1,
   194  					},
   195  				},
   196  				{
   197  					TxID:       "tx2",
   198  					Invalid:    false,
   199  					SeqInBlock: 2,
   200  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   201  						ineligiblens1c1,
   202  					},
   203  				},
   204  				{
   205  					TxID:       "tx3",
   206  					Invalid:    false,
   207  					SeqInBlock: 3,
   208  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   209  						ineligiblens1c1,
   210  					},
   211  				},
   212  			},
   213  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   214  				PvtData: ledger.TxPvtDataMap{},
   215  				MissingPvtData: ledger.TxMissingPvtData{
   216  					1: []*ledger.MissingPvtData{
   217  						{
   218  							Namespace:  "ns1",
   219  							Collection: "c1",
   220  							IsEligible: false,
   221  						},
   222  					},
   223  					2: []*ledger.MissingPvtData{
   224  						{
   225  							Namespace:  "ns1",
   226  							Collection: "c1",
   227  							IsEligible: false,
   228  						},
   229  					},
   230  					3: []*ledger.MissingPvtData{
   231  						{
   232  							Namespace:  "ns1",
   233  							Collection: "c1",
   234  							IsEligible: false,
   235  						},
   236  					},
   237  				},
   238  			},
   239  		},
   240  		{
   241  			// Scenario III
   242  			scenario:                       "Scenario III: Missing private data in cache, found in transient store",
   243  			storePvtdataOfInvalidTx:        true,
   244  			skipPullingInvalidTransactions: false,
   245  			rwSetsInCache: []rwSet{
   246  				{
   247  					txID:        "tx1",
   248  					namespace:   "ns1",
   249  					collections: []string{"c1", "c2"},
   250  					preHash:     ts.preHash,
   251  					hash:        ts.hash,
   252  					seqInBlock:  1,
   253  				},
   254  			},
   255  			rwSetsInTransientStore: []rwSet{
   256  				{
   257  					txID:        "tx2",
   258  					namespace:   "ns1",
   259  					collections: []string{"c2"},
   260  					preHash:     ts.preHash,
   261  					hash:        ts.hash,
   262  					seqInBlock:  2,
   263  				},
   264  			},
   265  			rwSetsInPeer:    []rwSet{},
   266  			expectedDigKeys: []privdatacommon.DigKey{},
   267  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   268  				{
   269  					TxID:       "tx1",
   270  					Invalid:    false,
   271  					SeqInBlock: 1,
   272  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   273  						ns1c1,
   274  						ns1c2,
   275  					},
   276  				},
   277  				{
   278  					TxID:       "tx2",
   279  					Invalid:    false,
   280  					SeqInBlock: 2,
   281  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   282  						ns1c2,
   283  					},
   284  				},
   285  			},
   286  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   287  				PvtData: ledger.TxPvtDataMap{
   288  					1: &ledger.TxPvtData{
   289  						SeqInBlock: 1,
   290  						WriteSet: &rwset.TxPvtReadWriteSet{
   291  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   292  								{
   293  									Namespace: "ns1",
   294  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   295  										preHash:     ts.preHash,
   296  										collections: []string{"c1", "c2"},
   297  									}),
   298  								},
   299  							},
   300  						},
   301  					},
   302  					2: &ledger.TxPvtData{
   303  						SeqInBlock: 2,
   304  						WriteSet: &rwset.TxPvtReadWriteSet{
   305  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   306  								{
   307  									Namespace: "ns1",
   308  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   309  										preHash:     ts.preHash,
   310  										collections: []string{"c2"},
   311  									}),
   312  								},
   313  							},
   314  						},
   315  					},
   316  				},
   317  				MissingPvtData: ledger.TxMissingPvtData{},
   318  			},
   319  		},
   320  		{
   321  			// Scenario IV
   322  			scenario:                       "Scenario IV: Missing private data in cache, found some in transient store and some in peer",
   323  			storePvtdataOfInvalidTx:        true,
   324  			skipPullingInvalidTransactions: false,
   325  			rwSetsInCache: []rwSet{
   326  				{
   327  					txID:        "tx1",
   328  					namespace:   "ns1",
   329  					collections: []string{"c1", "c2"},
   330  					preHash:     ts.preHash,
   331  					hash:        ts.hash,
   332  					seqInBlock:  1,
   333  				},
   334  			},
   335  			rwSetsInTransientStore: []rwSet{
   336  				{
   337  					txID:        "tx2",
   338  					namespace:   "ns1",
   339  					collections: []string{"c1", "c2"},
   340  					preHash:     ts.preHash,
   341  					hash:        ts.hash,
   342  					seqInBlock:  2,
   343  				},
   344  			},
   345  			rwSetsInPeer: []rwSet{
   346  				{
   347  					txID:        "tx3",
   348  					namespace:   "ns1",
   349  					collections: []string{"c1", "c2"},
   350  					preHash:     ts.preHash,
   351  					hash:        ts.hash,
   352  					seqInBlock:  3,
   353  				},
   354  			},
   355  			expectedDigKeys: []privdatacommon.DigKey{
   356  				{
   357  					TxId:       "tx3",
   358  					Namespace:  "ns1",
   359  					Collection: "c1",
   360  					BlockSeq:   ts.blockNum,
   361  					SeqInBlock: 3,
   362  				},
   363  				{
   364  					TxId:       "tx3",
   365  					Namespace:  "ns1",
   366  					Collection: "c2",
   367  					BlockSeq:   ts.blockNum,
   368  					SeqInBlock: 3,
   369  				},
   370  			},
   371  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   372  				{
   373  					TxID:       "tx1",
   374  					Invalid:    false,
   375  					SeqInBlock: 1,
   376  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   377  						ns1c1,
   378  						ns1c2,
   379  					},
   380  				},
   381  				{
   382  					TxID:       "tx2",
   383  					Invalid:    false,
   384  					SeqInBlock: 2,
   385  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   386  						ns1c1,
   387  						ns1c2,
   388  					},
   389  				},
   390  				{
   391  					TxID:       "tx3",
   392  					Invalid:    false,
   393  					SeqInBlock: 3,
   394  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   395  						ns1c1,
   396  						ns1c2,
   397  					},
   398  				},
   399  			},
   400  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   401  				PvtData: ledger.TxPvtDataMap{
   402  					1: &ledger.TxPvtData{
   403  						SeqInBlock: 1,
   404  						WriteSet: &rwset.TxPvtReadWriteSet{
   405  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   406  								{
   407  									Namespace: "ns1",
   408  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   409  										preHash:     ts.preHash,
   410  										collections: []string{"c1", "c2"},
   411  									}),
   412  								},
   413  							},
   414  						},
   415  					},
   416  					2: &ledger.TxPvtData{
   417  						SeqInBlock: 2,
   418  						WriteSet: &rwset.TxPvtReadWriteSet{
   419  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   420  								{
   421  									Namespace: "ns1",
   422  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   423  										preHash:     ts.preHash,
   424  										collections: []string{"c1", "c2"},
   425  									}),
   426  								},
   427  							},
   428  						},
   429  					},
   430  					3: &ledger.TxPvtData{
   431  						SeqInBlock: 3,
   432  						WriteSet: &rwset.TxPvtReadWriteSet{
   433  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   434  								{
   435  									Namespace: "ns1",
   436  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   437  										preHash:     ts.preHash,
   438  										collections: []string{"c1", "c2"},
   439  									}),
   440  								},
   441  							},
   442  						},
   443  					},
   444  				},
   445  				MissingPvtData: ledger.TxMissingPvtData{},
   446  			},
   447  		},
   448  		{
   449  			// Scenario V
   450  			scenario:                       "Scenario V: Skip invalid txs when storePvtdataOfInvalidTx is false",
   451  			storePvtdataOfInvalidTx:        false,
   452  			skipPullingInvalidTransactions: false,
   453  			rwSetsInCache: []rwSet{
   454  				{
   455  					txID:        "tx1",
   456  					namespace:   "ns1",
   457  					collections: []string{"c1"},
   458  					preHash:     ts.preHash,
   459  					hash:        ts.hash,
   460  					seqInBlock:  1,
   461  				},
   462  				{
   463  					txID:        "tx2",
   464  					namespace:   "ns1",
   465  					collections: []string{"c1"},
   466  					preHash:     ts.preHash,
   467  					hash:        ts.hash,
   468  					seqInBlock:  2,
   469  				},
   470  			},
   471  			rwSetsInTransientStore: []rwSet{},
   472  			rwSetsInPeer:           []rwSet{},
   473  			expectedDigKeys:        []privdatacommon.DigKey{},
   474  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   475  				{
   476  					TxID:       "tx1",
   477  					Invalid:    true,
   478  					SeqInBlock: 1,
   479  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   480  						ns1c1,
   481  					},
   482  				},
   483  				{
   484  					TxID:       "tx2",
   485  					Invalid:    false,
   486  					SeqInBlock: 2,
   487  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   488  						ns1c1,
   489  					},
   490  				},
   491  			},
   492  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   493  				PvtData: ledger.TxPvtDataMap{
   494  					2: &ledger.TxPvtData{
   495  						SeqInBlock: 2,
   496  						WriteSet: &rwset.TxPvtReadWriteSet{
   497  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   498  								{
   499  									Namespace: "ns1",
   500  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   501  										preHash:     ts.preHash,
   502  										collections: []string{"c1"},
   503  									}),
   504  								},
   505  							},
   506  						},
   507  					},
   508  				},
   509  				MissingPvtData: ledger.TxMissingPvtData{},
   510  			},
   511  		},
   512  		{
   513  			// Scenario VI
   514  			scenario:                       "Scenario VI: Don't skip invalid txs when storePvtdataOfInvalidTx is true",
   515  			storePvtdataOfInvalidTx:        true,
   516  			skipPullingInvalidTransactions: false,
   517  			rwSetsInCache: []rwSet{
   518  				{
   519  					txID:        "tx1",
   520  					namespace:   "ns1",
   521  					collections: []string{"c1"},
   522  					preHash:     ts.preHash,
   523  					hash:        ts.hash,
   524  					seqInBlock:  1,
   525  				},
   526  				{
   527  					txID:        "tx2",
   528  					namespace:   "ns1",
   529  					collections: []string{"c1"},
   530  					preHash:     ts.preHash,
   531  					hash:        ts.hash,
   532  					seqInBlock:  2,
   533  				},
   534  			},
   535  			rwSetsInTransientStore: []rwSet{},
   536  			rwSetsInPeer:           []rwSet{},
   537  			expectedDigKeys:        []privdatacommon.DigKey{},
   538  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   539  				{
   540  					TxID:       "tx1",
   541  					Invalid:    true,
   542  					SeqInBlock: 1,
   543  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   544  						ns1c1,
   545  					},
   546  				},
   547  				{
   548  					TxID:       "tx2",
   549  					Invalid:    false,
   550  					SeqInBlock: 2,
   551  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   552  						ns1c1,
   553  					},
   554  				},
   555  			},
   556  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   557  				PvtData: ledger.TxPvtDataMap{
   558  					1: &ledger.TxPvtData{
   559  						SeqInBlock: 1,
   560  						WriteSet: &rwset.TxPvtReadWriteSet{
   561  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   562  								{
   563  									Namespace: "ns1",
   564  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   565  										preHash:     ts.preHash,
   566  										collections: []string{"c1"},
   567  									}),
   568  								},
   569  							},
   570  						},
   571  					},
   572  					2: &ledger.TxPvtData{
   573  						SeqInBlock: 2,
   574  						WriteSet: &rwset.TxPvtReadWriteSet{
   575  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   576  								{
   577  									Namespace: "ns1",
   578  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   579  										preHash:     ts.preHash,
   580  										collections: []string{"c1"},
   581  									}),
   582  								},
   583  							},
   584  						},
   585  					},
   586  				},
   587  				MissingPvtData: ledger.TxMissingPvtData{},
   588  			},
   589  		},
   590  		{
   591  			// Scenario VII
   592  			scenario:                "Scenario VII: Can't find eligible tx from any source",
   593  			storePvtdataOfInvalidTx: true,
   594  			rwSetsInCache:           []rwSet{},
   595  			rwSetsInTransientStore:  []rwSet{},
   596  			rwSetsInPeer:            []rwSet{},
   597  			expectedDigKeys: []privdatacommon.DigKey{
   598  				{
   599  					TxId:       "tx1",
   600  					Namespace:  "ns1",
   601  					Collection: "c1",
   602  					BlockSeq:   ts.blockNum,
   603  					SeqInBlock: 1,
   604  				},
   605  				{
   606  					TxId:       "tx1",
   607  					Namespace:  "ns1",
   608  					Collection: "c2",
   609  					BlockSeq:   ts.blockNum,
   610  					SeqInBlock: 1,
   611  				},
   612  			},
   613  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   614  				{
   615  					TxID:       "tx1",
   616  					Invalid:    false,
   617  					SeqInBlock: 1,
   618  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   619  						ns1c1,
   620  						ns1c2,
   621  					},
   622  				},
   623  			},
   624  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   625  				PvtData: ledger.TxPvtDataMap{},
   626  				MissingPvtData: ledger.TxMissingPvtData{
   627  					1: []*ledger.MissingPvtData{
   628  						{
   629  							Namespace:  "ns1",
   630  							Collection: "c1",
   631  							IsEligible: true,
   632  						},
   633  						{
   634  							Namespace:  "ns1",
   635  							Collection: "c2",
   636  							IsEligible: true,
   637  						},
   638  					},
   639  				},
   640  			},
   641  		},
   642  		{
   643  			// Scenario VIII
   644  			scenario:                       "Scenario VIII: Extra data not requested",
   645  			storePvtdataOfInvalidTx:        true,
   646  			skipPullingInvalidTransactions: false,
   647  			rwSetsInCache: []rwSet{
   648  				{
   649  					txID:        "tx1",
   650  					namespace:   "ns1",
   651  					collections: []string{"c1", "c2"},
   652  					preHash:     ts.preHash,
   653  					hash:        ts.hash,
   654  					seqInBlock:  1,
   655  				},
   656  			},
   657  			rwSetsInTransientStore: []rwSet{
   658  				{
   659  					txID:        "tx2",
   660  					namespace:   "ns1",
   661  					collections: []string{"c1", "c2"},
   662  					preHash:     ts.preHash,
   663  					hash:        ts.hash,
   664  					seqInBlock:  2,
   665  				},
   666  			},
   667  			rwSetsInPeer: []rwSet{
   668  				{
   669  					txID:        "tx3",
   670  					namespace:   "ns1",
   671  					collections: []string{"c1", "c2"},
   672  					preHash:     ts.preHash,
   673  					hash:        ts.hash,
   674  					seqInBlock:  3,
   675  				},
   676  			},
   677  			expectedDigKeys: []privdatacommon.DigKey{
   678  				{
   679  					TxId:       "tx3",
   680  					Namespace:  "ns1",
   681  					Collection: "c1",
   682  					BlockSeq:   ts.blockNum,
   683  					SeqInBlock: 3,
   684  				},
   685  			},
   686  			// Only requesting tx3, ns1, c1, should skip all extra data found in all sources
   687  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   688  				{
   689  					TxID:       "tx3",
   690  					Invalid:    false,
   691  					SeqInBlock: 3,
   692  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   693  						ns1c1,
   694  					},
   695  				},
   696  			},
   697  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   698  				PvtData: ledger.TxPvtDataMap{
   699  					3: &ledger.TxPvtData{
   700  						SeqInBlock: 3,
   701  						WriteSet: &rwset.TxPvtReadWriteSet{
   702  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   703  								{
   704  									Namespace: "ns1",
   705  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   706  										preHash:     ts.preHash,
   707  										collections: []string{"c1"},
   708  									}),
   709  								},
   710  							},
   711  						},
   712  					},
   713  				},
   714  				MissingPvtData: ledger.TxMissingPvtData{},
   715  			},
   716  		},
   717  		{
   718  			// Scenario IX
   719  			scenario:                       "Scenario IX: Skip pulling invalid txs when skipPullingInvalidTransactions is true",
   720  			storePvtdataOfInvalidTx:        true,
   721  			skipPullingInvalidTransactions: true,
   722  			rwSetsInCache: []rwSet{
   723  				{
   724  					txID:        "tx1",
   725  					namespace:   "ns1",
   726  					collections: []string{"c1"},
   727  					preHash:     ts.preHash,
   728  					hash:        ts.hash,
   729  					seqInBlock:  1,
   730  				},
   731  			},
   732  			rwSetsInTransientStore: []rwSet{
   733  				{
   734  					txID:        "tx2",
   735  					namespace:   "ns1",
   736  					collections: []string{"c1"},
   737  					preHash:     ts.preHash,
   738  					hash:        ts.hash,
   739  					seqInBlock:  2,
   740  				},
   741  			},
   742  			rwSetsInPeer: []rwSet{
   743  				{
   744  					txID:        "tx3",
   745  					namespace:   "ns1",
   746  					collections: []string{"c1"},
   747  					preHash:     ts.preHash,
   748  					hash:        ts.hash,
   749  					seqInBlock:  2,
   750  				},
   751  			},
   752  			expectedDigKeys: []privdatacommon.DigKey{},
   753  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   754  				{
   755  					TxID:       "tx1",
   756  					Invalid:    true,
   757  					SeqInBlock: 1,
   758  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   759  						ns1c1,
   760  					},
   761  				},
   762  				{
   763  					TxID:       "tx2",
   764  					Invalid:    true,
   765  					SeqInBlock: 2,
   766  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   767  						ns1c1,
   768  					},
   769  				},
   770  				{
   771  					TxID:       "tx3",
   772  					Invalid:    true,
   773  					SeqInBlock: 3,
   774  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   775  						ns1c1,
   776  					},
   777  				},
   778  			},
   779  			// tx1 and tx2 are still fetched despite being invalid
   780  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   781  				PvtData: ledger.TxPvtDataMap{
   782  					1: &ledger.TxPvtData{
   783  						SeqInBlock: 1,
   784  						WriteSet: &rwset.TxPvtReadWriteSet{
   785  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   786  								{
   787  									Namespace: "ns1",
   788  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   789  										preHash:     ts.preHash,
   790  										collections: []string{"c1"},
   791  									}),
   792  								},
   793  							},
   794  						},
   795  					},
   796  					2: &ledger.TxPvtData{
   797  						SeqInBlock: 2,
   798  						WriteSet: &rwset.TxPvtReadWriteSet{
   799  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   800  								{
   801  									Namespace: "ns1",
   802  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   803  										preHash:     ts.preHash,
   804  										collections: []string{"c1"},
   805  									}),
   806  								},
   807  							},
   808  						},
   809  					},
   810  				},
   811  				// Only tx3 is missing since we skip pulling invalid tx from peers
   812  				MissingPvtData: ledger.TxMissingPvtData{
   813  					3: []*ledger.MissingPvtData{
   814  						{
   815  							Namespace:  "ns1",
   816  							Collection: "c1",
   817  							IsEligible: true,
   818  						},
   819  					},
   820  				},
   821  			},
   822  		},
   823  	}
   824  
   825  	for _, test := range tests {
   826  		t.Run(test.scenario, func(t *testing.T) {
   827  			testRetrievePvtdataSuccess(t, test.scenario, ts, test.storePvtdataOfInvalidTx, test.skipPullingInvalidTransactions,
   828  				test.rwSetsInCache, test.rwSetsInTransientStore, test.rwSetsInPeer, test.expectedDigKeys, test.pvtdataToRetrieve, test.expectedBlockPvtdata)
   829  		})
   830  	}
   831  }
   832  
   833  func TestRetrievePvtdataFailure(t *testing.T) {
   834  	err := msptesttools.LoadMSPSetupForTesting()
   835  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   836  
   837  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
   838  	require.NoError(t, err)
   839  	serializedID, err := identity.Serialize()
   840  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   841  	data := []byte{1, 2, 3}
   842  	signature, err := identity.Sign(data)
   843  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   844  	peerSelfSignedData := protoutil.SignedData{
   845  		Identity:  serializedID,
   846  		Signature: signature,
   847  		Data:      data,
   848  	}
   849  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
   850  		Mspid:   identity.GetMSPIdentifier(),
   851  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
   852  	})
   853  
   854  	ts := testSupport{
   855  		preHash:            []byte("rws-pre-image"),
   856  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
   857  		channelID:          "testchannelid",
   858  		blockNum:           uint64(1),
   859  		endorsers:          []string{identity.GetMSPIdentifier()},
   860  		peerSelfSignedData: peerSelfSignedData,
   861  	}
   862  
   863  	invalidns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
   864  	invalidns1c1.CollectionConfig.MemberOrgsPolicy = nil
   865  
   866  	scenario := "Scenario I: Invalid collection config policy"
   867  	storePvtdataOfInvalidTx := true
   868  	skipPullingInvalidTransactions := false
   869  	rwSetsInCache := []rwSet{}
   870  	rwSetsInTransientStore := []rwSet{}
   871  	rwSetsInPeer := []rwSet{}
   872  	expectedDigKeys := []privdatacommon.DigKey{}
   873  	pvtdataToRetrieve := []*ledger.TxPvtdataInfo{
   874  		{
   875  			TxID:       "tx1",
   876  			Invalid:    false,
   877  			SeqInBlock: 1,
   878  			CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   879  				invalidns1c1,
   880  			},
   881  		},
   882  	}
   883  
   884  	expectedErr := "Collection config policy is nil"
   885  
   886  	testRetrievePvtdataFailure(t, scenario, ts,
   887  		peerSelfSignedData, storePvtdataOfInvalidTx, skipPullingInvalidTransactions,
   888  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
   889  		expectedDigKeys, pvtdataToRetrieve,
   890  		expectedErr)
   891  }
   892  
   893  func TestRetryFetchFromPeer(t *testing.T) {
   894  	err := msptesttools.LoadMSPSetupForTesting()
   895  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   896  
   897  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
   898  	require.NoError(t, err)
   899  	serializedID, err := identity.Serialize()
   900  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   901  	data := []byte{1, 2, 3}
   902  	signature, err := identity.Sign(data)
   903  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   904  	peerSelfSignedData := protoutil.SignedData{
   905  		Identity:  serializedID,
   906  		Signature: signature,
   907  		Data:      data,
   908  	}
   909  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
   910  		Mspid:   identity.GetMSPIdentifier(),
   911  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
   912  	})
   913  
   914  	ts := testSupport{
   915  		preHash:            []byte("rws-pre-image"),
   916  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
   917  		channelID:          "testchannelid",
   918  		blockNum:           uint64(1),
   919  		endorsers:          []string{identity.GetMSPIdentifier()},
   920  		peerSelfSignedData: peerSelfSignedData,
   921  	}
   922  
   923  	ns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
   924  	ns1c2 := collectionPvtdataInfoFromTemplate("ns1", "c2", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
   925  
   926  	tempdir, err := ioutil.TempDir("", "ts")
   927  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
   928  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
   929  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
   930  	store, err := storeProvider.OpenStore(ts.channelID)
   931  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
   932  
   933  	defer storeProvider.Close()
   934  	defer os.RemoveAll(tempdir)
   935  
   936  	storePvtdataOfInvalidTx := true
   937  	skipPullingInvalidTransactions := false
   938  	rwSetsInCache := []rwSet{}
   939  	rwSetsInTransientStore := []rwSet{}
   940  	rwSetsInPeer := []rwSet{}
   941  	expectedDigKeys := []privdatacommon.DigKey{
   942  		{
   943  			TxId:       "tx1",
   944  			Namespace:  "ns1",
   945  			Collection: "c1",
   946  			BlockSeq:   ts.blockNum,
   947  			SeqInBlock: 1,
   948  		},
   949  		{
   950  			TxId:       "tx1",
   951  			Namespace:  "ns1",
   952  			Collection: "c2",
   953  			BlockSeq:   ts.blockNum,
   954  			SeqInBlock: 1,
   955  		},
   956  	}
   957  	pvtdataToRetrieve := []*ledger.TxPvtdataInfo{
   958  		{
   959  			TxID:       "tx1",
   960  			Invalid:    false,
   961  			SeqInBlock: 1,
   962  			CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   963  				ns1c1,
   964  				ns1c2,
   965  			},
   966  		},
   967  	}
   968  	pdp := setupPrivateDataProvider(t, ts, testConfig,
   969  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
   970  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
   971  		expectedDigKeys)
   972  	require.NotNil(t, pdp)
   973  
   974  	fakeSleeper := &mocks.Sleeper{}
   975  	SetSleeper(pdp, fakeSleeper)
   976  	fakeSleeper.SleepStub = func(sleepDur time.Duration) {
   977  		time.Sleep(sleepDur)
   978  	}
   979  
   980  	_, err = pdp.RetrievePvtdata(pvtdataToRetrieve)
   981  	require.NoError(t, err)
   982  
   983  	maxRetries := int(testConfig.PullRetryThreshold / pullRetrySleepInterval)
   984  	require.Equal(t, fakeSleeper.SleepCallCount() <= maxRetries, true)
   985  	require.Equal(t, fakeSleeper.SleepArgsForCall(0), pullRetrySleepInterval)
   986  }
   987  
   988  func TestSkipPullingAllInvalidTransactions(t *testing.T) {
   989  	err := msptesttools.LoadMSPSetupForTesting()
   990  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   991  
   992  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
   993  	require.NoError(t, err)
   994  	serializedID, err := identity.Serialize()
   995  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   996  	data := []byte{1, 2, 3}
   997  	signature, err := identity.Sign(data)
   998  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   999  	peerSelfSignedData := protoutil.SignedData{
  1000  		Identity:  serializedID,
  1001  		Signature: signature,
  1002  		Data:      data,
  1003  	}
  1004  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
  1005  		Mspid:   identity.GetMSPIdentifier(),
  1006  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
  1007  	})
  1008  
  1009  	ts := testSupport{
  1010  		preHash:            []byte("rws-pre-image"),
  1011  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
  1012  		channelID:          "testchannelid",
  1013  		blockNum:           uint64(1),
  1014  		endorsers:          []string{identity.GetMSPIdentifier()},
  1015  		peerSelfSignedData: peerSelfSignedData,
  1016  	}
  1017  
  1018  	ns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
  1019  	ns1c2 := collectionPvtdataInfoFromTemplate("ns1", "c2", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
  1020  
  1021  	tempdir, err := ioutil.TempDir("", "ts")
  1022  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
  1023  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
  1024  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
  1025  	store, err := storeProvider.OpenStore(ts.channelID)
  1026  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
  1027  
  1028  	defer storeProvider.Close()
  1029  	defer os.RemoveAll(tempdir)
  1030  
  1031  	storePvtdataOfInvalidTx := true
  1032  	skipPullingInvalidTransactions := true
  1033  	rwSetsInCache := []rwSet{}
  1034  	rwSetsInTransientStore := []rwSet{}
  1035  	rwSetsInPeer := []rwSet{}
  1036  	expectedDigKeys := []privdatacommon.DigKey{}
  1037  	expectedBlockPvtdata := &ledger.BlockPvtdata{
  1038  		PvtData: ledger.TxPvtDataMap{},
  1039  		MissingPvtData: ledger.TxMissingPvtData{
  1040  			1: []*ledger.MissingPvtData{
  1041  				{
  1042  					Namespace:  "ns1",
  1043  					Collection: "c1",
  1044  					IsEligible: true,
  1045  				},
  1046  				{
  1047  					Namespace:  "ns1",
  1048  					Collection: "c2",
  1049  					IsEligible: true,
  1050  				},
  1051  			},
  1052  		},
  1053  	}
  1054  	pvtdataToRetrieve := []*ledger.TxPvtdataInfo{
  1055  		{
  1056  			TxID:       "tx1",
  1057  			Invalid:    true,
  1058  			SeqInBlock: 1,
  1059  			CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
  1060  				ns1c1,
  1061  				ns1c2,
  1062  			},
  1063  		},
  1064  	}
  1065  	pdp := setupPrivateDataProvider(t, ts, testConfig,
  1066  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
  1067  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
  1068  		expectedDigKeys)
  1069  	require.NotNil(t, pdp)
  1070  
  1071  	fakeSleeper := &mocks.Sleeper{}
  1072  	SetSleeper(pdp, fakeSleeper)
  1073  	newFetcher := &fetcherMock{t: t}
  1074  	pdp.fetcher = newFetcher
  1075  
  1076  	retrievedPvtdata, err := pdp.RetrievePvtdata(pvtdataToRetrieve)
  1077  	require.NoError(t, err)
  1078  
  1079  	blockPvtdata := sortBlockPvtdata(retrievedPvtdata.GetBlockPvtdata())
  1080  	require.Equal(t, expectedBlockPvtdata, blockPvtdata)
  1081  
  1082  	// Check sleep and fetch were never called
  1083  	require.Equal(t, fakeSleeper.SleepCallCount(), 0)
  1084  	require.Len(t, newFetcher.Calls, 0)
  1085  }
  1086  
  1087  func TestRetrievedPvtdataPurgeBelowHeight(t *testing.T) {
  1088  	conf := testConfig
  1089  	conf.TransientBlockRetention = 5
  1090  
  1091  	err := msptesttools.LoadMSPSetupForTesting()
  1092  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1093  
  1094  	identity, err := mspmgmt.GetLocalMSP(factory.GetDefault()).GetDefaultSigningIdentity()
  1095  	require.NoError(t, err)
  1096  	serializedID, err := identity.Serialize()
  1097  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1098  	data := []byte{1, 2, 3}
  1099  	signature, err := identity.Sign(data)
  1100  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1101  	peerSelfSignedData := protoutil.SignedData{
  1102  		Identity:  serializedID,
  1103  		Signature: signature,
  1104  		Data:      data,
  1105  	}
  1106  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
  1107  		Mspid:   identity.GetMSPIdentifier(),
  1108  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
  1109  	})
  1110  
  1111  	ts := testSupport{
  1112  		preHash:            []byte("rws-pre-image"),
  1113  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
  1114  		channelID:          "testchannelid",
  1115  		blockNum:           uint64(9),
  1116  		endorsers:          []string{identity.GetMSPIdentifier()},
  1117  		peerSelfSignedData: peerSelfSignedData,
  1118  	}
  1119  
  1120  	ns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
  1121  
  1122  	tempdir, err := ioutil.TempDir("", "ts")
  1123  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
  1124  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
  1125  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
  1126  	store, err := storeProvider.OpenStore(ts.channelID)
  1127  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
  1128  
  1129  	defer storeProvider.Close()
  1130  	defer os.RemoveAll(tempdir)
  1131  
  1132  	// set up store with 9 existing private data write sets
  1133  	for i := 0; i < 9; i++ {
  1134  		txID := fmt.Sprintf("tx%d", i+1)
  1135  		store.Persist(txID, uint64(i), &tspb.TxPvtReadWriteSetWithConfigInfo{
  1136  			PvtRwset: &rwset.TxPvtReadWriteSet{
  1137  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1138  					{
  1139  						Namespace: "ns1",
  1140  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
  1141  							{
  1142  								CollectionName: "c1",
  1143  								Rwset:          []byte("rws-pre-image"),
  1144  							},
  1145  						},
  1146  					},
  1147  				},
  1148  			},
  1149  			CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1150  		})
  1151  	}
  1152  
  1153  	// test that the initial data shows up in the store
  1154  	for i := 1; i < 9; i++ {
  1155  		func() {
  1156  			txID := fmt.Sprintf("tx%d", i)
  1157  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1158  			require.NoError(t, err, fmt.Sprintf("Failed obtaining iterator from transient store, got err %s", err))
  1159  			defer iterator.Close()
  1160  			res, err := iterator.Next()
  1161  			require.NoError(t, err, fmt.Sprintf("Failed iterating, got err %s", err))
  1162  			require.NotNil(t, res)
  1163  		}()
  1164  	}
  1165  
  1166  	storePvtdataOfInvalidTx := true
  1167  	skipPullingInvalidTransactions := false
  1168  	rwSetsInCache := []rwSet{
  1169  		{
  1170  			txID:        "tx9",
  1171  			namespace:   "ns1",
  1172  			collections: []string{"c1"},
  1173  			preHash:     ts.preHash,
  1174  			hash:        ts.hash,
  1175  			seqInBlock:  1,
  1176  		},
  1177  	}
  1178  	rwSetsInTransientStore := []rwSet{}
  1179  	rwSetsInPeer := []rwSet{}
  1180  	expectedDigKeys := []privdatacommon.DigKey{}
  1181  	// request tx9 which is found in both the cache and transient store
  1182  	pvtdataToRetrieve := []*ledger.TxPvtdataInfo{
  1183  		{
  1184  			TxID:       "tx9",
  1185  			Invalid:    false,
  1186  			SeqInBlock: 1,
  1187  			CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
  1188  				ns1c1,
  1189  			},
  1190  		},
  1191  	}
  1192  	pdp := setupPrivateDataProvider(t, ts, conf,
  1193  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
  1194  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer, expectedDigKeys)
  1195  	require.NotNil(t, pdp)
  1196  
  1197  	retrievedPvtdata, err := pdp.RetrievePvtdata(pvtdataToRetrieve)
  1198  	require.NoError(t, err)
  1199  
  1200  	retrievedPvtdata.Purge()
  1201  
  1202  	for i := 1; i <= 9; i++ {
  1203  		func() {
  1204  			txID := fmt.Sprintf("tx%d", i)
  1205  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1206  			require.NoError(t, err, fmt.Sprintf("Failed obtaining iterator from transient store, got err %s", err))
  1207  			defer iterator.Close()
  1208  			res, err := iterator.Next()
  1209  			require.NoError(t, err, fmt.Sprintf("Failed iterating, got err %s", err))
  1210  			// Check that only the fetched private write set was purged because we haven't reached a blockNum that's a multiple of 5 yet
  1211  			if i == 9 {
  1212  				require.Nil(t, res)
  1213  			} else {
  1214  				require.NotNil(t, res)
  1215  			}
  1216  		}()
  1217  	}
  1218  
  1219  	// increment blockNum to a multiple of transientBlockRetention
  1220  	pdp.blockNum = 10
  1221  	retrievedPvtdata, err = pdp.RetrievePvtdata(pvtdataToRetrieve)
  1222  	require.NoError(t, err)
  1223  
  1224  	retrievedPvtdata.Purge()
  1225  
  1226  	for i := 1; i <= 9; i++ {
  1227  		func() {
  1228  			txID := fmt.Sprintf("tx%d", i)
  1229  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1230  			require.NoError(t, err, fmt.Sprintf("Failed obtaining iterator from transient store, got err %s", err))
  1231  			defer iterator.Close()
  1232  			res, err := iterator.Next()
  1233  			require.NoError(t, err, fmt.Sprintf("Failed iterating, got err %s", err))
  1234  			// Check that the first 5 sets have been purged alongside the 9th set purged earlier
  1235  			if i < 6 || i == 9 {
  1236  				require.Nil(t, res)
  1237  			} else {
  1238  				require.NotNil(t, res)
  1239  			}
  1240  		}()
  1241  	}
  1242  }
  1243  
  1244  func TestFetchStats(t *testing.T) {
  1245  	fetchStats := fetchStats{
  1246  		fromLocalCache:     1,
  1247  		fromTransientStore: 2,
  1248  		fromRemotePeer:     3,
  1249  	}
  1250  	require.Equal(t, "(1 from local cache, 2 from transient store, 3 from other peers)", fetchStats.String())
  1251  }
  1252  
  1253  func testRetrievePvtdataSuccess(t *testing.T,
  1254  	scenario string,
  1255  	ts testSupport,
  1256  	storePvtdataOfInvalidTx, skipPullingInvalidTransactions bool,
  1257  	rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer []rwSet,
  1258  	expectedDigKeys []privdatacommon.DigKey,
  1259  	pvtdataToRetrieve []*ledger.TxPvtdataInfo,
  1260  	expectedBlockPvtdata *ledger.BlockPvtdata) {
  1261  	fmt.Println("\n" + scenario)
  1262  
  1263  	tempdir, err := ioutil.TempDir("", "ts")
  1264  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
  1265  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
  1266  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
  1267  	store, err := storeProvider.OpenStore(ts.channelID)
  1268  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
  1269  	defer storeProvider.Close()
  1270  	defer os.RemoveAll(tempdir)
  1271  
  1272  	pdp := setupPrivateDataProvider(t, ts, testConfig,
  1273  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
  1274  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
  1275  		expectedDigKeys)
  1276  	require.NotNil(t, pdp, scenario)
  1277  
  1278  	retrievedPvtdata, err := pdp.RetrievePvtdata(pvtdataToRetrieve)
  1279  	require.NoError(t, err, scenario)
  1280  
  1281  	// sometimes the collection private write sets are added out of order
  1282  	// so we need to sort it to check equality with expected
  1283  	blockPvtdata := sortBlockPvtdata(retrievedPvtdata.GetBlockPvtdata())
  1284  	require.Equal(t, expectedBlockPvtdata, blockPvtdata, scenario)
  1285  
  1286  	// Test pvtdata is purged from store on Done() call
  1287  	testPurged(t, scenario, retrievedPvtdata, store, pvtdataToRetrieve)
  1288  }
  1289  
  1290  func testRetrievePvtdataFailure(t *testing.T,
  1291  	scenario string,
  1292  	ts testSupport,
  1293  	peerSelfSignedData protoutil.SignedData,
  1294  	storePvtdataOfInvalidTx, skipPullingInvalidTransactions bool,
  1295  	rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer []rwSet,
  1296  	expectedDigKeys []privdatacommon.DigKey,
  1297  	pvtdataToRetrieve []*ledger.TxPvtdataInfo,
  1298  	expectedErr string) {
  1299  	fmt.Println("\n" + scenario)
  1300  
  1301  	tempdir, err := ioutil.TempDir("", "ts")
  1302  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
  1303  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
  1304  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
  1305  	store, err := storeProvider.OpenStore(ts.channelID)
  1306  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
  1307  	defer storeProvider.Close()
  1308  	defer os.RemoveAll(tempdir)
  1309  
  1310  	pdp := setupPrivateDataProvider(t, ts, testConfig,
  1311  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
  1312  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
  1313  		expectedDigKeys)
  1314  	require.NotNil(t, pdp, scenario)
  1315  
  1316  	_, err = pdp.RetrievePvtdata(pvtdataToRetrieve)
  1317  	require.EqualError(t, err, expectedErr, scenario)
  1318  }
  1319  
  1320  func setupPrivateDataProvider(t *testing.T,
  1321  	ts testSupport,
  1322  	config CoordinatorConfig,
  1323  	storePvtdataOfInvalidTx, skipPullingInvalidTransactions bool, store *transientstore.Store,
  1324  	rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer []rwSet,
  1325  	expectedDigKeys []privdatacommon.DigKey) *PvtdataProvider {
  1326  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1327  
  1328  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1329  		return mspmgmt.GetManagerForChain(ts.channelID)
  1330  	})
  1331  
  1332  	// set up data in cache
  1333  	prefetchedPvtdata := storePvtdataInCache(rwSetsInCache)
  1334  	// set up data in transient store
  1335  	err := storePvtdataInTransientStore(rwSetsInTransientStore, store)
  1336  	require.NoError(t, err, fmt.Sprintf("Failed to store private data in transient store: got err %s", err))
  1337  
  1338  	// set up data in peer
  1339  	fetcher := &fetcherMock{t: t}
  1340  	storePvtdataInPeer(rwSetsInPeer, expectedDigKeys, fetcher, ts, skipPullingInvalidTransactions)
  1341  
  1342  	pdp := &PvtdataProvider{
  1343  		mspID:                                   "Org1MSP",
  1344  		selfSignedData:                          ts.peerSelfSignedData,
  1345  		logger:                                  logger,
  1346  		listMissingPrivateDataDurationHistogram: metrics.ListMissingPrivateDataDuration.With("channel", ts.channelID),
  1347  		fetchDurationHistogram:                  metrics.FetchDuration.With("channel", ts.channelID),
  1348  		purgeDurationHistogram:                  metrics.PurgeDuration.With("channel", ts.channelID),
  1349  		transientStore:                          store,
  1350  		pullRetryThreshold:                      config.PullRetryThreshold,
  1351  		prefetchedPvtdata:                       prefetchedPvtdata,
  1352  		transientBlockRetention:                 config.TransientBlockRetention,
  1353  		channelID:                               ts.channelID,
  1354  		blockNum:                                ts.blockNum,
  1355  		storePvtdataOfInvalidTx:                 storePvtdataOfInvalidTx,
  1356  		skipPullingInvalidTransactions:          skipPullingInvalidTransactions,
  1357  		fetcher:                                 fetcher,
  1358  		idDeserializerFactory:                   idDeserializerFactory,
  1359  	}
  1360  
  1361  	return pdp
  1362  }
  1363  
  1364  func testPurged(t *testing.T,
  1365  	scenario string,
  1366  	retrievedPvtdata ledger.RetrievedPvtdata,
  1367  	store *transientstore.Store,
  1368  	txPvtdataInfo []*ledger.TxPvtdataInfo) {
  1369  	retrievedPvtdata.Purge()
  1370  	for _, pvtdata := range retrievedPvtdata.GetBlockPvtdata().PvtData {
  1371  		func() {
  1372  			txID := getTxIDBySeqInBlock(pvtdata.SeqInBlock, txPvtdataInfo)
  1373  			require.NotEqual(t, txID, "", fmt.Sprintf("Could not find txID for SeqInBlock %d", pvtdata.SeqInBlock), scenario)
  1374  
  1375  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1376  			require.NoError(t, err, fmt.Sprintf("Failed obtaining iterator from transient store, got err %s", err))
  1377  			defer iterator.Close()
  1378  
  1379  			res, err := iterator.Next()
  1380  			require.NoError(t, err, fmt.Sprintf("Failed iterating, got err %s", err))
  1381  
  1382  			require.Nil(t, res, scenario)
  1383  		}()
  1384  	}
  1385  }
  1386  
  1387  func storePvtdataInCache(rwsets []rwSet) util.PvtDataCollections {
  1388  	res := []*ledger.TxPvtData{}
  1389  	for _, rws := range rwsets {
  1390  		set := &rwset.TxPvtReadWriteSet{
  1391  			NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1392  				{
  1393  					Namespace:          rws.namespace,
  1394  					CollectionPvtRwset: getCollectionPvtReadWriteSet(rws),
  1395  				},
  1396  			},
  1397  		}
  1398  
  1399  		res = append(res, &ledger.TxPvtData{
  1400  			SeqInBlock: rws.seqInBlock,
  1401  			WriteSet:   set,
  1402  		})
  1403  	}
  1404  
  1405  	return res
  1406  }
  1407  
  1408  func storePvtdataInTransientStore(rwsets []rwSet, store *transientstore.Store) error {
  1409  	for _, rws := range rwsets {
  1410  		set := &tspb.TxPvtReadWriteSetWithConfigInfo{
  1411  			PvtRwset: &rwset.TxPvtReadWriteSet{
  1412  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1413  					{
  1414  						Namespace:          rws.namespace,
  1415  						CollectionPvtRwset: getCollectionPvtReadWriteSet(rws),
  1416  					},
  1417  				},
  1418  			},
  1419  			CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1420  		}
  1421  
  1422  		err := store.Persist(rws.txID, 1, set)
  1423  		if err != nil {
  1424  			return err
  1425  		}
  1426  	}
  1427  	return nil
  1428  }
  1429  
  1430  func storePvtdataInPeer(rwSets []rwSet, expectedDigKeys []privdatacommon.DigKey, fetcher *fetcherMock, ts testSupport, skipPullingInvalidTransactions bool) {
  1431  	availableElements := []*proto.PvtDataElement{}
  1432  	for _, rws := range rwSets {
  1433  		for _, c := range rws.collections {
  1434  			availableElements = append(availableElements, &proto.PvtDataElement{
  1435  				Digest: &proto.PvtDataDigest{
  1436  					TxId:       rws.txID,
  1437  					Namespace:  rws.namespace,
  1438  					Collection: c,
  1439  					BlockSeq:   ts.blockNum,
  1440  					SeqInBlock: rws.seqInBlock,
  1441  				},
  1442  				Payload: [][]byte{ts.preHash},
  1443  			})
  1444  		}
  1445  	}
  1446  
  1447  	endorsers := []string{}
  1448  	if len(expectedDigKeys) > 0 {
  1449  		endorsers = ts.endorsers
  1450  	}
  1451  	fetcher.On("fetch", mock.Anything).expectingDigests(expectedDigKeys).expectingEndorsers(endorsers...).Return(&privdatacommon.FetchedPvtDataContainer{
  1452  		AvailableElements: availableElements,
  1453  	}, nil)
  1454  }
  1455  
  1456  func getCollectionPvtReadWriteSet(rws rwSet) []*rwset.CollectionPvtReadWriteSet {
  1457  	colPvtRwSet := []*rwset.CollectionPvtReadWriteSet{}
  1458  	for _, c := range rws.collections {
  1459  		colPvtRwSet = append(colPvtRwSet, &rwset.CollectionPvtReadWriteSet{
  1460  			CollectionName: c,
  1461  			Rwset:          rws.preHash,
  1462  		})
  1463  	}
  1464  
  1465  	sort.Slice(colPvtRwSet, func(i, j int) bool {
  1466  		return colPvtRwSet[i].CollectionName < colPvtRwSet[j].CollectionName
  1467  	})
  1468  
  1469  	return colPvtRwSet
  1470  }
  1471  
  1472  func sortBlockPvtdata(blockPvtdata *ledger.BlockPvtdata) *ledger.BlockPvtdata {
  1473  	for _, pvtdata := range blockPvtdata.PvtData {
  1474  		for _, ws := range pvtdata.WriteSet.NsPvtRwset {
  1475  			sort.Slice(ws.CollectionPvtRwset, func(i, j int) bool {
  1476  				return ws.CollectionPvtRwset[i].CollectionName < ws.CollectionPvtRwset[j].CollectionName
  1477  			})
  1478  		}
  1479  	}
  1480  	for _, missingPvtdata := range blockPvtdata.MissingPvtData {
  1481  		sort.Slice(missingPvtdata, func(i, j int) bool {
  1482  			return missingPvtdata[i].Collection < missingPvtdata[j].Collection
  1483  		})
  1484  	}
  1485  	return blockPvtdata
  1486  }
  1487  
  1488  func collectionPvtdataInfoFromTemplate(namespace, collection, mspIdentifier string, hash, endorser, signature []byte) *ledger.CollectionPvtdataInfo {
  1489  	return &ledger.CollectionPvtdataInfo{
  1490  		Collection:   collection,
  1491  		Namespace:    namespace,
  1492  		ExpectedHash: hash,
  1493  		Endorsers: []*peer.Endorsement{
  1494  			{
  1495  				Endorser:  endorser,
  1496  				Signature: signature,
  1497  			},
  1498  		},
  1499  		CollectionConfig: &peer.StaticCollectionConfig{
  1500  			Name:           collection,
  1501  			MemberOnlyRead: true,
  1502  			MemberOrgsPolicy: &peer.CollectionPolicyConfig{
  1503  				Payload: &peer.CollectionPolicyConfig_SignaturePolicy{
  1504  					SignaturePolicy: &common.SignaturePolicyEnvelope{
  1505  						Rule: &common.SignaturePolicy{
  1506  							Type: &common.SignaturePolicy_SignedBy{
  1507  								SignedBy: 0,
  1508  							},
  1509  						},
  1510  						Identities: []*mspproto.MSPPrincipal{
  1511  							{
  1512  								PrincipalClassification: mspproto.MSPPrincipal_ROLE,
  1513  								Principal: protoutil.MarshalOrPanic(&mspproto.MSPRole{
  1514  									MspIdentifier: mspIdentifier,
  1515  									Role:          mspproto.MSPRole_MEMBER,
  1516  								}),
  1517  							},
  1518  						},
  1519  					},
  1520  				},
  1521  			},
  1522  		},
  1523  	}
  1524  }