github.com/osdi23p228/fabric@v0.0.0-20221218062954-77808885f5db/gossip/privdata/pvtdataprovider_test.go (about)

     1  /*
     2  Copyright IBM Corp. All Rights Reserved.
     3  
     4  SPDX-License-Identifier: Apache-2.0
     5  */
     6  
     7  package privdata
     8  
     9  import (
    10  	"fmt"
    11  	"io/ioutil"
    12  	"os"
    13  	"sort"
    14  	"testing"
    15  	"time"
    16  
    17  	"github.com/hyperledger/fabric-protos-go/common"
    18  	proto "github.com/hyperledger/fabric-protos-go/gossip"
    19  	"github.com/hyperledger/fabric-protos-go/ledger/rwset"
    20  	mspproto "github.com/hyperledger/fabric-protos-go/msp"
    21  	"github.com/hyperledger/fabric-protos-go/peer"
    22  	tspb "github.com/hyperledger/fabric-protos-go/transientstore"
    23  	"github.com/osdi23p228/fabric/bccsp/factory"
    24  	"github.com/osdi23p228/fabric/common/metrics/disabled"
    25  	util2 "github.com/osdi23p228/fabric/common/util"
    26  	"github.com/osdi23p228/fabric/core/ledger"
    27  	"github.com/osdi23p228/fabric/core/transientstore"
    28  	"github.com/osdi23p228/fabric/gossip/metrics"
    29  	privdatacommon "github.com/osdi23p228/fabric/gossip/privdata/common"
    30  	"github.com/osdi23p228/fabric/gossip/privdata/mocks"
    31  	"github.com/osdi23p228/fabric/gossip/util"
    32  	"github.com/osdi23p228/fabric/msp"
    33  	mspmgmt "github.com/osdi23p228/fabric/msp/mgmt"
    34  	msptesttools "github.com/osdi23p228/fabric/msp/mgmt/testtools"
    35  	"github.com/osdi23p228/fabric/protoutil"
    36  	"github.com/stretchr/testify/assert"
    37  	"github.com/stretchr/testify/mock"
    38  	"github.com/stretchr/testify/require"
    39  )
    40  
    41  type testSupport struct {
    42  	preHash, hash      []byte
    43  	channelID          string
    44  	blockNum           uint64
    45  	endorsers          []string
    46  	peerSelfSignedData protoutil.SignedData
    47  }
    48  
    49  type rwSet struct {
    50  	txID          string
    51  	namespace     string
    52  	collections   []string
    53  	preHash, hash []byte
    54  	seqInBlock    uint64
    55  }
    56  
    57  func init() {
    58  	util.SetupTestLoggingWithLevel("INFO")
    59  }
    60  
    61  func TestRetrievePvtdata(t *testing.T) {
    62  	err := msptesttools.LoadMSPSetupForTesting()
    63  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
    64  
    65  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
    66  	serializedID, err := identity.Serialize()
    67  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
    68  	data := []byte{1, 2, 3}
    69  	signature, err := identity.Sign(data)
    70  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
    71  	peerSelfSignedData := protoutil.SignedData{
    72  		Identity:  serializedID,
    73  		Signature: signature,
    74  		Data:      data,
    75  	}
    76  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
    77  		Mspid:   identity.GetMSPIdentifier(),
    78  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
    79  	})
    80  
    81  	ts := testSupport{
    82  		preHash:            []byte("rws-pre-image"),
    83  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
    84  		channelID:          "testchannelid",
    85  		blockNum:           uint64(1),
    86  		endorsers:          []string{identity.GetMSPIdentifier()},
    87  		peerSelfSignedData: peerSelfSignedData,
    88  	}
    89  
    90  	ns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
    91  	ns1c2 := collectionPvtdataInfoFromTemplate("ns1", "c2", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
    92  	ineligiblens1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", "different-org", ts.hash, endorser, signature)
    93  
    94  	tests := []struct {
    95  		scenario                                                string
    96  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions bool
    97  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer     []rwSet
    98  		expectedDigKeys                                         []privdatacommon.DigKey
    99  		pvtdataToRetrieve                                       []*ledger.TxPvtdataInfo
   100  		expectedBlockPvtdata                                    *ledger.BlockPvtdata
   101  	}{
   102  		{
   103  			// Scenario I
   104  			scenario:                       "Scenario I: Only eligible private data in cache, no missing private data",
   105  			storePvtdataOfInvalidTx:        true,
   106  			skipPullingInvalidTransactions: false,
   107  			rwSetsInCache: []rwSet{
   108  				{
   109  					txID:        "tx1",
   110  					namespace:   "ns1",
   111  					collections: []string{"c1", "c2"},
   112  					preHash:     ts.preHash,
   113  					hash:        ts.hash,
   114  					seqInBlock:  1,
   115  				},
   116  			},
   117  			rwSetsInTransientStore: []rwSet{},
   118  			rwSetsInPeer:           []rwSet{},
   119  			expectedDigKeys:        []privdatacommon.DigKey{},
   120  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   121  				{
   122  					TxID:       "tx1",
   123  					Invalid:    false,
   124  					SeqInBlock: 1,
   125  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   126  						ns1c1,
   127  						ns1c2,
   128  					},
   129  				},
   130  			},
   131  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   132  				PvtData: ledger.TxPvtDataMap{
   133  					1: &ledger.TxPvtData{
   134  						SeqInBlock: 1,
   135  						WriteSet: &rwset.TxPvtReadWriteSet{
   136  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   137  								{
   138  									Namespace: "ns1",
   139  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   140  										preHash:     ts.preHash,
   141  										collections: []string{"c1", "c2"},
   142  									}),
   143  								},
   144  							},
   145  						},
   146  					},
   147  				},
   148  				MissingPvtData: ledger.TxMissingPvtDataMap{},
   149  			},
   150  		},
   151  		{
   152  			// Scenario II
   153  			scenario:                       "Scenario II: No eligible private data, skip ineligible private data from all sources even if found in cache",
   154  			storePvtdataOfInvalidTx:        true,
   155  			skipPullingInvalidTransactions: false,
   156  			rwSetsInCache: []rwSet{
   157  				{
   158  					txID:        "tx1",
   159  					namespace:   "ns1",
   160  					collections: []string{"c1"},
   161  					preHash:     ts.preHash,
   162  					hash:        ts.hash,
   163  					seqInBlock:  1,
   164  				},
   165  			},
   166  			rwSetsInTransientStore: []rwSet{
   167  				{
   168  					txID:        "tx2",
   169  					namespace:   "ns1",
   170  					collections: []string{"c1"},
   171  					preHash:     ts.preHash,
   172  					hash:        ts.hash,
   173  					seqInBlock:  2,
   174  				},
   175  			},
   176  			rwSetsInPeer: []rwSet{
   177  				{
   178  					txID:        "tx3",
   179  					namespace:   "ns1",
   180  					collections: []string{"c1"},
   181  					preHash:     ts.preHash,
   182  					hash:        ts.hash,
   183  					seqInBlock:  3,
   184  				},
   185  			},
   186  			expectedDigKeys: []privdatacommon.DigKey{},
   187  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   188  				{
   189  					TxID:       "tx1",
   190  					Invalid:    false,
   191  					SeqInBlock: 1,
   192  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   193  						ineligiblens1c1,
   194  					},
   195  				},
   196  				{
   197  					TxID:       "tx2",
   198  					Invalid:    false,
   199  					SeqInBlock: 2,
   200  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   201  						ineligiblens1c1,
   202  					},
   203  				},
   204  				{
   205  					TxID:       "tx3",
   206  					Invalid:    false,
   207  					SeqInBlock: 3,
   208  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   209  						ineligiblens1c1,
   210  					},
   211  				},
   212  			},
   213  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   214  				PvtData: ledger.TxPvtDataMap{},
   215  				MissingPvtData: ledger.TxMissingPvtDataMap{
   216  					1: []*ledger.MissingPvtData{
   217  						{
   218  							Namespace:  "ns1",
   219  							Collection: "c1",
   220  							IsEligible: false,
   221  						},
   222  					},
   223  					2: []*ledger.MissingPvtData{
   224  						{
   225  							Namespace:  "ns1",
   226  							Collection: "c1",
   227  							IsEligible: false,
   228  						},
   229  					},
   230  					3: []*ledger.MissingPvtData{
   231  						{
   232  							Namespace:  "ns1",
   233  							Collection: "c1",
   234  							IsEligible: false,
   235  						},
   236  					},
   237  				},
   238  			},
   239  		},
   240  		{
   241  			// Scenario III
   242  			scenario:                       "Scenario III: Missing private data in cache, found in transient store",
   243  			storePvtdataOfInvalidTx:        true,
   244  			skipPullingInvalidTransactions: false,
   245  			rwSetsInCache: []rwSet{
   246  				{
   247  					txID:        "tx1",
   248  					namespace:   "ns1",
   249  					collections: []string{"c1", "c2"},
   250  					preHash:     ts.preHash,
   251  					hash:        ts.hash,
   252  					seqInBlock:  1,
   253  				},
   254  			},
   255  			rwSetsInTransientStore: []rwSet{
   256  				{
   257  					txID:        "tx2",
   258  					namespace:   "ns1",
   259  					collections: []string{"c2"},
   260  					preHash:     ts.preHash,
   261  					hash:        ts.hash,
   262  					seqInBlock:  2,
   263  				},
   264  			},
   265  			rwSetsInPeer:    []rwSet{},
   266  			expectedDigKeys: []privdatacommon.DigKey{},
   267  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   268  				{
   269  					TxID:       "tx1",
   270  					Invalid:    false,
   271  					SeqInBlock: 1,
   272  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   273  						ns1c1,
   274  						ns1c2,
   275  					},
   276  				},
   277  				{
   278  					TxID:       "tx2",
   279  					Invalid:    false,
   280  					SeqInBlock: 2,
   281  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   282  						ns1c2,
   283  					},
   284  				},
   285  			},
   286  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   287  				PvtData: ledger.TxPvtDataMap{
   288  					1: &ledger.TxPvtData{
   289  						SeqInBlock: 1,
   290  						WriteSet: &rwset.TxPvtReadWriteSet{
   291  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   292  								{
   293  									Namespace: "ns1",
   294  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   295  										preHash:     ts.preHash,
   296  										collections: []string{"c1", "c2"},
   297  									}),
   298  								},
   299  							},
   300  						},
   301  					},
   302  					2: &ledger.TxPvtData{
   303  						SeqInBlock: 2,
   304  						WriteSet: &rwset.TxPvtReadWriteSet{
   305  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   306  								{
   307  									Namespace: "ns1",
   308  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   309  										preHash:     ts.preHash,
   310  										collections: []string{"c2"},
   311  									}),
   312  								},
   313  							},
   314  						},
   315  					},
   316  				},
   317  				MissingPvtData: ledger.TxMissingPvtDataMap{},
   318  			},
   319  		},
   320  		{
   321  			// Scenario IV
   322  			scenario:                       "Scenario IV: Missing private data in cache, found some in transient store and some in peer",
   323  			storePvtdataOfInvalidTx:        true,
   324  			skipPullingInvalidTransactions: false,
   325  			rwSetsInCache: []rwSet{
   326  				{
   327  					txID:        "tx1",
   328  					namespace:   "ns1",
   329  					collections: []string{"c1", "c2"},
   330  					preHash:     ts.preHash,
   331  					hash:        ts.hash,
   332  					seqInBlock:  1,
   333  				},
   334  			},
   335  			rwSetsInTransientStore: []rwSet{
   336  				{
   337  					txID:        "tx2",
   338  					namespace:   "ns1",
   339  					collections: []string{"c1", "c2"},
   340  					preHash:     ts.preHash,
   341  					hash:        ts.hash,
   342  					seqInBlock:  2,
   343  				},
   344  			},
   345  			rwSetsInPeer: []rwSet{
   346  				{
   347  					txID:        "tx3",
   348  					namespace:   "ns1",
   349  					collections: []string{"c1", "c2"},
   350  					preHash:     ts.preHash,
   351  					hash:        ts.hash,
   352  					seqInBlock:  3,
   353  				},
   354  			},
   355  			expectedDigKeys: []privdatacommon.DigKey{
   356  				{
   357  					TxId:       "tx3",
   358  					Namespace:  "ns1",
   359  					Collection: "c1",
   360  					BlockSeq:   ts.blockNum,
   361  					SeqInBlock: 3,
   362  				},
   363  				{
   364  					TxId:       "tx3",
   365  					Namespace:  "ns1",
   366  					Collection: "c2",
   367  					BlockSeq:   ts.blockNum,
   368  					SeqInBlock: 3,
   369  				},
   370  			},
   371  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   372  				{
   373  					TxID:       "tx1",
   374  					Invalid:    false,
   375  					SeqInBlock: 1,
   376  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   377  						ns1c1,
   378  						ns1c2,
   379  					},
   380  				},
   381  				{
   382  					TxID:       "tx2",
   383  					Invalid:    false,
   384  					SeqInBlock: 2,
   385  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   386  						ns1c1,
   387  						ns1c2,
   388  					},
   389  				},
   390  				{
   391  					TxID:       "tx3",
   392  					Invalid:    false,
   393  					SeqInBlock: 3,
   394  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   395  						ns1c1,
   396  						ns1c2,
   397  					},
   398  				},
   399  			},
   400  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   401  				PvtData: ledger.TxPvtDataMap{
   402  					1: &ledger.TxPvtData{
   403  						SeqInBlock: 1,
   404  						WriteSet: &rwset.TxPvtReadWriteSet{
   405  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   406  								{
   407  									Namespace: "ns1",
   408  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   409  										preHash:     ts.preHash,
   410  										collections: []string{"c1", "c2"},
   411  									}),
   412  								},
   413  							},
   414  						},
   415  					},
   416  					2: &ledger.TxPvtData{
   417  						SeqInBlock: 2,
   418  						WriteSet: &rwset.TxPvtReadWriteSet{
   419  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   420  								{
   421  									Namespace: "ns1",
   422  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   423  										preHash:     ts.preHash,
   424  										collections: []string{"c1", "c2"},
   425  									}),
   426  								},
   427  							},
   428  						},
   429  					},
   430  					3: &ledger.TxPvtData{
   431  						SeqInBlock: 3,
   432  						WriteSet: &rwset.TxPvtReadWriteSet{
   433  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   434  								{
   435  									Namespace: "ns1",
   436  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   437  										preHash:     ts.preHash,
   438  										collections: []string{"c1", "c2"},
   439  									}),
   440  								},
   441  							},
   442  						},
   443  					},
   444  				},
   445  				MissingPvtData: ledger.TxMissingPvtDataMap{},
   446  			},
   447  		},
   448  		{
   449  			// Scenario V
   450  			scenario:                       "Scenario V: Skip invalid txs when storePvtdataOfInvalidTx is false",
   451  			storePvtdataOfInvalidTx:        false,
   452  			skipPullingInvalidTransactions: false,
   453  			rwSetsInCache: []rwSet{
   454  				{
   455  					txID:        "tx1",
   456  					namespace:   "ns1",
   457  					collections: []string{"c1"},
   458  					preHash:     ts.preHash,
   459  					hash:        ts.hash,
   460  					seqInBlock:  1,
   461  				},
   462  				{
   463  					txID:        "tx2",
   464  					namespace:   "ns1",
   465  					collections: []string{"c1"},
   466  					preHash:     ts.preHash,
   467  					hash:        ts.hash,
   468  					seqInBlock:  2,
   469  				},
   470  			},
   471  			rwSetsInTransientStore: []rwSet{},
   472  			rwSetsInPeer:           []rwSet{},
   473  			expectedDigKeys:        []privdatacommon.DigKey{},
   474  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   475  				{
   476  					TxID:       "tx1",
   477  					Invalid:    true,
   478  					SeqInBlock: 1,
   479  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   480  						ns1c1,
   481  					},
   482  				},
   483  				{
   484  					TxID:       "tx2",
   485  					Invalid:    false,
   486  					SeqInBlock: 2,
   487  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   488  						ns1c1,
   489  					},
   490  				},
   491  			},
   492  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   493  				PvtData: ledger.TxPvtDataMap{
   494  					2: &ledger.TxPvtData{
   495  						SeqInBlock: 2,
   496  						WriteSet: &rwset.TxPvtReadWriteSet{
   497  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   498  								{
   499  									Namespace: "ns1",
   500  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   501  										preHash:     ts.preHash,
   502  										collections: []string{"c1"},
   503  									}),
   504  								},
   505  							},
   506  						},
   507  					},
   508  				},
   509  				MissingPvtData: ledger.TxMissingPvtDataMap{},
   510  			},
   511  		},
   512  		{
   513  			// Scenario VI
   514  			scenario:                       "Scenario VI: Don't skip invalid txs when storePvtdataOfInvalidTx is true",
   515  			storePvtdataOfInvalidTx:        true,
   516  			skipPullingInvalidTransactions: false,
   517  			rwSetsInCache: []rwSet{
   518  				{
   519  					txID:        "tx1",
   520  					namespace:   "ns1",
   521  					collections: []string{"c1"},
   522  					preHash:     ts.preHash,
   523  					hash:        ts.hash,
   524  					seqInBlock:  1,
   525  				},
   526  				{
   527  					txID:        "tx2",
   528  					namespace:   "ns1",
   529  					collections: []string{"c1"},
   530  					preHash:     ts.preHash,
   531  					hash:        ts.hash,
   532  					seqInBlock:  2,
   533  				},
   534  			},
   535  			rwSetsInTransientStore: []rwSet{},
   536  			rwSetsInPeer:           []rwSet{},
   537  			expectedDigKeys:        []privdatacommon.DigKey{},
   538  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   539  				{
   540  					TxID:       "tx1",
   541  					Invalid:    true,
   542  					SeqInBlock: 1,
   543  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   544  						ns1c1,
   545  					},
   546  				},
   547  				{
   548  					TxID:       "tx2",
   549  					Invalid:    false,
   550  					SeqInBlock: 2,
   551  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   552  						ns1c1,
   553  					},
   554  				},
   555  			},
   556  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   557  				PvtData: ledger.TxPvtDataMap{
   558  					1: &ledger.TxPvtData{
   559  						SeqInBlock: 1,
   560  						WriteSet: &rwset.TxPvtReadWriteSet{
   561  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   562  								{
   563  									Namespace: "ns1",
   564  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   565  										preHash:     ts.preHash,
   566  										collections: []string{"c1"},
   567  									}),
   568  								},
   569  							},
   570  						},
   571  					},
   572  					2: &ledger.TxPvtData{
   573  						SeqInBlock: 2,
   574  						WriteSet: &rwset.TxPvtReadWriteSet{
   575  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   576  								{
   577  									Namespace: "ns1",
   578  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   579  										preHash:     ts.preHash,
   580  										collections: []string{"c1"},
   581  									}),
   582  								},
   583  							},
   584  						},
   585  					},
   586  				},
   587  				MissingPvtData: ledger.TxMissingPvtDataMap{},
   588  			},
   589  		},
   590  		{
   591  			// Scenario VII
   592  			scenario:                "Scenario VII: Can't find eligible tx from any source",
   593  			storePvtdataOfInvalidTx: true,
   594  			rwSetsInCache:           []rwSet{},
   595  			rwSetsInTransientStore:  []rwSet{},
   596  			rwSetsInPeer:            []rwSet{},
   597  			expectedDigKeys: []privdatacommon.DigKey{
   598  				{
   599  					TxId:       "tx1",
   600  					Namespace:  "ns1",
   601  					Collection: "c1",
   602  					BlockSeq:   ts.blockNum,
   603  					SeqInBlock: 1,
   604  				},
   605  				{
   606  					TxId:       "tx1",
   607  					Namespace:  "ns1",
   608  					Collection: "c2",
   609  					BlockSeq:   ts.blockNum,
   610  					SeqInBlock: 1,
   611  				},
   612  			},
   613  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   614  				{
   615  					TxID:       "tx1",
   616  					Invalid:    false,
   617  					SeqInBlock: 1,
   618  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   619  						ns1c1,
   620  						ns1c2,
   621  					},
   622  				},
   623  			},
   624  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   625  				PvtData: ledger.TxPvtDataMap{},
   626  				MissingPvtData: ledger.TxMissingPvtDataMap{
   627  					1: []*ledger.MissingPvtData{
   628  						{
   629  							Namespace:  "ns1",
   630  							Collection: "c1",
   631  							IsEligible: true,
   632  						},
   633  						{
   634  							Namespace:  "ns1",
   635  							Collection: "c2",
   636  							IsEligible: true,
   637  						},
   638  					},
   639  				},
   640  			},
   641  		},
   642  		{
   643  			// Scenario VIII
   644  			scenario:                       "Scenario VIII: Extra data not requested",
   645  			storePvtdataOfInvalidTx:        true,
   646  			skipPullingInvalidTransactions: false,
   647  			rwSetsInCache: []rwSet{
   648  				{
   649  					txID:        "tx1",
   650  					namespace:   "ns1",
   651  					collections: []string{"c1", "c2"},
   652  					preHash:     ts.preHash,
   653  					hash:        ts.hash,
   654  					seqInBlock:  1,
   655  				},
   656  			},
   657  			rwSetsInTransientStore: []rwSet{
   658  				{
   659  					txID:        "tx2",
   660  					namespace:   "ns1",
   661  					collections: []string{"c1", "c2"},
   662  					preHash:     ts.preHash,
   663  					hash:        ts.hash,
   664  					seqInBlock:  2,
   665  				},
   666  			},
   667  			rwSetsInPeer: []rwSet{
   668  				{
   669  					txID:        "tx3",
   670  					namespace:   "ns1",
   671  					collections: []string{"c1", "c2"},
   672  					preHash:     ts.preHash,
   673  					hash:        ts.hash,
   674  					seqInBlock:  3,
   675  				},
   676  			},
   677  			expectedDigKeys: []privdatacommon.DigKey{
   678  				{
   679  					TxId:       "tx3",
   680  					Namespace:  "ns1",
   681  					Collection: "c1",
   682  					BlockSeq:   ts.blockNum,
   683  					SeqInBlock: 3,
   684  				},
   685  			},
   686  			// Only requesting tx3, ns1, c1, should skip all extra data found in all sources
   687  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   688  				{
   689  					TxID:       "tx3",
   690  					Invalid:    false,
   691  					SeqInBlock: 3,
   692  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   693  						ns1c1,
   694  					},
   695  				},
   696  			},
   697  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   698  				PvtData: ledger.TxPvtDataMap{
   699  					3: &ledger.TxPvtData{
   700  						SeqInBlock: 3,
   701  						WriteSet: &rwset.TxPvtReadWriteSet{
   702  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   703  								{
   704  									Namespace: "ns1",
   705  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   706  										preHash:     ts.preHash,
   707  										collections: []string{"c1"},
   708  									}),
   709  								},
   710  							},
   711  						},
   712  					},
   713  				},
   714  				MissingPvtData: ledger.TxMissingPvtDataMap{},
   715  			},
   716  		},
   717  		{
   718  			// Scenario IX
   719  			scenario:                       "Scenario IX: Skip pulling invalid txs when skipPullingInvalidTransactions is true",
   720  			storePvtdataOfInvalidTx:        true,
   721  			skipPullingInvalidTransactions: true,
   722  			rwSetsInCache: []rwSet{
   723  				{
   724  					txID:        "tx1",
   725  					namespace:   "ns1",
   726  					collections: []string{"c1"},
   727  					preHash:     ts.preHash,
   728  					hash:        ts.hash,
   729  					seqInBlock:  1,
   730  				},
   731  			},
   732  			rwSetsInTransientStore: []rwSet{
   733  				{
   734  					txID:        "tx2",
   735  					namespace:   "ns1",
   736  					collections: []string{"c1"},
   737  					preHash:     ts.preHash,
   738  					hash:        ts.hash,
   739  					seqInBlock:  2,
   740  				},
   741  			},
   742  			rwSetsInPeer: []rwSet{
   743  				{
   744  					txID:        "tx3",
   745  					namespace:   "ns1",
   746  					collections: []string{"c1"},
   747  					preHash:     ts.preHash,
   748  					hash:        ts.hash,
   749  					seqInBlock:  2,
   750  				},
   751  			},
   752  			expectedDigKeys: []privdatacommon.DigKey{},
   753  			pvtdataToRetrieve: []*ledger.TxPvtdataInfo{
   754  				{
   755  					TxID:       "tx1",
   756  					Invalid:    true,
   757  					SeqInBlock: 1,
   758  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   759  						ns1c1,
   760  					},
   761  				},
   762  				{
   763  					TxID:       "tx2",
   764  					Invalid:    true,
   765  					SeqInBlock: 2,
   766  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   767  						ns1c1,
   768  					},
   769  				},
   770  				{
   771  					TxID:       "tx3",
   772  					Invalid:    true,
   773  					SeqInBlock: 3,
   774  					CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   775  						ns1c1,
   776  					},
   777  				},
   778  			},
   779  			// tx1 and tx2 are still fetched despite being invalid
   780  			expectedBlockPvtdata: &ledger.BlockPvtdata{
   781  				PvtData: ledger.TxPvtDataMap{
   782  					1: &ledger.TxPvtData{
   783  						SeqInBlock: 1,
   784  						WriteSet: &rwset.TxPvtReadWriteSet{
   785  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   786  								{
   787  									Namespace: "ns1",
   788  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   789  										preHash:     ts.preHash,
   790  										collections: []string{"c1"},
   791  									}),
   792  								},
   793  							},
   794  						},
   795  					},
   796  					2: &ledger.TxPvtData{
   797  						SeqInBlock: 2,
   798  						WriteSet: &rwset.TxPvtReadWriteSet{
   799  							NsPvtRwset: []*rwset.NsPvtReadWriteSet{
   800  								{
   801  									Namespace: "ns1",
   802  									CollectionPvtRwset: getCollectionPvtReadWriteSet(rwSet{
   803  										preHash:     ts.preHash,
   804  										collections: []string{"c1"},
   805  									}),
   806  								},
   807  							},
   808  						},
   809  					},
   810  				},
   811  				// Only tx3 is missing since we skip pulling invalid tx from peers
   812  				MissingPvtData: ledger.TxMissingPvtDataMap{
   813  					3: []*ledger.MissingPvtData{
   814  						{
   815  							Namespace:  "ns1",
   816  							Collection: "c1",
   817  							IsEligible: true,
   818  						},
   819  					},
   820  				},
   821  			},
   822  		},
   823  	}
   824  
   825  	for _, test := range tests {
   826  		t.Run(test.scenario, func(t *testing.T) {
   827  			testRetrievePvtdataSuccess(t, test.scenario, ts, test.storePvtdataOfInvalidTx, test.skipPullingInvalidTransactions,
   828  				test.rwSetsInCache, test.rwSetsInTransientStore, test.rwSetsInPeer, test.expectedDigKeys, test.pvtdataToRetrieve, test.expectedBlockPvtdata)
   829  		})
   830  	}
   831  }
   832  
   833  func TestRetrievePvtdataFailure(t *testing.T) {
   834  	err := msptesttools.LoadMSPSetupForTesting()
   835  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   836  
   837  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
   838  	serializedID, err := identity.Serialize()
   839  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   840  	data := []byte{1, 2, 3}
   841  	signature, err := identity.Sign(data)
   842  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   843  	peerSelfSignedData := protoutil.SignedData{
   844  		Identity:  serializedID,
   845  		Signature: signature,
   846  		Data:      data,
   847  	}
   848  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
   849  		Mspid:   identity.GetMSPIdentifier(),
   850  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
   851  	})
   852  
   853  	ts := testSupport{
   854  		preHash:            []byte("rws-pre-image"),
   855  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
   856  		channelID:          "testchannelid",
   857  		blockNum:           uint64(1),
   858  		endorsers:          []string{identity.GetMSPIdentifier()},
   859  		peerSelfSignedData: peerSelfSignedData,
   860  	}
   861  
   862  	invalidns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
   863  	invalidns1c1.CollectionConfig.MemberOrgsPolicy = nil
   864  
   865  	scenario := "Scenario I: Invalid collection config policy"
   866  	storePvtdataOfInvalidTx := true
   867  	skipPullingInvalidTransactions := false
   868  	rwSetsInCache := []rwSet{}
   869  	rwSetsInTransientStore := []rwSet{}
   870  	rwSetsInPeer := []rwSet{}
   871  	expectedDigKeys := []privdatacommon.DigKey{}
   872  	pvtdataToRetrieve := []*ledger.TxPvtdataInfo{
   873  		{
   874  			TxID:       "tx1",
   875  			Invalid:    false,
   876  			SeqInBlock: 1,
   877  			CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   878  				invalidns1c1,
   879  			},
   880  		},
   881  	}
   882  
   883  	expectedErr := "Collection config policy is nil"
   884  
   885  	testRetrievePvtdataFailure(t, scenario, ts,
   886  		peerSelfSignedData, storePvtdataOfInvalidTx, skipPullingInvalidTransactions,
   887  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
   888  		expectedDigKeys, pvtdataToRetrieve,
   889  		expectedErr)
   890  }
   891  
   892  func TestRetryFetchFromPeer(t *testing.T) {
   893  	err := msptesttools.LoadMSPSetupForTesting()
   894  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   895  
   896  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
   897  	serializedID, err := identity.Serialize()
   898  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   899  	data := []byte{1, 2, 3}
   900  	signature, err := identity.Sign(data)
   901  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   902  	peerSelfSignedData := protoutil.SignedData{
   903  		Identity:  serializedID,
   904  		Signature: signature,
   905  		Data:      data,
   906  	}
   907  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
   908  		Mspid:   identity.GetMSPIdentifier(),
   909  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
   910  	})
   911  
   912  	ts := testSupport{
   913  		preHash:            []byte("rws-pre-image"),
   914  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
   915  		channelID:          "testchannelid",
   916  		blockNum:           uint64(1),
   917  		endorsers:          []string{identity.GetMSPIdentifier()},
   918  		peerSelfSignedData: peerSelfSignedData,
   919  	}
   920  
   921  	ns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
   922  	ns1c2 := collectionPvtdataInfoFromTemplate("ns1", "c2", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
   923  
   924  	tempdir, err := ioutil.TempDir("", "ts")
   925  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
   926  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
   927  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
   928  	store, err := storeProvider.OpenStore(ts.channelID)
   929  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
   930  
   931  	defer storeProvider.Close()
   932  	defer os.RemoveAll(tempdir)
   933  
   934  	storePvtdataOfInvalidTx := true
   935  	skipPullingInvalidTransactions := false
   936  	rwSetsInCache := []rwSet{}
   937  	rwSetsInTransientStore := []rwSet{}
   938  	rwSetsInPeer := []rwSet{}
   939  	expectedDigKeys := []privdatacommon.DigKey{
   940  		{
   941  			TxId:       "tx1",
   942  			Namespace:  "ns1",
   943  			Collection: "c1",
   944  			BlockSeq:   ts.blockNum,
   945  			SeqInBlock: 1,
   946  		},
   947  		{
   948  			TxId:       "tx1",
   949  			Namespace:  "ns1",
   950  			Collection: "c2",
   951  			BlockSeq:   ts.blockNum,
   952  			SeqInBlock: 1,
   953  		},
   954  	}
   955  	pvtdataToRetrieve := []*ledger.TxPvtdataInfo{
   956  		{
   957  			TxID:       "tx1",
   958  			Invalid:    false,
   959  			SeqInBlock: 1,
   960  			CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
   961  				ns1c1,
   962  				ns1c2,
   963  			},
   964  		},
   965  	}
   966  	pdp := setupPrivateDataProvider(t, ts, testConfig,
   967  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
   968  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
   969  		expectedDigKeys)
   970  	require.NotNil(t, pdp)
   971  
   972  	fakeSleeper := &mocks.Sleeper{}
   973  	SetSleeper(pdp, fakeSleeper)
   974  	fakeSleeper.SleepStub = func(sleepDur time.Duration) {
   975  		time.Sleep(sleepDur)
   976  	}
   977  
   978  	_, err = pdp.RetrievePvtdata(pvtdataToRetrieve)
   979  	assert.NoError(t, err)
   980  
   981  	maxRetries := int(testConfig.PullRetryThreshold / pullRetrySleepInterval)
   982  	assert.Equal(t, fakeSleeper.SleepCallCount() <= maxRetries, true)
   983  	assert.Equal(t, fakeSleeper.SleepArgsForCall(0), pullRetrySleepInterval)
   984  }
   985  
   986  func TestSkipPullingAllInvalidTransactions(t *testing.T) {
   987  	err := msptesttools.LoadMSPSetupForTesting()
   988  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
   989  
   990  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
   991  	serializedID, err := identity.Serialize()
   992  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
   993  	data := []byte{1, 2, 3}
   994  	signature, err := identity.Sign(data)
   995  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
   996  	peerSelfSignedData := protoutil.SignedData{
   997  		Identity:  serializedID,
   998  		Signature: signature,
   999  		Data:      data,
  1000  	}
  1001  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
  1002  		Mspid:   identity.GetMSPIdentifier(),
  1003  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
  1004  	})
  1005  
  1006  	ts := testSupport{
  1007  		preHash:            []byte("rws-pre-image"),
  1008  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
  1009  		channelID:          "testchannelid",
  1010  		blockNum:           uint64(1),
  1011  		endorsers:          []string{identity.GetMSPIdentifier()},
  1012  		peerSelfSignedData: peerSelfSignedData,
  1013  	}
  1014  
  1015  	ns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
  1016  	ns1c2 := collectionPvtdataInfoFromTemplate("ns1", "c2", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
  1017  
  1018  	tempdir, err := ioutil.TempDir("", "ts")
  1019  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
  1020  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
  1021  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
  1022  	store, err := storeProvider.OpenStore(ts.channelID)
  1023  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
  1024  
  1025  	defer storeProvider.Close()
  1026  	defer os.RemoveAll(tempdir)
  1027  
  1028  	storePvtdataOfInvalidTx := true
  1029  	skipPullingInvalidTransactions := true
  1030  	rwSetsInCache := []rwSet{}
  1031  	rwSetsInTransientStore := []rwSet{}
  1032  	rwSetsInPeer := []rwSet{}
  1033  	expectedDigKeys := []privdatacommon.DigKey{}
  1034  	expectedBlockPvtdata := &ledger.BlockPvtdata{
  1035  		PvtData: ledger.TxPvtDataMap{},
  1036  		MissingPvtData: ledger.TxMissingPvtDataMap{
  1037  			1: []*ledger.MissingPvtData{
  1038  				{
  1039  					Namespace:  "ns1",
  1040  					Collection: "c1",
  1041  					IsEligible: true,
  1042  				},
  1043  				{
  1044  					Namespace:  "ns1",
  1045  					Collection: "c2",
  1046  					IsEligible: true,
  1047  				},
  1048  			},
  1049  		},
  1050  	}
  1051  	pvtdataToRetrieve := []*ledger.TxPvtdataInfo{
  1052  		{
  1053  			TxID:       "tx1",
  1054  			Invalid:    true,
  1055  			SeqInBlock: 1,
  1056  			CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
  1057  				ns1c1,
  1058  				ns1c2,
  1059  			},
  1060  		},
  1061  	}
  1062  	pdp := setupPrivateDataProvider(t, ts, testConfig,
  1063  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
  1064  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
  1065  		expectedDigKeys)
  1066  	require.NotNil(t, pdp)
  1067  
  1068  	fakeSleeper := &mocks.Sleeper{}
  1069  	SetSleeper(pdp, fakeSleeper)
  1070  	newFetcher := &fetcherMock{t: t}
  1071  	pdp.fetcher = newFetcher
  1072  
  1073  	retrievedPvtdata, err := pdp.RetrievePvtdata(pvtdataToRetrieve)
  1074  	assert.NoError(t, err)
  1075  
  1076  	blockPvtdata := sortBlockPvtdata(retrievedPvtdata.GetBlockPvtdata())
  1077  	assert.Equal(t, expectedBlockPvtdata, blockPvtdata)
  1078  
  1079  	// Check sleep and fetch were never called
  1080  	assert.Equal(t, fakeSleeper.SleepCallCount(), 0)
  1081  	assert.Len(t, newFetcher.Calls, 0)
  1082  }
  1083  
  1084  func TestRetrievedPvtdataPurgeBelowHeight(t *testing.T) {
  1085  	conf := testConfig
  1086  	conf.TransientBlockRetention = 5
  1087  
  1088  	err := msptesttools.LoadMSPSetupForTesting()
  1089  	require.NoError(t, err, fmt.Sprintf("Failed to setup local msp for testing, got err %s", err))
  1090  
  1091  	identity := mspmgmt.GetLocalSigningIdentityOrPanic(factory.GetDefault())
  1092  	serializedID, err := identity.Serialize()
  1093  	require.NoError(t, err, fmt.Sprintf("Serialize should have succeeded, got err %s", err))
  1094  	data := []byte{1, 2, 3}
  1095  	signature, err := identity.Sign(data)
  1096  	require.NoError(t, err, fmt.Sprintf("Could not sign identity, got err %s", err))
  1097  	peerSelfSignedData := protoutil.SignedData{
  1098  		Identity:  serializedID,
  1099  		Signature: signature,
  1100  		Data:      data,
  1101  	}
  1102  	endorser := protoutil.MarshalOrPanic(&mspproto.SerializedIdentity{
  1103  		Mspid:   identity.GetMSPIdentifier(),
  1104  		IdBytes: []byte(fmt.Sprintf("p0%s", identity.GetMSPIdentifier())),
  1105  	})
  1106  
  1107  	ts := testSupport{
  1108  		preHash:            []byte("rws-pre-image"),
  1109  		hash:               util2.ComputeSHA256([]byte("rws-pre-image")),
  1110  		channelID:          "testchannelid",
  1111  		blockNum:           uint64(9),
  1112  		endorsers:          []string{identity.GetMSPIdentifier()},
  1113  		peerSelfSignedData: peerSelfSignedData,
  1114  	}
  1115  
  1116  	ns1c1 := collectionPvtdataInfoFromTemplate("ns1", "c1", identity.GetMSPIdentifier(), ts.hash, endorser, signature)
  1117  
  1118  	tempdir, err := ioutil.TempDir("", "ts")
  1119  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
  1120  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
  1121  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
  1122  	store, err := storeProvider.OpenStore(ts.channelID)
  1123  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
  1124  
  1125  	defer storeProvider.Close()
  1126  	defer os.RemoveAll(tempdir)
  1127  
  1128  	// set up store with 9 existing private data write sets
  1129  	for i := 0; i < 9; i++ {
  1130  		txID := fmt.Sprintf("tx%d", i+1)
  1131  		store.Persist(txID, uint64(i), &tspb.TxPvtReadWriteSetWithConfigInfo{
  1132  			PvtRwset: &rwset.TxPvtReadWriteSet{
  1133  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1134  					{
  1135  						Namespace: "ns1",
  1136  						CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{
  1137  							{
  1138  								CollectionName: "c1",
  1139  								Rwset:          []byte("rws-pre-image"),
  1140  							},
  1141  						},
  1142  					},
  1143  				},
  1144  			},
  1145  			CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1146  		})
  1147  	}
  1148  
  1149  	// test that the initial data shows up in the store
  1150  	for i := 1; i < 9; i++ {
  1151  		func() {
  1152  			txID := fmt.Sprintf("tx%d", i)
  1153  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1154  			require.NoError(t, err, fmt.Sprintf("Failed obtaining iterator from transient store, got err %s", err))
  1155  			defer iterator.Close()
  1156  			res, err := iterator.Next()
  1157  			require.NoError(t, err, fmt.Sprintf("Failed iterating, got err %s", err))
  1158  			assert.NotNil(t, res)
  1159  		}()
  1160  	}
  1161  
  1162  	storePvtdataOfInvalidTx := true
  1163  	skipPullingInvalidTransactions := false
  1164  	rwSetsInCache := []rwSet{
  1165  		{
  1166  			txID:        "tx9",
  1167  			namespace:   "ns1",
  1168  			collections: []string{"c1"},
  1169  			preHash:     ts.preHash,
  1170  			hash:        ts.hash,
  1171  			seqInBlock:  1,
  1172  		},
  1173  	}
  1174  	rwSetsInTransientStore := []rwSet{}
  1175  	rwSetsInPeer := []rwSet{}
  1176  	expectedDigKeys := []privdatacommon.DigKey{}
  1177  	// request tx9 which is found in both the cache and transient store
  1178  	pvtdataToRetrieve := []*ledger.TxPvtdataInfo{
  1179  		{
  1180  			TxID:       "tx9",
  1181  			Invalid:    false,
  1182  			SeqInBlock: 1,
  1183  			CollectionPvtdataInfo: []*ledger.CollectionPvtdataInfo{
  1184  				ns1c1,
  1185  			},
  1186  		},
  1187  	}
  1188  	pdp := setupPrivateDataProvider(t, ts, conf,
  1189  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
  1190  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer, expectedDigKeys)
  1191  	require.NotNil(t, pdp)
  1192  
  1193  	retrievedPvtdata, err := pdp.RetrievePvtdata(pvtdataToRetrieve)
  1194  	require.NoError(t, err)
  1195  
  1196  	retrievedPvtdata.Purge()
  1197  
  1198  	for i := 1; i <= 9; i++ {
  1199  		func() {
  1200  			txID := fmt.Sprintf("tx%d", i)
  1201  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1202  			require.NoError(t, err, fmt.Sprintf("Failed obtaining iterator from transient store, got err %s", err))
  1203  			defer iterator.Close()
  1204  			res, err := iterator.Next()
  1205  			require.NoError(t, err, fmt.Sprintf("Failed iterating, got err %s", err))
  1206  			// Check that only the fetched private write set was purged because we haven't reached a blockNum that's a multiple of 5 yet
  1207  			if i == 9 {
  1208  				assert.Nil(t, res)
  1209  			} else {
  1210  				assert.NotNil(t, res)
  1211  			}
  1212  		}()
  1213  	}
  1214  
  1215  	// increment blockNum to a multiple of transientBlockRetention
  1216  	pdp.blockNum = 10
  1217  	retrievedPvtdata, err = pdp.RetrievePvtdata(pvtdataToRetrieve)
  1218  	require.NoError(t, err)
  1219  
  1220  	retrievedPvtdata.Purge()
  1221  
  1222  	for i := 1; i <= 9; i++ {
  1223  		func() {
  1224  			txID := fmt.Sprintf("tx%d", i)
  1225  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1226  			require.NoError(t, err, fmt.Sprintf("Failed obtaining iterator from transient store, got err %s", err))
  1227  			defer iterator.Close()
  1228  			res, err := iterator.Next()
  1229  			require.NoError(t, err, fmt.Sprintf("Failed iterating, got err %s", err))
  1230  			// Check that the first 5 sets have been purged alongside the 9th set purged earlier
  1231  			if i < 6 || i == 9 {
  1232  				assert.Nil(t, res)
  1233  			} else {
  1234  				assert.NotNil(t, res)
  1235  			}
  1236  		}()
  1237  	}
  1238  }
  1239  
  1240  func TestFetchStats(t *testing.T) {
  1241  	fetchStats := fetchStats{
  1242  		fromLocalCache:     1,
  1243  		fromTransientStore: 2,
  1244  		fromRemotePeer:     3,
  1245  	}
  1246  	assert.Equal(t, "(1 from local cache, 2 from transient store, 3 from other peers)", fetchStats.String())
  1247  }
  1248  
  1249  func testRetrievePvtdataSuccess(t *testing.T,
  1250  	scenario string,
  1251  	ts testSupport,
  1252  	storePvtdataOfInvalidTx, skipPullingInvalidTransactions bool,
  1253  	rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer []rwSet,
  1254  	expectedDigKeys []privdatacommon.DigKey,
  1255  	pvtdataToRetrieve []*ledger.TxPvtdataInfo,
  1256  	expectedBlockPvtdata *ledger.BlockPvtdata) {
  1257  
  1258  	fmt.Println("\n" + scenario)
  1259  
  1260  	tempdir, err := ioutil.TempDir("", "ts")
  1261  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
  1262  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
  1263  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
  1264  	store, err := storeProvider.OpenStore(ts.channelID)
  1265  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
  1266  	defer storeProvider.Close()
  1267  	defer os.RemoveAll(tempdir)
  1268  
  1269  	pdp := setupPrivateDataProvider(t, ts, testConfig,
  1270  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
  1271  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
  1272  		expectedDigKeys)
  1273  	require.NotNil(t, pdp, scenario)
  1274  
  1275  	retrievedPvtdata, err := pdp.RetrievePvtdata(pvtdataToRetrieve)
  1276  	assert.NoError(t, err, scenario)
  1277  
  1278  	// sometimes the collection private write sets are added out of order
  1279  	// so we need to sort it to check equality with expected
  1280  	blockPvtdata := sortBlockPvtdata(retrievedPvtdata.GetBlockPvtdata())
  1281  	assert.Equal(t, expectedBlockPvtdata, blockPvtdata, scenario)
  1282  
  1283  	// Test pvtdata is purged from store on Done() call
  1284  	testPurged(t, scenario, retrievedPvtdata, store, pvtdataToRetrieve)
  1285  }
  1286  
  1287  func testRetrievePvtdataFailure(t *testing.T,
  1288  	scenario string,
  1289  	ts testSupport,
  1290  	peerSelfSignedData protoutil.SignedData,
  1291  	storePvtdataOfInvalidTx, skipPullingInvalidTransactions bool,
  1292  	rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer []rwSet,
  1293  	expectedDigKeys []privdatacommon.DigKey,
  1294  	pvtdataToRetrieve []*ledger.TxPvtdataInfo,
  1295  	expectedErr string) {
  1296  
  1297  	fmt.Println("\n" + scenario)
  1298  
  1299  	tempdir, err := ioutil.TempDir("", "ts")
  1300  	require.NoError(t, err, fmt.Sprintf("Failed to create test directory, got err %s", err))
  1301  	storeProvider, err := transientstore.NewStoreProvider(tempdir)
  1302  	require.NoError(t, err, fmt.Sprintf("Failed to create store provider, got err %s", err))
  1303  	store, err := storeProvider.OpenStore(ts.channelID)
  1304  	require.NoError(t, err, fmt.Sprintf("Failed to open store, got err %s", err))
  1305  	defer storeProvider.Close()
  1306  	defer os.RemoveAll(tempdir)
  1307  
  1308  	pdp := setupPrivateDataProvider(t, ts, testConfig,
  1309  		storePvtdataOfInvalidTx, skipPullingInvalidTransactions, store,
  1310  		rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer,
  1311  		expectedDigKeys)
  1312  	require.NotNil(t, pdp, scenario)
  1313  
  1314  	_, err = pdp.RetrievePvtdata(pvtdataToRetrieve)
  1315  	assert.EqualError(t, err, expectedErr, scenario)
  1316  }
  1317  
  1318  func setupPrivateDataProvider(t *testing.T,
  1319  	ts testSupport,
  1320  	config CoordinatorConfig,
  1321  	storePvtdataOfInvalidTx, skipPullingInvalidTransactions bool, store *transientstore.Store,
  1322  	rwSetsInCache, rwSetsInTransientStore, rwSetsInPeer []rwSet,
  1323  	expectedDigKeys []privdatacommon.DigKey) *PvtdataProvider {
  1324  
  1325  	metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
  1326  
  1327  	idDeserializerFactory := IdentityDeserializerFactoryFunc(func(chainID string) msp.IdentityDeserializer {
  1328  		return mspmgmt.GetManagerForChain(ts.channelID)
  1329  	})
  1330  
  1331  	// set up data in cache
  1332  	prefetchedPvtdata := storePvtdataInCache(rwSetsInCache)
  1333  	// set up data in transient store
  1334  	err := storePvtdataInTransientStore(rwSetsInTransientStore, store)
  1335  	require.NoError(t, err, fmt.Sprintf("Failed to store private data in transient store: got err %s", err))
  1336  
  1337  	// set up data in peer
  1338  	fetcher := &fetcherMock{t: t}
  1339  	storePvtdataInPeer(rwSetsInPeer, expectedDigKeys, fetcher, ts, skipPullingInvalidTransactions)
  1340  
  1341  	pdp := &PvtdataProvider{
  1342  		mspID:                                   "Org1MSP",
  1343  		selfSignedData:                          ts.peerSelfSignedData,
  1344  		logger:                                  logger,
  1345  		listMissingPrivateDataDurationHistogram: metrics.ListMissingPrivateDataDuration.With("channel", ts.channelID),
  1346  		fetchDurationHistogram:                  metrics.FetchDuration.With("channel", ts.channelID),
  1347  		purgeDurationHistogram:                  metrics.PurgeDuration.With("channel", ts.channelID),
  1348  		transientStore:                          store,
  1349  		pullRetryThreshold:                      config.PullRetryThreshold,
  1350  		prefetchedPvtdata:                       prefetchedPvtdata,
  1351  		transientBlockRetention:                 config.TransientBlockRetention,
  1352  		channelID:                               ts.channelID,
  1353  		blockNum:                                ts.blockNum,
  1354  		storePvtdataOfInvalidTx:                 storePvtdataOfInvalidTx,
  1355  		skipPullingInvalidTransactions:          skipPullingInvalidTransactions,
  1356  		fetcher:                                 fetcher,
  1357  		idDeserializerFactory:                   idDeserializerFactory,
  1358  	}
  1359  
  1360  	return pdp
  1361  }
  1362  
  1363  func testPurged(t *testing.T,
  1364  	scenario string,
  1365  	retrievedPvtdata ledger.RetrievedPvtdata,
  1366  	store *transientstore.Store,
  1367  	txPvtdataInfo []*ledger.TxPvtdataInfo) {
  1368  
  1369  	retrievedPvtdata.Purge()
  1370  	for _, pvtdata := range retrievedPvtdata.GetBlockPvtdata().PvtData {
  1371  		func() {
  1372  			txID := getTxIDBySeqInBlock(pvtdata.SeqInBlock, txPvtdataInfo)
  1373  			require.NotEqual(t, txID, "", fmt.Sprintf("Could not find txID for SeqInBlock %d", pvtdata.SeqInBlock), scenario)
  1374  
  1375  			iterator, err := store.GetTxPvtRWSetByTxid(txID, nil)
  1376  			require.NoError(t, err, fmt.Sprintf("Failed obtaining iterator from transient store, got err %s", err))
  1377  			defer iterator.Close()
  1378  
  1379  			res, err := iterator.Next()
  1380  			require.NoError(t, err, fmt.Sprintf("Failed iterating, got err %s", err))
  1381  
  1382  			assert.Nil(t, res, scenario)
  1383  		}()
  1384  	}
  1385  }
  1386  
  1387  func storePvtdataInCache(rwsets []rwSet) util.PvtDataCollections {
  1388  	res := []*ledger.TxPvtData{}
  1389  	for _, rws := range rwsets {
  1390  		set := &rwset.TxPvtReadWriteSet{
  1391  			NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1392  				{
  1393  					Namespace:          rws.namespace,
  1394  					CollectionPvtRwset: getCollectionPvtReadWriteSet(rws),
  1395  				},
  1396  			},
  1397  		}
  1398  
  1399  		res = append(res, &ledger.TxPvtData{
  1400  			SeqInBlock: rws.seqInBlock,
  1401  			WriteSet:   set,
  1402  		})
  1403  	}
  1404  
  1405  	return res
  1406  }
  1407  
  1408  func storePvtdataInTransientStore(rwsets []rwSet, store *transientstore.Store) error {
  1409  	for _, rws := range rwsets {
  1410  		set := &tspb.TxPvtReadWriteSetWithConfigInfo{
  1411  			PvtRwset: &rwset.TxPvtReadWriteSet{
  1412  				NsPvtRwset: []*rwset.NsPvtReadWriteSet{
  1413  					{
  1414  						Namespace:          rws.namespace,
  1415  						CollectionPvtRwset: getCollectionPvtReadWriteSet(rws),
  1416  					},
  1417  				},
  1418  			},
  1419  			CollectionConfigs: make(map[string]*peer.CollectionConfigPackage),
  1420  		}
  1421  
  1422  		err := store.Persist(rws.txID, 1, set)
  1423  		if err != nil {
  1424  			return err
  1425  		}
  1426  	}
  1427  	return nil
  1428  }
  1429  
  1430  func storePvtdataInPeer(rwSets []rwSet, expectedDigKeys []privdatacommon.DigKey, fetcher *fetcherMock, ts testSupport, skipPullingInvalidTransactions bool) {
  1431  	availableElements := []*proto.PvtDataElement{}
  1432  	for _, rws := range rwSets {
  1433  		for _, c := range rws.collections {
  1434  			availableElements = append(availableElements, &proto.PvtDataElement{
  1435  				Digest: &proto.PvtDataDigest{
  1436  					TxId:       rws.txID,
  1437  					Namespace:  rws.namespace,
  1438  					Collection: c,
  1439  					BlockSeq:   ts.blockNum,
  1440  					SeqInBlock: rws.seqInBlock,
  1441  				},
  1442  				Payload: [][]byte{ts.preHash},
  1443  			})
  1444  		}
  1445  	}
  1446  
  1447  	endorsers := []string{}
  1448  	if len(expectedDigKeys) > 0 {
  1449  		endorsers = ts.endorsers
  1450  	}
  1451  	fetcher.On("fetch", mock.Anything).expectingDigests(expectedDigKeys).expectingEndorsers(endorsers...).Return(&privdatacommon.FetchedPvtDataContainer{
  1452  		AvailableElements: availableElements,
  1453  	}, nil)
  1454  }
  1455  
  1456  func getCollectionPvtReadWriteSet(rws rwSet) []*rwset.CollectionPvtReadWriteSet {
  1457  	colPvtRwSet := []*rwset.CollectionPvtReadWriteSet{}
  1458  	for _, c := range rws.collections {
  1459  		colPvtRwSet = append(colPvtRwSet, &rwset.CollectionPvtReadWriteSet{
  1460  			CollectionName: c,
  1461  			Rwset:          rws.preHash,
  1462  		})
  1463  	}
  1464  
  1465  	sort.Slice(colPvtRwSet, func(i, j int) bool {
  1466  		return colPvtRwSet[i].CollectionName < colPvtRwSet[j].CollectionName
  1467  	})
  1468  
  1469  	return colPvtRwSet
  1470  }
  1471  
  1472  func sortBlockPvtdata(blockPvtdata *ledger.BlockPvtdata) *ledger.BlockPvtdata {
  1473  	for _, pvtdata := range blockPvtdata.PvtData {
  1474  		for _, ws := range pvtdata.WriteSet.NsPvtRwset {
  1475  			sort.Slice(ws.CollectionPvtRwset, func(i, j int) bool {
  1476  				return ws.CollectionPvtRwset[i].CollectionName < ws.CollectionPvtRwset[j].CollectionName
  1477  			})
  1478  		}
  1479  	}
  1480  	for _, missingPvtdata := range blockPvtdata.MissingPvtData {
  1481  		sort.Slice(missingPvtdata, func(i, j int) bool {
  1482  			return missingPvtdata[i].Collection < missingPvtdata[j].Collection
  1483  		})
  1484  	}
  1485  	return blockPvtdata
  1486  }
  1487  
  1488  func collectionPvtdataInfoFromTemplate(namespace, collection, mspIdentifier string, hash, endorser, signature []byte) *ledger.CollectionPvtdataInfo {
  1489  	return &ledger.CollectionPvtdataInfo{
  1490  		Collection:   collection,
  1491  		Namespace:    namespace,
  1492  		ExpectedHash: hash,
  1493  		Endorsers: []*peer.Endorsement{
  1494  			{
  1495  				Endorser:  endorser,
  1496  				Signature: signature,
  1497  			},
  1498  		},
  1499  		CollectionConfig: &peer.StaticCollectionConfig{
  1500  			Name:           collection,
  1501  			MemberOnlyRead: true,
  1502  			MemberOrgsPolicy: &peer.CollectionPolicyConfig{
  1503  				Payload: &peer.CollectionPolicyConfig_SignaturePolicy{
  1504  					SignaturePolicy: &common.SignaturePolicyEnvelope{
  1505  						Rule: &common.SignaturePolicy{
  1506  							Type: &common.SignaturePolicy_SignedBy{
  1507  								SignedBy: 0,
  1508  							},
  1509  						},
  1510  						Identities: []*mspproto.MSPPrincipal{
  1511  							{
  1512  								PrincipalClassification: mspproto.MSPPrincipal_ROLE,
  1513  								Principal: protoutil.MarshalOrPanic(&mspproto.MSPRole{
  1514  									MspIdentifier: mspIdentifier,
  1515  									Role:          mspproto.MSPRole_MEMBER,
  1516  								}),
  1517  							},
  1518  						},
  1519  					},
  1520  				},
  1521  			},
  1522  		},
  1523  	}
  1524  }