github.com/hasnat/dolt/go@v0.0.0-20210628190320-9eb5d843fbb7/store/datas/pull_test.go (about)

     1  // Copyright 2019 Dolthub, Inc.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  //
    15  // This file incorporates work covered by the following copyright and
    16  // permission notice:
    17  //
    18  // Copyright 2016 Attic Labs, Inc. All rights reserved.
    19  // Licensed under the Apache License, version 2.0:
    20  // http://www.apache.org/licenses/LICENSE-2.0
    21  
    22  package datas
    23  
    24  import (
    25  	"bytes"
    26  	"context"
    27  	"errors"
    28  	"io"
    29  	"io/ioutil"
    30  	"reflect"
    31  	"sync"
    32  	"testing"
    33  
    34  	"github.com/stretchr/testify/assert"
    35  	"github.com/stretchr/testify/require"
    36  	"github.com/stretchr/testify/suite"
    37  
    38  	"github.com/dolthub/dolt/go/store/chunks"
    39  	"github.com/dolthub/dolt/go/store/d"
    40  	"github.com/dolthub/dolt/go/store/hash"
    41  	"github.com/dolthub/dolt/go/store/nbs"
    42  	"github.com/dolthub/dolt/go/store/types"
    43  )
    44  
    45  const datasetID = "ds1"
    46  
    47  func TestLocalToLocalPulls(t *testing.T) {
    48  	suite.Run(t, &LocalToLocalSuite{})
    49  }
    50  
    51  func TestRemoteToLocalPulls(t *testing.T) {
    52  	suite.Run(t, &RemoteToLocalSuite{})
    53  }
    54  
    55  func TestLocalToRemotePulls(t *testing.T) {
    56  	suite.Run(t, &LocalToRemoteSuite{})
    57  }
    58  
    59  func TestRemoteToRemotePulls(t *testing.T) {
    60  	suite.Run(t, &RemoteToRemoteSuite{})
    61  }
    62  
    63  type PullSuite struct {
    64  	suite.Suite
    65  	sinkCS      *chunks.TestStoreView
    66  	sourceCS    *chunks.TestStoreView
    67  	sink        Database
    68  	source      Database
    69  	commitReads int // The number of reads triggered by commit differs across chunk store impls
    70  }
    71  
    72  func makeTestStoreViews() (ts1, ts2 *chunks.TestStoreView) {
    73  	st1, st2 := &chunks.TestStorage{}, &chunks.TestStorage{}
    74  	return st1.NewView(), st2.NewView()
    75  }
    76  
    77  type LocalToLocalSuite struct {
    78  	PullSuite
    79  }
    80  
    81  func (suite *LocalToLocalSuite) SetupTest() {
    82  	suite.sinkCS, suite.sourceCS = makeTestStoreViews()
    83  	suite.sink = NewDatabase(suite.sinkCS)
    84  	suite.source = NewDatabase(suite.sourceCS)
    85  }
    86  
    87  type RemoteToLocalSuite struct {
    88  	PullSuite
    89  }
    90  
    91  func (suite *RemoteToLocalSuite) SetupTest() {
    92  	suite.sinkCS, suite.sourceCS = makeTestStoreViews()
    93  	suite.sink = NewDatabase(suite.sinkCS)
    94  	suite.source = makeRemoteDb(suite.sourceCS)
    95  }
    96  
    97  type LocalToRemoteSuite struct {
    98  	PullSuite
    99  }
   100  
   101  func (suite *LocalToRemoteSuite) SetupTest() {
   102  	suite.sinkCS, suite.sourceCS = makeTestStoreViews()
   103  	suite.sink = makeRemoteDb(suite.sinkCS)
   104  	suite.source = NewDatabase(suite.sourceCS)
   105  	suite.commitReads = 1
   106  }
   107  
   108  type RemoteToRemoteSuite struct {
   109  	PullSuite
   110  }
   111  
   112  func (suite *RemoteToRemoteSuite) SetupTest() {
   113  	suite.sinkCS, suite.sourceCS = makeTestStoreViews()
   114  	suite.sink = makeRemoteDb(suite.sinkCS)
   115  	suite.source = makeRemoteDb(suite.sourceCS)
   116  	suite.commitReads = 1
   117  }
   118  
   119  func makeRemoteDb(cs chunks.ChunkStore) Database {
   120  	return NewDatabase(cs)
   121  }
   122  
   123  func (suite *PullSuite) TearDownTest() {
   124  	suite.sink.Close()
   125  	suite.source.Close()
   126  	suite.sinkCS.Close()
   127  	suite.sourceCS.Close()
   128  }
   129  
   130  type progressTracker struct {
   131  	Ch     chan PullProgress
   132  	doneCh chan []PullProgress
   133  }
   134  
   135  func startProgressTracker() *progressTracker {
   136  	pt := &progressTracker{make(chan PullProgress), make(chan []PullProgress)}
   137  	go func() {
   138  		progress := []PullProgress{}
   139  		for info := range pt.Ch {
   140  			progress = append(progress, info)
   141  		}
   142  		pt.doneCh <- progress
   143  	}()
   144  	return pt
   145  }
   146  
   147  func (pt *progressTracker) Validate(suite *PullSuite) {
   148  	close(pt.Ch)
   149  	progress := <-pt.doneCh
   150  
   151  	// Expecting exact progress would be unreliable and not necessary meaningful. Instead, just validate that it's useful and consistent.
   152  	suite.NotEmpty(progress)
   153  
   154  	first := progress[0]
   155  	suite.Zero(first.DoneCount)
   156  	suite.True(first.KnownCount > 0)
   157  	suite.Zero(first.ApproxWrittenBytes)
   158  
   159  	last := progress[len(progress)-1]
   160  	suite.True(last.DoneCount > 0)
   161  	suite.Equal(last.DoneCount, last.KnownCount)
   162  
   163  	for i, prog := range progress {
   164  		suite.True(prog.KnownCount >= prog.DoneCount)
   165  		if i > 0 {
   166  			prev := progress[i-1]
   167  			suite.True(prog.DoneCount >= prev.DoneCount)
   168  			suite.True(prog.ApproxWrittenBytes >= prev.ApproxWrittenBytes)
   169  		}
   170  	}
   171  }
   172  
   173  // Source: -3-> C(L2) -1-> N
   174  //                 \  -2-> L1 -1-> N
   175  //                          \ -1-> L0
   176  //
   177  // Sink: Nada
   178  func (suite *PullSuite) TestPullEverything() {
   179  	expectedReads := suite.sinkCS.Reads()
   180  
   181  	l := buildListOfHeight(2, suite.source)
   182  	sourceRef := suite.commitToSource(l, mustList(types.NewList(context.Background(), suite.source)))
   183  	pt := startProgressTracker()
   184  
   185  	err := Pull(context.Background(), suite.source, suite.sink, sourceRef, pt.Ch)
   186  	suite.NoError(err)
   187  	suite.True(expectedReads-suite.sinkCS.Reads() <= suite.commitReads)
   188  	pt.Validate(suite)
   189  
   190  	v := mustValue(suite.sink.ReadValue(context.Background(), sourceRef.TargetHash())).(types.Struct)
   191  	suite.NotNil(v)
   192  	suite.True(l.Equals(mustGetValue(v.MaybeGet(ValueField))))
   193  }
   194  
   195  // Source: -6-> C3(L5) -1-> N
   196  //               .  \  -5-> L4 -1-> N
   197  //                .          \ -4-> L3 -1-> N
   198  //                 .                 \  -3-> L2 -1-> N
   199  //                  5                         \ -2-> L1 -1-> N
   200  //                   .                                \ -1-> L0
   201  //                  C2(L4) -1-> N
   202  //                   .  \  -4-> L3 -1-> N
   203  //                    .          \ -3-> L2 -1-> N
   204  //                     .                 \ -2-> L1 -1-> N
   205  //                      3                        \ -1-> L0
   206  //                       .
   207  //                     C1(L2) -1-> N
   208  //                         \  -2-> L1 -1-> N
   209  //                                  \ -1-> L0
   210  //
   211  // Sink: -3-> C1(L2) -1-> N
   212  //                \  -2-> L1 -1-> N
   213  //                         \ -1-> L0
   214  func (suite *PullSuite) TestPullMultiGeneration() {
   215  	sinkL := buildListOfHeight(2, suite.sink)
   216  	suite.commitToSink(sinkL, mustList(types.NewList(context.Background(), suite.sink)))
   217  	expectedReads := suite.sinkCS.Reads()
   218  
   219  	srcL := buildListOfHeight(2, suite.source)
   220  	sourceRef := suite.commitToSource(srcL, mustList(types.NewList(context.Background(), suite.source)))
   221  	srcL = buildListOfHeight(4, suite.source)
   222  	sourceRef = suite.commitToSource(srcL, mustList(types.NewList(context.Background(), suite.source, sourceRef)))
   223  	srcL = buildListOfHeight(5, suite.source)
   224  	sourceRef = suite.commitToSource(srcL, mustList(types.NewList(context.Background(), suite.source, sourceRef)))
   225  
   226  	pt := startProgressTracker()
   227  
   228  	err := Pull(context.Background(), suite.source, suite.sink, sourceRef, pt.Ch)
   229  	suite.NoError(err)
   230  
   231  	suite.True(expectedReads-suite.sinkCS.Reads() <= suite.commitReads)
   232  	pt.Validate(suite)
   233  
   234  	v, err := suite.sink.ReadValue(context.Background(), sourceRef.TargetHash())
   235  	suite.NoError(err)
   236  	suite.NotNil(v)
   237  	suite.True(srcL.Equals(mustGetValue(v.(types.Struct).MaybeGet(ValueField))))
   238  }
   239  
   240  // Source: -6-> C2(L5) -1-> N
   241  //               .  \  -5-> L4 -1-> N
   242  //                .          \ -4-> L3 -1-> N
   243  //                 .                 \  -3-> L2 -1-> N
   244  //                  4                         \ -2-> L1 -1-> N
   245  //                   .                                \ -1-> L0
   246  //                  C1(L3) -1-> N
   247  //                      \  -3-> L2 -1-> N
   248  //                               \ -2-> L1 -1-> N
   249  //                                       \ -1-> L0
   250  //
   251  // Sink: -5-> C3(L3') -1-> N
   252  //             .   \ -3-> L2 -1-> N
   253  //              .   \      \ -2-> L1 -1-> N
   254  //               .   \             \ -1-> L0
   255  //                .   \  - "oy!"
   256  //                 4
   257  //                  .
   258  //                C1(L3) -1-> N
   259  //                    \  -3-> L2 -1-> N
   260  //                             \ -2-> L1 -1-> N
   261  //                                     \ -1-> L0
   262  func (suite *PullSuite) TestPullDivergentHistory() {
   263  	sinkL := buildListOfHeight(3, suite.sink)
   264  	sinkRef := suite.commitToSink(sinkL, mustList(types.NewList(context.Background(), suite.sink)))
   265  	srcL := buildListOfHeight(3, suite.source)
   266  	sourceRef := suite.commitToSource(srcL, mustList(types.NewList(context.Background(), suite.source)))
   267  
   268  	var err error
   269  	sinkL, err = sinkL.Edit().Append(types.String("oy!")).List(context.Background())
   270  	suite.NoError(err)
   271  	sinkRef = suite.commitToSink(sinkL, mustList(types.NewList(context.Background(), suite.sink, sinkRef)))
   272  	srcL, err = srcL.Edit().Set(1, buildListOfHeight(5, suite.source)).List(context.Background())
   273  	suite.NoError(err)
   274  	sourceRef = suite.commitToSource(srcL, mustList(types.NewList(context.Background(), suite.source, sourceRef)))
   275  	preReads := suite.sinkCS.Reads()
   276  
   277  	pt := startProgressTracker()
   278  
   279  	err = Pull(context.Background(), suite.source, suite.sink, sourceRef, pt.Ch)
   280  	suite.NoError(err)
   281  
   282  	suite.True(preReads-suite.sinkCS.Reads() <= suite.commitReads)
   283  	pt.Validate(suite)
   284  
   285  	v, err := suite.sink.ReadValue(context.Background(), sourceRef.TargetHash())
   286  	suite.NoError(err)
   287  	suite.NotNil(v)
   288  	suite.True(srcL.Equals(mustGetValue(v.(types.Struct).MaybeGet(ValueField))))
   289  }
   290  
   291  // Source: -6-> C2(L4) -1-> N
   292  //               .  \  -4-> L3 -1-> N
   293  //                 .         \ -3-> L2 -1-> N
   294  //                  .                \ - "oy!"
   295  //                   5                \ -2-> L1 -1-> N
   296  //                    .                       \ -1-> L0
   297  //                   C1(L4) -1-> N
   298  //                       \  -4-> L3 -1-> N
   299  //                                \ -3-> L2 -1-> N
   300  //                                        \ -2-> L1 -1-> N
   301  //                                                \ -1-> L0
   302  // Sink: -5-> C1(L4) -1-> N
   303  //                \  -4-> L3 -1-> N
   304  //                         \ -3-> L2 -1-> N
   305  //                                 \ -2-> L1 -1-> N
   306  //                                         \ -1-> L0
   307  func (suite *PullSuite) TestPullUpdates() {
   308  	sinkL := buildListOfHeight(4, suite.sink)
   309  	suite.commitToSink(sinkL, mustList(types.NewList(context.Background(), suite.sink)))
   310  	expectedReads := suite.sinkCS.Reads()
   311  
   312  	srcL := buildListOfHeight(4, suite.source)
   313  	sourceRef := suite.commitToSource(srcL, mustList(types.NewList(context.Background(), suite.source)))
   314  	L3 := mustValue(mustValue(srcL.Get(context.Background(), 1)).(types.Ref).TargetValue(context.Background(), suite.source)).(types.List)
   315  	L2 := mustValue(mustValue(L3.Get(context.Background(), 1)).(types.Ref).TargetValue(context.Background(), suite.source)).(types.List)
   316  	L2Ed := L2.Edit().Append(mustRef(suite.source.WriteValue(context.Background(), types.String("oy!"))))
   317  	L2, err := L2Ed.List(context.Background())
   318  	suite.NoError(err)
   319  	L3Ed := L3.Edit().Set(1, mustRef(suite.source.WriteValue(context.Background(), L2)))
   320  	L3, err = L3Ed.List(context.Background())
   321  	suite.NoError(err)
   322  	srcLEd := srcL.Edit().Set(1, mustRef(suite.source.WriteValue(context.Background(), L3)))
   323  	srcL, err = srcLEd.List(context.Background())
   324  	suite.NoError(err)
   325  	sourceRef = suite.commitToSource(srcL, mustList(types.NewList(context.Background(), suite.source, sourceRef)))
   326  
   327  	pt := startProgressTracker()
   328  
   329  	err = Pull(context.Background(), suite.source, suite.sink, sourceRef, pt.Ch)
   330  	suite.NoError(err)
   331  
   332  	suite.True(expectedReads-suite.sinkCS.Reads() <= suite.commitReads)
   333  	pt.Validate(suite)
   334  
   335  	v, err := suite.sink.ReadValue(context.Background(), sourceRef.TargetHash())
   336  	suite.NoError(err)
   337  	suite.NotNil(v)
   338  	suite.True(srcL.Equals(mustGetValue(v.(types.Struct).MaybeGet(ValueField))))
   339  }
   340  
   341  func (suite *PullSuite) commitToSource(v types.Value, p types.List) types.Ref {
   342  	ds, err := suite.source.GetDataset(context.Background(), datasetID)
   343  	suite.NoError(err)
   344  	ds, err = suite.source.Commit(context.Background(), ds, v, CommitOptions{ParentsList: p})
   345  	suite.NoError(err)
   346  	return mustHeadRef(ds)
   347  }
   348  
   349  func (suite *PullSuite) commitToSink(v types.Value, p types.List) types.Ref {
   350  	ds, err := suite.sink.GetDataset(context.Background(), datasetID)
   351  	suite.NoError(err)
   352  	ds, err = suite.sink.Commit(context.Background(), ds, v, CommitOptions{ParentsList: p})
   353  	suite.NoError(err)
   354  	return mustHeadRef(ds)
   355  }
   356  
   357  func buildListOfHeight(height int, vrw types.ValueReadWriter) types.List {
   358  	unique := 0
   359  	l, err := types.NewList(context.Background(), vrw, types.Float(unique), types.Float(unique+1))
   360  	d.PanicIfError(err)
   361  	unique += 2
   362  
   363  	for i := 0; i < height; i++ {
   364  		r1, err := vrw.WriteValue(context.Background(), types.Float(unique))
   365  		d.PanicIfError(err)
   366  		r2, err := vrw.WriteValue(context.Background(), l)
   367  		d.PanicIfError(err)
   368  		unique++
   369  		l, err = types.NewList(context.Background(), vrw, r1, r2)
   370  		d.PanicIfError(err)
   371  	}
   372  	return l
   373  }
   374  
   375  type TestFailingTableFile struct {
   376  	fileID    string
   377  	numChunks int
   378  }
   379  
   380  func (ttf *TestFailingTableFile) FileID() string {
   381  	return ttf.fileID
   382  }
   383  
   384  func (ttf *TestFailingTableFile) NumChunks() int {
   385  	return ttf.numChunks
   386  }
   387  
   388  func (ttf *TestFailingTableFile) Open(ctx context.Context) (io.ReadCloser, error) {
   389  	return ioutil.NopCloser(bytes.NewReader([]byte{0x00})), errors.New("this is a test error")
   390  }
   391  
   392  type TestTableFile struct {
   393  	fileID    string
   394  	numChunks int
   395  	data      []byte
   396  }
   397  
   398  func (ttf *TestTableFile) FileID() string {
   399  	return ttf.fileID
   400  }
   401  
   402  func (ttf *TestTableFile) NumChunks() int {
   403  	return ttf.numChunks
   404  }
   405  
   406  func (ttf *TestTableFile) Open(ctx context.Context) (io.ReadCloser, error) {
   407  	return ioutil.NopCloser(bytes.NewReader(ttf.data)), nil
   408  }
   409  
   410  type TestTableFileWriter struct {
   411  	fileID    string
   412  	numChunks int
   413  	writer    *bytes.Buffer
   414  	ttfs      *TestTableFileStore
   415  }
   416  
   417  func (ttfWr *TestTableFileWriter) Write(data []byte) (int, error) {
   418  	return ttfWr.writer.Write(data)
   419  }
   420  
   421  func (ttfWr *TestTableFileWriter) Close(ctx context.Context) error {
   422  	data := ttfWr.writer.Bytes()
   423  	ttfWr.writer = nil
   424  
   425  	ttfWr.ttfs.mu.Lock()
   426  	defer ttfWr.ttfs.mu.Unlock()
   427  	ttfWr.ttfs.tableFiles[ttfWr.fileID] = &TestTableFile{ttfWr.fileID, ttfWr.numChunks, data}
   428  	return nil
   429  }
   430  
   431  type TestTableFileStore struct {
   432  	root       hash.Hash
   433  	tableFiles map[string]*TestTableFile
   434  	mu         sync.Mutex
   435  }
   436  
   437  var _ nbs.TableFileStore = &TestTableFileStore{}
   438  
   439  func (ttfs *TestTableFileStore) Sources(ctx context.Context) (hash.Hash, []nbs.TableFile, []nbs.TableFile, error) {
   440  	ttfs.mu.Lock()
   441  	defer ttfs.mu.Unlock()
   442  	var tblFiles []nbs.TableFile
   443  	for _, tblFile := range ttfs.tableFiles {
   444  		tblFiles = append(tblFiles, tblFile)
   445  	}
   446  
   447  	return ttfs.root, tblFiles, []nbs.TableFile{}, nil
   448  }
   449  
   450  func (ttfs *TestTableFileStore) Size(ctx context.Context) (uint64, error) {
   451  	ttfs.mu.Lock()
   452  	defer ttfs.mu.Unlock()
   453  	sz := uint64(0)
   454  	for _, tblFile := range ttfs.tableFiles {
   455  		sz += uint64(len(tblFile.data))
   456  	}
   457  	return sz, nil
   458  }
   459  
   460  func (ttfs *TestTableFileStore) WriteTableFile(ctx context.Context, fileId string, numChunks int, rd io.Reader, contentLength uint64, contentHash []byte) error {
   461  	tblFile := &TestTableFileWriter{fileId, numChunks, bytes.NewBuffer(nil), ttfs}
   462  	_, err := io.Copy(tblFile, rd)
   463  
   464  	if err != nil {
   465  		return err
   466  	}
   467  
   468  	return tblFile.Close(ctx)
   469  }
   470  
   471  func (ttfs *TestTableFileStore) SetRootChunk(ctx context.Context, root, previous hash.Hash) error {
   472  	ttfs.root = root
   473  	return nil
   474  }
   475  
   476  type FlakeyTestTableFileStore struct {
   477  	*TestTableFileStore
   478  	GoodNow bool
   479  }
   480  
   481  func (f *FlakeyTestTableFileStore) Sources(ctx context.Context) (hash.Hash, []nbs.TableFile, []nbs.TableFile, error) {
   482  	if !f.GoodNow {
   483  		f.GoodNow = true
   484  		r, files, appendixFiles, _ := f.TestTableFileStore.Sources(ctx)
   485  		for i := range files {
   486  			files[i] = &TestFailingTableFile{files[i].FileID(), files[i].NumChunks()}
   487  		}
   488  		return r, files, appendixFiles, nil
   489  	}
   490  	return f.TestTableFileStore.Sources(ctx)
   491  }
   492  
   493  func (ttfs *TestTableFileStore) SupportedOperations() nbs.TableFileStoreOps {
   494  	return nbs.TableFileStoreOps{
   495  		CanRead:  true,
   496  		CanWrite: true,
   497  	}
   498  }
   499  
   500  func (ttfs *TestTableFileStore) PruneTableFiles(ctx context.Context) error {
   501  	return chunks.ErrUnsupportedOperation
   502  }
   503  
   504  func TestClone(t *testing.T) {
   505  	hashBytes := [hash.ByteLen]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, 0x11, 0x12, 0x13}
   506  	src := &TestTableFileStore{
   507  		root: hash.Of(hashBytes[:]),
   508  		tableFiles: map[string]*TestTableFile{
   509  			"file1": &TestTableFile{
   510  				fileID:    "file1",
   511  				numChunks: 1,
   512  				data:      []byte("Call me Ishmael. Some years ago—never mind how long precisely—having little or no money in my purse, "),
   513  			},
   514  			"file2": &TestTableFile{
   515  				fileID:    "file2",
   516  				numChunks: 2,
   517  				data:      []byte("and nothing particular to interest me on shore, I thought I would sail about a little and see the watery "),
   518  			},
   519  			"file3": &TestTableFile{
   520  				fileID:    "file3",
   521  				numChunks: 3,
   522  				data:      []byte("part of the world. It is a way I have of driving off the spleen and regulating the "),
   523  			},
   524  			"file4": &TestTableFile{
   525  				fileID:    "file4",
   526  				numChunks: 4,
   527  				data:      []byte("circulation. Whenever I find myself growing grim about the mouth; whenever it is a damp, drizzly "),
   528  			},
   529  			"file5": &TestTableFile{
   530  				fileID:    "file5",
   531  				numChunks: 5,
   532  				data:      []byte("November in my soul; whenever I find myself involuntarily pausing before coffin warehouses, and bringing "),
   533  			},
   534  		},
   535  	}
   536  
   537  	dest := &TestTableFileStore{
   538  		root:       hash.Hash{},
   539  		tableFiles: map[string]*TestTableFile{},
   540  	}
   541  
   542  	ctx := context.Background()
   543  	err := clone(ctx, src, dest, nil)
   544  	require.NoError(t, err)
   545  
   546  	err = dest.SetRootChunk(ctx, src.root, hash.Hash{})
   547  	require.NoError(t, err)
   548  
   549  	assert.True(t, reflect.DeepEqual(src, dest))
   550  
   551  	t.Run("WithFlakeyTableFileStore", func(t *testing.T) {
   552  		// After a Clone()'s TableFile.Open() or a Read from the TableFile
   553  		// fails, we retry with newly fetched Sources().
   554  		flakeySrc := &FlakeyTestTableFileStore{
   555  			TestTableFileStore: src,
   556  		}
   557  
   558  		dest = &TestTableFileStore{
   559  			root:       hash.Hash{},
   560  			tableFiles: map[string]*TestTableFile{},
   561  		}
   562  
   563  		err := clone(ctx, flakeySrc, dest, nil)
   564  		require.NoError(t, err)
   565  
   566  		err = dest.SetRootChunk(ctx, flakeySrc.root, hash.Hash{})
   567  		require.NoError(t, err)
   568  
   569  		assert.True(t, reflect.DeepEqual(flakeySrc.TestTableFileStore, dest))
   570  	})
   571  }