github.com/hasnat/dolt/go@v0.0.0-20210628190320-9eb5d843fbb7/store/nbs/table_persister_test.go (about) 1 // Copyright 2019 Dolthub, Inc. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 // 15 // This file incorporates work covered by the following copyright and 16 // permission notice: 17 // 18 // Copyright 2017 Attic Labs, Inc. All rights reserved. 19 // Licensed under the Apache License, version 2.0: 20 // http://www.apache.org/licenses/LICENSE-2.0 21 22 package nbs 23 24 import ( 25 "testing" 26 27 "github.com/stretchr/testify/assert" 28 "github.com/stretchr/testify/require" 29 ) 30 31 func TestPlanCompaction(t *testing.T) { 32 assert := assert.New(t) 33 tableContents := [][][]byte{ 34 {[]byte("hello2"), []byte("goodbye2"), []byte("badbye2")}, 35 {[]byte("red"), []byte("blue")}, 36 {[]byte("solo")}, 37 } 38 39 var sources chunkSources 40 var dataLens []uint64 41 var totalUnc uint64 42 for _, content := range tableContents { 43 for _, chnk := range content { 44 totalUnc += uint64(len(chnk)) 45 } 46 data, name, err := buildTable(content) 47 require.NoError(t, err) 48 ti, err := parseTableIndex(data) 49 require.NoError(t, err) 50 src := chunkSourceAdapter{newTableReader(ti, tableReaderAtFromBytes(data), fileBlockSize), name} 51 dataLens = append(dataLens, uint64(len(data))-indexSize(mustUint32(src.count()))-footerSize) 52 sources = append(sources, src) 53 } 54 55 plan, err := planConjoin(sources, &Stats{}) 56 require.NoError(t, err) 57 58 var totalChunks uint32 59 for i, src := range sources { 60 assert.Equal(dataLens[i], plan.sources.sws[i].dataLen) 61 totalChunks += mustUint32(src.count()) 62 } 63 64 idx, err := parseTableIndex(plan.mergedIndex) 65 require.NoError(t, err) 66 67 assert.Equal(totalChunks, idx.chunkCount) 68 assert.Equal(totalUnc, idx.totalUncompressedData) 69 70 tr := newTableReader(idx, tableReaderAtFromBytes(nil), fileBlockSize) 71 for _, content := range tableContents { 72 assertChunksInReader(content, tr, assert) 73 } 74 }