github.com/hasnat/dolt/go@v0.0.0-20210628190320-9eb5d843fbb7/store/datas/serialize_hashes.go (about)

     1  // Copyright 2019 Dolthub, Inc.
     2  //
     3  // Licensed under the Apache License, Version 2.0 (the "License");
     4  // you may not use this file except in compliance with the License.
     5  // You may obtain a copy of the License at
     6  //
     7  //     http://www.apache.org/licenses/LICENSE-2.0
     8  //
     9  // Unless required by applicable law or agreed to in writing, software
    10  // distributed under the License is distributed on an "AS IS" BASIS,
    11  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  // See the License for the specific language governing permissions and
    13  // limitations under the License.
    14  //
    15  // This file incorporates work covered by the following copyright and
    16  // permission notice:
    17  //
    18  // Copyright 2016 Attic Labs, Inc. All rights reserved.
    19  // Licensed under the Apache License, version 2.0:
    20  // http://www.apache.org/licenses/LICENSE-2.0
    21  
    22  package datas
    23  
    24  import (
    25  	"encoding/binary"
    26  	"errors"
    27  	"io"
    28  
    29  	"github.com/dolthub/dolt/go/store/chunks"
    30  	"github.com/dolthub/dolt/go/store/hash"
    31  )
    32  
    33  func serializeHashes(w io.Writer, batch chunks.ReadBatch) error {
    34  	err := binary.Write(w, binary.BigEndian, uint32(len(batch))) // 4 billion hashes is probably absurd. Maybe this should be smaller?
    35  
    36  	if err != nil {
    37  		return err
    38  	}
    39  
    40  	for h := range batch {
    41  		err = serializeHash(w, h)
    42  
    43  		if err != nil {
    44  			return err
    45  		}
    46  	}
    47  
    48  	return nil
    49  }
    50  
    51  func serializeHash(w io.Writer, h hash.Hash) error {
    52  	_, err := w.Write(h[:])
    53  
    54  	return err
    55  }
    56  
    57  func deserializeHashes(reader io.Reader) (hash.HashSlice, error) {
    58  	count := uint32(0)
    59  	err := binary.Read(reader, binary.BigEndian, &count)
    60  
    61  	if err != nil {
    62  		return hash.HashSlice{}, err
    63  	}
    64  
    65  	hashes := make(hash.HashSlice, count)
    66  	for i := range hashes {
    67  		hashes[i], err = deserializeHash(reader)
    68  
    69  		if err != nil {
    70  			return hash.HashSlice{}, err
    71  		}
    72  	}
    73  	return hashes, nil
    74  }
    75  
    76  func deserializeHash(reader io.Reader) (hash.Hash, error) {
    77  	h := hash.Hash{}
    78  	n, err := io.ReadFull(reader, h[:])
    79  
    80  	if err != nil {
    81  		return hash.Hash{}, err
    82  	}
    83  
    84  	if int(hash.ByteLen) != n {
    85  		return hash.Hash{}, errors.New("failed to read all data")
    86  	}
    87  
    88  	return h, nil
    89  }