github.com/onflow/atree@v0.6.0/hash.go (about)

     1  /*
     2   * Copyright 2021 Dapper Labs, Inc.
     3   *
     4   * Licensed under the Apache License, Version 2.0 (the "License");
     5   * you may not use this file except in compliance with the License.
     6   * You may obtain a copy of the License at
     7   *
     8   *   http://www.apache.org/licenses/LICENSE-2.0
     9   *
    10   * Unless required by applicable law or agreed to in writing, software
    11   * distributed under the License is distributed on an "AS IS" BASIS,
    12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13   * See the License for the specific language governing permissions and
    14   * limitations under the License.
    15   */
    16  
    17  package atree
    18  
    19  import (
    20  	"encoding/binary"
    21  	"sync"
    22  
    23  	"github.com/fxamacker/circlehash"
    24  	"github.com/zeebo/blake3"
    25  )
    26  
    27  type HashInputProvider func(value Value, buffer []byte) ([]byte, error)
    28  
    29  type Digest uint64
    30  
    31  type DigesterBuilder interface {
    32  	SetSeed(k0 uint64, k1 uint64)
    33  	Digest(HashInputProvider, Value) (Digester, error)
    34  }
    35  
    36  type Digester interface {
    37  	// DigestPrefix returns digests before specified level.
    38  	// If level is 0, DigestPrefix returns nil.
    39  	DigestPrefix(level uint) ([]Digest, error)
    40  
    41  	// Digest returns digest at specified level.
    42  	Digest(level uint) (Digest, error)
    43  
    44  	// Reset data for reuse
    45  	Reset()
    46  
    47  	Levels() uint
    48  }
    49  
    50  type basicDigesterBuilder struct {
    51  	k0 uint64
    52  	k1 uint64
    53  }
    54  
    55  var _ DigesterBuilder = &basicDigesterBuilder{}
    56  
    57  type basicDigester struct {
    58  	circleHash64 uint64
    59  	blake3Hash   [4]uint64
    60  	scratch      [32]byte
    61  	msg          []byte
    62  }
    63  
    64  // basicDigesterPool caches unused basicDigester objects for later reuse.
    65  var basicDigesterPool = sync.Pool{
    66  	New: func() interface{} {
    67  		return &basicDigester{}
    68  	},
    69  }
    70  
    71  func getBasicDigester() *basicDigester {
    72  	return basicDigesterPool.Get().(*basicDigester)
    73  }
    74  
    75  func putDigester(e Digester) {
    76  	if _, ok := e.(*basicDigester); !ok {
    77  		return
    78  	}
    79  	e.Reset()
    80  	basicDigesterPool.Put(e)
    81  }
    82  
    83  var (
    84  	emptyBlake3Hash [4]uint64
    85  )
    86  
    87  func NewDefaultDigesterBuilder() DigesterBuilder {
    88  	return newBasicDigesterBuilder()
    89  }
    90  
    91  func newBasicDigesterBuilder() *basicDigesterBuilder {
    92  	return &basicDigesterBuilder{}
    93  }
    94  
    95  func (bdb *basicDigesterBuilder) SetSeed(k0 uint64, k1 uint64) {
    96  	bdb.k0 = k0
    97  	bdb.k1 = k1
    98  }
    99  
   100  func (bdb *basicDigesterBuilder) Digest(hip HashInputProvider, value Value) (Digester, error) {
   101  	if bdb.k0 == 0 {
   102  		return nil, NewHashSeedUninitializedError()
   103  	}
   104  
   105  	digester := getBasicDigester()
   106  
   107  	msg, err := hip(value, digester.scratch[:])
   108  	if err != nil {
   109  		putDigester(digester)
   110  		// Wrap err as external error (if needed) because err is returned by HashInputProvider callback.
   111  		return nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to generate hash input")
   112  	}
   113  
   114  	digester.msg = msg
   115  	digester.circleHash64 = circlehash.Hash64(msg, bdb.k0)
   116  
   117  	return digester, nil
   118  }
   119  
   120  func (bd *basicDigester) Reset() {
   121  	bd.circleHash64 = 0
   122  	bd.blake3Hash = emptyBlake3Hash
   123  	bd.msg = nil
   124  }
   125  
   126  func (bd *basicDigester) DigestPrefix(level uint) ([]Digest, error) {
   127  	if level > bd.Levels() {
   128  		// level must be [0, bd.Levels()] (inclusive) for prefix
   129  		return nil, NewHashLevelErrorf("cannot get digest < level %d: level must be [0, %d]", level, bd.Levels())
   130  	}
   131  	var prefix []Digest
   132  	for i := uint(0); i < level; i++ {
   133  		d, err := bd.Digest(i)
   134  		if err != nil {
   135  			// Don't need to wrap error as external error because err is already categorized by basicDigester.Digest().
   136  			return nil, err
   137  		}
   138  		prefix = append(prefix, d)
   139  	}
   140  	return prefix, nil
   141  }
   142  
   143  func (bd *basicDigester) Digest(level uint) (Digest, error) {
   144  	if level >= bd.Levels() {
   145  		// level must be [0, bd.Levels()) (not inclusive) for digest
   146  		return 0, NewHashLevelErrorf("cannot get digest at level %d: level must be [0, %d)", level, bd.Levels())
   147  	}
   148  
   149  	switch level {
   150  	case 0:
   151  		return Digest(bd.circleHash64), nil
   152  
   153  	case 1, 2, 3:
   154  		if bd.blake3Hash == emptyBlake3Hash {
   155  			sum := blake3.Sum256(bd.msg)
   156  			bd.blake3Hash[0] = binary.BigEndian.Uint64(sum[:])
   157  			bd.blake3Hash[1] = binary.BigEndian.Uint64(sum[8:])
   158  			bd.blake3Hash[2] = binary.BigEndian.Uint64(sum[16:])
   159  			bd.blake3Hash[3] = binary.BigEndian.Uint64(sum[24:])
   160  		}
   161  		return Digest(bd.blake3Hash[level-1]), nil
   162  
   163  	default: // list mode
   164  		return 0, nil
   165  	}
   166  }
   167  
   168  func (bd *basicDigester) Levels() uint {
   169  	return 4
   170  }