github.com/SmartMeshFoundation/Spectrum@v0.0.0-20220621030607-452a266fee1e/consensus/ethash/algorithm_go1.8.go (about) 1 // Copyright 2017 The Spectrum Authors 2 // This file is part of the Spectrum library. 3 // 4 // The Spectrum library is free software: you can redistribute it and/or modify 5 // it under the terms of the GNU Lesser General Public License as published by 6 // the Free Software Foundation, either version 3 of the License, or 7 // (at your option) any later version. 8 // 9 // The Spectrum library is distributed in the hope that it will be useful, 10 // but WITHOUT ANY WARRANTY; without even the implied warranty of 11 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 // GNU Lesser General Public License for more details. 13 // 14 // You should have received a copy of the GNU Lesser General Public License 15 // along with the Spectrum library. If not, see <http://www.gnu.org/licenses/>. 16 17 // +build go1.8 18 19 package ethash 20 21 import "math/big" 22 23 // cacheSize calculates and returns the size of the ethash verification cache that 24 // belongs to a certain block number. The cache size grows linearly, however, we 25 // always take the highest prime below the linearly growing threshold in order to 26 // reduce the risk of accidental regularities leading to cyclic behavior. 27 func cacheSize(block uint64) uint64 { 28 // If we have a pre-generated value, use that 29 epoch := int(block / epochLength) 30 if epoch < len(cacheSizes) { 31 return cacheSizes[epoch] 32 } 33 // No known cache size, calculate manually (sanity branch only) 34 size := cacheInitBytes + cacheGrowthBytes*uint64(epoch) - hashBytes 35 for !new(big.Int).SetUint64(size / hashBytes).ProbablyPrime(1) { // Always accurate for n < 2^64 36 size -= 2 * hashBytes 37 } 38 return size 39 } 40 41 // datasetSize calculates and returns the size of the ethash mining dataset that 42 // belongs to a certain block number. The dataset size grows linearly, however, we 43 // always take the highest prime below the linearly growing threshold in order to 44 // reduce the risk of accidental regularities leading to cyclic behavior. 45 func datasetSize(block uint64) uint64 { 46 // If we have a pre-generated value, use that 47 epoch := int(block / epochLength) 48 if epoch < len(datasetSizes) { 49 return datasetSizes[epoch] 50 } 51 // No known dataset size, calculate manually (sanity branch only) 52 size := datasetInitBytes + datasetGrowthBytes*uint64(epoch) - mixBytes 53 for !new(big.Int).SetUint64(size / mixBytes).ProbablyPrime(1) { // Always accurate for n < 2^64 54 size -= 2 * mixBytes 55 } 56 return size 57 }