github.com/deroproject/derosuite@v2.1.6-1.0.20200307070847-0f2e589c7a2b+incompatible/blockchain/difficulty.go (about) 1 // Copyright 2017-2018 DERO Project. All rights reserved. 2 // Use of this source code in any form is governed by RESEARCH license. 3 // license can be found in the LICENSE file. 4 // GPG: 0F39 E425 8C65 3947 702A 8234 08B2 0360 A03A 9DE8 5 // 6 // 7 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY 8 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 9 // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL 10 // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 11 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 12 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 13 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 14 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF 15 // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 16 17 package blockchain 18 19 //import "fmt" 20 import "math/big" 21 22 import "github.com/romana/rlog" 23 24 import "github.com/deroproject/derosuite/block" 25 import "github.com/deroproject/derosuite/config" 26 import "github.com/deroproject/derosuite/crypto" 27 import "github.com/deroproject/derosuite/globals" 28 import "github.com/deroproject/derosuite/storage" 29 30 var ( 31 // bigZero is 0 represented as a big.Int. It is defined here to avoid 32 // the overhead of creating it multiple times. 33 bigZero = big.NewInt(0) 34 35 // bigOne is 1 represented as a big.Int. It is defined here to avoid 36 // the overhead of creating it multiple times. 37 bigOne = big.NewInt(1) 38 39 // oneLsh256 is 1 shifted left 256 bits. It is defined here to avoid 40 // the overhead of creating it multiple times. 41 oneLsh256 = new(big.Int).Lsh(bigOne, 256) 42 43 // enabling this will simulation mode with hard coded difficulty set to 1 44 // the variable is knowingly not exported, so no one can tinker with it 45 //simulation = false // simulation mode is disabled 46 ) 47 48 // HashToBig converts a PoW has into a big.Int that can be used to 49 // perform math comparisons. 50 func HashToBig(buf crypto.Hash) *big.Int { 51 // A Hash is in little-endian, but the big package wants the bytes in 52 // big-endian, so reverse them. 53 blen := len(buf) // its hardcoded 32 bytes, so why do len but lets do it 54 for i := 0; i < blen/2; i++ { 55 buf[i], buf[blen-1-i] = buf[blen-1-i], buf[i] 56 } 57 58 return new(big.Int).SetBytes(buf[:]) 59 } 60 61 // this function calculates the difficulty in big num form 62 func ConvertDifficultyToBig(difficultyi uint64) *big.Int { 63 if difficultyi == 0 { 64 panic("difficulty can never be zero") 65 } 66 // (1 << 256) / (difficultyNum ) 67 difficulty := new(big.Int).SetUint64(difficultyi) 68 denominator := new(big.Int).Add(difficulty, bigZero) // above 2 lines can be merged 69 return new(big.Int).Div(oneLsh256, denominator) 70 } 71 72 func ConvertIntegerDifficultyToBig(difficultyi *big.Int) *big.Int { 73 74 if difficultyi.Cmp(bigZero) == 0 { // if work_pow is less than difficulty 75 panic("difficulty can never be zero") 76 } 77 78 return new(big.Int).Div(oneLsh256, difficultyi) 79 } 80 81 // this function check whether the pow hash meets difficulty criteria 82 func CheckPowHash(pow_hash crypto.Hash, difficulty uint64) bool { 83 big_difficulty := ConvertDifficultyToBig(difficulty) 84 big_pow_hash := HashToBig(pow_hash) 85 86 if big_pow_hash.Cmp(big_difficulty) <= 0 { // if work_pow is less than difficulty 87 return true 88 } 89 return false 90 } 91 92 // this function check whether the pow hash meets difficulty criteria 93 // however, it take diff in bigint format 94 func CheckPowHashBig(pow_hash crypto.Hash, big_difficulty_integer *big.Int) bool { 95 big_pow_hash := HashToBig(pow_hash) 96 97 big_difficulty := ConvertIntegerDifficultyToBig(big_difficulty_integer) 98 if big_pow_hash.Cmp(big_difficulty) <= 0 { // if work_pow is less than difficulty 99 return true 100 } 101 return false 102 } 103 104 // this function finds a common base which can be used to compare tips based on cumulative difficulty 105 func (chain *Blockchain) find_best_tip_cumulative_difficulty(dbtx storage.DBTX, tips []crypto.Hash) (best crypto.Hash) { 106 107 tips_scores := make([]BlockScore, len(tips), len(tips)) 108 109 for i := range tips { 110 tips_scores[i].BLID = tips[i] // we should chose the lowest weight 111 tips_scores[i].Cumulative_Difficulty = chain.Load_Block_Cumulative_Difficulty(dbtx, tips[i]) 112 } 113 114 sort_descending_by_cumulative_difficulty(tips_scores) 115 116 best = tips_scores[0].BLID 117 // base_height = scores[0].Weight 118 119 return best 120 121 } 122 123 // confirms whether the actual tip difficulty is withing 9% deviation with reference 124 // actual tip cannot be less than 91% of main tip 125 // if yes tip is okay, else tip should be declared stale 126 // both the tips should be in the store 127 func (chain *Blockchain) validate_tips(dbtx storage.DBTX, reference, actual crypto.Hash) (result bool) { 128 129 reference_diff := chain.Load_Block_Difficulty(dbtx, reference) 130 actual_diff := chain.Load_Block_Difficulty(dbtx, actual) 131 132 // multiply by 91 133 reference91 := new(big.Int).Mul(reference_diff, new(big.Int).SetUint64(91)) 134 // divide by 100 135 reference91.Div(reference91, new(big.Int).SetUint64(100)) 136 137 if reference91.Cmp(actual_diff) < 0 { 138 return true 139 } else { 140 return false 141 } 142 143 } 144 145 // when creating a new block, current_time in utc + chain_block_time must be added 146 // while verifying the block, expected time stamp should be replaced from what is in blocks header 147 // in DERO atlantis difficulty is based on previous tips 148 // get difficulty at specific tips, 149 // algorithm is as follows choose biggest difficulty tip (// division is integer and not floating point) 150 // diff = (parent_diff + (parent_diff / 100 * max(1 - (parent_timestamp - parent_parent_timestamp) // (chain_block_time*2//3), -1)) 151 // this should be more thoroughly evaluated 152 153 // NOTE: we need to evaluate if the mining adversary gains something, if the they set the time diff to 1 154 // we need to do more simulations and evaluations 155 func (chain *Blockchain) Get_Difficulty_At_Tips(dbtx storage.DBTX, tips []crypto.Hash) *big.Int { 156 157 var MinimumDifficulty *big.Int 158 159 if globals.IsMainnet() { 160 MinimumDifficulty = new(big.Int).SetUint64(config.MAINNET_MINIMUM_DIFFICULTY) // this must be controllable parameter 161 162 }else{ 163 MinimumDifficulty = new(big.Int).SetUint64(config.TESTNET_MINIMUM_DIFFICULTY) // this must be controllable parameter 164 } 165 //MinimumDifficulty := new(big.Int).SetUint64(131072) // TODO tthis must be controllable parameter 166 GenesisDifficulty := new(big.Int).SetUint64(1) 167 168 if chain.simulator == true { 169 return GenesisDifficulty 170 } 171 172 if len(tips) == 0 { // genesis block difficulty is 1 173 return GenesisDifficulty // it should be configurable via params 174 } 175 176 height := chain.Calculate_Height_At_Tips(dbtx, tips) 177 178 // hard fork version 1 has difficulty set to 1 179 if 1 == chain.Get_Current_Version_at_Height(height) { 180 return new(big.Int).SetUint64(1) 181 } 182 183 // if we are hardforking from 1 to 2 184 // we can start from high difficulty to find the right point 185 if height >= 1 && chain.Get_Current_Version_at_Height(height-1) == 1 && chain.Get_Current_Version_at_Height(height) == 2 { 186 if globals.IsMainnet() { 187 bootstrap_difficulty := new(big.Int).SetUint64(config.MAINNET_BOOTSTRAP_DIFFICULTY) // return bootstrap mainnet difficulty 188 rlog.Infof("Returning bootstrap difficulty %s at height %d", bootstrap_difficulty.String(), height) 189 return bootstrap_difficulty 190 } else { 191 bootstrap_difficulty := new(big.Int).SetUint64(config.TESTNET_BOOTSTRAP_DIFFICULTY) 192 rlog.Infof("Returning bootstrap difficulty %s at height %d", bootstrap_difficulty.String(), height) 193 return bootstrap_difficulty // return bootstrap difficulty for testnet 194 } 195 } 196 197 // if we are hardforking from 3 to 4 198 // we can start from high difficulty to find the right point 199 if height >= 1 && chain.Get_Current_Version_at_Height(height-1) <= 3 && chain.Get_Current_Version_at_Height(height) == 4 { 200 if globals.IsMainnet() { 201 bootstrap_difficulty := new(big.Int).SetUint64(config.MAINNET_BOOTSTRAP_DIFFICULTY_hf4) // return bootstrap mainnet difficulty 202 rlog.Infof("Returning bootstrap difficulty %s at height %d", bootstrap_difficulty.String(), height) 203 return bootstrap_difficulty 204 } else { 205 bootstrap_difficulty := new(big.Int).SetUint64(config.TESTNET_BOOTSTRAP_DIFFICULTY) 206 rlog.Infof("Returning bootstrap difficulty %s at height %d", bootstrap_difficulty.String(), height) 207 return bootstrap_difficulty // return bootstrap difficulty for testnet 208 } 209 } 210 211 // for testing purposes, not possible on mainchain 212 if height < 3 && chain.Get_Current_Version_at_Height(height) <= 3 { 213 return MinimumDifficulty 214 } 215 216 /* 217 // build all blocks whivh are reachale 218 // process only which are close to the chain 219 reachable_blocks := chain.BuildReachableBlocks(dbtx,tips) 220 var difficulty_sum big.Int // used to calculate average difficulty 221 var average_difficulty big.Int 222 var lowest_average_difficulty big.Int 223 var block_count int64 224 for k,_ := range reachable_blocks{ 225 height_of_k := chain.Load_Height_for_BL_ID(dbtx,k) 226 if (height - height_of_k) <= ((config.STABLE_LIMIT*3)/4) { 227 block_count++ 228 difficulty_of_k := chain.Load_Block_Difficulty(dbtx, k) 229 difficulty_sum.Add(&difficulty_sum, difficulty_of_k) 230 } 231 } 232 233 // used to rate limit maximum drop over a certain number of blocks 234 average_difficulty.Div(&difficulty_sum,new(big.Int).SetInt64(block_count)) 235 average_difficulty.Mul(&average_difficulty,new(big.Int).SetUint64(92)) //max 10 % drop 236 average_difficulty.Div(&average_difficulty,new(big.Int).SetUint64(100)) 237 238 lowest_average_difficulty.Set(&average_difficulty) // difficulty can never drop less than this 239 240 241 */ 242 243 biggest_tip := chain.find_best_tip_cumulative_difficulty(dbtx, tips) 244 biggest_difficulty := chain.Load_Block_Difficulty(dbtx, biggest_tip) 245 246 // take the time from the most heavy block 247 parent_highest_time := chain.Load_Block_Timestamp(dbtx, biggest_tip) 248 249 // find parents parents tip which hash highest tip 250 parent_past := chain.Get_Block_Past(dbtx, biggest_tip) 251 252 past_biggest_tip := chain.find_best_tip_cumulative_difficulty(dbtx, parent_past) 253 parent_parent_highest_time := chain.Load_Block_Timestamp(dbtx, past_biggest_tip) 254 255 if biggest_difficulty.Cmp(MinimumDifficulty) < 0 { 256 biggest_difficulty.Set(MinimumDifficulty) 257 } 258 259 // create 3 ranges, used for physical verification 260 /* 261 switch { 262 case (parent_highest_time - parent_parent_highest_time) <= 6: // increase diff 263 logger.Infof(" increase diff") 264 case (parent_highest_time - parent_parent_highest_time) >= 12: // decrease diff 265 logger.Infof(" decrease diff") 266 267 default :// between 6 to 12, 7,8,9,10,11 do nothing, return previous difficulty 268 logger.Infof("stable diff diff") 269 }*/ 270 271 bigTime := new(big.Int).SetInt64(parent_highest_time) 272 bigParentTime := new(big.Int).SetInt64(parent_parent_highest_time) 273 274 // holds intermediate values to make the algo easier to read & audit 275 x := new(big.Int) 276 y := new(big.Int) 277 278 // 1 - (block_timestamp - parent_timestamp) // ((config.BLOCK_TIME*2)/3) 279 // the above creates the following ranges 0-5 , increase diff 6-11 keep it constant, above 12 and above decrease 280 big1 := new(big.Int).SetUint64(1) 281 282 block_time:= config.BLOCK_TIME 283 if chain.Get_Current_Version_at_Height(height) >= 4 { 284 block_time = config.BLOCK_TIME_hf4 285 } 286 big_block_chain_time_range := new(big.Int).SetUint64((block_time * 2) / 3) 287 288 DifficultyBoundDivisor := new(big.Int).SetUint64(100) // granularity of 100 steps to increase or decrease difficulty 289 bigmaxdifficulydrop := new(big.Int).SetInt64(-2) // this should ideally be .05% of difficuly bound divisor, but currentlt its 0.5 % 290 291 x.Sub(bigTime, bigParentTime) 292 293 x.Div(x, big_block_chain_time_range) 294 //logger.Infof(" block time - parent time %d %s / 6",parent_highest_time - parent_parent_highest_time, x.String()) 295 296 x.Sub(big1, x) 297 298 //logger.Infof("x %s biggest %s lowest average %s ", x.String(), biggest_difficulty, lowest_average_difficulty.String()) 299 300 // max(1 - (block_timestamp - parent_timestamp) // chain_block_time, -99) 301 if x.Cmp(bigmaxdifficulydrop) < 0 { 302 x.Set(bigmaxdifficulydrop) 303 } 304 // logger.Infof("x %s biggest %s ", x.String(), biggest_difficulty) 305 306 // (parent_diff + parent_diff // 2048 * max(1 - (block_timestamp - parent_timestamp) // 10, -99)) 307 y.Div(biggest_difficulty, DifficultyBoundDivisor) 308 309 // decreases are 1/2 of increases 310 // this will cause the network to adjust slower to big difficulty drops 311 // but has more benefits 312 /*if x.Sign() < 0 { 313 logger.Infof("decrease will be 1//2 ") 314 315 y.Div(y, new(big.Int).SetUint64(2)) 316 }*/ 317 318 //logger.Infof("max increase/decrease %s x %s", y.String(), x.String()) 319 320 x.Mul(y, x) 321 x.Add(biggest_difficulty, x) 322 323 /* 324 // if difficulty drop is more than X% than the average, limit it here 325 if x.Cmp(&lowest_average_difficulty) < 0{ 326 x.Set(&lowest_average_difficulty) 327 } 328 329 */ 330 // 331 // minimum difficulty can ever be 332 if x.Cmp(MinimumDifficulty) < 0 { 333 x.Set(MinimumDifficulty) 334 } 335 // logger.Infof("Final diff %s biggest %s lowest average %s ", x.String(), biggest_difficulty, lowest_average_difficulty.String()) 336 337 return x 338 } 339 340 func (chain *Blockchain) VerifyPoW(dbtx storage.DBTX, bl *block.Block) (verified bool) { 341 342 verified = false 343 //block_work := bl.GetBlockWork() 344 345 //PoW := crypto.Scrypt_1024_1_1_256(block_work) 346 //PoW := crypto.Keccak256(block_work) 347 PoW := bl.GetPoWHash() 348 349 block_difficulty := chain.Get_Difficulty_At_Tips(dbtx, bl.Tips) 350 351 // test new difficulty checksm whether they are equivalent to integer math 352 353 /*if CheckPowHash(PoW, block_difficulty.Uint64()) != CheckPowHashBig(PoW, block_difficulty) { 354 logger.Panicf("Difficuly mismatch between big and uint64 diff ") 355 }*/ 356 357 if CheckPowHashBig(PoW, block_difficulty) == true { 358 return true 359 } 360 /* * 361 if CheckPowHash(PoW, block_difficulty.Uint64()) == true { 362 return true 363 }*/ 364 365 return false 366 } 367 368 // this function calculates difficulty on the basis of previous difficulty and number of blocks 369 // THIS is the ideal algorithm for us as it will be optimal based on the number of orphan blocks 370 // we may deploy it when the block reward becomes insignificant in comparision to fees 371 // basically tail emission kicks in or we need to optimally increase number of blocks 372 // the algorithm does NOT work if the network has a single miner !!! 373 // this algorithm will work without the concept of time