github.com/NebulousLabs/Sia@v1.3.7/modules/consensus/difficulty_test.go (about)

     1  package consensus
     2  
     3  import (
     4  	"bytes"
     5  	"math/big"
     6  	"testing"
     7  
     8  	"github.com/NebulousLabs/Sia/types"
     9  
    10  	"github.com/coreos/bbolt"
    11  )
    12  
    13  // TestChildTargetOak checks the childTargetOak function, especially for edge
    14  // cases like overflows and underflows.
    15  func TestChildTargetOak(t *testing.T) {
    16  	// NOTE: Test must not be run in parallel.
    17  	if testing.Short() {
    18  		t.SkipNow()
    19  	}
    20  	cst, err := createConsensusSetTester(t.Name())
    21  	if err != nil {
    22  		t.Fatal(err)
    23  	}
    24  	defer cst.Close()
    25  	cs := cst.cs
    26  	// NOTE: Test must not be run in parallel.
    27  	//
    28  	// Set the constants to match the real-network constants, and then make sure
    29  	// they are reset at the end of the test.
    30  	oldFreq := types.BlockFrequency
    31  	oldMaxRise := types.OakMaxRise
    32  	oldMaxDrop := types.OakMaxDrop
    33  	oldRootTarget := types.RootTarget
    34  	types.BlockFrequency = 600
    35  	types.OakMaxRise = big.NewRat(1004, 1e3)
    36  	types.OakMaxDrop = big.NewRat(1e3, 1004)
    37  	types.RootTarget = types.Target{0, 0, 0, 1}
    38  	defer func() {
    39  		types.BlockFrequency = oldFreq
    40  		types.OakMaxRise = oldMaxRise
    41  		types.OakMaxDrop = oldMaxDrop
    42  		types.RootTarget = oldRootTarget
    43  	}()
    44  
    45  	// Start with some values that are normal, resulting in no change in target.
    46  	parentHeight := types.BlockHeight(100)
    47  	// The total time and total target will be set to 100 uncompressed blocks.
    48  	// Though the actual algorithm is compressing the times to achieve an
    49  	// exponential weighting, this test only requires that the visible hashrate
    50  	// be measured as equal to the root target per block.
    51  	parentTotalTime := int64(types.BlockFrequency * parentHeight)
    52  	parentTotalTarget := types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
    53  	parentTimestamp := types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight))
    54  	parentTarget := types.RootTarget
    55  	// newTarget should match the root target, as the hashrate and blocktime all
    56  	// match the existing target - there should be no reason for adjustment.
    57  	newTarget := cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
    58  	// New target should be barely moving. Some imprecision may cause slight
    59  	// adjustments, but the total difference should be less than 0.01%.
    60  	maxNewTarget := parentTarget.MulDifficulty(big.NewRat(10e3, 10001))
    61  	minNewTarget := parentTarget.MulDifficulty(big.NewRat(10001, 10e3))
    62  	if newTarget.Cmp(maxNewTarget) > 0 {
    63  		t.Error("The target shifted too much for a constant hashrate")
    64  	}
    65  	if newTarget.Cmp(minNewTarget) < 0 {
    66  		t.Error("The target shifted too much for a constant hashrate")
    67  	}
    68  
    69  	// Set the total time such that the difficulty needs to be adjusted down.
    70  	// Shifter clamps and adjustment clamps will both be in effect.
    71  	parentHeight = types.BlockHeight(100)
    72  	// Set the visible hashrate to types.RootTarget per block.
    73  	parentTotalTime = int64(types.BlockFrequency * parentHeight)
    74  	parentTotalTarget = types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
    75  	// Set the timestamp far in the future, triggering the shifter to increase
    76  	// the block time to the point that the shifter clamps activate.
    77  	parentTimestamp = types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight)) + 500e6
    78  	// Set the target to types.RootTarget, causing the max difficulty adjustment
    79  	// clamp to be in effect.
    80  	parentTarget = types.RootTarget
    81  	newTarget = cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
    82  	if parentTarget.Difficulty().Cmp(newTarget.Difficulty()) <= 0 {
    83  		t.Error("Difficulty did not decrease in response to increased total time")
    84  	}
    85  	// Check that the difficulty decreased by the maximum amount.
    86  	minNewTarget = parentTarget.MulDifficulty(types.OakMaxDrop)
    87  	if minNewTarget.Difficulty().Cmp(newTarget.Difficulty()) != 0 {
    88  		t.Error("Difficulty did not decrease by the maximum amount")
    89  	}
    90  
    91  	// Set the total time such that the difficulty needs to be adjusted up.
    92  	// Shifter clamps and adjustment clamps will both be in effect.
    93  	parentHeight = types.BlockHeight(100)
    94  	// Set the visible hashrate to types.RootTarget per block.
    95  	parentTotalTime = int64(types.BlockFrequency * parentHeight)
    96  	parentTotalTarget = types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
    97  	// Set the timestamp far in the past, triggering the shifter to decrease the
    98  	// block time to the point that the shifter clamps activate.
    99  	parentTimestamp = types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight)) - 500e6
   100  	// Set the target to types.RootTarget, causing the max difficulty adjustment
   101  	// clamp to be in effect.
   102  	parentTarget = types.RootTarget
   103  	newTarget = cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
   104  	if parentTarget.Difficulty().Cmp(newTarget.Difficulty()) >= 0 {
   105  		t.Error("Difficulty did not increase in response to decreased total time")
   106  	}
   107  	// Check that the difficulty decreased by the maximum amount.
   108  	minNewTarget = parentTarget.MulDifficulty(types.OakMaxRise)
   109  	if minNewTarget.Difficulty().Cmp(newTarget.Difficulty()) != 0 {
   110  		t.Error("Difficulty did not increase by the maximum amount")
   111  	}
   112  
   113  	// Set the total time such that the difficulty needs to be adjusted down.
   114  	// Neither the shiftor clamps nor the adjustor clamps will be in effect.
   115  	parentHeight = types.BlockHeight(100)
   116  	// Set the visible hashrate to types.RootTarget per block.
   117  	parentTotalTime = int64(types.BlockFrequency * parentHeight)
   118  	parentTotalTarget = types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
   119  	// Set the timestamp in the future, but little enough in the future that
   120  	// neither the shifter clamp nor the adjustment clamp will trigger.
   121  	parentTimestamp = types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight)) + 5e3
   122  	// Set the target to types.RootTarget.
   123  	parentTarget = types.RootTarget
   124  	newTarget = cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
   125  	// Check that the difficulty decreased, but not by the max amount.
   126  	minNewTarget = parentTarget.MulDifficulty(types.OakMaxDrop)
   127  	if parentTarget.Difficulty().Cmp(newTarget.Difficulty()) <= 0 {
   128  		t.Error("Difficulty did not decrease")
   129  	}
   130  	if minNewTarget.Difficulty().Cmp(newTarget.Difficulty()) >= 0 {
   131  		t.Error("Difficulty decreased by the clamped amount")
   132  	}
   133  
   134  	// Set the total time such that the difficulty needs to be adjusted up.
   135  	// Neither the shiftor clamps nor the adjustor clamps will be in effect.
   136  	parentHeight = types.BlockHeight(100)
   137  	// Set the visible hashrate to types.RootTarget per block.
   138  	parentTotalTime = int64(types.BlockFrequency * parentHeight)
   139  	parentTotalTarget = types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
   140  	// Set the timestamp in the past, but little enough in the future that
   141  	// neither the shifter clamp nor the adjustment clamp will trigger.
   142  	parentTimestamp = types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight)) - 5e3
   143  	// Set the target to types.RootTarget.
   144  	parentTarget = types.RootTarget
   145  	newTarget = cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
   146  	// Check that the difficulty increased, but not by the max amount.
   147  	maxNewTarget = parentTarget.MulDifficulty(types.OakMaxRise)
   148  	if parentTarget.Difficulty().Cmp(newTarget.Difficulty()) >= 0 {
   149  		t.Error("Difficulty did not decrease")
   150  	}
   151  	if maxNewTarget.Difficulty().Cmp(newTarget.Difficulty()) <= 0 {
   152  		t.Error("Difficulty decreased by the clamped amount")
   153  	}
   154  
   155  	// Set the total time such that the difficulty needs to be adjusted down.
   156  	// Adjustor clamps will be in effect, shifter clamps will not be in effect.
   157  	parentHeight = types.BlockHeight(100)
   158  	// Set the visible hashrate to types.RootTarget per block.
   159  	parentTotalTime = int64(types.BlockFrequency * parentHeight)
   160  	parentTotalTarget = types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
   161  	// Set the timestamp in the future, but little enough in the future that
   162  	// neither the shifter clamp nor the adjustment clamp will trigger.
   163  	parentTimestamp = types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight)) + 10e3
   164  	// Set the target to types.RootTarget.
   165  	parentTarget = types.RootTarget
   166  	newTarget = cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
   167  	// Check that the difficulty decreased, but not by the max amount.
   168  	minNewTarget = parentTarget.MulDifficulty(types.OakMaxDrop)
   169  	if parentTarget.Difficulty().Cmp(newTarget.Difficulty()) <= 0 {
   170  		t.Error("Difficulty did not decrease")
   171  	}
   172  	if minNewTarget.Difficulty().Cmp(newTarget.Difficulty()) != 0 {
   173  		t.Error("Difficulty decreased by the clamped amount")
   174  	}
   175  
   176  	// Set the total time such that the difficulty needs to be adjusted up.
   177  	// Adjustor clamps will be in effect, shifter clamps will not be in effect.
   178  	parentHeight = types.BlockHeight(100)
   179  	// Set the visible hashrate to types.RootTarget per block.
   180  	parentTotalTime = int64(types.BlockFrequency * parentHeight)
   181  	parentTotalTarget = types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
   182  	// Set the timestamp in the past, but little enough in the future that
   183  	// neither the shifter clamp nor the adjustment clamp will trigger.
   184  	parentTimestamp = types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight)) - 10e3
   185  	// Set the target to types.RootTarget.
   186  	parentTarget = types.RootTarget
   187  	newTarget = cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
   188  	// Check that the difficulty increased, but not by the max amount.
   189  	maxNewTarget = parentTarget.MulDifficulty(types.OakMaxRise)
   190  	if parentTarget.Difficulty().Cmp(newTarget.Difficulty()) >= 0 {
   191  		t.Error("Difficulty did not decrease")
   192  	}
   193  	if maxNewTarget.Difficulty().Cmp(newTarget.Difficulty()) != 0 {
   194  		t.Error("Difficulty decreased by the clamped amount")
   195  	}
   196  
   197  	// Set the total time such that the difficulty needs to be adjusted down.
   198  	// Shifter clamps will be in effect, adjustor clamps will not be in effect.
   199  	parentHeight = types.BlockHeight(100)
   200  	// Set the visible hashrate to types.RootTarget per block.
   201  	parentTotalTime = int64(types.BlockFrequency * parentHeight)
   202  	parentTotalTarget = types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
   203  	// Set the timestamp in the future, but little enough in the future that
   204  	// neither the shifter clamp nor the adjustment clamp will trigger.
   205  	parentTimestamp = types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight)) + 500e6
   206  	// Set the target to types.RootTarget.
   207  	parentTarget = types.RootTarget.MulDifficulty(big.NewRat(1, types.OakMaxBlockShift))
   208  	newTarget = cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
   209  	// New target should be barely moving. Some imprecision may cause slight
   210  	// adjustments, but the total difference should be less than 0.01%.
   211  	maxNewTarget = parentTarget.MulDifficulty(big.NewRat(10e3, 10001))
   212  	minNewTarget = parentTarget.MulDifficulty(big.NewRat(10001, 10e3))
   213  	if newTarget.Cmp(maxNewTarget) > 0 {
   214  		t.Error("The target shifted too much for a constant hashrate")
   215  	}
   216  	if newTarget.Cmp(minNewTarget) < 0 {
   217  		t.Error("The target shifted too much for a constant hashrate")
   218  	}
   219  
   220  	// Set the total time such that the difficulty needs to be adjusted up.
   221  	// Shifter clamps will be in effect, adjustor clamps will not be in effect.
   222  	parentHeight = types.BlockHeight(100)
   223  	// Set the visible hashrate to types.RootTarget per block.
   224  	parentTotalTime = int64(types.BlockFrequency * parentHeight)
   225  	parentTotalTarget = types.RootTarget.MulDifficulty(big.NewRat(int64(parentHeight), 1))
   226  	// Set the timestamp in the past, but little enough in the future that
   227  	// neither the shifter clamp nor the adjustment clamp will trigger.
   228  	parentTimestamp = types.GenesisTimestamp + types.Timestamp((types.BlockFrequency * parentHeight)) - 500e6
   229  	// Set the target to types.RootTarget.
   230  	parentTarget = types.RootTarget.MulDifficulty(big.NewRat(types.OakMaxBlockShift, 1))
   231  	newTarget = cs.childTargetOak(parentTotalTime, parentTotalTarget, parentTarget, parentHeight, parentTimestamp)
   232  	// New target should be barely moving. Some imprecision may cause slight
   233  	// adjustments, but the total difference should be less than 0.01%.
   234  	maxNewTarget = parentTarget.MulDifficulty(big.NewRat(10e3, 10001))
   235  	minNewTarget = parentTarget.MulDifficulty(big.NewRat(10001, 10e3))
   236  	if newTarget.Cmp(maxNewTarget) > 0 {
   237  		t.Error("The target shifted too much for a constant hashrate")
   238  	}
   239  	if newTarget.Cmp(minNewTarget) < 0 {
   240  		t.Error("The target shifted too much for a constant hashrate")
   241  	}
   242  }
   243  
   244  // TestStoreBlockTotals checks features of the storeBlockTotals and
   245  // getBlockTotals code.
   246  func TestStoreBlockTotals(t *testing.T) {
   247  	// NOTE: Test must not be run in parallel.
   248  	if testing.Short() {
   249  		t.SkipNow()
   250  	}
   251  	cst, err := createConsensusSetTester(t.Name())
   252  	if err != nil {
   253  		t.Fatal(err)
   254  	}
   255  	defer cst.Close()
   256  	cs := cst.cs
   257  	// NOTE: Test must not be run in parallel.
   258  	//
   259  	// Set the constants to match the real-network constants, and then make sure
   260  	// they are reset at the end of the test.
   261  	oldFreq := types.BlockFrequency
   262  	oldDecayNum := types.OakDecayNum
   263  	oldDecayDenom := types.OakDecayDenom
   264  	oldMaxRise := types.OakMaxRise
   265  	oldMaxDrop := types.OakMaxDrop
   266  	oldRootTarget := types.RootTarget
   267  	types.BlockFrequency = 600
   268  	types.OakDecayNum = 995
   269  	types.OakDecayDenom = 1e3
   270  	types.OakMaxRise = big.NewRat(1004, 1e3)
   271  	types.OakMaxDrop = big.NewRat(1e3, 1004)
   272  	types.RootTarget = types.Target{0, 0, 0, 1}
   273  	defer func() {
   274  		types.BlockFrequency = oldFreq
   275  		types.OakDecayNum = oldDecayNum
   276  		types.OakDecayDenom = oldDecayDenom
   277  		types.OakMaxRise = oldMaxRise
   278  		types.OakMaxDrop = oldMaxDrop
   279  		types.RootTarget = oldRootTarget
   280  	}()
   281  
   282  	// Check that as totals get stored over and over, the values getting
   283  	// returned follow a decay. While storing repeatedly, check that the
   284  	// getBlockTotals values match the values that were stored.
   285  	err = cs.db.Update(func(tx *bolt.Tx) error {
   286  		var totalTime int64
   287  		var id types.BlockID
   288  		var parentTimestamp, currentTimestamp types.Timestamp
   289  		currentTarget := types.RootTarget
   290  		totalTarget := types.RootDepth
   291  		for i := types.BlockHeight(0); i < 8000; i++ {
   292  			id[i/256] = byte(i % 256)
   293  			parentTimestamp = currentTimestamp
   294  			currentTimestamp += types.Timestamp(types.BlockFrequency)
   295  			totalTime, totalTarget, err = cs.storeBlockTotals(tx, i, id, totalTime, parentTimestamp, currentTimestamp, totalTarget, currentTarget)
   296  			if err != nil {
   297  				return err
   298  			}
   299  
   300  			// Check that the fetched values match the stored values.
   301  			getTime, getTarg := cs.getBlockTotals(tx, id)
   302  			if getTime != totalTime || getTarg != totalTarget {
   303  				t.Error("fetch failed - retrieving time and target did not work")
   304  			}
   305  		}
   306  		// Do a final iteration, but keep the old totals. After 8000 iterations,
   307  		// the totals should no longer be changing, yet they should be hundreds
   308  		// of times larger than the original values.
   309  		id[8001/256] = byte(8001 % 256)
   310  		parentTimestamp = currentTimestamp
   311  		currentTimestamp += types.Timestamp(types.BlockFrequency)
   312  		newTotalTime, newTotalTarget, err := cs.storeBlockTotals(tx, 8001, id, totalTime, parentTimestamp, currentTimestamp, totalTarget, currentTarget)
   313  		if err != nil {
   314  			return err
   315  		}
   316  		if newTotalTime != totalTime || newTotalTarget.Difficulty().Cmp(totalTarget.Difficulty()) != 0 {
   317  			t.Log(newTotalTime)
   318  			t.Log(totalTime)
   319  			t.Log(newTotalTarget)
   320  			t.Log(totalTarget)
   321  			t.Error("Total time and target did not seem to converge to a result")
   322  		}
   323  		if newTotalTime < int64(types.BlockFrequency)*199 {
   324  			t.Error("decay seems to be happening too rapidly")
   325  		}
   326  		if newTotalTime > int64(types.BlockFrequency)*205 {
   327  			t.Error("decay seems to be happening too slowly")
   328  		}
   329  		if newTotalTarget.Difficulty().Cmp(types.RootTarget.Difficulty().Mul64(199)) < 0 {
   330  			t.Error("decay seems to be happening too rapidly")
   331  		}
   332  		if newTotalTarget.Difficulty().Cmp(types.RootTarget.Difficulty().Mul64(205)) > 0 {
   333  			t.Error("decay seems to be happening too slowly")
   334  		}
   335  		return nil
   336  	})
   337  	if err != nil {
   338  		t.Fatal(err)
   339  	}
   340  }
   341  
   342  // TestOakHardforkMechanic mines blocks until the oak hardfork kicks in,
   343  // verifying that nothing unusual happens, and that the difficulty adjustments
   344  // begin happening every block.
   345  func TestHardforkMechanic(t *testing.T) {
   346  	if testing.Short() {
   347  		t.SkipNow()
   348  	}
   349  	cst, err := createConsensusSetTester(t.Name())
   350  	if err != nil {
   351  		t.Fatal(err)
   352  	}
   353  	defer cst.Close()
   354  
   355  	// Mine blocks until the oak hardfork height, printing the current target at
   356  	// each height.
   357  	var prevTarg types.Target
   358  	for i := types.BlockHeight(0); i < types.OakHardforkBlock*2; i++ {
   359  		b, err := cst.miner.AddBlock()
   360  		if err != nil {
   361  			t.Fatal(err)
   362  		}
   363  		targ, _ := cst.cs.ChildTarget(b.ID())
   364  		if i > types.OakHardforkBlock && bytes.Compare(targ[:], prevTarg[:]) >= 0 {
   365  			t.Error("target is not adjusting down during mining every block")
   366  		}
   367  		prevTarg = targ
   368  
   369  	}
   370  }