github.com/NebulousLabs/Sia@v1.3.7/modules/consensus/processedblock_test.go (about) 1 package consensus 2 3 import ( 4 "path/filepath" 5 "testing" 6 7 "github.com/NebulousLabs/Sia/build" 8 "github.com/NebulousLabs/Sia/crypto" 9 "github.com/NebulousLabs/Sia/modules" 10 "github.com/NebulousLabs/Sia/modules/gateway" 11 "github.com/NebulousLabs/Sia/modules/miner" 12 "github.com/NebulousLabs/Sia/modules/transactionpool" 13 "github.com/NebulousLabs/Sia/modules/wallet" 14 "github.com/NebulousLabs/Sia/types" 15 ) 16 17 // TestIntegrationMinimumValidChildTimestamp probes the 18 // MinimumValidChildTimestamp method of the consensus type. 19 func TestIntegrationMinimumValidChildTimestamp(t *testing.T) { 20 if testing.Short() { 21 t.SkipNow() 22 } 23 t.Parallel() 24 25 // Create a custom consensus set to control the blocks. 26 testdir := build.TempDir(modules.ConsensusDir, t.Name()) 27 g, err := gateway.New("localhost:0", false, filepath.Join(testdir, modules.GatewayDir)) 28 if err != nil { 29 t.Fatal(err) 30 } 31 cs, err := New(g, false, filepath.Join(testdir, modules.ConsensusDir)) 32 if err != nil { 33 t.Fatal(err) 34 } 35 tp, err := transactionpool.New(cs, g, filepath.Join(testdir, modules.TransactionPoolDir)) 36 if err != nil { 37 t.Fatal(err) 38 } 39 w, err := wallet.New(cs, tp, filepath.Join(testdir, modules.WalletDir)) 40 if err != nil { 41 t.Fatal(err) 42 } 43 key := crypto.GenerateTwofishKey() 44 _, err = w.Encrypt(key) 45 if err != nil { 46 t.Fatal(err) 47 } 48 err = w.Unlock(key) 49 if err != nil { 50 t.Fatal(err) 51 } 52 m, err := miner.New(cs, tp, w, filepath.Join(testdir, modules.MinerDir)) 53 if err != nil { 54 t.Fatal(err) 55 } 56 defer g.Close() 57 58 // The earliest child timestamp of the genesis block should be the 59 // timestamp of the genesis block. 60 genesisTime := cs.blockRoot.Block.Timestamp 61 earliest, ok := cs.MinimumValidChildTimestamp(cs.blockRoot.Block.ID()) 62 if !ok || genesisTime != earliest { 63 t.Error("genesis block earliest timestamp producing unexpected results") 64 } 65 66 timestampOffsets := []types.Timestamp{1, 3, 2, 5, 4, 6, 7, 8, 9, 10} 67 blockIDs := []types.BlockID{cs.blockRoot.Block.ID()} 68 for _, offset := range timestampOffsets { 69 bfw, target, err := m.BlockForWork() 70 if err != nil { 71 t.Fatal(err) 72 } 73 bfw.Timestamp = genesisTime + offset 74 solvedBlock, _ := m.SolveBlock(bfw, target) 75 err = cs.AcceptBlock(solvedBlock) 76 if err != nil { 77 t.Fatal(err) 78 } 79 blockIDs = append(blockIDs, solvedBlock.ID()) 80 } 81 82 // Median should be genesisTime for 6th block. 83 earliest, ok = cs.MinimumValidChildTimestamp(blockIDs[5]) 84 if !ok || earliest != genesisTime { 85 t.Error("incorrect child timestamp") 86 } 87 // Median should be genesisTime+1 for 7th block. 88 earliest, ok = cs.MinimumValidChildTimestamp(blockIDs[6]) 89 if !ok || earliest != genesisTime+1 { 90 t.Error("incorrect child timestamp") 91 } 92 // Median should be genesisTime + 5 for pb11. 93 earliest, ok = cs.MinimumValidChildTimestamp(blockIDs[10]) 94 if !ok || earliest != genesisTime+5 { 95 t.Error("incorrect child timestamp") 96 } 97 } 98 99 // TestUnitHeavierThan probes the heavierThan method of the processedBlock type. 100 func TestUnitHeavierThan(t *testing.T) { 101 // Create a light node. 102 pbLight := new(processedBlock) 103 pbLight.Depth[0] = 64 104 pbLight.ChildTarget[0] = 200 105 106 // Create a node that's heavier, but not enough to beat the surpass 107 // threshold. 108 pbMiddle := new(processedBlock) 109 pbMiddle.Depth[0] = 60 110 pbMiddle.ChildTarget[0] = 200 111 112 // Create a node that's heavy enough to break the surpass threshold. 113 pbHeavy := new(processedBlock) 114 pbHeavy.Depth[0] = 16 115 pbHeavy.ChildTarget[0] = 200 116 117 // pbLight should not be heavier than pbHeavy. 118 if pbLight.heavierThan(pbHeavy) { 119 t.Error("light heavier than heavy") 120 } 121 // pbLight should not be heavier than middle. 122 if pbLight.heavierThan(pbMiddle) { 123 t.Error("light heavier than middle") 124 } 125 // pbLight should not be heavier than itself. 126 if pbLight.heavierThan(pbLight) { 127 t.Error("light heavier than itself") 128 } 129 130 // pbMiddle should not be heavier than pbLight. 131 if pbMiddle.heavierThan(pbLight) { 132 t.Error("middle heaver than light - surpass threshold should not have been broken") 133 } 134 // pbHeavy should be heaver than pbLight. 135 if !pbHeavy.heavierThan(pbLight) { 136 t.Error("heavy is not heavier than light") 137 } 138 // pbHeavy should be heavier than pbMiddle. 139 if !pbHeavy.heavierThan(pbMiddle) { 140 t.Error("heavy is not heavier than middle") 141 } 142 } 143 144 // TestChildDepth probes the childDeath method of the blockNode type. 145 func TestChildDepth(t *testing.T) { 146 // Try adding to equal weight nodes, result should be half. 147 pb := new(processedBlock) 148 pb.Depth[0] = 64 149 pb.ChildTarget[0] = 64 150 childDepth := pb.childDepth() 151 if childDepth[0] != 32 { 152 t.Error("unexpected child depth") 153 } 154 155 // Try adding nodes of different weights. 156 pb.Depth[0] = 24 157 pb.ChildTarget[0] = 48 158 childDepth = pb.childDepth() 159 if childDepth[0] != 16 { 160 t.Error("unexpected child depth") 161 } 162 } 163 164 /* 165 // TestTargetAdjustmentBase probes the targetAdjustmentBase method of the block 166 // node type. 167 func TestTargetAdjustmentBase(t *testing.T) { 168 cst, err := createConsensusSetTester(t.Name()) 169 if err != nil { 170 t.Fatal(err) 171 } 172 defer cst.closeCst() 173 174 // Create a genesis node at timestamp 10,000 175 genesisNode := &processedBlock{ 176 Block: types.Block{Timestamp: 10000}, 177 } 178 cst.cs.db.addBlockMap(genesisNode) 179 exactTimeNode := &processedBlock{ 180 Block: types.Block{ 181 Nonce: types.BlockNonce{1, 0, 0, 0, 0, 0, 0, 0}, 182 Timestamp: types.Timestamp(10000 + types.BlockFrequency), 183 }, 184 } 185 exactTimeNode.Parent = genesisNode.Block.ID() 186 cst.cs.db.addBlockMap(exactTimeNode) 187 188 // Base adjustment for the exactTimeNode should be 1. 189 adjustment, exact := cst.cs.targetAdjustmentBase(exactTimeNode).Float64() 190 if !exact { 191 t.Fatal("did not get an exact target adjustment") 192 } 193 if adjustment != 1 { 194 t.Error("block did not adjust itself to the same target") 195 } 196 197 // Create a double-speed node and get the base adjustment. 198 doubleSpeedNode := &processedBlock{ 199 Block: types.Block{Timestamp: types.Timestamp(10000 + types.BlockFrequency)}, 200 } 201 doubleSpeedNode.Parent = exactTimeNode.Block.ID() 202 cst.cs.db.addBlockMap(doubleSpeedNode) 203 adjustment, exact = cst.cs.targetAdjustmentBase(doubleSpeedNode).Float64() 204 if !exact { 205 t.Fatal("did not get an exact adjustment") 206 } 207 if adjustment != 0.5 { 208 t.Error("double speed node did not get a base to halve the target") 209 } 210 211 // Create a half-speed node and get the base adjustment. 212 halfSpeedNode := &processedBlock{ 213 Block: types.Block{Timestamp: types.Timestamp(10000 + types.BlockFrequency*6)}, 214 } 215 halfSpeedNode.Parent = doubleSpeedNode.Block.ID() 216 cst.cs.db.addBlockMap(halfSpeedNode) 217 adjustment, exact = cst.cs.targetAdjustmentBase(halfSpeedNode).Float64() 218 if !exact { 219 t.Fatal("did not get an exact adjustment") 220 } 221 if adjustment != 2 { 222 t.Error("double speed node did not get a base to halve the target") 223 } 224 225 if testing.Short() { 226 t.SkipNow() 227 } 228 // Create a chain of nodes so that the genesis node is no longer the point 229 // of comparison. 230 comparisonNode := &processedBlock{ 231 Block: types.Block{Timestamp: 125000}, 232 } 233 comparisonNode.Parent = halfSpeedNode.Block.ID() 234 cst.cs.db.addBlockMap(comparisonNode) 235 startingNode := comparisonNode 236 for i := types.BlockHeight(0); i < types.TargetWindow; i++ { 237 newNode := new(processedBlock) 238 newNode.Parent = startingNode.Block.ID() 239 newNode.Block.Nonce = types.BlockNonce{byte(i), byte(i / 256), 0, 0, 0, 0, 0, 0} 240 cst.cs.db.addBlockMap(newNode) 241 startingNode = newNode 242 } 243 startingNode.Block.Timestamp = types.Timestamp(125000 + types.BlockFrequency*types.TargetWindow) 244 adjustment, exact = cst.cs.targetAdjustmentBase(startingNode).Float64() 245 if !exact { 246 t.Error("failed to get exact result") 247 } 248 if adjustment != 1 { 249 t.Error("got wrong long-range adjustment") 250 } 251 startingNode.Block.Timestamp = types.Timestamp(125000 + 2*types.BlockFrequency*types.TargetWindow) 252 adjustment, exact = cst.cs.targetAdjustmentBase(startingNode).Float64() 253 if !exact { 254 t.Error("failed to get exact result") 255 } 256 if adjustment != 2 { 257 t.Error("got wrong long-range adjustment") 258 } 259 } 260 261 // TestClampTargetAdjustment probes the clampTargetAdjustment function. 262 func TestClampTargetAdjustment(t *testing.T) { 263 // Check that the MaxAdjustmentUp and MaxAdjustmentDown constants match the 264 // test's expectations. 265 if types.MaxAdjustmentUp.Cmp(big.NewRat(10001, 10000)) != 0 { 266 t.Fatal("MaxAdjustmentUp changed - test now invalid") 267 } 268 if types.MaxAdjustmentDown.Cmp(big.NewRat(9999, 10000)) != 0 { 269 t.Fatal("MaxAdjustmentDown changed - test now invalid") 270 } 271 272 // Check high and low clamping. 273 initial := big.NewRat(2, 1) 274 clamped := clampTargetAdjustment(initial) 275 if clamped.Cmp(big.NewRat(10001, 10000)) != 0 { 276 t.Error("clamp not applied to large target adjustment") 277 } 278 initial = big.NewRat(1, 2) 279 clamped = clampTargetAdjustment(initial) 280 if clamped.Cmp(big.NewRat(9999, 10000)) != 0 { 281 t.Error("clamp not applied to small target adjustment") 282 } 283 284 // Check middle clamping (or lack thereof). 285 initial = big.NewRat(10002, 10001) 286 clamped = clampTargetAdjustment(initial) 287 if clamped.Cmp(initial) != 0 { 288 t.Error("clamp applied to safe target adjustment") 289 } 290 initial = big.NewRat(99999, 100000) 291 clamped = clampTargetAdjustment(initial) 292 if clamped.Cmp(initial) != 0 { 293 t.Error("clamp applied to safe target adjustment") 294 } 295 } 296 297 // TestSetChildTarget probes the setChildTarget method of the block node type. 298 func TestSetChildTarget(t *testing.T) { 299 cst, err := createConsensusSetTester(t.Name()) 300 if err != nil { 301 t.Fatal(err) 302 } 303 defer cst.closeCst() 304 305 // Create a genesis node and a child that took 2x as long as expected. 306 genesisNode := &processedBlock{ 307 Block: types.Block{Timestamp: 10000}, 308 } 309 genesisNode.ChildTarget[0] = 64 310 cst.cs.db.addBlockMap(genesisNode) 311 doubleTimeNode := &processedBlock{ 312 Block: types.Block{Timestamp: types.Timestamp(10000 + types.BlockFrequency*2)}, 313 } 314 doubleTimeNode.Parent = genesisNode.Block.ID() 315 cst.cs.db.addBlockMap(doubleTimeNode) 316 317 // Check the resulting childTarget of the new node and see that the clamp 318 // was applied. 319 cst.cs.setChildTarget(doubleTimeNode) 320 if doubleTimeNode.ChildTarget.Cmp(genesisNode.ChildTarget) <= 0 { 321 t.Error("double time node target did not increase") 322 } 323 fullAdjustment := genesisNode.ChildTarget.MulDifficulty(big.NewRat(1, 2)) 324 if doubleTimeNode.ChildTarget.Cmp(fullAdjustment) >= 0 { 325 t.Error("clamp was not applied when adjusting target") 326 } 327 } 328 329 // TestNewChild probes the newChild method of the block node type. 330 func TestNewChild(t *testing.T) { 331 cst, err := createConsensusSetTester(t.Name()) 332 if err != nil { 333 t.Fatal(err) 334 } 335 defer cst.closeCst() 336 337 parent := &processedBlock{ 338 Height: 12, 339 } 340 parent.Depth[0] = 45 341 parent.Block.Timestamp = 100 342 parent.ChildTarget[0] = 90 343 344 cst.cs.db.addBlockMap(parent) 345 346 child := cst.cs.newChild(parent, types.Block{Timestamp: types.Timestamp(100 + types.BlockFrequency)}) 347 if child.Parent != parent.Block.ID() { 348 t.Error("parent-child relationship incorrect") 349 } 350 if child.Height != 13 { 351 t.Error("child height set incorrectly") 352 } 353 var expectedDepth types.Target 354 expectedDepth[0] = 30 355 if child.Depth.Cmp(expectedDepth) != 0 { 356 t.Error("child depth did not adjust correctly") 357 } 358 if child.ChildTarget.Cmp(parent.ChildTarget) != 0 { 359 t.Error("child childTarget not adjusted correctly") 360 } 361 } 362 */