github.com/muhammedhassanm/blockchain@v0.0.0-20200120143007-697261defd4d/sawtooth-core-master/validator/tests/test_journal/tests.py (about)

     1  # Copyright 2016 Intel Corporation
     2  #
     3  # Licensed under the Apache License, Version 2.0 (the "License");
     4  # you may not use this file except in compliance with the License.
     5  # You may obtain a copy of the License at
     6  #
     7  #     http://www.apache.org/licenses/LICENSE-2.0
     8  #
     9  # Unless required by applicable law or agreed to in writing, software
    10  # distributed under the License is distributed on an "AS IS" BASIS,
    11  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  # See the License for the specific language governing permissions and
    13  # limitations under the License.
    14  # ------------------------------------------------------------------------------
    15  
    16  # pylint: disable=too-many-lines
    17  # pylint: disable=pointless-statement
    18  # pylint: disable=protected-access
    19  # pylint: disable=unbalanced-tuple-unpacking
    20  # pylint: disable=arguments-differ
    21  
    22  import logging
    23  import os
    24  import shutil
    25  import tempfile
    26  from threading import RLock
    27  import unittest
    28  from unittest.mock import patch
    29  
    30  from sawtooth_validator.database.dict_database import DictDatabase
    31  from sawtooth_validator.database.native_lmdb import NativeLmdbDatabase
    32  
    33  from sawtooth_validator.journal.block_cache import BlockCache
    34  from sawtooth_validator.journal.block_wrapper import BlockStatus
    35  from sawtooth_validator.journal.block_wrapper import BlockWrapper
    36  from sawtooth_validator.journal.block_wrapper import NULL_BLOCK_IDENTIFIER
    37  
    38  from sawtooth_validator.journal.block_store import BlockStore
    39  from sawtooth_validator.journal.block_validator import BlockValidator
    40  from sawtooth_validator.journal.block_validator import BlockValidationFailure
    41  from sawtooth_validator.journal.chain import ChainController
    42  from sawtooth_validator.journal.chain_id_manager import ChainIdManager
    43  from sawtooth_validator.journal.chain_commit_state import ChainCommitState
    44  from sawtooth_validator.journal.chain_commit_state import DuplicateTransaction
    45  from sawtooth_validator.journal.chain_commit_state import DuplicateBatch
    46  from sawtooth_validator.journal.chain_commit_state import MissingDependency
    47  from sawtooth_validator.journal.publisher import BlockPublisher
    48  from sawtooth_validator.journal.timed_cache import TimedCache
    49  from sawtooth_validator.journal.event_extractors \
    50      import BlockEventExtractor
    51  from sawtooth_validator.journal.event_extractors \
    52      import ReceiptEventExtractor
    53  from sawtooth_validator.journal.batch_injector import \
    54      DefaultBatchInjectorFactory
    55  
    56  from sawtooth_validator.server.events.subscription import EventSubscription
    57  from sawtooth_validator.server.events.subscription import EventFilterFactory
    58  
    59  from sawtooth_validator.protobuf.transaction_pb2 import Transaction
    60  from sawtooth_validator.protobuf.transaction_pb2 import TransactionHeader
    61  from sawtooth_validator.protobuf.batch_pb2 import Batch
    62  from sawtooth_validator.protobuf.block_pb2 import Block
    63  from sawtooth_validator.protobuf.block_pb2 import BlockHeader
    64  from sawtooth_validator.protobuf.transaction_receipt_pb2 import \
    65      TransactionReceipt
    66  from sawtooth_validator.protobuf.transaction_receipt_pb2 import StateChange
    67  from sawtooth_validator.protobuf.transaction_receipt_pb2 import StateChangeList
    68  from sawtooth_validator.protobuf.events_pb2 import Event
    69  from sawtooth_validator.protobuf.events_pb2 import EventFilter
    70  
    71  from sawtooth_validator.state.merkle import MerkleDatabase
    72  from sawtooth_validator.state.settings_view import SettingsViewFactory
    73  from sawtooth_validator.state.settings_cache import SettingsCache
    74  
    75  from test_journal.block_tree_manager import BlockTreeManager
    76  
    77  from test_journal.mock import MockBlockSender
    78  from test_journal.mock import MockBatchSender
    79  from test_journal.mock import MockNetwork
    80  from test_journal.mock import MockStateViewFactory, CreateSetting
    81  from test_journal.mock import MockTransactionExecutor
    82  from test_journal.mock import MockPermissionVerifier
    83  from test_journal.mock import SynchronousExecutor
    84  from test_journal.mock import MockBatchInjectorFactory
    85  from test_journal.utils import wait_until
    86  
    87  from test_journal import mock_consensus
    88  
    89  
    90  LOGGER = logging.getLogger(__name__)
    91  
    92  
    93  class TestBlockCache(unittest.TestCase):
    94      def test_load_from_block_store(self):
    95          """ Test that misses will load from the block store.
    96          """
    97          bs = {}
    98          block1 = Block(
    99              header=BlockHeader(previous_block_id="000").SerializeToString(),
   100              header_signature="test")
   101          bs["test"] = BlockWrapper(block1)
   102          block2 = Block(
   103              header=BlockHeader(previous_block_id="000").SerializeToString(),
   104              header_signature="test2")
   105          blkw2 = BlockWrapper(block2)
   106          bs["test2"] = blkw2
   107          bc = BlockCache(bs)
   108  
   109          self.assertTrue("test" in bc)
   110          self.assertTrue(bc["test2"] == blkw2)
   111  
   112          with self.assertRaises(KeyError):
   113              bc["test-missing"]
   114  
   115  
   116  class TestBlockPublisher(unittest.TestCase):
   117      '''
   118      The block publisher has three main functions, and in these tests
   119      those functions are given the following wrappers for convenience:
   120          * on_batch_received -> receive_batches
   121          * on_chain_updated -> update_chain_head
   122          * on_check_publish_block -> publish_block
   123  
   124      After publishing a block, publish_block sends its block to the
   125      mock block sender, and that block is named result_block. This block
   126      is what is checked by the test assertions.
   127  
   128      The basic pattern for the publisher tests (with variations) is:
   129          0) make a list of batches (usually in setUp);
   130          1) receive the batches;
   131          2) publish a block;
   132          3) verify the block (checking that it contains the correct batches,
   133             or checking that it doesn't exist, or whatever).
   134  
   135      The publisher chain head might be updated several times in a test.
   136      '''
   137  
   138      @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory')
   139      def setUp(self, mock_batch_injector_factory):
   140  
   141          mock_batch_injector_factory.create_injectors.return_value = []
   142  
   143          self.block_tree_manager = BlockTreeManager()
   144          self.block_sender = MockBlockSender()
   145          self.batch_sender = MockBatchSender()
   146          self.state_view_factory = MockStateViewFactory({})
   147          self.permission_verifier = MockPermissionVerifier()
   148  
   149          self.publisher = BlockPublisher(
   150              transaction_executor=MockTransactionExecutor(),
   151              block_cache=self.block_tree_manager.block_cache,
   152              state_view_factory=self.state_view_factory,
   153              settings_cache=SettingsCache(
   154                  SettingsViewFactory(
   155                      self.block_tree_manager.state_view_factory),
   156              ),
   157              block_sender=self.block_sender,
   158              batch_sender=self.batch_sender,
   159              chain_head=self.block_tree_manager.chain_head,
   160              identity_signer=self.block_tree_manager.identity_signer,
   161              data_dir=None,
   162              config_dir=None,
   163              check_publish_block_frequency=0.1,
   164              batch_observers=[],
   165              permission_verifier=self.permission_verifier,
   166              batch_injector_factory=mock_batch_injector_factory)
   167  
   168          self.init_chain_head = self.block_tree_manager.chain_head
   169  
   170          self.result_block = None
   171  
   172          # A list of batches is created at the beginning of each test.
   173          # The test assertions and the publisher function wrappers
   174          # take these batches as a default argument.
   175          self.batch_count = 8
   176          self.batches = self.make_batches()
   177  
   178      def test_publish(self):
   179          '''
   180          Publish a block with several batches
   181          '''
   182          self.receive_batches()
   183  
   184          self.publish_block()
   185  
   186          self.verify_block()
   187  
   188      def test_reject_duplicate_batches_from_receive(self):
   189          '''
   190          Test that duplicate batches from on_batch_received are rejected
   191          '''
   192          for _ in range(5):
   193              self.receive_batches()
   194  
   195          self.publish_block()
   196  
   197          self.verify_block()
   198  
   199      def test_reject_duplicate_batches_from_store(self):
   200          '''
   201          Test that duplicate batches from block store are rejected
   202          '''
   203          self.update_chain_head(None)
   204  
   205          self.update_chain_head(
   206              head=self.init_chain_head,
   207              uncommitted=self.batches)
   208  
   209          self.receive_batches()
   210  
   211          self.publish_block()
   212  
   213          self.verify_block()
   214  
   215      def test_no_chain_head(self):
   216          '''
   217          Test that nothing gets published with a null chain head,
   218          then test that publishing resumes after updating
   219          '''
   220          self.update_chain_head(None)
   221  
   222          self.receive_batches()
   223  
   224          # try to publish block (failing)
   225          self.publish_block()
   226  
   227          self.assert_no_block_published()
   228  
   229          # reset chain head several times,
   230          # making sure batches remain queued
   231          for _ in range(3):
   232              self.update_chain_head(None)
   233              self.update_chain_head(self.init_chain_head)
   234  
   235          # try to publish block (succeeding)
   236          self.publish_block()
   237  
   238          self.verify_block()
   239  
   240      def test_committed_batches(self):
   241          '''
   242          Test that batches committed upon updating the chain head
   243          are not included in the next block.
   244          '''
   245          self.update_chain_head(None)
   246  
   247          self.update_chain_head(
   248              head=self.init_chain_head,
   249              committed=self.batches)
   250  
   251          new_batches = self.make_batches(batch_count=12)
   252  
   253          self.receive_batches(new_batches)
   254  
   255          self.publish_block()
   256  
   257          self.verify_block(new_batches)
   258  
   259      def test_uncommitted_batches(self):
   260          '''
   261          Test that batches uncommitted upon updating the chain head
   262          are included in the next block.
   263          '''
   264          self.update_chain_head(None)
   265  
   266          self.update_chain_head(
   267              head=self.init_chain_head,
   268              uncommitted=self.batches)
   269  
   270          self.publish_block()
   271  
   272          self.verify_block()
   273  
   274      def test_empty_pending_queue(self):
   275          '''
   276          Test that no block is published if the pending queue is empty
   277          '''
   278          # try to publish with no pending queue (failing)
   279          self.publish_block()
   280  
   281          self.assert_no_block_published()
   282  
   283          # receive batches, then try again (succeeding)
   284          self.receive_batches()
   285  
   286          self.publish_block()
   287  
   288          self.verify_block()
   289  
   290      def test_missing_dependencies(self):
   291          '''
   292          Test that no block is published with missing dependencies
   293          '''
   294          self.batches = self.make_batches(
   295              missing_deps=True)
   296  
   297          self.receive_batches()
   298  
   299          self.publish_block()
   300  
   301          self.assert_no_block_published()
   302  
   303      @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory')
   304      def test_batches_rejected_by_scheduler(self, mock_batch_injector_factory):
   305          '''
   306          Test that no block is published with
   307          batches rejected by the scheduler
   308          '''
   309  
   310          mock_batch_injector_factory.create_injectors.return_value = []
   311          self.publisher = BlockPublisher(
   312              transaction_executor=MockTransactionExecutor(
   313                  batch_execution_result=False),
   314              block_cache=self.block_tree_manager.block_cache,
   315              state_view_factory=self.state_view_factory,
   316              settings_cache=SettingsCache(
   317                  SettingsViewFactory(
   318                      self.block_tree_manager.state_view_factory),
   319              ),
   320              block_sender=self.block_sender,
   321              batch_sender=self.batch_sender,
   322              chain_head=self.block_tree_manager.chain_head,
   323              identity_signer=self.block_tree_manager.identity_signer,
   324              data_dir=None,
   325              config_dir=None,
   326              check_publish_block_frequency=0.1,
   327              batch_observers=[],
   328              permission_verifier=self.permission_verifier,
   329              batch_injector_factory=mock_batch_injector_factory)
   330  
   331          self.receive_batches()
   332  
   333          self.publish_block()
   334  
   335          self.assert_no_block_published()
   336  
   337      @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory')
   338      def test_max_block_size(self, mock_batch_injector_factory):
   339          '''
   340          Test block publisher obeys the block size limits
   341          '''
   342  
   343          mock_batch_injector_factory.create_injectors.return_value = []
   344  
   345          # Create a publisher that has a state view
   346          # with a batch limit
   347          addr, value = CreateSetting(
   348              'sawtooth.publisher.max_batches_per_block', 1)
   349          self.state_view_factory = MockStateViewFactory(
   350              {addr: value})
   351  
   352          self.publisher = BlockPublisher(
   353              transaction_executor=MockTransactionExecutor(),
   354              block_cache=self.block_tree_manager.block_cache,
   355              state_view_factory=self.state_view_factory,
   356              settings_cache=SettingsCache(
   357                  SettingsViewFactory(
   358                      self.state_view_factory),
   359              ),
   360              block_sender=self.block_sender,
   361              batch_sender=self.batch_sender,
   362              chain_head=self.block_tree_manager.chain_head,
   363              identity_signer=self.block_tree_manager.identity_signer,
   364              data_dir=None,
   365              config_dir=None,
   366              check_publish_block_frequency=0.1,
   367              batch_observers=[],
   368              permission_verifier=self.permission_verifier,
   369              batch_injector_factory=mock_batch_injector_factory)
   370  
   371          self.assert_no_block_published()
   372  
   373          # receive batches, then try again (succeeding)
   374          self.receive_batches()
   375  
   376          # try to publish with no pending queue (failing)
   377          for i in range(self.batch_count):
   378              self.publish_block()
   379              self.assert_block_published()
   380              self.update_chain_head(BlockWrapper(self.result_block))
   381              self.verify_block([self.batches[i]])
   382  
   383      def test_duplicate_transactions(self):
   384          '''
   385          Test discards batches that have duplicate transactions in them.
   386          '''
   387          # receive batches, then try again (succeeding)
   388          self.batches = self.batches[1:2]
   389          self.receive_batches()
   390          self.publish_block()
   391          self.assert_block_published()
   392          self.update_chain_head(BlockWrapper(self.result_block))
   393          self.verify_block()
   394  
   395          # build a new set of batches with the same transactions in them
   396          self.batches = self.make_batches_with_duplicate_txn()
   397          self.receive_batches()
   398          self.publish_block()
   399          self.assert_no_block_published()  # block should be empty after batch
   400          # with duplicate transaction is dropped.
   401  
   402      def test_batch_injection_start_block(self):
   403          '''
   404          Test that the batch is injected at the beginning of the block.
   405          '''
   406  
   407          injected_batch = self.make_batch()
   408  
   409          self.publisher = BlockPublisher(
   410              transaction_executor=MockTransactionExecutor(),
   411              block_cache=self.block_tree_manager.block_cache,
   412              state_view_factory=self.state_view_factory,
   413              settings_cache=SettingsCache(
   414                  SettingsViewFactory(
   415                      self.block_tree_manager.state_view_factory),
   416              ),
   417              block_sender=self.block_sender,
   418              batch_sender=self.batch_sender,
   419              chain_head=self.block_tree_manager.chain_head,
   420              identity_signer=self.block_tree_manager.identity_signer,
   421              data_dir=None,
   422              config_dir=None,
   423              permission_verifier=self.permission_verifier,
   424              check_publish_block_frequency=0.1,
   425              batch_observers=[],
   426              batch_injector_factory=MockBatchInjectorFactory(injected_batch))
   427  
   428          self.receive_batches()
   429  
   430          self.publish_block()
   431  
   432          self.assert_batch_in_block(injected_batch)
   433  
   434      @unittest.mock.patch('test_journal.mock.MockBatchInjectorFactory')
   435      def test_validation_rules_reject_batches(self,
   436                                               mock_batch_injector_factory):
   437          """Test that a batch is not added to the block if it will violate the
   438          block validation rules.
   439  
   440          It does the following:
   441  
   442          - Sets the block_validation_rules to limit the number of 'test'
   443            transactions to 1
   444          - creates two batches, limited to 1 transaction each, and receives
   445            them
   446          - verifies that only the first batch was committed to the block
   447          """
   448          addr, value = CreateSetting(
   449              'sawtooth.validator.block_validation_rules', 'NofX:1,test')
   450          self.state_view_factory = MockStateViewFactory(
   451              {addr: value})
   452  
   453          mock_batch_injector_factory.create_injectors.return_value = []
   454  
   455          batch1 = self.make_batch(txn_count=1)
   456          batch2 = self.make_batch(txn_count=1)
   457  
   458          self.publisher = BlockPublisher(
   459              transaction_executor=MockTransactionExecutor(),
   460              block_cache=self.block_tree_manager.block_cache,
   461              state_view_factory=self.state_view_factory,
   462              settings_cache=SettingsCache(
   463                  SettingsViewFactory(
   464                      self.state_view_factory),
   465              ),
   466              block_sender=self.block_sender,
   467              batch_sender=self.batch_sender,
   468              chain_head=self.block_tree_manager.chain_head,
   469              identity_signer=self.block_tree_manager.identity_signer,
   470              data_dir=None,
   471              config_dir=None,
   472              check_publish_block_frequency=0.1,
   473              batch_observers=[],
   474              permission_verifier=self.permission_verifier,
   475              batch_injector_factory=mock_batch_injector_factory)
   476  
   477          self.receive_batches(batches=[batch1, batch2])
   478  
   479          self.publish_block()
   480  
   481          self.assert_block_batch_count(1)
   482          self.assert_batch_in_block(batch1)
   483  
   484      # assertions
   485      def assert_block_published(self):
   486          self.assertIsNotNone(
   487              self.result_block,
   488              'Block should have been published')
   489  
   490      def assert_no_block_published(self):
   491          self.assertIsNone(
   492              self.result_block,
   493              'Block should not have been published')
   494  
   495      def assert_batch_in_block(self, batch):
   496          self.assertIn(
   497              batch,
   498              tuple(self.result_block.batches),
   499              'Batch not in block')
   500  
   501      def assert_batches_in_block(self, batches=None):
   502          if batches is None:
   503              batches = self.batches
   504  
   505          for batch in batches:
   506              self.assert_batch_in_block(batch)
   507  
   508      def assert_block_batch_count(self, batch_count=None):
   509          if batch_count is None:
   510              batch_count = self.batch_count
   511  
   512          self.assertEqual(
   513              len(self.result_block.batches),
   514              batch_count,
   515              'Wrong batch count in block')
   516  
   517      def verify_block(self, batches=None):
   518          if batches is None:
   519              batches = self.batches
   520  
   521          batch_count = None if batches is None else len(batches)
   522  
   523          self.assert_block_published()
   524          self.assert_batches_in_block(batches)
   525          self.assert_block_batch_count(batch_count)
   526  
   527          self.result_block = None
   528  
   529      # publisher functions
   530  
   531      def receive_batch(self, batch):
   532          self.publisher.on_batch_received(batch)
   533  
   534      def receive_batches(self, batches=None):
   535          if batches is None:
   536              batches = self.batches
   537  
   538          for batch in batches:
   539              self.receive_batch(batch)
   540  
   541      def publish_block(self):
   542          self.publisher.on_check_publish_block()
   543          self.result_block = self.block_sender.new_block
   544          self.block_sender.new_block = None
   545  
   546      def update_chain_head(self, head, committed=None, uncommitted=None):
   547          if head:
   548              self.block_tree_manager.block_store.update_chain([head])
   549          self.publisher.on_chain_updated(
   550              chain_head=head,
   551              committed_batches=committed,
   552              uncommitted_batches=uncommitted)
   553  
   554      # batches
   555      def make_batch(self, missing_deps=False, txn_count=2):
   556          return self.block_tree_manager.generate_batch(
   557              txn_count=txn_count,
   558              missing_deps=missing_deps)
   559  
   560      def make_batches(self, batch_count=None, missing_deps=False):
   561          if batch_count is None:
   562              batch_count = self.batch_count
   563  
   564          return [self.make_batch(missing_deps=missing_deps)
   565                  for _ in range(batch_count)]
   566  
   567      def make_batches_with_duplicate_txn(self):
   568          txns = [self.batches[0].transactions[0],
   569                  self.block_tree_manager.generate_transaction("nonce")]
   570          return [self.block_tree_manager.generate_batch(txns=txns)]
   571  
   572  
   573  class TestBlockValidator(unittest.TestCase):
   574      def setUp(self):
   575          self.state_view_factory = MockStateViewFactory()
   576  
   577          self.block_tree_manager = BlockTreeManager()
   578          self.root = self.block_tree_manager.chain_head
   579  
   580          self.block_validation_handler = self.BlockValidationHandler()
   581          self.permission_verifier = MockPermissionVerifier()
   582  
   583      # fork based tests
   584      def test_fork_simple(self):
   585          """
   586          Test a simple case of a new block extending the current root.
   587          """
   588  
   589          new_block = self.block_tree_manager.generate_block(
   590              previous_block=self.root,
   591              add_to_cache=True)
   592  
   593          self.validate_block(new_block)
   594  
   595          self.assert_valid_block(new_block)
   596          self.assert_new_block_committed()
   597  
   598      def test_good_fork_lower(self):
   599          """
   600          Test case of a new block extending on a valid chain but not as long
   601          as the current chain.
   602          """
   603          # create a new valid chain 5 long from the current root
   604          _, head = self.generate_chain_with_head(
   605              self.root, 5, {'add_to_store': True})
   606  
   607          self.block_tree_manager.set_chain_head(head)
   608  
   609          # generate candidate chain 3 long from the same root
   610          _, new_head = self.generate_chain_with_head(
   611              self.root, 3, {'add_to_cache': True})
   612  
   613          self.validate_block(new_head)
   614  
   615          self.assert_valid_block(new_head)
   616          self.assert_new_block_not_committed()
   617  
   618      def test_good_fork_higher(self):
   619          """
   620          Test case of a new block extending on a valid chain but longer
   621          than the current chain. ( similar to test_good_fork_lower but uses
   622          a different code path when finding the common root )
   623          """
   624          # create a new valid chain 5 long from the current root
   625          _, head = self.generate_chain_with_head(
   626              self.root, 5, {'add_to_store': True})
   627  
   628          self.block_tree_manager.set_chain_head(head)
   629  
   630          # generate candidate chain 8 long from the same root
   631          _, new_head = self.generate_chain_with_head(
   632              head, 8, {'add_to_cache': True})
   633  
   634          self.validate_block(new_head)
   635  
   636          self.assert_valid_block(new_head)
   637          self.assert_new_block_committed()
   638  
   639      def test_fork_different_genesis(self):
   640          """"
   641          Test the case where new block is from a different genesis
   642          """
   643          # create a new valid chain 5 long from the current root
   644          _, head = self.generate_chain_with_head(
   645              self.root, 5, {'add_to_store': True})
   646  
   647          self.block_tree_manager.set_chain_head(head)
   648  
   649          # generate candidate chain 5 long from its own genesis
   650          _, new_head = self.generate_chain_with_head(
   651              None, 5, {'add_to_cache': True})
   652  
   653          self.validate_block(new_head)
   654  
   655          self.assert_invalid_block(new_head)
   656          self.assert_new_block_not_committed()
   657  
   658      def test_fork_missing_predecessor(self):
   659          """"
   660          Test the case where new block is missing the a predecessor
   661          """
   662          # generate candidate chain 5 long off the current head.
   663          chain, head = self.generate_chain_with_head(
   664              self.root, 5, {'add_to_cache': True})
   665  
   666          # remove one of the new blocks
   667          del self.block_tree_manager.block_cache[chain[1].identifier]
   668  
   669          self.validate_block(head)
   670  
   671          self.assert_unknown_block(head)
   672          self.assert_new_block_not_committed()
   673  
   674      def test_fork_invalid_predecessor(self):
   675          """"
   676          Test the case where new block has an invalid predecessor
   677          """
   678          # generate candidate chain 5 long off the current head.
   679          chain, head = self.generate_chain_with_head(
   680              self.root, 5, {'add_to_cache': True})
   681  
   682          # Mark a predecessor as invalid
   683          chain[1].status = BlockStatus.Invalid
   684  
   685          self.validate_block(head)
   686  
   687          self.assert_invalid_block(head)
   688          self.assert_new_block_not_committed()
   689  
   690      def test_block_bad_consensus(self):
   691          """
   692          Test the case where the new block has a bad batch
   693          """
   694          _, head = self.generate_chain_with_head(
   695              self.root, 5, {'add_to_store': True}, False)
   696  
   697          new_block = self.block_tree_manager.generate_block(
   698              previous_block=head,
   699              add_to_cache=True,
   700              invalid_consensus=True)
   701  
   702          self.validate_block(new_block)
   703  
   704          self.assert_invalid_block(new_block)
   705          self.assert_new_block_not_committed()
   706  
   707      def test_block_bad_batch(self):
   708          """
   709          Test the case where the new block has a bad batch
   710          """
   711          _, head = self.generate_chain_with_head(
   712              self.root, 5, {'add_to_store': True}, False)
   713  
   714          new_block = self.block_tree_manager.generate_block(
   715              previous_block=head,
   716              add_to_cache=True,
   717              invalid_batch=True)
   718  
   719          self.validate_block(new_block)
   720  
   721          self.assert_invalid_block(new_block)
   722          self.assert_new_block_not_committed()
   723  
   724      def test_block_missing_batch_dependency(self):
   725          """
   726          Test the case where the new block has a batch that is missing a
   727          dependency.
   728          """
   729          _, head = self.generate_chain_with_head(
   730              self.root, 5, {'add_to_store': True}, False)
   731  
   732          txn = self.block_tree_manager.generate_transaction(deps=["missing"])
   733          batch = self.block_tree_manager.generate_batch(txns=[txn])
   734          new_block = self.block_tree_manager.generate_block(
   735              previous_block=head,
   736              add_to_cache=True,
   737              invalid_batch=True,
   738              batches=[batch])
   739  
   740          self.validate_block(new_block)
   741  
   742          self.assert_invalid_block(new_block)
   743          self.assert_new_block_not_committed()
   744  
   745      def test_block_duplicate_batch(self):
   746          """
   747          Test the case where the new block has a batch that already committed to
   748          the chain.
   749          """
   750          _, head = self.generate_chain_with_head(
   751              self.root, 5, {'add_to_store': True}, False)
   752  
   753          batch = self.block_tree_manager.generate_batch()
   754          new_block = self.block_tree_manager.generate_block(
   755              previous_block=head,
   756              add_to_cache=True,
   757              invalid_batch=True,
   758              batches=[batch])
   759          self.validate_block(new_block)
   760  
   761          new_block = self.block_tree_manager.generate_block(
   762              previous_block=head,
   763              add_to_cache=True,
   764              invalid_batch=True,
   765              batches=[batch])
   766          self.validate_block(new_block)
   767  
   768          self.assert_invalid_block(new_block)
   769          self.assert_new_block_not_committed()
   770  
   771      def test_block_duplicate_batch_in_block(self):
   772          """
   773          Test the case where the new block has a duplicate batches.
   774          """
   775          _, head = self.generate_chain_with_head(
   776              self.root, 5, {'add_to_store': True}, False)
   777  
   778          batch = self.block_tree_manager.generate_batch()
   779  
   780          new_block = self.block_tree_manager.generate_block(
   781              previous_block=head,
   782              add_to_cache=True,
   783              invalid_batch=True,
   784              batches=[batch, batch])
   785          self.validate_block(new_block)
   786  
   787          self.assert_invalid_block(new_block)
   788          self.assert_new_block_not_committed()
   789  
   790      def test_block_duplicate_transaction(self):
   791          """
   792          Test the case where the new block has a transaction that is already
   793          committed.
   794          """
   795          _, head = self.generate_chain_with_head(
   796              self.root, 5, {'add_to_store': True}, False)
   797  
   798          txn = self.block_tree_manager.generate_transaction()
   799          batch = self.block_tree_manager.generate_batch(txns=[txn])
   800          new_block = self.block_tree_manager.generate_block(
   801              previous_block=head,
   802              add_to_cache=True,
   803              invalid_batch=True,
   804              batches=[batch])
   805          self.validate_block(new_block)
   806  
   807          txn2 = self.block_tree_manager.generate_transaction()
   808          batch = self.block_tree_manager.generate_batch(txns=[txn, txn2])
   809          new_block = self.block_tree_manager.generate_block(
   810              previous_block=new_block,
   811              add_to_cache=True,
   812              invalid_batch=True,
   813              batches=[batch])
   814          self.validate_block(new_block)
   815  
   816          self.assert_invalid_block(new_block)
   817          self.assert_new_block_not_committed()
   818  
   819      def test_block_duplicate_transaction_in_batch(self):
   820          """
   821          Test the case where the new block has a batch that contains duplicate
   822          transactions.
   823          """
   824          _, head = self.generate_chain_with_head(
   825              self.root, 5, {'add_to_store': True}, False)
   826  
   827          txn = self.block_tree_manager.generate_transaction()
   828          batch = self.block_tree_manager.generate_batch(txns=[txn, txn])
   829          new_block = self.block_tree_manager.generate_block(
   830              previous_block=head,
   831              add_to_cache=True,
   832              invalid_batch=True,
   833              batches=[batch])
   834          self.validate_block(new_block)
   835  
   836          self.assert_invalid_block(new_block)
   837          self.assert_new_block_not_committed()
   838  
   839      # assertions
   840  
   841      def assert_valid_block(self, block):
   842          self.assertEqual(
   843              block.status, BlockStatus.Valid,
   844              "Block should be valid")
   845  
   846      def assert_invalid_block(self, block):
   847          self.assertEqual(
   848              block.status, BlockStatus.Invalid,
   849              "Block should be invalid")
   850  
   851      def assert_unknown_block(self, block):
   852          self.assertEqual(
   853              block.status, BlockStatus.Unknown,
   854              "Block should be unknown")
   855  
   856      def assert_new_block_committed(self):
   857          self.assert_handler_has_result()
   858          self.assertTrue(
   859              self.block_validation_handler.commit_new_block,
   860              "New block not committed, should be")
   861  
   862      def assert_new_block_not_committed(self):
   863          self.assert_handler_has_result()
   864          self.assertFalse(
   865              self.block_validation_handler.commit_new_block,
   866              "New block committed, shouldn't be")
   867  
   868      def assert_handler_has_result(self):
   869          msg = "Validation handler doesn't have result"
   870          self.assertTrue(self.block_validation_handler.has_result(), msg)
   871  
   872      # block validation
   873  
   874      def validate_block(self, block):
   875          validator = self.create_block_validator()
   876          validator._load_consensus = lambda block: mock_consensus
   877          validator.process_block_verification(
   878              block,
   879              self.block_validation_handler.on_block_validated)
   880  
   881      def create_block_validator(self):
   882          return BlockValidator(
   883              state_view_factory=self.state_view_factory,
   884              block_cache=self.block_tree_manager.block_cache,
   885              transaction_executor=MockTransactionExecutor(
   886                  batch_execution_result=None),
   887              identity_signer=self.block_tree_manager.identity_signer,
   888              data_dir=None,
   889              config_dir=None,
   890              permission_verifier=self.permission_verifier)
   891  
   892      class BlockValidationHandler(object):
   893          def __init__(self):
   894              self.commit_new_block = None
   895              self.result = None
   896  
   897          def on_block_validated(self, commit_new_block, result):
   898              self.commit_new_block = commit_new_block
   899              self.result = result
   900  
   901          def has_result(self):
   902              return not (self.result is None or self.commit_new_block is None)
   903  
   904      # block tree manager interface
   905  
   906      def generate_chain_with_head(self, root_block, num_blocks, params=None,
   907                                   exclude_head=True):
   908          chain = self.block_tree_manager.generate_chain(
   909              root_block, num_blocks, params, exclude_head)
   910  
   911          head = chain[-1]
   912  
   913          return chain, head
   914  
   915  
   916  @unittest.skip(
   917      'These tests no longer take into account underlying FFI threads')
   918  class TestChainController(unittest.TestCase):
   919      def setUp(self):
   920          self.dir = tempfile.mkdtemp()
   921  
   922          self.state_database = NativeLmdbDatabase(
   923              os.path.join(self.dir, 'merkle.lmdb'),
   924              indexes=MerkleDatabase.create_index_configuration(),
   925              _size=120 * 1024 * 1024)
   926  
   927          self.block_tree_manager = BlockTreeManager()
   928          self.gossip = MockNetwork()
   929          self.txn_executor = MockTransactionExecutor()
   930          self._chain_head_lock = RLock()
   931          self.permission_verifier = MockPermissionVerifier()
   932          self.state_view_factory = MockStateViewFactory(
   933              self.block_tree_manager.state_db)
   934          self.transaction_executor = MockTransactionExecutor(
   935              batch_execution_result=None)
   936          self.executor = SynchronousExecutor()
   937  
   938          self.block_validator = BlockValidator(
   939              state_view_factory=self.state_view_factory,
   940              block_cache=self.block_tree_manager.block_cache,
   941              transaction_executor=self.transaction_executor,
   942              identity_signer=self.block_tree_manager.identity_signer,
   943              data_dir=self.dir,
   944              config_dir=None,
   945              permission_verifier=self.permission_verifier,
   946              thread_pool=self.executor)
   947  
   948          def chain_updated(head, committed_batches=None,
   949                            uncommitted_batches=None):
   950              pass
   951  
   952          self.chain_ctrl = ChainController(
   953              self.block_tree_manager.block_store,
   954              self.block_tree_manager.block_cache,
   955              self.block_validator,
   956              self.state_database,
   957              self._chain_head_lock,
   958              chain_updated,
   959              data_dir=self.dir,
   960              observers=[])
   961  
   962          init_root = self.chain_ctrl.chain_head
   963          self.assert_is_chain_head(init_root)
   964  
   965          # create a chain of length 5 extending the root
   966          _, head = self.generate_chain(init_root, 5)
   967          self.receive_and_process_blocks(head)
   968          self.assert_is_chain_head(head)
   969  
   970          self.init_head = head
   971  
   972      def tearDown(self):
   973          shutil.rmtree(self.dir)
   974  
   975      def test_simple_case(self):
   976          new_block = self.generate_block(self.init_head)
   977          self.receive_and_process_blocks(new_block)
   978          self.assert_is_chain_head(new_block)
   979  
   980      def test_alternate_genesis(self):
   981          '''Tests a fork extending an alternate genesis block
   982          '''
   983          chain, _ = self.generate_chain(None, 5)
   984  
   985          for block in chain:
   986              self.receive_and_process_blocks(block)
   987  
   988          # make sure initial head is still chain head
   989          self.assert_is_chain_head(self.init_head)
   990  
   991      def test_bad_blocks(self):
   992          '''Tests bad blocks extending current chain
   993          '''
   994          # Bad due to consensus
   995          bad_consen = self.generate_block(
   996              previous_block=self.init_head,
   997              invalid_consensus=True)
   998  
   999          # chain head should be the same
  1000          self.receive_and_process_blocks(bad_consen)
  1001          self.assert_is_chain_head(self.init_head)
  1002  
  1003          # Bad due to transaction
  1004          bad_batch = self.generate_block(
  1005              previous_block=self.init_head,
  1006              invalid_batch=True)
  1007  
  1008          # chain head should be the same
  1009          self.receive_and_process_blocks(bad_batch)
  1010          self.assert_is_chain_head(self.init_head)
  1011  
  1012          # # Ensure good block works
  1013          good_block = self.generate_block(
  1014              previous_block=self.init_head)
  1015  
  1016          # chain head should be good_block
  1017          self.receive_and_process_blocks(good_block)
  1018          self.assert_is_chain_head(good_block)
  1019  
  1020      def test_fork_weights(self):
  1021          '''Tests extending blocks of different weights
  1022          '''
  1023          weight_4 = self.generate_block(
  1024              previous_block=self.init_head,
  1025              weight=4)
  1026  
  1027          weight_7 = self.generate_block(
  1028              previous_block=self.init_head,
  1029              weight=7)
  1030  
  1031          weight_8 = self.generate_block(
  1032              previous_block=self.init_head,
  1033              weight=8)
  1034  
  1035          self.receive_and_process_blocks(
  1036              weight_7,
  1037              weight_4,
  1038              weight_8)
  1039  
  1040          self.assert_is_chain_head(weight_8)
  1041  
  1042      def test_fork_lengths(self):
  1043          '''Tests competing forks of different lengths
  1044          '''
  1045          _, head_2 = self.generate_chain(self.init_head, 2)
  1046          _, head_7 = self.generate_chain(self.init_head, 7)
  1047          _, head_5 = self.generate_chain(self.init_head, 5)
  1048  
  1049          self.receive_and_process_blocks(
  1050              head_2,
  1051              head_7,
  1052              head_5)
  1053  
  1054          self.assert_is_chain_head(head_7)
  1055  
  1056      def test_advancing_chain(self):
  1057          '''Tests the chain being advanced between a fork's
  1058          creation and validation
  1059          '''
  1060          _, fork_5 = self.generate_chain(self.init_head, 5)
  1061          _, fork_3 = self.generate_chain(self.init_head, 3)
  1062  
  1063          self.receive_and_process_blocks(fork_3)
  1064          self.assert_is_chain_head(fork_3)
  1065  
  1066          # fork_5 is longer than fork_3, so it should be accepted
  1067          self.receive_and_process_blocks(fork_5)
  1068          self.assert_is_chain_head(fork_5)
  1069  
  1070      def test_fork_missing_block(self):
  1071          '''Tests a fork with a missing block
  1072          '''
  1073          # make new chain
  1074          new_chain, new_head = self.generate_chain(self.init_head, 5)
  1075  
  1076          self.chain_ctrl.on_block_received(new_head)
  1077  
  1078          # delete a block from the new chain
  1079          del self.block_tree_manager.block_cache[new_chain[3].identifier]
  1080  
  1081          self.executor.process_all()
  1082  
  1083          # chain shouldn't advance
  1084          self.assert_is_chain_head(self.init_head)
  1085  
  1086          # try again, chain still shouldn't advance
  1087          self.receive_and_process_blocks(new_head)
  1088  
  1089          self.assert_is_chain_head(self.init_head)
  1090  
  1091      def test_fork_bad_block(self):
  1092          '''Tests a fork with a bad block in the middle
  1093          '''
  1094          # make two chains extending chain
  1095          _, good_head = self.generate_chain(self.init_head, 5)
  1096          bad_chain, bad_head = self.generate_chain(self.init_head, 5)
  1097  
  1098          self.chain_ctrl.on_block_received(bad_head)
  1099          self.chain_ctrl.on_block_received(good_head)
  1100  
  1101          # invalidate block in the middle of bad_chain
  1102          bad_chain[3].status = BlockStatus.Invalid
  1103  
  1104          self.executor.process_all()
  1105  
  1106          # good_chain should be accepted
  1107          self.assert_is_chain_head(good_head)
  1108  
  1109      def test_advancing_fork(self):
  1110          '''Tests a fork advancing before getting validated
  1111          '''
  1112          _, fork_head = self.generate_chain(self.init_head, 5)
  1113  
  1114          self.chain_ctrl.on_block_received(fork_head)
  1115  
  1116          # advance fork before it gets accepted
  1117          _, ext_head = self.generate_chain(fork_head, 3)
  1118  
  1119          self.executor.process_all()
  1120  
  1121          self.assert_is_chain_head(fork_head)
  1122  
  1123          self.receive_and_process_blocks(ext_head)
  1124  
  1125          self.assert_is_chain_head(ext_head)
  1126  
  1127      def test_block_extends_in_validation(self):
  1128          '''Tests a block getting extended while being validated
  1129          '''
  1130          # create candidate block
  1131          candidate = self.block_tree_manager.generate_block(
  1132              previous_block=self.init_head)
  1133  
  1134          self.assert_is_chain_head(self.init_head)
  1135  
  1136          # queue up the candidate block, but don't process
  1137          self.chain_ctrl.on_block_received(candidate)
  1138  
  1139          # create a new block extending the candidate block
  1140          extending_block = self.block_tree_manager.generate_block(
  1141              previous_block=candidate)
  1142  
  1143          self.assert_is_chain_head(self.init_head)
  1144  
  1145          # queue and process the extending block,
  1146          # which should be the new head
  1147          self.receive_and_process_blocks(extending_block)
  1148          self.assert_is_chain_head(extending_block)
  1149  
  1150      def test_multiple_extended_forks(self):
  1151          '''A more involved example of competing forks
  1152  
  1153          Three forks of varying lengths a_0, b_0, and c_0
  1154          are created extending the existing chain, with c_0
  1155          being the longest initially. The chains are extended
  1156          in the following sequence:
  1157  
  1158          1. Extend all forks by 2. The c fork should remain the head.
  1159          2. Extend forks by lenths such that the b fork is the
  1160             longest. It should be the new head.
  1161          3. Extend all forks by 8. The b fork should remain the head.
  1162          4. Create a new fork of the initial chain longer than
  1163             any of the other forks. It should be the new head.
  1164          '''
  1165  
  1166          # create forks of various lengths
  1167          _, a_0 = self.generate_chain(self.init_head, 3)
  1168          _, b_0 = self.generate_chain(self.init_head, 5)
  1169          _, c_0 = self.generate_chain(self.init_head, 7)
  1170  
  1171          self.receive_and_process_blocks(a_0, b_0, c_0)
  1172          self.assert_is_chain_head(c_0)
  1173  
  1174          # extend every fork by 2
  1175          _, a_1 = self.generate_chain(a_0, 2)
  1176          _, b_1 = self.generate_chain(b_0, 2)
  1177          _, c_1 = self.generate_chain(c_0, 2)
  1178  
  1179          self.receive_and_process_blocks(a_1, b_1, c_1)
  1180          self.assert_is_chain_head(c_1)
  1181  
  1182          # extend the forks by different lengths
  1183          _, a_2 = self.generate_chain(a_1, 1)
  1184          _, b_2 = self.generate_chain(b_1, 6)
  1185          _, c_2 = self.generate_chain(c_1, 3)
  1186  
  1187          self.receive_and_process_blocks(a_2, b_2, c_2)
  1188          self.assert_is_chain_head(b_2)
  1189  
  1190          # extend every fork by 2
  1191          _, a_3 = self.generate_chain(a_2, 8)
  1192          _, b_3 = self.generate_chain(b_2, 8)
  1193          _, c_3 = self.generate_chain(c_2, 8)
  1194  
  1195          self.receive_and_process_blocks(a_3, b_3, c_3)
  1196          self.assert_is_chain_head(b_3)
  1197  
  1198          # create a new longest chain
  1199          _, wow = self.generate_chain(self.init_head, 30)
  1200          self.receive_and_process_blocks(wow)
  1201          self.assert_is_chain_head(wow)
  1202  
  1203      # next multi threaded
  1204      # next add block publisher
  1205      # next batch lists
  1206      # integrate with LMDB
  1207      # early vs late binding ( class member of consensus BlockPublisher)
  1208  
  1209      # helpers
  1210  
  1211      def assert_is_chain_head(self, block):
  1212          chain_head_sig = self.chain_ctrl.chain_head.header_signature
  1213          block_sig = block.header_signature
  1214  
  1215          self.assertEqual(
  1216              chain_head_sig,
  1217              block_sig,
  1218              'Not chain head')
  1219  
  1220      def generate_chain(self, root_block, num_blocks, params=None):
  1221          '''Returns (chain, chain_head).
  1222          Usually only the head is needed,
  1223          but occasionally the chain itself is used.
  1224          '''
  1225          if params is None:
  1226              params = {'add_to_cache': True}
  1227  
  1228          chain = self.block_tree_manager.generate_chain(
  1229              root_block, num_blocks, params)
  1230  
  1231          head = chain[-1]
  1232  
  1233          return chain, head
  1234  
  1235      def generate_block(self, *args, **kwargs):
  1236          return self.block_tree_manager.generate_block(
  1237              *args, **kwargs)
  1238  
  1239      def receive_and_process_blocks(self, *blocks):
  1240          for block in blocks:
  1241              self.chain_ctrl.on_block_received(block)
  1242          self.executor.process_all()
  1243  
  1244  
  1245  class TestChainControllerGenesisPeer(unittest.TestCase):
  1246      def setUp(self):
  1247          self.dir = tempfile.mkdtemp()
  1248          self.block_tree_manager = BlockTreeManager(with_genesis=False)
  1249          self.gossip = MockNetwork()
  1250          self.txn_executor = MockTransactionExecutor()
  1251          self.chain_id_manager = ChainIdManager(self.dir)
  1252          self.permission_verifier = MockPermissionVerifier()
  1253          self.state_view_factory = MockStateViewFactory(
  1254              self.block_tree_manager.state_db)
  1255          self.transaction_executor = MockTransactionExecutor(
  1256              batch_execution_result=None)
  1257          self.executor = SynchronousExecutor()
  1258  
  1259          self.state_database = NativeLmdbDatabase(
  1260              os.path.join(self.dir, 'merkle.lmdb'),
  1261              indexes=MerkleDatabase.create_index_configuration(),
  1262              _size=120 * 1024 * 1024)
  1263          self.block_sender = MockBlockSender()
  1264          self.batch_sender = MockBatchSender()
  1265  
  1266          self.publisher = BlockPublisher(
  1267              transaction_executor=self.txn_executor,
  1268              block_cache=self.block_tree_manager.block_cache,
  1269              state_view_factory=MockStateViewFactory(
  1270                  self.block_tree_manager.state_db),
  1271              settings_cache=SettingsCache(
  1272                  SettingsViewFactory(
  1273                      self.block_tree_manager.state_view_factory),
  1274              ),
  1275              block_sender=self.block_sender,
  1276              batch_sender=self.batch_sender,
  1277              chain_head=self.block_tree_manager.block_store.chain_head,
  1278              identity_signer=self.block_tree_manager.identity_signer,
  1279              data_dir=None,
  1280              config_dir=None,
  1281              permission_verifier=self.permission_verifier,
  1282              check_publish_block_frequency=0.1,
  1283              batch_observers=[],
  1284              batch_injector_factory=DefaultBatchInjectorFactory(
  1285                  block_cache=self.block_tree_manager.block_cache,
  1286                  state_view_factory=MockStateViewFactory(
  1287                      self.block_tree_manager.state_db),
  1288                  signer=self.block_tree_manager.identity_signer))
  1289  
  1290          self.block_validator = None
  1291          self.chain_ctrl = None
  1292  
  1293      def setup_chain_controller(self):
  1294  
  1295          self.block_validator = BlockValidator(
  1296              state_view_factory=self.state_view_factory,
  1297              block_cache=self.block_tree_manager.block_cache,
  1298              transaction_executor=self.transaction_executor,
  1299              identity_signer=self.block_tree_manager.identity_signer,
  1300              data_dir=self.dir,
  1301              config_dir=None,
  1302              permission_verifier=self.permission_verifier,
  1303              thread_pool=self.executor)
  1304  
  1305          self.chain_ctrl = ChainController(
  1306              self.block_tree_manager.block_store,
  1307              self.block_tree_manager.block_cache,
  1308              self.block_validator,
  1309              self.state_database,
  1310              self.publisher.chain_head_lock,
  1311              data_dir=self.dir,
  1312              observers=[])
  1313  
  1314          self.assertIsNone(self.chain_ctrl.chain_head)
  1315  
  1316      def tearDown(self):
  1317          shutil.rmtree(self.dir)
  1318  
  1319      def test_genesis_block_mismatch(self):
  1320          '''Test mismatch block chain id will drop genesis block.
  1321          Given a ChainController with an empty chain
  1322          mismatches the block-chain-id stored on disk.
  1323          '''
  1324          self.setup_chain_controller()
  1325          self.chain_id_manager.save_block_chain_id('my_chain_id')
  1326          some_other_genesis_block = \
  1327              self.block_tree_manager.generate_genesis_block()
  1328          self.chain_ctrl.on_block_received(some_other_genesis_block)
  1329  
  1330          self.assertIsNone(self.chain_ctrl.chain_head)
  1331  
  1332      def test_genesis_block_matches_block_chain_id(self):
  1333          '''Test that a validator with no chain will accept a valid genesis
  1334          block that matches the block-chain-id stored on disk.
  1335          '''
  1336          with patch.object(BlockValidator,
  1337                            'validate_block',
  1338                            return_value=True):
  1339              self.setup_chain_controller()
  1340  
  1341          my_genesis_block = self.block_tree_manager.generate_genesis_block()
  1342          chain_id = my_genesis_block.header_signature
  1343          self.chain_id_manager.save_block_chain_id(chain_id)
  1344  
  1345          self.chain_ctrl.on_block_received(my_genesis_block)
  1346  
  1347          self.assertIsNotNone(self.chain_ctrl.chain_head)
  1348          chain_head_sig = self.chain_ctrl.chain_head.header_signature
  1349  
  1350          self.assertEqual(
  1351              chain_head_sig[:8],
  1352              chain_id[:8],
  1353              'Chain id does not match')
  1354  
  1355          self.assertEqual(chain_id,
  1356                           self.chain_id_manager.get_block_chain_id())
  1357  
  1358      def test_invalid_genesis_block_matches_block_chain_id(self):
  1359          '''Test that a validator with no chain will drop an invalid genesis
  1360          block that matches the block-chain-id stored on disk.
  1361          '''
  1362          self.setup_chain_controller()
  1363          my_genesis_block = self.block_tree_manager.generate_genesis_block()
  1364          chain_id = my_genesis_block.header_signature
  1365          self.chain_id_manager.save_block_chain_id(chain_id)
  1366  
  1367          with patch.object(BlockValidator,
  1368                            'validate_block',
  1369                            side_effect=BlockValidationFailure):
  1370              self.chain_ctrl.on_block_received(my_genesis_block)
  1371  
  1372          self.assertIsNone(self.chain_ctrl.chain_head)
  1373  
  1374  
  1375  class TestJournal(unittest.TestCase):
  1376      def setUp(self):
  1377          self.dir = tempfile.mkdtemp()
  1378          self.gossip = MockNetwork()
  1379          self.txn_executor = MockTransactionExecutor()
  1380          self.block_sender = MockBlockSender()
  1381          self.batch_sender = MockBatchSender()
  1382          self.permission_verifier = MockPermissionVerifier()
  1383  
  1384      def tearDown(self):
  1385          shutil.rmtree(self.dir)
  1386  
  1387      def test_publish_block(self):
  1388          """
  1389          Test that the Journal will produce blocks and consume those blocks
  1390          to extend the chain.
  1391          :return:
  1392          """
  1393          # construction and wire the journal to the
  1394          # gossip layer.
  1395  
  1396          btm = BlockTreeManager()
  1397          block_publisher = None
  1398          chain_controller = None
  1399          try:
  1400              block_publisher = BlockPublisher(
  1401                  transaction_executor=self.txn_executor,
  1402                  block_cache=btm.block_cache,
  1403                  state_view_factory=MockStateViewFactory(btm.state_db),
  1404                  settings_cache=SettingsCache(
  1405                      SettingsViewFactory(
  1406                          btm.state_view_factory),
  1407                  ),
  1408                  block_sender=self.block_sender,
  1409                  batch_sender=self.batch_sender,
  1410                  chain_head=btm.block_store.chain_head,
  1411                  identity_signer=btm.identity_signer,
  1412                  data_dir=None,
  1413                  config_dir=None,
  1414                  permission_verifier=self.permission_verifier,
  1415                  check_publish_block_frequency=0.1,
  1416                  batch_observers=[],
  1417                  batch_injector_factory=DefaultBatchInjectorFactory(
  1418                      block_cache=btm.block_store,
  1419                      state_view_factory=MockStateViewFactory(btm.state_db),
  1420                      signer=btm.identity_signer))
  1421  
  1422              block_validator = BlockValidator(
  1423                  state_view_factory=MockStateViewFactory(btm.state_db),
  1424                  block_cache=btm.block_cache,
  1425                  transaction_executor=self.txn_executor,
  1426                  identity_signer=btm.identity_signer,
  1427                  data_dir=None,
  1428                  config_dir=None,
  1429                  permission_verifier=self.permission_verifier)
  1430  
  1431              state_database = NativeLmdbDatabase(
  1432                  os.path.join(self.dir, 'merkle.lmdb'),
  1433                  indexes=MerkleDatabase.create_index_configuration(),
  1434                  _size=120 * 1024 * 1024)
  1435  
  1436              chain_controller = ChainController(
  1437                  btm.block_store,
  1438                  btm.block_cache,
  1439                  block_validator,
  1440                  state_database,
  1441                  block_publisher.chain_head_lock,
  1442                  data_dir=None,
  1443                  observers=[])
  1444  
  1445              self.gossip.on_batch_received = block_publisher.batch_sender().send
  1446              self.gossip.on_block_received = chain_controller.queue_block
  1447  
  1448              block_publisher.start()
  1449              chain_controller.start()
  1450  
  1451              # feed it a batch
  1452              batch = Batch()
  1453              block_publisher.batch_sender().send(batch)
  1454  
  1455              wait_until(lambda: self.block_sender.new_block is not None, 2)
  1456              self.assertTrue(self.block_sender.new_block is not None)
  1457  
  1458              block = BlockWrapper.wrap(self.block_sender.new_block)
  1459              chain_controller.queue_block(block)
  1460  
  1461              # wait for the chain_head to be updated.
  1462              wait_until(
  1463                  lambda: btm.chain_head.identifier == block.identifier, 2)
  1464              self.assertTrue(btm.chain_head.identifier == block.identifier)
  1465          finally:
  1466              if block_publisher is not None:
  1467                  block_publisher.stop()
  1468              if chain_controller is not None:
  1469                  chain_controller.stop()
  1470              if block_validator is not None:
  1471                  block_validator.stop()
  1472  
  1473  
  1474  class TestTimedCache(unittest.TestCase):
  1475      def test_cache(self):
  1476          bc = TimedCache(keep_time=1, purge_frequency=0)
  1477  
  1478          with self.assertRaises(KeyError):
  1479              bc["test"]
  1480  
  1481          bc["test"] = "value"
  1482  
  1483          self.assertEqual(len(bc), 1)
  1484  
  1485          del bc["test"]
  1486          self.assertFalse("test" in bc)
  1487  
  1488      def test_evict_expired(self):
  1489          """ Test that values will be evicted from the
  1490          cache as they time out.
  1491          """
  1492  
  1493          # use an invasive technique so that we don't have to sleep for
  1494          # the item to expire
  1495  
  1496          bc = TimedCache(keep_time=1, purge_frequency=0)
  1497  
  1498          bc["test"] = "value"
  1499          bc["test2"] = "value2"
  1500          self.assertEqual(len(bc), 2)
  1501  
  1502          # test that expired item i
  1503          bc.cache["test"].timestamp = bc.cache["test"].timestamp - 2
  1504          bc["test2"] = "value2"  # set value to activate purge
  1505          self.assertEqual(len(bc), 1)
  1506          self.assertFalse("test" in bc)
  1507          self.assertTrue("test2" in bc)
  1508  
  1509      def test_access_update(self):
  1510  
  1511          bc = TimedCache(keep_time=1, purge_frequency=0)
  1512  
  1513          bc["test"] = "value"
  1514          bc["test2"] = "value2"
  1515          self.assertEqual(len(bc), 2)
  1516  
  1517          bc["test"] = "value"
  1518          bc.cache["test"].timestamp = bc.cache["test"].timestamp - 2
  1519          bc["test"]  # access to update timestamp
  1520          bc["test2"] = "value2"  # set value to activate purge
  1521          self.assertEqual(len(bc), 2)
  1522          self.assertTrue("test" in bc)
  1523          self.assertTrue("test2" in bc)
  1524  
  1525  
  1526  class TestChainCommitState(unittest.TestCase):
  1527      """Test for:
  1528      - No duplicates found for batches
  1529      - No duplicates found for transactions
  1530      - Duplicate batch found in current chain
  1531      - Duplicate batch found in fork
  1532      - Duplicate transaction found in current chain
  1533      - Duplicate transaction found in fork
  1534      - Missing dependencies caught
  1535      - Dependencies found for transactions in current chain
  1536      - Dependencies found for transactions in fork
  1537      """
  1538  
  1539      def gen_block(self, block_id, prev_id, num, batches):
  1540          return BlockWrapper(
  1541              Block(
  1542                  header_signature=block_id,
  1543                  batches=batches,
  1544                  header=BlockHeader(
  1545                      block_num=num,
  1546                      previous_block_id=prev_id).SerializeToString()))
  1547  
  1548      def gen_batch(self, batch_id, transactions):
  1549          return Batch(header_signature=batch_id, transactions=transactions)
  1550  
  1551      def gen_txn(self, txn_id, deps=None):
  1552          return Transaction(
  1553              header_signature=txn_id,
  1554              header=TransactionHeader(dependencies=deps).SerializeToString())
  1555  
  1556      # Batches
  1557      def test_no_duplicate_batch_found(self):
  1558          """Verify that DuplicateBatch is not raised for a completely new
  1559          batch.
  1560          """
  1561          _, _, committed_blocks, uncommitted_blocks =\
  1562              self.create_new_chain()
  1563  
  1564          commit_state = self.create_chain_commit_state(
  1565              committed_blocks, uncommitted_blocks, 'B6')
  1566  
  1567          commit_state.check_for_duplicate_batches([self.gen_batch('b10', [])])
  1568  
  1569      def test_duplicate_batch_in_both_chains(self):
  1570          """Verify that DuplicateBatch is raised for a batch in both the current
  1571          chain and the fork.
  1572          """
  1573          _, batches, committed_blocks, uncommitted_blocks =\
  1574              self.create_new_chain()
  1575  
  1576          commit_state = self.create_chain_commit_state(
  1577              committed_blocks, uncommitted_blocks, 'B6')
  1578  
  1579          with self.assertRaises(DuplicateBatch) as cm:
  1580              commit_state.check_for_duplicate_batches(
  1581                  [batches[2]])
  1582  
  1583          self.assertEqual(cm.exception.batch_id, 'b2')
  1584  
  1585      def test_duplicate_batch_in_current_chain(self):
  1586          """Verify that DuplicateBatch is raised for a batch in the current
  1587          chain.
  1588          """
  1589          _, batches, committed_blocks, uncommitted_blocks =\
  1590              self.create_new_chain()
  1591  
  1592          commit_state = self.create_chain_commit_state(
  1593              committed_blocks, uncommitted_blocks, 'B6')
  1594  
  1595          with self.assertRaises(DuplicateBatch) as cm:
  1596              commit_state.check_for_duplicate_batches(
  1597                  [batches[5]])
  1598  
  1599          self.assertEqual(cm.exception.batch_id, 'b5')
  1600  
  1601      def test_duplicate_batch_in_fork(self):
  1602          """Verify that DuplicateBatch is raised for a batch in the fork.
  1603          """
  1604          _, batches, committed_blocks, uncommitted_blocks =\
  1605              self.create_new_chain()
  1606  
  1607          commit_state = self.create_chain_commit_state(
  1608              committed_blocks, uncommitted_blocks, 'B9')
  1609  
  1610          with self.assertRaises(DuplicateBatch) as cm:
  1611              commit_state.check_for_duplicate_batches(
  1612                  [batches[8]])
  1613  
  1614          self.assertEqual(cm.exception.batch_id, 'b8')
  1615  
  1616      def test_no_duplicate_batch_in_current_chain(self):
  1617          """Verify that DuplicateBatch is not raised for a batch that is in the
  1618          current chain but not the fork when head is on the fork.
  1619          """
  1620          _, batches, committed_blocks, uncommitted_blocks =\
  1621              self.create_new_chain()
  1622  
  1623          commit_state = self.create_chain_commit_state(
  1624              committed_blocks, uncommitted_blocks, 'B9')
  1625  
  1626          commit_state.check_for_duplicate_batches(
  1627              [batches[5]])
  1628  
  1629      def test_no_duplicate_batch_in_fork(self):
  1630          """Verify that DuplicateBatch is not raised for a batch that is in the
  1631          fork but not the current chain when head is on the current chain.
  1632          """
  1633          _, batches, committed_blocks, uncommitted_blocks =\
  1634              self.create_new_chain()
  1635  
  1636          commit_state = self.create_chain_commit_state(
  1637              committed_blocks, uncommitted_blocks, 'B6')
  1638  
  1639          commit_state.check_for_duplicate_batches(
  1640              [batches[8]])
  1641  
  1642      # Transactions
  1643      def test_no_duplicate_txn_found(self):
  1644          """Verify that DuplicateTransaction is not raised for a completely new
  1645          transaction.
  1646          """
  1647          _, _, committed_blocks, uncommitted_blocks =\
  1648              self.create_new_chain()
  1649  
  1650          commit_state = self.create_chain_commit_state(
  1651              committed_blocks, uncommitted_blocks, 'B6')
  1652  
  1653          commit_state.check_for_duplicate_transactions([self.gen_txn('t10')])
  1654  
  1655      def test_duplicate_txn_in_both_chains(self):
  1656          """Verify that DuplicateTransaction is raised for a transaction in both
  1657          the current chain and the fork.
  1658          """
  1659          transactions, _, committed_blocks, uncommitted_blocks =\
  1660              self.create_new_chain()
  1661  
  1662          commit_state = self.create_chain_commit_state(
  1663              committed_blocks, uncommitted_blocks, 'B6')
  1664  
  1665          with self.assertRaises(DuplicateTransaction) as cm:
  1666              commit_state.check_for_duplicate_transactions(
  1667                  [transactions[2]])
  1668  
  1669          self.assertEqual(cm.exception.transaction_id, 't2')
  1670  
  1671      def test_duplicate_txn_in_current_chain(self):
  1672          """Verify that DuplicateTransaction is raised for a transaction in the
  1673          current chain.
  1674          """
  1675          transactions, _, committed_blocks, uncommitted_blocks =\
  1676              self.create_new_chain()
  1677  
  1678          commit_state = self.create_chain_commit_state(
  1679              committed_blocks, uncommitted_blocks, 'B6')
  1680  
  1681          with self.assertRaises(DuplicateTransaction) as cm:
  1682              commit_state.check_for_duplicate_transactions(
  1683                  [transactions[5]])
  1684  
  1685          self.assertEqual(cm.exception.transaction_id, 't5')
  1686  
  1687      def test_duplicate_txn_in_fork(self):
  1688          """Verify that DuplicateTransaction is raised for a transaction in the
  1689          fork.
  1690          """
  1691          transactions, _, committed_blocks, uncommitted_blocks =\
  1692              self.create_new_chain()
  1693  
  1694          commit_state = self.create_chain_commit_state(
  1695              committed_blocks, uncommitted_blocks, 'B9')
  1696  
  1697          with self.assertRaises(DuplicateTransaction) as cm:
  1698              commit_state.check_for_duplicate_transactions(
  1699                  [transactions[8]])
  1700  
  1701          self.assertEqual(cm.exception.transaction_id, 't8')
  1702  
  1703      def test_no_duplicate_txn_in_current_chain(self):
  1704          """Verify that DuplicateTransaction is not raised for a transaction
  1705          that is in the current chain but not the fork when head is on the fork.
  1706          """
  1707          transactions, _, committed_blocks, uncommitted_blocks =\
  1708              self.create_new_chain()
  1709  
  1710          commit_state = self.create_chain_commit_state(
  1711              committed_blocks, uncommitted_blocks, 'B9')
  1712  
  1713          commit_state.check_for_duplicate_transactions(
  1714              [transactions[5]])
  1715  
  1716      def test_no_duplicate_txn_in_fork(self):
  1717          """Verify that DuplicateTransaction is not raised for a transaction
  1718          that is in the fork but not the current chain when head is on the
  1719          current chain.
  1720          """
  1721          transactions, _, committed_blocks, uncommitted_blocks =\
  1722              self.create_new_chain()
  1723  
  1724          commit_state = self.create_chain_commit_state(
  1725              committed_blocks, uncommitted_blocks, 'B6')
  1726  
  1727          commit_state.check_for_duplicate_transactions(
  1728              [transactions[8]])
  1729  
  1730      # Dependencies
  1731      def test_present_dependency(self):
  1732          """Verify that a present dependency is found."""
  1733          transactions, _, committed_blocks, uncommitted_blocks =\
  1734              self.create_new_chain()
  1735  
  1736          commit_state = self.create_chain_commit_state(
  1737              committed_blocks, uncommitted_blocks, 'B6')
  1738  
  1739          commit_state.check_for_transaction_dependencies([
  1740              self.gen_txn('t10', deps=[transactions[2].header_signature])
  1741          ])
  1742  
  1743      def test_missing_dependency_in_both_chains(self):
  1744          """Verifies that MissingDependency is raised when a dependency is not
  1745          committed anywhere.
  1746          """
  1747          _, _, committed_blocks, uncommitted_blocks =\
  1748              self.create_new_chain()
  1749  
  1750          commit_state = self.create_chain_commit_state(
  1751              committed_blocks, uncommitted_blocks, 'B6')
  1752  
  1753          with self.assertRaises(MissingDependency) as cm:
  1754              commit_state.check_for_transaction_dependencies([
  1755                  self.gen_txn('t10', deps=['t11'])
  1756              ])
  1757  
  1758          self.assertEqual(cm.exception.transaction_id, 't11')
  1759  
  1760      def test_present_dependency_in_current_chain(self):
  1761          """Verify that a dependency present in the current chain is found.
  1762          """
  1763          transactions, _, committed_blocks, uncommitted_blocks =\
  1764              self.create_new_chain()
  1765  
  1766          commit_state = self.create_chain_commit_state(
  1767              committed_blocks, uncommitted_blocks, 'B6')
  1768  
  1769          commit_state.check_for_transaction_dependencies([
  1770              self.gen_txn('t10', deps=[transactions[5].header_signature])
  1771          ])
  1772  
  1773      def test_present_dependency_in_fork(self):
  1774          """Verify that a dependency present in the fork is found.
  1775          """
  1776          transactions, _, committed_blocks, uncommitted_blocks =\
  1777              self.create_new_chain()
  1778  
  1779          commit_state = self.create_chain_commit_state(
  1780              committed_blocks, uncommitted_blocks, 'B9')
  1781  
  1782          commit_state.check_for_transaction_dependencies([
  1783              self.gen_txn('t10', deps=[transactions[8].header_signature])
  1784          ])
  1785  
  1786      def test_missing_dependency_in_current_chain(self):
  1787          """Verify that MissingDependency is raised for a dependency that is
  1788          committed to the current chain but not the fork when head is on the
  1789          fork.
  1790          """
  1791          transactions, _, committed_blocks, uncommitted_blocks =\
  1792              self.create_new_chain()
  1793  
  1794          commit_state = self.create_chain_commit_state(
  1795              committed_blocks, uncommitted_blocks, 'B9')
  1796  
  1797          commit_state.check_for_duplicate_transactions(
  1798              [transactions[5]])
  1799  
  1800      def test_missing_dependency_in_fork(self):
  1801          """Verify that MissingDependency is raised for a dependency that is
  1802          committed to the fork but not the current chain when head is on the
  1803          current chain.
  1804          """
  1805          transactions, _, committed_blocks, uncommitted_blocks =\
  1806              self.create_new_chain()
  1807  
  1808          commit_state = self.create_chain_commit_state(
  1809              committed_blocks, uncommitted_blocks, 'B6')
  1810  
  1811          commit_state.check_for_duplicate_transactions(
  1812              [transactions[8]])
  1813  
  1814      def create_new_chain(self):
  1815          """
  1816          NUM     0  1  2  3  4  5  6
  1817          CURRENT B0-B1-B2-B3-B4-B5-B6
  1818                           |
  1819          FORK             +--B7-B8-B9
  1820          """
  1821          txns = [
  1822              self.gen_txn('t' + format(i, 'x'))
  1823              for i in range(10)
  1824          ]
  1825          batches = [
  1826              self.gen_batch('b' + format(i, 'x'), [txns[i]])
  1827              for i in range(10)
  1828          ]
  1829          committed_blocks = [
  1830              self.gen_block(
  1831                  block_id='B0',
  1832                  prev_id=NULL_BLOCK_IDENTIFIER,
  1833                  num=0,
  1834                  batches=[batches[0]])
  1835          ]
  1836          committed_blocks.extend([
  1837              self.gen_block(
  1838                  block_id='B' + format(i, 'x'),
  1839                  prev_id='B' + format(i - 1, 'x'),
  1840                  num=i,
  1841                  batches=[batches[i]])
  1842              for i in range(1, 7)
  1843          ])
  1844          uncommitted_blocks = [
  1845              self.gen_block(
  1846                  block_id='B7',
  1847                  prev_id='B3',
  1848                  num=4,
  1849                  batches=[batches[0]])
  1850          ]
  1851          uncommitted_blocks.extend([
  1852              self.gen_block(
  1853                  block_id='B' + format(i, 'x'),
  1854                  prev_id='B' + format(i - 1, 'x'),
  1855                  num=5 + (i - 8),
  1856                  batches=[batches[i]])
  1857              for i in range(8, 10)
  1858          ])
  1859  
  1860          return txns, batches, committed_blocks, uncommitted_blocks
  1861  
  1862      def create_chain_commit_state(
  1863          self,
  1864          committed_blocks,
  1865          uncommitted_blocks,
  1866          head_id,
  1867      ):
  1868          block_store = BlockStore(DictDatabase(
  1869              indexes=BlockStore.create_index_configuration()))
  1870          block_store.update_chain(committed_blocks)
  1871  
  1872          block_cache = BlockCache(
  1873              block_store=block_store)
  1874  
  1875          for block in uncommitted_blocks:
  1876              block_cache[block.header_signature] = block
  1877  
  1878          return ChainCommitState(head_id, block_cache, block_store)
  1879  
  1880  
  1881  class TestBlockEventExtractor(unittest.TestCase):
  1882      def test_block_event_extractor(self):
  1883          """Test that a sawtooth/block-commit event is generated correctly."""
  1884          block_header = BlockHeader(
  1885              block_num=85,
  1886              state_root_hash="0987654321fedcba",
  1887              previous_block_id="0000000000000000")
  1888          block = BlockWrapper(Block(
  1889              header_signature="abcdef1234567890",
  1890              header=block_header.SerializeToString()))
  1891          extractor = BlockEventExtractor(block)
  1892          events = extractor.extract([EventSubscription(
  1893              event_type="sawtooth/block-commit")])
  1894          self.assertEqual(events, [
  1895              Event(
  1896                  event_type="sawtooth/block-commit",
  1897                  attributes=[
  1898                      Event.Attribute(key="block_id", value="abcdef1234567890"),
  1899                      Event.Attribute(key="block_num", value="85"),
  1900                      Event.Attribute(
  1901                          key="state_root_hash", value="0987654321fedcba"),
  1902                      Event.Attribute(
  1903                          key="previous_block_id",
  1904                          value="0000000000000000")])])
  1905  
  1906  
  1907  class TestReceiptEventExtractor(unittest.TestCase):
  1908      def test_tf_events(self):
  1909          """Test that tf events are generated correctly."""
  1910          gen_data = [
  1911              ["test1", "test2"],
  1912              ["test3"],
  1913              ["test4", "test5", "test6"],
  1914          ]
  1915          event_sets = [
  1916              [
  1917                  Event(event_type=event_type)
  1918                  for event_type in events
  1919              ] for events in gen_data
  1920          ]
  1921          receipts = [
  1922              TransactionReceipt(events=events)
  1923              for events in event_sets
  1924          ]
  1925          extractor = ReceiptEventExtractor(receipts)
  1926  
  1927          events = extractor.extract([])
  1928          self.assertEqual([], events)
  1929  
  1930          events = extractor.extract([
  1931              EventSubscription(event_type="test1"),
  1932              EventSubscription(event_type="test5"),
  1933          ])
  1934          self.assertEqual(events, [event_sets[0][0], event_sets[2][1]])
  1935  
  1936      def test_state_delta_events(self):
  1937          """Test that sawtooth/state-delta events are generated correctly."""
  1938          gen_data = [
  1939              [("a", b"a", StateChange.SET), ("b", b"b", StateChange.DELETE)],
  1940              [("a", b"a", StateChange.DELETE), ("d", b"d", StateChange.SET)],
  1941              [("e", b"e", StateChange.SET)],
  1942          ]
  1943          change_sets = [
  1944              [
  1945                  StateChange(address=address, value=value, type=change_type)
  1946                  for address, value, change_type in state_changes
  1947              ] for state_changes in gen_data
  1948          ]
  1949          receipts = [
  1950              TransactionReceipt(state_changes=state_changes)
  1951              for state_changes in change_sets
  1952          ]
  1953          extractor = ReceiptEventExtractor(receipts)
  1954  
  1955          factory = EventFilterFactory()
  1956          events = extractor.extract([
  1957              EventSubscription(
  1958                  event_type="sawtooth/state-delta",
  1959                  filters=[factory.create("address", "a")]),
  1960              EventSubscription(
  1961                  event_type="sawtooth/state-delta",
  1962                  filters=[factory.create(
  1963                      "address", "[ce]", EventFilter.REGEX_ANY)],
  1964              )
  1965          ])
  1966          self.assertEqual(events, [Event(
  1967              event_type="sawtooth/state-delta",
  1968              attributes=[
  1969                  Event.Attribute(key="address", value=address)
  1970                  for address in ["e", "d", "a", "b"]
  1971              ],
  1972              data=StateChangeList(state_changes=[
  1973                  change_sets[2][0], change_sets[1][1],
  1974                  change_sets[1][0], change_sets[0][1],
  1975              ]).SerializeToString(),
  1976          )])