github.com/muhammedhassanm/blockchain@v0.0.0-20200120143007-697261defd4d/sawtooth-core-master/cli/sawtooth_cli/network_command/compare.py (about)

     1  # Copyright 2017 Intel Corporation
     2  #
     3  # Licensed under the Apache License, Version 2.0 (the "License");
     4  # you may not use this file except in compliance with the License.
     5  # You may obtain a copy of the License at
     6  #
     7  #     http://www.apache.org/licenses/LICENSE-2.0
     8  #
     9  # Unless required by applicable law or agreed to in writing, software
    10  # distributed under the License is distributed on an "AS IS" BASIS,
    11  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  # See the License for the specific language governing permissions and
    13  # limitations under the License.
    14  # ------------------------------------------------------------------------------
    15  import argparse
    16  from math import floor, log
    17  
    18  from sawtooth_cli.network_command.parent_parsers import base_multinode_parser
    19  from sawtooth_cli.network_command.parent_parsers import split_comma_append_args
    20  from sawtooth_cli.network_command.parent_parsers import make_rest_apis
    21  from sawtooth_cli.network_command.fork_graph import ForkGraph
    22  from sawtooth_cli.network_command.fork_graph import SimpleBlock
    23  
    24  from sawtooth_cli.exceptions import CliException
    25  
    26  
    27  def add_compare_chains_parser(subparsers, parent_parser):
    28      """Creates the arg parsers needed for the compare command.
    29      """
    30      parser = subparsers.add_parser(
    31          'compare-chains',
    32          help='Compare chains from different nodes.',
    33          description=(
    34              'Compute and display information about how the chains at '
    35              'different nodes differ.'
    36          ),
    37          formatter_class=argparse.RawDescriptionHelpFormatter,
    38          epilog='''
    39  By default, prints a table of summary data and a table of per-node data with
    40  the following fields. Pass --tree for a fork graph.
    41  
    42  COMMON ANCESTOR
    43      The most recent block that all chains have in common.
    44  
    45  COMMON HEIGHT
    46      Let min_height := the minimum height of any chain across all nodes passed
    47      in. COMMON HEIGHT = min_height.
    48  
    49  HEAD
    50      The block id of the most recent block on a given chain.
    51  
    52  HEIGHT
    53      The block number of the most recent block on a given chain.
    54  
    55  LAG
    56      Let max_height := the maximum height of any chain across all nodes passed
    57      in. LAG = max_height - HEIGHT for a given chain.
    58  
    59  DIVERG
    60      Let common_ancestor_height := the height of the COMMON ANCESTOR.
    61      DIVERG = HEIGHT - common_ancestor_height
    62  
    63  ''',
    64          parents=[parent_parser, base_multinode_parser()])
    65  
    66      parser.add_argument(
    67          '-l',
    68          '--limit',
    69          default=25,
    70          type=int,
    71          help='the number of blocks to request at a time',
    72      )
    73  
    74      parser.add_argument(
    75          '--table',
    76          action='store_true',
    77          help='Print out a fork table for all nodes since the common ancestor.')
    78  
    79      parser.add_argument(
    80          '--tree',
    81          action='store_true',
    82          help='Print out a fork tree for all nodes since the common ancestor.')
    83  
    84  
    85  def do_compare_chains(args):
    86      """Calculates and outputs comparison between all nodes on the network."""
    87      urls = split_comma_append_args(args.urls)
    88      users = split_comma_append_args(args.users)
    89      clients = make_rest_apis(urls, users)
    90  
    91      broken = []
    92  
    93      chains, errors = get_chain_generators(clients, args.limit)
    94      broken.extend(errors)
    95      for node in errors:
    96          print("Error connecting to node %d: %s" % (node, urls[node]))
    97      if not chains:
    98          print("No nodes reporting")
    99          return
   100  
   101      tails, errors = get_tails(chains)
   102      broken.extend(errors)
   103      for node in errors:
   104          del chains[node]
   105      for node in errors:
   106          print("Failed to reach common height with node %d: %s" % (
   107              node, urls[node]))
   108      if not chains:
   109          print("Failed to get common height")
   110          return
   111  
   112      graph, errors = build_fork_graph(chains, tails)
   113      broken.extend(errors)
   114      for node in errors:
   115          print("Failed to reach common ancestor with node %d: %s" % (
   116              node, urls[node]))
   117      if not graph:
   118          print("Failed to build fork graph")
   119          return
   120  
   121      # Transform tails and errors into the format expected by the print
   122      # functions. Because errors can occur while building the graph, we need to
   123      # remove the tails for those clients.
   124      broken.sort()
   125      node_id_map = get_node_id_map(broken, len(clients))
   126      tails = list(map(
   127          lambda item: item[1],
   128          filter(
   129              lambda item: item[0] not in broken,
   130              sorted(tails.items()))))
   131  
   132      if args.table:
   133          print_table(graph, tails, node_id_map)
   134  
   135      elif args.tree:
   136          print_tree(graph, tails, node_id_map)
   137  
   138      else:
   139          print_summary(graph, tails, node_id_map)
   140  
   141  
   142  def get_chain_generators(clients, limit):
   143      # Send one request to each client to determine if it is responsive or not.
   144      # Use the heights of all the responding clients' heads to set the paging
   145      # size for future requests, so that the number of requests is minimized.
   146      heads = []
   147      good_clients = []
   148      bad_clients = []
   149      for i, client in enumerate(clients):
   150          try:
   151              block = next(client.list_blocks(limit=1))
   152              heads.append(SimpleBlock.from_block_dict(block))
   153              good_clients.append(client)
   154          except CliException:
   155              bad_clients.append(i)
   156  
   157      if not heads:
   158          return {}, bad_clients
   159  
   160      # Convert the block dictionaries to simpler python data structures to
   161      # conserve memory and simplify interactions.
   162      return {
   163          i: map(SimpleBlock.from_block_dict, c.list_blocks(limit=limit))
   164          for i, c in enumerate(good_clients)
   165      }, bad_clients
   166  
   167  
   168  def prune_unreporting_peers(graph, unreporting):
   169      for _, _, siblings in graph.walk():
   170          for _, peers in siblings.items():
   171              for bad_peer in unreporting:
   172                  if bad_peer in peers:
   173                      peers.remove(bad_peer)
   174  
   175  
   176  def get_node_id_map(unreporting, total):
   177      node_id_map = {}
   178      offset = 0
   179      for i in range(total):
   180          if i not in unreporting:
   181              node_id_map[i - offset] = i
   182          else:
   183              offset += 1
   184      return node_id_map
   185  
   186  
   187  def print_summary(graph, tails, node_id_map):
   188      """Print out summary and per-node comparison data."""
   189      # Get comparison data
   190      heads = get_heads(tails)
   191      heights = get_heights(tails)
   192      max_height = max(heights)
   193      common_height, block_ids_at_common_height = get_common_height(tails)
   194      lags = get_lags(heights, max_height)
   195      common_ancestor = graph.root
   196      divergences = get_divergences(heights, graph.root)
   197  
   198      # Print summary info
   199      col_1 = 8
   200      col_n = 8
   201      format_str = '{:<' + str(col_1) + '} ' + ('{:<' + str(col_n) + '} ') * 2
   202      header = format_str.format("COMMON", "HEIGHT", "BLOCKS")
   203      print(header)
   204      print("-" * len(header))
   205      print(format_str.format(
   206          "ANCESTOR", common_ancestor.num, common_ancestor.ident[:col_n]))
   207      print(format_str.format(
   208          "HEIGHT", common_height, str(block_ids_at_common_height)))
   209      print()
   210  
   211      # Print per-node data
   212      node_col_width = get_col_width_for_num(len(tails), len("NODE"))
   213      num_col_width = get_col_width_for_num(max_height, len("HEIGHT"))
   214      lag_col_width = get_col_width_for_num(max(lags), len("LAG"))
   215      diverg_col_width = get_col_width_for_num(max(divergences), len("DIVERG"))
   216  
   217      format_str = (
   218          '{:<' + str(node_col_width) + '} '
   219          '{:<8} '
   220          '{:<' + str(num_col_width) + '} '
   221          '{:<' + str(lag_col_width) + '} '
   222          '{:<' + str(diverg_col_width) + '}'
   223      )
   224  
   225      header = format_str.format("NODE", "HEAD", "HEIGHT", "LAG", "DIVERG")
   226      print(header)
   227      print('-' * len(header))
   228  
   229      for i, _ in enumerate(tails):
   230          print(format_str.format(
   231              node_id_map[i],
   232              heads[i],
   233              heights[i],
   234              lags[i],
   235              divergences[i],
   236          ))
   237      print()
   238  
   239  
   240  def get_col_width_for_num(num, min_width):
   241      assert num >= 0
   242      if num == 0:
   243          num = 1
   244      return max(floor(log(num)) + 1, min_width)
   245  
   246  
   247  def print_table(graph, tails, node_id_map):
   248      """Print out a table of nodes and the blocks they have at each block height
   249      starting with the common ancestor."""
   250      node_count = len(tails)
   251  
   252      # Get the width of the table columns
   253      num_col_width = max(
   254          floor(log(max(get_heights(tails)), 10)) + 1,
   255          len("NUM"))
   256      node_col_width = max(
   257          floor(log(node_count, 10)) + 1,
   258          8)
   259  
   260      # Construct the output format string
   261      format_str = ''
   262      format_str += '{:<' + str(num_col_width) + '} '
   263      for _ in range(node_count):
   264          format_str += '{:<' + str(node_col_width) + '} '
   265  
   266      nodes_header = ["NODE " + str(node_id_map[i]) for i in range(node_count)]
   267      header = format_str.format("NUM", *nodes_header)
   268      print(header)
   269      print('-' * len(header))
   270  
   271      prev_block_num = -1
   272      node_list = [''] * node_count
   273      for block_num, _, siblings in graph.walk():
   274          if block_num != prev_block_num:
   275              # Need to skip the first one
   276              if prev_block_num != -1:
   277                  print(format_str.format(prev_block_num, *node_list))
   278  
   279              node_list.clear()
   280              node_list.extend([''] * node_count)
   281              prev_block_num = block_num
   282  
   283          for block_id, node_ids in siblings.items():
   284              for node_id in node_ids:
   285                  node_list[node_id] = block_id[:8]
   286  
   287      # Print the last one
   288      print(format_str.format(prev_block_num, *node_list))
   289  
   290  
   291  def print_tree(graph, tails, node_id_map):
   292      """Print out a tree of blocks starting from the common ancestor."""
   293      # Example:
   294      # |
   295      # | 5
   296      # *  a {0, 1, 2, 3, 4}
   297      # |
   298      # | 6
   299      # |\
   300      # * |  b {0, 1, 2, 3}
   301      # | *  n {4}
   302      # | |
   303      # | | 7
   304      # * |  c {0, 1, 2, 3}
   305      # | *  o {4}
   306      # | |
   307      # | | 8
   308      # |\ \
   309      # * | |  i {2, 3}
   310      # | * |  d {0, 1}
   311      # | | *  p {4}
   312      # | | |
   313      # | | | 9
   314      # * | |  j {2, 3}
   315      # | * |  e {0, 1}
   316      # | | *  q {4}
   317      # | | |
   318      # | | | 10
   319      # * | |  k {2, 3}
   320      # | * |  f {0, 1}
   321      # | | *  r {4}
   322      # | | |
   323      # | | | 11
   324      # |\ \ \
   325      # | | |\ \
   326      # * | | | |    g {0}
   327      # | * | | |    h {1}
   328      # |   * | |    l {2}
   329      # |   | * |    m {3}
   330      # |   |   *    s {4}
   331      # |  /   /
   332      # | |  /
   333      # | | | 12
   334      # * | |   t {0}
   335      # | * |   u {2}
   336      # | | *   v {4}
   337      # | |
   338      # | | 13
   339      # * |   w {0}
   340      # | *   x {2}
   341      # |
   342      # | 14
   343      # *   y {0}
   344      # | 15
   345      # *   z {0}
   346  
   347      walker = graph.walk()
   348      next_block_num, next_parent, next_siblings = next(walker)
   349      prev_cliques = []
   350  
   351      done = False
   352      while not done:
   353          cliques = {}
   354          block_num = next_block_num
   355  
   356          # Read all the cliques for this block number
   357          try:
   358              while block_num == next_block_num:
   359                  cliques[next_parent] = next_siblings
   360                  next_block_num, next_parent, next_siblings = next(walker)
   361          except StopIteration:
   362              # Do one last iteration after we've consumed the entire graph
   363              done = True
   364  
   365          print_cliques(prev_cliques, cliques, node_id_map)
   366  
   367          print_block_num_row(block_num, prev_cliques, cliques)
   368  
   369          print_splits(prev_cliques, cliques)
   370  
   371          print_folds(prev_cliques, cliques)
   372  
   373          prev_cliques = build_ordered_cliques(prev_cliques, cliques)
   374  
   375      print_cliques(prev_cliques, [], node_id_map)
   376  
   377  
   378  def build_ordered_cliques(cliques, next_cliques):
   379      """Order the new cliques based on the order of their ancestors in the
   380      previous iteration."""
   381      def sort_key(clique):
   382          return -len(clique[1])
   383  
   384      if not cliques:
   385          return list(sorted(
   386              list(next_cliques.values())[0].items(),
   387              key=sort_key))
   388  
   389      ordered_cliques = []
   390      for _, clique in enumerate(cliques):
   391          parent, _ = clique
   392  
   393          # If this fork continues
   394          if parent in next_cliques:
   395              # Sort the cliques in descending order of the size of the
   396              # clique, so that the main chain tends to the left
   397              ordered_cliques.extend(
   398                  sorted(next_cliques[parent].items(), key=sort_key))
   399  
   400          # Else drop it
   401  
   402      return ordered_cliques
   403  
   404  
   405  def print_folds(cliques, next_cliques):
   406      # Need to keep track of which columns each branch is in as we fold
   407      folds = []
   408      for i, clique in enumerate(cliques):
   409          block_id, _ = clique
   410          if block_id not in next_cliques:
   411              folds.append(i)
   412  
   413      n_cliques = len(cliques)
   414      for i, fold in enumerate(folds):
   415          print_fold(fold, n_cliques - i, folds)
   416          folds[i] = None
   417          for j, _ in enumerate(folds):
   418              if folds[j] is not None:
   419                  folds[j] -= 1
   420  
   421  
   422  def print_fold(column_to_fold, total_columns, skips):
   423      """Print a row that removes the given column and shifts all the following
   424      columns."""
   425      format_str = '{:<2}' * (total_columns - 1)
   426      cols = []
   427      for i in range(column_to_fold):
   428          # print(i)
   429          if i in skips:
   430              cols.append("  ")
   431          else:
   432              cols.append("| ")
   433      for i in range(column_to_fold + 1, total_columns):
   434          # print(i)
   435          if i in skips:
   436              cols.append("  ")
   437          else:
   438              cols.append(" /")
   439      print(format_str.format(*cols))
   440  
   441  
   442  def print_block_num_row(block_num, cliques, next_cliques):
   443      """Print out a row of padding and a row with the block number. Includes
   444      the branches prior to this block number."""
   445      n_cliques = len(cliques)
   446      if n_cliques == 0:
   447          print('|  {}'.format(block_num))
   448          return
   449  
   450      def mapper(clique):
   451          block_id, _ = clique
   452          if block_id not in next_cliques:
   453              return ' '
   454          return '|'
   455  
   456      format_str = '{:<' + str(n_cliques * 2) + '} {}'
   457      branches = list(map(mapper, cliques))
   458      for end in ('', block_num):
   459          print(format_str.format(' '.join(branches), end))
   460  
   461  
   462  def print_cliques(cliques, next_cliques, node_id_map):
   463      """Print a '*' on each branch with its block id and the ids of the nodes
   464      that have the block."""
   465      n_cliques = len(cliques)
   466      format_str = '{:<' + str(n_cliques * 2) + '}  {} {}'
   467      branches = ['|'] * len(cliques)
   468      for i, clique in enumerate(cliques):
   469          block_id, nodes = clique
   470          print(format_str.format(
   471              ' '.join(branches[:i] + ['*'] + branches[i + 1:]),
   472              block_id[:8], format_siblings(nodes, node_id_map)))
   473          if block_id not in next_cliques:
   474              branches[i] = ' '
   475  
   476  
   477  def print_splits(cliques, next_cliques):
   478      """Print shifts for new forks."""
   479      splits = 0
   480      for i, clique in enumerate(cliques):
   481          parent, _ = clique
   482  
   483          # If this fork continues
   484          if parent in next_cliques:
   485              # If there is a new fork, print a split
   486              if len(next_cliques[parent]) > 1:
   487                  print_split(i + splits, len(cliques) + splits)
   488                  splits += 1
   489  
   490  
   491  def print_split(column_to_split, total_columns):
   492      """Print a row that splits the given column into two columns while
   493      shifting all the following columns."""
   494      out = ""
   495      for _ in range(column_to_split):
   496          out += "| "
   497      out += "|\\"
   498      for _ in range(column_to_split + 1, total_columns):
   499          out += " \\"
   500      print(out)
   501  
   502  
   503  def format_siblings(nodes, node_id_map):
   504      return "{" + ", ".join(str(node_id_map[n]) for n in nodes) + "}"
   505  
   506  
   507  def get_heads(tails):
   508      return [tail[-1].ident[:8] for tail in tails]
   509  
   510  
   511  def get_heights(tails):
   512      return [tail[-1].num for tail in tails]
   513  
   514  
   515  def get_common_height(tails):
   516      block_ids = set(tail[0].ident[:8] for tail in tails)
   517      return tails[0][0].num, block_ids
   518  
   519  
   520  def get_lags(heights, max_height):
   521      return [max_height - height for height in heights]
   522  
   523  
   524  def get_divergences(heights, root):
   525      return [height - root.num for height in heights]
   526  
   527  
   528  def get_tails(chains):
   529      """
   530      Args:
   531          An ordered collection of block generators.
   532  
   533      Returns
   534          A dictionary of lists of blocks for all chains where:
   535              1. The first block in all the lists has the same block number
   536              2. Each list has all blocks from the common block to the current
   537                 block in increasing order
   538              3. The dictionary key is the index of the chain in `chains` that
   539                 the list was generated from
   540          A list of indexes of the chains that had communication problems.
   541      """
   542  
   543      def get_num_of_oldest(blocks):
   544          return blocks[0].num
   545  
   546      # Get the first block from every chain
   547      tails = {}
   548      bad_chains = []
   549      for i, chain in chains.items():
   550          try:
   551              tails[i] = [next(chain)]
   552          except StopIteration:
   553              bad_chains.append(i)
   554  
   555      # Find the minimum block number between all chains
   556      min_block_num = min(map(get_num_of_oldest, tails.values()))
   557  
   558      # Walk all chains back to the minimum block number, adding blocks to the
   559      # chain lists as we go
   560      for i, chain in chains.items():
   561          if i not in bad_chains:
   562              tail = tails[i]
   563              while get_num_of_oldest(tail) > min_block_num:
   564                  try:
   565                      block = next(chain)
   566                  except StopIteration:
   567                      bad_chains.append(i)
   568                      break
   569                  tail.insert(0, block)
   570  
   571      return tails, bad_chains
   572  
   573  
   574  def _compare_across(collections, key):
   575      """Return whether all the collections return equal values when called with
   576      `key`."""
   577      if len(collections) < 2:
   578          return True
   579      c0 = key(collections[0])
   580      return all(c0 == key(c) for c in collections[1:])
   581  
   582  
   583  def build_fork_graph(chains, tails):
   584      """
   585      Args:
   586          An ordered collection of block generators which have been consumed to
   587          the point where they are all at the same block height and the tails of
   588          the chains from that block height (in the same order).
   589  
   590      Returns:
   591          A ForkGraph
   592          A list of indexes of the chains that had communication problems.
   593      """
   594      graph = ForkGraph()
   595      bad_chains = []
   596  
   597      # Add tails to the graph first
   598      for i, tail in tails.items():
   599          for block in reversed(tail):
   600              graph.add_block(i, block)
   601  
   602      # If we are already at the common ancestor, stop
   603      if _compare_across(
   604          [tail[0] for tail in tails.values()], key=lambda block: block.ident
   605      ):
   606          return graph, bad_chains
   607  
   608      # Chains should now all be at the same height, so we can walk back
   609      # to common ancestor
   610      while True:
   611          heads = []
   612          for i, chain in chains.items():
   613              if i not in bad_chains:
   614                  try:
   615                      head = next(chain)
   616                  except StopIteration:
   617                      bad_chains.append(i)
   618                  heads.append((i, head))
   619  
   620          for i, block in heads:
   621              graph.add_block(i, block)
   622          if _compare_across(heads, key=lambda head: head[1].ident):
   623              break
   624  
   625      prune_unreporting_peers(graph, bad_chains)
   626  
   627      return graph, bad_chains