github.com/apache/beam/sdks/v2@v2.48.2/python/apache_beam/tools/map_fn_microbenchmark.py (about)

     1  # Licensed to the Apache Software Foundation (ASF) under one or more
     2  # contributor license agreements.  See the NOTICE file distributed with
     3  # this work for additional information regarding copyright ownership.
     4  # The ASF licenses this file to You under the Apache License, Version 2.0
     5  # (the "License"); you may not use this file except in compliance with
     6  # the License.  You may obtain a copy of the License at
     7  #
     8  #    http://www.apache.org/licenses/LICENSE-2.0
     9  #
    10  # Unless required by applicable law or agreed to in writing, software
    11  # distributed under the License is distributed on an "AS IS" BASIS,
    12  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13  # See the License for the specific language governing permissions and
    14  # limitations under the License.
    15  #
    16  
    17  """A microbenchmark for measuring changes in overhead for critical code paths.
    18  
    19  This runs a sequence of trivial Maps over a variable number of inputs to
    20  estimate the per-element processing time.  It can be useful to run this
    21  benchmark before and after a proposed set of changes.  A typical per-element
    22  cost should be 1-2 microseconds.
    23  
    24  This executes the same codepaths that are run on the Fn API (and Dataflow)
    25  workers, but is generally easier to run (locally) and more stable.  It does
    26  not, on the other hand, excercise any non-trivial amount of IO (e.g. shuffle).
    27  
    28  Run as
    29  
    30     python -m apache_beam.tools.map_fn_microbenchmark
    31  """
    32  
    33  # pytype: skip-file
    34  
    35  import logging
    36  import time
    37  
    38  from scipy import stats
    39  
    40  import apache_beam as beam
    41  from apache_beam.tools import utils
    42  
    43  
    44  def run_benchmark(num_maps=100, num_runs=10, num_elements_step=1000):
    45    timings = {}
    46    for run in range(num_runs):
    47      num_elements = num_elements_step * run + 1
    48      start = time.time()
    49      with beam.Pipeline() as p:
    50        pc = p | beam.Create(list(range(num_elements)))
    51        for ix in range(num_maps):
    52          pc = pc | 'Map%d' % ix >> beam.FlatMap(lambda x: (None, ))
    53      timings[num_elements] = time.time() - start
    54      print(
    55          "%6d element%s %g sec" % (
    56              num_elements,
    57              " " if num_elements == 1 else "s",
    58              timings[num_elements]))
    59  
    60    print()
    61    # pylint: disable=unused-variable
    62    gradient, intercept, r_value, p_value, std_err = stats.linregress(
    63        *list(zip(*list(timings.items()))))
    64    print("Fixed cost  ", intercept)
    65    print("Per-element ", gradient / num_maps)
    66    print("R^2         ", r_value**2)
    67  
    68  
    69  if __name__ == '__main__':
    70    logging.basicConfig()
    71    utils.check_compiled('apache_beam.runners.common')
    72    run_benchmark()