github.com/apache/beam/sdks/v2@v2.48.2/python/apache_beam/examples/cookbook/combiners_test.py (about) 1 # 2 # Licensed to the Apache Software Foundation (ASF) under one or more 3 # contributor license agreements. See the NOTICE file distributed with 4 # this work for additional information regarding copyright ownership. 5 # The ASF licenses this file to You under the Apache License, Version 2.0 6 # (the "License"); you may not use this file except in compliance with 7 # the License. You may obtain a copy of the License at 8 # 9 # http://www.apache.org/licenses/LICENSE-2.0 10 # 11 # Unless required by applicable law or agreed to in writing, software 12 # distributed under the License is distributed on an "AS IS" BASIS, 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 # See the License for the specific language governing permissions and 15 # limitations under the License. 16 # 17 18 """Simple tests to showcase combiners. 19 20 The tests are meant to be "copy/paste" code snippets for the topic they address 21 (combiners in this case). Most examples use neither sources nor sinks. 22 The input data is generated simply with a Create transform and the output is 23 checked directly on the last PCollection produced. 24 """ 25 26 # pytype: skip-file 27 28 # beam-playground: 29 # name: CombinersTest 30 # description: Unit-test to showcase combiners. 31 # multifile: false 32 # context_line: 47 33 # categories: 34 # - Combiners 35 # complexity: MEDIUM 36 # tags: 37 # - combine 38 # - test 39 40 import logging 41 import unittest 42 43 import apache_beam as beam 44 from apache_beam.testing.test_pipeline import TestPipeline 45 from apache_beam.testing.util import assert_that 46 from apache_beam.testing.util import equal_to 47 48 49 class CombinersTest(unittest.TestCase): 50 """Tests showcasing Dataflow combiners.""" 51 52 SAMPLE_DATA = [('a', 1), ('b', 10), ('a', 2), ('a', 3), ('b', 20), ('c', 100)] 53 54 def test_combine_per_key_with_callable(self): 55 """CombinePerKey using a standard callable reducing iterables. 56 57 A common case for Dataflow combiners is to sum (or max or min) over the 58 values of each key. Such standard functions can be used directly as combiner 59 functions. In fact, any function "reducing" an iterable to a single value 60 can be used. 61 """ 62 with TestPipeline() as p: 63 result = ( 64 p 65 | beam.Create(CombinersTest.SAMPLE_DATA) 66 | beam.CombinePerKey(sum)) 67 68 assert_that(result, equal_to([('a', 6), ('b', 30), ('c', 100)])) 69 70 def test_combine_per_key_with_custom_callable(self): 71 """CombinePerKey using a custom function reducing iterables.""" 72 def multiply(values): 73 result = 1 74 for v in values: 75 result *= v 76 return result 77 78 with TestPipeline() as p: 79 result = ( 80 p 81 | beam.Create(CombinersTest.SAMPLE_DATA) 82 | beam.CombinePerKey(multiply)) 83 84 assert_that(result, equal_to([('a', 6), ('b', 200), ('c', 100)])) 85 86 87 if __name__ == '__main__': 88 logging.getLogger().setLevel(logging.INFO) 89 unittest.main()