github.com/apache/beam/sdks/v2@v2.48.2/python/apache_beam/runners/dataflow/template_runner_test.py (about)

     1  #
     2  # Licensed to the Apache Software Foundation (ASF) under one or more
     3  # contributor license agreements.  See the NOTICE file distributed with
     4  # this work for additional information regarding copyright ownership.
     5  # The ASF licenses this file to You under the Apache License, Version 2.0
     6  # (the "License"); you may not use this file except in compliance with
     7  # the License.  You may obtain a copy of the License at
     8  #
     9  #    http://www.apache.org/licenses/LICENSE-2.0
    10  #
    11  # Unless required by applicable law or agreed to in writing, software
    12  # distributed under the License is distributed on an "AS IS" BASIS,
    13  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    14  # See the License for the specific language governing permissions and
    15  # limitations under the License.
    16  #
    17  
    18  """Unit tests for templated pipelines."""
    19  
    20  # pytype: skip-file
    21  
    22  import json
    23  import tempfile
    24  import unittest
    25  
    26  import apache_beam as beam
    27  from apache_beam.options.pipeline_options import PipelineOptions
    28  from apache_beam.pipeline import Pipeline
    29  from apache_beam.runners.dataflow.dataflow_runner import DataflowRunner
    30  
    31  # Protect against environments where apitools library is not available.
    32  # pylint: disable=wrong-import-order, wrong-import-position
    33  try:
    34    from apache_beam.runners.dataflow.internal import apiclient
    35  except ImportError:
    36    apiclient = None  # type: ignore
    37  # pylint: enable=wrong-import-order, wrong-import-position
    38  
    39  
    40  @unittest.skipIf(apiclient is None, 'GCP dependencies are not installed')
    41  class TemplatingDataflowRunnerTest(unittest.TestCase):
    42    """TemplatingDataflow tests."""
    43    def test_full_completion(self):
    44      # Create dummy file and close it.  Note that we need to do this because
    45      # Windows does not allow NamedTemporaryFiles to be reopened elsewhere
    46      # before the temporary file is closed.
    47      dummy_file = tempfile.NamedTemporaryFile(delete=False)
    48      dummy_file_name = dummy_file.name
    49      dummy_file.close()
    50  
    51      dummy_dir = tempfile.mkdtemp()
    52  
    53      remote_runner = DataflowRunner()
    54      with Pipeline(remote_runner,
    55                    options=PipelineOptions(['--dataflow_endpoint=ignored',
    56                                             '--sdk_location=' + dummy_file_name,
    57                                             '--job_name=test-job',
    58                                             '--project=test-project',
    59                                             '--staging_location=' + dummy_dir,
    60                                             '--temp_location=/dev/null',
    61                                             '--template_location=' +
    62                                             dummy_file_name,
    63                                             '--no_auth'])) as pipeline:
    64  
    65        pipeline | beam.Create([1, 2, 3]) | beam.Map(lambda x: x)  # pylint: disable=expression-not-assigned
    66  
    67      with open(dummy_file_name) as template_file:
    68        saved_job_dict = json.load(template_file)
    69        self.assertEqual(
    70            saved_job_dict['environment']['sdkPipelineOptions']['options']
    71            ['project'],
    72            'test-project')
    73        self.assertEqual(
    74            saved_job_dict['environment']['sdkPipelineOptions']['options']
    75            ['job_name'],
    76            'test-job')
    77  
    78  
    79  if __name__ == '__main__':
    80    unittest.main()