github.com/apache/beam/sdks/v2@v2.48.2/python/apache_beam/runners/dataflow/dataflow_job_service_test.py (about)

     1  #
     2  # Licensed to the Apache Software Foundation (ASF) under one or more
     3  # contributor license agreements.  See the NOTICE file distributed with
     4  # this work for additional information regarding copyright ownership.
     5  # The ASF licenses this file to You under the Apache License, Version 2.0
     6  # (the "License"); you may not use this file except in compliance with
     7  # the License.  You may obtain a copy of the License at
     8  #
     9  #    http://www.apache.org/licenses/LICENSE-2.0
    10  #
    11  # Unless required by applicable law or agreed to in writing, software
    12  # distributed under the License is distributed on an "AS IS" BASIS,
    13  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    14  # See the License for the specific language governing permissions and
    15  # limitations under the License.
    16  #
    17  
    18  import unittest
    19  
    20  import apache_beam as beam
    21  from apache_beam.options.pipeline_options import PipelineOptions
    22  from apache_beam.runners.dataflow import dataflow_job_service
    23  from apache_beam.runners.portability import local_job_service
    24  
    25  # Protect against environments where apitools library is not available.
    26  # pylint: disable=wrong-import-order, wrong-import-position
    27  try:
    28    from apache_beam.runners.dataflow.internal import apiclient
    29  except ImportError:
    30    apiclient = None  # type: ignore
    31  # pylint: enable=wrong-import-order, wrong-import-position
    32  
    33  
    34  @unittest.skipIf(apiclient is None, 'GCP dependencies are not installed')
    35  class DirectPipelineResultTest(unittest.TestCase):
    36    def test_dry_run(self):
    37      # Not an integration test that actually runs on Dataflow,
    38      # but does exercise (most of) the translation and setup code,
    39      # as well as the connection.
    40      job_servicer = local_job_service.LocalJobServicer(
    41          None, beam_job_type=dataflow_job_service.DataflowBeamJob)
    42      port = job_servicer.start_grpc_server(0)
    43      try:
    44        options = PipelineOptions(
    45            runner='PortableRunner',
    46            job_endpoint=f'localhost:{port}',
    47            project='some_project',
    48            temp_location='gs://bucket/dir',
    49            region='us-central1',
    50            dry_run=True,
    51        )
    52        with beam.Pipeline(options=options) as p:
    53          _ = p | beam.Create([1, 2, 3]) | beam.Map(lambda x: x * x)
    54      finally:
    55        job_servicer.stop()
    56  
    57  
    58  if __name__ == '__main__':
    59    unittest.main()