github.com/apache/beam/sdks/v2@v2.48.2/python/apache_beam/runners/dataflow/internal/names.py (about)

     1  #
     2  # Licensed to the Apache Software Foundation (ASF) under one or more
     3  # contributor license agreements.  See the NOTICE file distributed with
     4  # this work for additional information regarding copyright ownership.
     5  # The ASF licenses this file to You under the Apache License, Version 2.0
     6  # (the "License"); you may not use this file except in compliance with
     7  # the License.  You may obtain a copy of the License at
     8  #
     9  #    http://www.apache.org/licenses/LICENSE-2.0
    10  #
    11  # Unless required by applicable law or agreed to in writing, software
    12  # distributed under the License is distributed on an "AS IS" BASIS,
    13  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    14  # See the License for the specific language governing permissions and
    15  # limitations under the License.
    16  #
    17  
    18  """Various names for properties, transforms, etc."""
    19  
    20  # All constants are for internal use only; no backwards-compatibility
    21  # guarantees.
    22  
    23  # pytype: skip-file
    24  
    25  # Referenced by Dataflow legacy worker.
    26  from apache_beam.runners.internal.names import PICKLED_MAIN_SESSION_FILE  # pylint: disable=unused-import
    27  
    28  # String constants related to sources framework
    29  SOURCE_FORMAT = 'custom_source'
    30  SOURCE_TYPE = 'CustomSourcesType'
    31  SERIALIZED_SOURCE_KEY = 'serialized_source'
    32  
    33  # In a released SDK, container tags are selected based on the SDK version.
    34  # Unreleased versions use container versions based on values of
    35  # BEAM_CONTAINER_VERSION and BEAM_FNAPI_CONTAINER_VERSION (see below).
    36  
    37  # Update this version to the next version whenever there is a change that will
    38  # require changes to legacy Dataflow worker execution environment.
    39  BEAM_CONTAINER_VERSION = 'beam-master-20230412'
    40  # Update this version to the next version whenever there is a change that
    41  # requires changes to SDK harness container or SDK harness launcher.
    42  BEAM_FNAPI_CONTAINER_VERSION = 'beam-master-20230422'
    43  
    44  DATAFLOW_CONTAINER_IMAGE_REPOSITORY = 'gcr.io/cloud-dataflow/v1beta3'
    45  
    46  
    47  class TransformNames(object):
    48    """For internal use only; no backwards-compatibility guarantees.
    49  
    50    Transform strings as they are expected in the CloudWorkflow protos.
    51    """
    52    COLLECTION_TO_SINGLETON = 'CollectionToSingleton'
    53    COMBINE = 'CombineValues'
    54    CREATE_PCOLLECTION = 'CreateCollection'
    55    DO = 'ParallelDo'
    56    FLATTEN = 'Flatten'
    57    GROUP = 'GroupByKey'
    58    READ = 'ParallelRead'
    59    WRITE = 'ParallelWrite'
    60  
    61  
    62  class PropertyNames(object):
    63    """For internal use only; no backwards-compatibility guarantees.
    64  
    65    Property strings as they are expected in the CloudWorkflow protos.
    66    """
    67    # If uses_keyed_state, whether the state can be sharded.
    68    ALLOWS_SHARDABLE_STATE = 'allows_shardable_state'
    69    BIGQUERY_CREATE_DISPOSITION = 'create_disposition'
    70    BIGQUERY_DATASET = 'dataset'
    71    BIGQUERY_EXPORT_FORMAT = 'bigquery_export_format'
    72    BIGQUERY_FLATTEN_RESULTS = 'bigquery_flatten_results'
    73    BIGQUERY_KMS_KEY = 'bigquery_kms_key'
    74    BIGQUERY_PROJECT = 'project'
    75    BIGQUERY_QUERY = 'bigquery_query'
    76    BIGQUERY_SCHEMA = 'schema'
    77    BIGQUERY_TABLE = 'table'
    78    BIGQUERY_USE_LEGACY_SQL = 'bigquery_use_legacy_sql'
    79    BIGQUERY_WRITE_DISPOSITION = 'write_disposition'
    80    DISPLAY_DATA = 'display_data'
    81    ELEMENT = 'element'
    82    ELEMENTS = 'elements'
    83    ENCODING = 'encoding'
    84    FILE_PATTERN = 'filepattern'
    85    FILE_NAME_PREFIX = 'filename_prefix'
    86    FILE_NAME_SUFFIX = 'filename_suffix'
    87    FORMAT = 'format'
    88    INPUTS = 'inputs'
    89    IMPULSE_ELEMENT = 'impulse_element'
    90    NON_PARALLEL_INPUTS = 'non_parallel_inputs'
    91    NUM_SHARDS = 'num_shards'
    92    OUT = 'out'
    93    OUTPUT = 'output'
    94    OUTPUT_INFO = 'output_info'
    95    OUTPUT_NAME = 'output_name'
    96    PARALLEL_INPUT = 'parallel_input'
    97    PIPELINE_PROTO_TRANSFORM_ID = 'pipeline_proto_transform_id'
    98    # If the input element is a key/value pair, then the output element(s) all
    99    # have the same key as the input.
   100    PRESERVES_KEYS = 'preserves_keys'
   101    PUBSUB_ID_LABEL = 'pubsub_id_label'
   102    PUBSUB_SERIALIZED_ATTRIBUTES_FN = 'pubsub_serialized_attributes_fn'
   103    PUBSUB_SUBSCRIPTION = 'pubsub_subscription'
   104    PUBSUB_TIMESTAMP_ATTRIBUTE = 'pubsub_timestamp_label'
   105    PUBSUB_TOPIC = 'pubsub_topic'
   106    RESOURCE_HINTS = 'resource_hints'
   107    RESTRICTION_ENCODING = 'restriction_encoding'
   108    SERIALIZED_FN = 'serialized_fn'
   109    SHARD_NAME_TEMPLATE = 'shard_template'
   110    SOURCE_STEP_INPUT = 'custom_source_step_input'
   111    SERIALIZED_TEST_STREAM = 'serialized_test_stream'
   112    STEP_NAME = 'step_name'
   113    USE_INDEXED_FORMAT = 'use_indexed_format'
   114    USER_FN = 'user_fn'
   115    USER_NAME = 'user_name'
   116    USES_KEYED_STATE = 'uses_keyed_state'
   117    VALIDATE_SINK = 'validate_sink'
   118    VALIDATE_SOURCE = 'validate_source'
   119    VALUE = 'value'
   120    WINDOWING_STRATEGY = 'windowing_strategy'