github.com/filecoin-project/bacalhau@v0.3.23-0.20230228154132-45c989550ace/clients/python/bacalhau_apiclient/models/job_sharding_config.py (about)

     1  # coding: utf-8
     2  
     3  """
     4      Bacalhau API
     5  
     6      This page is the reference of the Bacalhau REST API. Project docs are available at https://docs.bacalhau.org/. Find more information about Bacalhau at https://github.com/filecoin-project/bacalhau.  # noqa: E501
     7  
     8      OpenAPI spec version: 0.3.22.post4
     9      Contact: team@bacalhau.org
    10      Generated by: https://github.com/swagger-api/swagger-codegen.git
    11  """
    12  
    13  
    14  import pprint
    15  import re  # noqa: F401
    16  
    17  import six
    18  
    19  from bacalhau_apiclient.configuration import Configuration
    20  
    21  
    22  class JobShardingConfig(object):
    23      """NOTE: This class is auto generated by the swagger code generator program.
    24  
    25      Do not edit the class manually.
    26      """
    27  
    28      """
    29      Attributes:
    30        swagger_types (dict): The key is attribute name
    31                              and the value is attribute type.
    32        attribute_map (dict): The key is attribute name
    33                              and the value is json key in definition.
    34      """
    35      swagger_types = {
    36          'batch_size': 'int',
    37          'glob_pattern': 'str',
    38          'glob_pattern_base_path': 'str'
    39      }
    40  
    41      attribute_map = {
    42          'batch_size': 'BatchSize',
    43          'glob_pattern': 'GlobPattern',
    44          'glob_pattern_base_path': 'GlobPatternBasePath'
    45      }
    46  
    47      def __init__(self, batch_size=None, glob_pattern=None, glob_pattern_base_path=None, _configuration=None):  # noqa: E501
    48          """JobShardingConfig - a model defined in Swagger"""  # noqa: E501
    49          if _configuration is None:
    50              _configuration = Configuration()
    51          self._configuration = _configuration
    52  
    53          self._batch_size = None
    54          self._glob_pattern = None
    55          self._glob_pattern_base_path = None
    56          self.discriminator = None
    57  
    58          if batch_size is not None:
    59              self.batch_size = batch_size
    60          if glob_pattern is not None:
    61              self.glob_pattern = glob_pattern
    62          if glob_pattern_base_path is not None:
    63              self.glob_pattern_base_path = glob_pattern_base_path
    64  
    65      @property
    66      def batch_size(self):
    67          """Gets the batch_size of this JobShardingConfig.  # noqa: E501
    68  
    69          how many \"items\" are to be processed in each shard we first apply the glob pattern which will result in a flat list of items this number decides how to group that flat list into actual shards run by compute nodes  # noqa: E501
    70  
    71          :return: The batch_size of this JobShardingConfig.  # noqa: E501
    72          :rtype: int
    73          """
    74          return self._batch_size
    75  
    76      @batch_size.setter
    77      def batch_size(self, batch_size):
    78          """Sets the batch_size of this JobShardingConfig.
    79  
    80          how many \"items\" are to be processed in each shard we first apply the glob pattern which will result in a flat list of items this number decides how to group that flat list into actual shards run by compute nodes  # noqa: E501
    81  
    82          :param batch_size: The batch_size of this JobShardingConfig.  # noqa: E501
    83          :type: int
    84          """
    85  
    86          self._batch_size = batch_size
    87  
    88      @property
    89      def glob_pattern(self):
    90          """Gets the glob_pattern of this JobShardingConfig.  # noqa: E501
    91  
    92          divide the inputs up into the smallest possible unit for example /* would mean \"all top level files or folders\" this being an empty string means \"no sharding\"  # noqa: E501
    93  
    94          :return: The glob_pattern of this JobShardingConfig.  # noqa: E501
    95          :rtype: str
    96          """
    97          return self._glob_pattern
    98  
    99      @glob_pattern.setter
   100      def glob_pattern(self, glob_pattern):
   101          """Sets the glob_pattern of this JobShardingConfig.
   102  
   103          divide the inputs up into the smallest possible unit for example /* would mean \"all top level files or folders\" this being an empty string means \"no sharding\"  # noqa: E501
   104  
   105          :param glob_pattern: The glob_pattern of this JobShardingConfig.  # noqa: E501
   106          :type: str
   107          """
   108  
   109          self._glob_pattern = glob_pattern
   110  
   111      @property
   112      def glob_pattern_base_path(self):
   113          """Gets the glob_pattern_base_path of this JobShardingConfig.  # noqa: E501
   114  
   115          when using multiple input volumes what path do we treat as the common mount path to apply the glob pattern to  # noqa: E501
   116  
   117          :return: The glob_pattern_base_path of this JobShardingConfig.  # noqa: E501
   118          :rtype: str
   119          """
   120          return self._glob_pattern_base_path
   121  
   122      @glob_pattern_base_path.setter
   123      def glob_pattern_base_path(self, glob_pattern_base_path):
   124          """Sets the glob_pattern_base_path of this JobShardingConfig.
   125  
   126          when using multiple input volumes what path do we treat as the common mount path to apply the glob pattern to  # noqa: E501
   127  
   128          :param glob_pattern_base_path: The glob_pattern_base_path of this JobShardingConfig.  # noqa: E501
   129          :type: str
   130          """
   131  
   132          self._glob_pattern_base_path = glob_pattern_base_path
   133  
   134      def to_dict(self):
   135          """Returns the model properties as a dict"""
   136          result = {}
   137  
   138          for attr, _ in six.iteritems(self.swagger_types):
   139              value = getattr(self, attr)
   140              if isinstance(value, list):
   141                  result[attr] = list(map(
   142                      lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
   143                      value
   144                  ))
   145              elif hasattr(value, "to_dict"):
   146                  result[attr] = value.to_dict()
   147              elif isinstance(value, dict):
   148                  result[attr] = dict(map(
   149                      lambda item: (item[0], item[1].to_dict())
   150                      if hasattr(item[1], "to_dict") else item,
   151                      value.items()
   152                  ))
   153              else:
   154                  result[attr] = value
   155          if issubclass(JobShardingConfig, dict):
   156              for key, value in self.items():
   157                  result[key] = value
   158  
   159          return result
   160  
   161      def to_str(self):
   162          """Returns the string representation of the model"""
   163          return pprint.pformat(self.to_dict())
   164  
   165      def __repr__(self):
   166          """For `print` and `pprint`"""
   167          return self.to_str()
   168  
   169      def __eq__(self, other):
   170          """Returns true if both objects are equal"""
   171          if not isinstance(other, JobShardingConfig):
   172              return False
   173  
   174          return self.to_dict() == other.to_dict()
   175  
   176      def __ne__(self, other):
   177          """Returns true if both objects are not equal"""
   178          if not isinstance(other, JobShardingConfig):
   179              return True
   180  
   181          return self.to_dict() != other.to_dict()