github.com/chnsz/golangsdk@v0.0.0-20240506093406-85a3fbfa605b/openstack/dli/v2/batches/requests.go (about)

     1  package batches
     2  
     3  import "github.com/chnsz/golangsdk"
     4  
     5  // CreateOpts is a struct which will be used to submit a spark job.
     6  type CreateOpts struct {
     7  	// Name of the package that is of the JAR or pyFile type and has been uploaded to the DLI resource management
     8  	// system. You can also specify an OBS path, for example, obs://Bucket name/Package name.
     9  	File string `json:"file" required:"true"`
    10  	// Queue name. Set this parameter to the name of the created DLI queue.
    11  	// NOTE: This parameter is compatible with the cluster_name parameter. That is, if cluster_name is used to specify a
    12  	//       queue, the queue is still valid.
    13  	// You are advised to use the queue parameter. The queue and cluster_name parameters cannot coexist.
    14  	Queue string `json:"queue" required:"true"`
    15  	// Java/Spark main class of the batch processing job.
    16  	ClassName *string `json:"class_name,omitempty"`
    17  	// Queue name. Set this parameter to the created DLI queue name.
    18  	// NOTE: You are advised to use the queue parameter. The queue and cluster_name parameters cannot coexist.
    19  	ClusterName string `json:"cluster_name,omitempty"`
    20  	// Input parameters of the main class, that is, application parameters.
    21  	Arguments []string `json:"args,omitempty"`
    22  	// Compute resource type. Currently, resource types A, B, and C are available.
    23  	// If this parameter is not specified, the minimum configuration (type A) is used.
    24  	Specification string `json:"sc_type,omitempty"`
    25  	// Name of the package that is of the JAR type and has been uploaded to the DLI resource management system.
    26  	// You can also specify an OBS path, for example, obs://Bucket name/Package name.
    27  	Jars []string `json:"jars,omitempty"`
    28  	// Name of the package that is of the PyFile type and has been uploaded to the DLI resource management system.
    29  	// You can also specify an OBS path, for example, obs://Bucket name/Package name.
    30  	PythonFiles []string `json:"python_files,omitempty"`
    31  	// Name of the package that is of the file type and has been uploaded to the DLI resource management system.
    32  	// You can also specify an OBS path, for example, obs://Bucket name/Package name.
    33  	Files []string `json:"files,omitempty"`
    34  	// Name of the dependent system resource module. You can view the module name using the API related to Querying
    35  	// Resource Packages in a Group. DLI provides dependencies for executing datasource jobs.
    36  	// The following table lists the dependency modules corresponding to different services.
    37  	//   CloudTable/MRS HBase: sys.datasource.hbase
    38  	//   CloudTable/MRS OpenTSDB: sys.datasource.opentsdb
    39  	//   RDS MySQL: sys.datasource.rds
    40  	//   RDS Postgre: preset
    41  	//   DWS: preset
    42  	//   CSS: sys.datasource.css
    43  	Modules []string `json:"modules,omitempty"`
    44  	// JSON object list, including the name and type of the JSON package that has been uploaded to the queue.
    45  	Resources []Resource `json:"resources,omitempty"`
    46  	// JSON object list, including the package group resource. For details about the format, see the request example.
    47  	// If the type of the name in resources is not verified, the package with the name exists in the group.
    48  	Groups []Group `json:"groups,omitempty"`
    49  	// Batch configuration item. For details, see Spark Configuration.
    50  	Configurations map[string]interface{} `json:"conf,omitempty"`
    51  	// Batch processing task name. The value contains a maximum of 128 characters.
    52  	Name string `json:"name,omitempty"`
    53  	// Driver memory of the Spark application, for example, 2 GB and 2048 MB. This configuration item replaces the
    54  	// default parameter in sc_type. The unit must be provided. Otherwise, the startup fails.
    55  	DriverMemory string `json:"driver_memory,omitempty"`
    56  	// Number of CPU cores of the Spark application driver.
    57  	// This configuration item replaces the default parameter in sc_type.
    58  	DriverCores int `json:"driver_cores,omitempty"`
    59  	// Executor memory of the Spark application, for example, 2 GB and 2048 MB. This configuration item replaces the
    60  	// default parameter in sc_type. The unit must be provided. Otherwise, the startup fails.
    61  	ExecutorMemory string `json:"executor_memory,omitempty"`
    62  	// Number of CPU cores of each Executor in the Spark application.
    63  	// This configuration item replaces the default parameter in sc_type.
    64  	ExecutorCores int `json:"executor_cores,omitempty"`
    65  	// Number of Executors in a Spark application. This configuration item replaces the default parameter in sc_type.
    66  	NumExecutors int `json:"num_executors,omitempty"`
    67  	// OBS bucket for storing the Spark jobs. Set this parameter when you need to save jobs.
    68  	ObsBucket string `json:"obs_bucket,omitempty"`
    69  	// Whether to enable the retry function.
    70  	// If enabled, Spark jobs will be automatically retried after an exception occurs. The default value is false.
    71  	AutoRecovery bool `json:"auto_recovery,omitempty"`
    72  	// Maximum retry times. The maximum value is 100, and the default value is 20.
    73  	MaxRetryTimes int `json:"max_retry_times,omitempty"`
    74  	// Job feature. Type of the Spark image used by a job.
    75  	// basic: indicates that the basic Spark image provided by DLI is used.
    76  	// custom: indicates that the user-defined Spark image is used.
    77  	// ai: indicates that the AI image provided by DLI is used.
    78  	Feature string `json:"feature,omitempty"`
    79  	// Version of the Spark component used by a job. Set this parameter when feature is set to basic or ai.
    80  	// If this parameter is not set, the default Spark component version 2.3.2 is used.
    81  	SparkVersion string `json:"spark_version,omitempty"`
    82  	// Custom image. The format is Organization name/Image name:Image version.
    83  	// This parameter is valid only when feature is set to custom.
    84  	// You can use this parameter with the feature parameter to specify a user-defined Spark image for job running.
    85  	Image string `json:"image,omitempty"`
    86  	// To access metadata, set this parameter to DLI.
    87  	CatalogName string `json:"catalog_name,omitempty"`
    88  }
    89  
    90  // Group is an object which will be build up a package group.
    91  type Group struct {
    92  	// User group name.
    93  	Name string `json:"name,omitempty"`
    94  	// User group resource.
    95  	Resources []Resource `json:"resources,omitempty"`
    96  }
    97  
    98  // Resource is an object which specified the user group resource.
    99  type Resource struct {
   100  	// Resource name. You can also specify an OBS path, for example, obs://Bucket name/Package name.
   101  	Name string `json:"name,omitempty"`
   102  	// Resource type.
   103  	Type string `json:"type,omitempty"`
   104  }
   105  
   106  // Create is a method to submit a Spark job with given parameters.
   107  func Create(c *golangsdk.ServiceClient, opts CreateOpts) (*CreateResp, error) {
   108  	b, err := golangsdk.BuildRequestBody(opts, "")
   109  	if err != nil {
   110  		return nil, err
   111  	}
   112  
   113  	var rst golangsdk.Result
   114  	_, err = c.Post(rootURL(c), b, &rst.Body, nil)
   115  	if err == nil {
   116  		var r CreateResp
   117  		rst.ExtractInto(&r)
   118  		return &r, nil
   119  	}
   120  	return nil, err
   121  }
   122  
   123  // Get is a method to obtain the specified Spark job with job ID.
   124  func Get(c *golangsdk.ServiceClient, jobId string) (*CreateResp, error) {
   125  	var rst golangsdk.Result
   126  	_, err := c.Get(resourceURL(c, jobId), &rst.Body, nil)
   127  	if err == nil {
   128  		var r CreateResp
   129  		rst.ExtractInto(&r)
   130  		return &r, nil
   131  	}
   132  	return nil, err
   133  }
   134  
   135  // GetState is a method to obtain the state of specified Spark job with job ID.
   136  func GetState(c *golangsdk.ServiceClient, jobId string) (*StateResp, error) {
   137  	var rst golangsdk.Result
   138  	_, err := c.Get(stateURL(c, jobId), &rst.Body, nil)
   139  	if err == nil {
   140  		var r StateResp
   141  		rst.ExtractInto(&r)
   142  		return &r, nil
   143  	}
   144  	return nil, err
   145  }
   146  
   147  // Delete is a method to cancel the unfinished spark job.
   148  func Delete(c *golangsdk.ServiceClient, jobId string) *golangsdk.ErrResult {
   149  	var r golangsdk.ErrResult
   150  	_, r.Err = c.Delete(resourceURL(c, jobId), nil)
   151  	return &r
   152  }