github.com/chnsz/golangsdk@v0.0.0-20240506093406-85a3fbfa605b/openstack/dli/v1/sqljob/requests.go (about)

     1  package sqljob
     2  
     3  import (
     4  	"github.com/chnsz/golangsdk"
     5  	"github.com/chnsz/golangsdk/openstack/common/tags"
     6  )
     7  
     8  const (
     9  
    10  	// Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit.
    11  	ConfigSparkSqlFilesMaxRecordsPerFile = "spark.sql.files.maxRecordsPerFile"
    12  	// Maximum size of the table that displays all working nodes when a connection is executed.
    13  	// You can set this parameter to -1 to disable the display.
    14  	// NOTE:
    15  	// only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics noscan command and
    16  	// the file-based data source table that directly calculates statistics based on data files are supported.
    17  	ConfigSparkSqlAutoBroadcastJoinThreshold = "spark.sql.autoBroadcastJoinThreshold"
    18  	// Default number of partitions used to filter data for join or aggregation.
    19  	ConfigSparkSqlShufflePartitions = "spark.sql.shuffle.partitions"
    20  	// In dynamic mode, Spark does not delete the previous partitions and only overwrites the partitions without
    21  	// data during execution.
    22  	ConfigSparkSqlDynamicPartitionOverwriteEnabled = "spark.sql.dynamicPartitionOverwrite.enabled"
    23  	// Maximum number of bytes to be packed into a single partition when a file is read.
    24  	ConfigSparkSqlMaxPartitionBytes = "spark.sql.files.maxPartitionBytes"
    25  	// Path of bad records.
    26  	ConfigSparkSqlBadRecordsPath = "spark.sql.badRecordsPath"
    27  	// Indicates whether DDL and DCL statements are executed asynchronously. The value true indicates that
    28  	// asynchronous execution is enabled.
    29  	ConfigDliSqlasyncEnabled = "dli.sql.sqlasync.enabled"
    30  	// Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: ms.
    31  	ConfigDliSqljobTimeout = "dli.sql.job.timeout"
    32  )
    33  
    34  type SqlJobOpts struct {
    35  	// SQL statement that you want to execute.
    36  	Sql string `json:"sql" required:"true"`
    37  	// Database where the SQL is executed. This parameter does not need to be configured during database creation.
    38  	Currentdb string `json:"currentdb,omitempty"`
    39  	QueueName string `json:"queue_name,omitempty"`
    40  	// You can set the configuration parameters for the SQL job in the form of Key/Value
    41  	Conf []string           `json:"conf,omitempty"`
    42  	Tags []tags.ResourceTag `json:"tags,omitempty"`
    43  }
    44  
    45  // ListJobsOpts
    46  type ListJobsOpts struct {
    47  	// Maximum number of jobs displayed on each page. The value range is as follows: [1, 100]. The default value is 50.
    48  	PageSize *int `q:"page-size"`
    49  	// Current page number. The default value is 1.
    50  	CurrentPage *int `q:"current-page"`
    51  	// Queries the jobs executed later than the time. The time is a UNIX timestamp in milliseconds.
    52  	Start *int `q:"start"`
    53  	// Queries the jobs executed earlier than the time. The time is a UNIX timestamp in milliseconds.
    54  	End *int `q:"end"`
    55  	// Type of a job to be queried. Job types include:DDL、DCL、IMPORT、EXPORT、QUERY、INSERT、DATA_MIGRATION、UPDATE、
    56  	// DELETE、RESTART_QUEUE、SCALE_QUEUE, To query all types of jobs, enter ALL
    57  	JobType   string `q:"job-type"`
    58  	JobStatus string `q:"job-status"`
    59  	JobId     string `q:"job-id"`
    60  	DbName    string `q:"db_name"`
    61  	TableName string `q:"table_name"`
    62  	// Specifies queue_name as the filter to query jobs running on the specified queue.
    63  	QueueName string `q:"queue_name"`
    64  	// Specifies the SQL segment as the filter. It is case insensitive.
    65  	SqlPattern string `q:"sql_pattern"`
    66  	// Specifies the job sorting mode. The default value is start_time_desc (job submission time in descending order).
    67  	// Four sorting modes are supported: duration_desc (job running duration in descending order),
    68  	// duration_asc (job running duration in ascending order),
    69  	// start_time_desc (job submission time in descending order),
    70  	// and start_time_asc (job submission time in ascending order).
    71  	Order      string `q:"order"`
    72  	EngineType string `q:"engine-type"`
    73  }
    74  
    75  type CheckSQLGramarOpts struct {
    76  	// SQL statement that you want to execute.
    77  	Sql string `json:"sql" required:"true"`
    78  	// Database where the SQL statement is executed.
    79  	// NOTE:
    80  	// If the SQL statement contains db_name, for example, select * from db1.t1, you do not need to set this parameter.
    81  	// If the SQL statement does not contain db_name, the semantics check will fail when you do not set this parameter
    82  	// or set this parameter to an incorrect value.
    83  	Currentdb string `json:"currentdb"`
    84  }
    85  
    86  type ExportQueryResultOpts struct {
    87  	// Path for storing the exported data. Currently, data can be stored only on OBS.
    88  	// The OBS path cannot contain folders, for example, the path folder in the sample request.
    89  	DataPath string `json:"data_path" required:"true"`
    90  	// Compression format of exported data. Currently, gzip, bzip2, and deflate are supported.
    91  	// The default value is none, indicating that data is not compressed.
    92  	Compress string `json:"compress,omitempty"`
    93  	// Storage format of exported data. Currently, only CSV and JSON are supported.
    94  	DataType string `json:"data_type" required:"true"`
    95  	// Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used.
    96  	QueueName string `json:"queue_name,omitempty"`
    97  	// Export mode. The parameter value can be ErrorIfExists or Overwrite.
    98  	// If export_mode is not specified, this parameter is set to ErrorIfExists by default.
    99  	// ErrorIfExists: Ensure that the specified export directory does not exist.
   100  	// If the specified export directory exists, an error is reported and the export operation cannot be performed.
   101  	// Overwrite: If you add new files to a specific directory, existing files will be deleted.
   102  	ExportMode string `json:"export_mode,omitempty"`
   103  	// Whether to export column names when exporting CSV and JSON data.
   104  	// If this parameter is set to true, the column names are exported.
   105  	// If this parameter is set to false, the column names are not exported.
   106  	// If this parameter is left blank, the default value false is used.
   107  	WithColumnHeader *bool `json:"with_column_header,omitempty"`
   108  	// Number of data records to be exported. The default value is 0, indicating that all data records are exported.
   109  	LimitNum *int `json:"limit_num,omitempty"`
   110  }
   111  
   112  type ImportDataOpts struct {
   113  	// Path to the data to be imported. Currently, only OBS data can be imported.
   114  	DataPath string `json:"data_path" required:"true"`
   115  	// Type of the data to be imported. Currently, data types of CSV, Parquet, ORC, JSON, and Avro are supported.
   116  	// NOTE:
   117  	// Data in Avro format generated by Hive tables cannot be imported.
   118  	DataType string `json:"data_type" required:"true"`
   119  	// Name of the database where the table to which data is imported resides.
   120  	DatabaseName string `json:"database_name" required:"true"`
   121  	// Name of the table to which data is imported.
   122  	TableName string `json:"table_name" required:"true"`
   123  	// Whether the first line of the imported data contains column names, that is, headers. The default value is false, indicating that column names are not contained. This parameter can be specified when CSV data is imported.
   124  	WithColumnHeader *bool `json:"with_column_header,omitempty"`
   125  	// User-defined data delimiter. The default value is a comma (,). This parameter can be specified when CSV data is imported.
   126  	Delimiter string `json:"delimiter,omitempty"`
   127  	// User-defined quotation character. The default value is double quotation marks ("). This parameter can be specified when CSV data is imported.
   128  	QuoteChar string `json:"quote_char,omitempty"`
   129  	// User-defined escape character. The default value is a backslash (\). This parameter can be specified when CSV data is imported.
   130  	EscapeChar string `json:"escape_char,omitempty"`
   131  	// Specified date format. The default value is yyyy-MM-dd. For details about the characters involved in the date format, see Table 3. This parameter can be specified when data in the CSV or JSON format is imported.
   132  	DateFormat string `json:"date_format,omitempty"`
   133  	// Bad records storage directory during job execution. After configuring this item, the bad records is not imported into the target table.
   134  	BadRecordsPath string `json:"bad_records_path,omitempty"`
   135  	// Specified time format. The default value is yyyy-MM-dd HH:mm:ss. For definitions about characters in the time format, see Table 3. This parameter can be specified when data in the CSV or JSON format is imported.
   136  	TimestampFormat string `json:"timestamp_format,omitempty"`
   137  	// Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used.
   138  	QueueName string `json:"queue_name,omitempty"`
   139  	// Whether to overwrite data. The default value is false, indicating appending write. If the value is true, it indicates overwriting.
   140  	Overwrite *bool `json:"overwrite,omitempty"`
   141  	// Partition to which data is to be imported.
   142  	// If this parameter is not set, the entire table data is dynamically imported. The imported data must contain the data in the partition column.
   143  	// If this parameter is set and all partition information is configured during data import, data is imported to the specified partition. The imported data cannot contain data in the partition column.
   144  	// If not all partition information is configured during data import, the imported data must contain all non-specified partition data. Otherwise, abnormal values such as null exist in the partition field column of non-specified data after data import.
   145  	PartitionSpec map[string]string `json:"partition_spec,omitempty"`
   146  	// User-defined parameter that applies to the job. Currently, dli.sql.dynamicPartitionOverwrite.enabled can be set to false by default. If it is set to true, data in a specified partition is overwritten. If it is set to false, data in the entire DataSource table is dynamically overwritten.
   147  	// NOTE:
   148  	// For dynamic overwrite of Hive partition tables, only the involved partition data can be overwritten. The entire table data cannot be overwritten.
   149  	Conf []string `json:"conf,omitempty"`
   150  }
   151  
   152  type ExportDataOpts struct {
   153  	// Path for storing the exported data. Currently, data can be stored only on OBS.
   154  	// If export_mode is set to errorifexists, the OBS path cannot contain the specified folder,
   155  	// for example, the test folder in the example request.
   156  	DataPath string `json:"data_path" required:"true"`
   157  	// Type of data to be exported. Currently, only CSV and JSON are supported.
   158  	DataType string `json:"data_type" required:"true"`
   159  	// Name of the database where the table from which data is exported resides.
   160  	DatabaseName string `json:"database_name" required:"true"`
   161  	// Name of the table from which data is exported.
   162  	TableName string `json:"table_name" required:"true"`
   163  	// Compression mode for exported data. Currently, the compression modes gzip, bzip2, and deflate are supported. If you do not want to compress data, enter none.
   164  	Compress string `json:"compress" required:"true"`
   165  	// Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used.
   166  	QueueName string `json:"queue_name,omitempty"`
   167  	// Export mode. The parameter value can be ErrorIfExists or Overwrite. If export_mode is not specified, this parameter is set to ErrorIfExists by default.
   168  	// ErrorIfExists: Ensure that the specified export directory does not exist. If the specified export directory exists, an error is reported and the export operation cannot be performed.
   169  	// Overwrite: If you add new files to a specific directory, existing files will be deleted.
   170  	ExportMode string `json:"export_mode,omitempty"`
   171  	// Whether to export column names when exporting CSV and JSON data.
   172  	// If this parameter is set to true, the column names are exported.
   173  	// If this parameter is set to false, the column names are not exported.
   174  	// If this parameter is left blank, the default value false is used.
   175  	WithColumnHeader *bool `json:"with_column_header,omitempty"`
   176  }
   177  
   178  var RequestOpts = golangsdk.RequestOpts{
   179  	MoreHeaders: map[string]string{"Content-Type": "application/json", "X-Language": "en-us"},
   180  }
   181  
   182  // Submit Job
   183  func Submit(c *golangsdk.ServiceClient, opts SqlJobOpts) (*SubmitJobResult, error) {
   184  	b, err := golangsdk.BuildRequestBody(opts, "")
   185  	if err != nil {
   186  		return nil, err
   187  	}
   188  
   189  	var rst SubmitJobResult
   190  	_, err = c.Post(submitURL(c), b, &rst, &golangsdk.RequestOpts{
   191  		MoreHeaders: RequestOpts.MoreHeaders,
   192  	})
   193  	return &rst, err
   194  }
   195  
   196  func Cancel(c *golangsdk.ServiceClient, jobId string) (*CommonResp, error) {
   197  	var rst CommonResp
   198  	_, err := c.DeleteWithResponse(resourceURL(c, jobId), &rst, &golangsdk.RequestOpts{
   199  		MoreHeaders: RequestOpts.MoreHeaders,
   200  	})
   201  	return &rst, err
   202  }
   203  
   204  func List(c *golangsdk.ServiceClient, opts ListJobsOpts) (*ListJobsResp, error) {
   205  	url := listURL(c)
   206  	query, err := golangsdk.BuildQueryString(opts)
   207  	if err != nil {
   208  		return nil, err
   209  	}
   210  	url += query.String()
   211  
   212  	var rst ListJobsResp
   213  	_, err = c.Get(url, &rst, &golangsdk.RequestOpts{
   214  		MoreHeaders: RequestOpts.MoreHeaders,
   215  	})
   216  
   217  	return &rst, err
   218  }
   219  
   220  func Status(c *golangsdk.ServiceClient, jobId string) (*JobStatus, error) {
   221  	var rst JobStatus
   222  	_, err := c.Get(queryStatusURL(c, jobId), &rst, &golangsdk.RequestOpts{
   223  		MoreHeaders: RequestOpts.MoreHeaders,
   224  	})
   225  	return &rst, err
   226  }
   227  
   228  func Get(c *golangsdk.ServiceClient, jobId string) (*JobDetail, error) {
   229  	var rst JobDetail
   230  	_, err := c.Get(detailURL(c, jobId), &rst, &golangsdk.RequestOpts{
   231  		MoreHeaders: RequestOpts.MoreHeaders,
   232  	})
   233  	return &rst, err
   234  }
   235  
   236  func CheckSQLGramar(c *golangsdk.ServiceClient, opts CheckSQLGramarOpts) (*CheckSqlResult, error) {
   237  	b, err := golangsdk.BuildRequestBody(opts, "")
   238  	if err != nil {
   239  		return nil, err
   240  	}
   241  
   242  	var rst CheckSqlResult
   243  	_, err = c.Post(checkSqlURL(c), b, &rst, &golangsdk.RequestOpts{
   244  		MoreHeaders: RequestOpts.MoreHeaders,
   245  	})
   246  	return &rst, err
   247  }
   248  
   249  func ExportQueryResult(c *golangsdk.ServiceClient, jobId string, opts ExportQueryResultOpts) (*JobResp, error) {
   250  	b, err := golangsdk.BuildRequestBody(opts, "")
   251  	if err != nil {
   252  		return nil, err
   253  	}
   254  
   255  	var rst JobResp
   256  	_, err = c.Post(exportResultURL(c, jobId), b, &rst, &golangsdk.RequestOpts{
   257  		MoreHeaders: RequestOpts.MoreHeaders,
   258  	})
   259  
   260  	return &rst, err
   261  }
   262  
   263  func Progress(c *golangsdk.ServiceClient, jobId string) (*JobProgress, error) {
   264  	var rst JobProgress
   265  	_, err := c.Get(progressURL(c, jobId), &rst, &golangsdk.RequestOpts{
   266  		MoreHeaders: RequestOpts.MoreHeaders,
   267  	})
   268  	return &rst, err
   269  }
   270  
   271  func ImportData(c *golangsdk.ServiceClient, opts ImportDataOpts) (*JobResp, error) {
   272  	b, err := golangsdk.BuildRequestBody(opts, "")
   273  	if err != nil {
   274  		return nil, err
   275  	}
   276  
   277  	var rst JobResp
   278  	_, err = c.Post(importTableURL(c), b, &rst, &golangsdk.RequestOpts{
   279  		MoreHeaders: RequestOpts.MoreHeaders,
   280  	})
   281  	return &rst, err
   282  }
   283  
   284  func ExportData(c *golangsdk.ServiceClient, opts ExportDataOpts) (*JobResp, error) {
   285  	b, err := golangsdk.BuildRequestBody(opts, "")
   286  	if err != nil {
   287  		return nil, err
   288  	}
   289  
   290  	var rst JobResp
   291  	_, err = c.Post(exportTableURL(c), b, &rst, &golangsdk.RequestOpts{
   292  		MoreHeaders: RequestOpts.MoreHeaders,
   293  	})
   294  	return &rst, err
   295  }