github.com/chnsz/golangsdk@v0.0.0-20240506093406-85a3fbfa605b/openstack/dli/v1/sqljob/results.go (about)

     1  package sqljob
     2  
     3  import "github.com/chnsz/golangsdk/openstack/common/tags"
     4  
     5  const (
     6  	JobTypeDDL           = "DDL"
     7  	JobTypeDCL           = "DCL"
     8  	JobTypeImport        = "IMPORT"
     9  	JobTypeExport        = "EXPORT"
    10  	JobTypeQuery         = "QUERY"
    11  	JobTypeInsert        = "INSERT"
    12  	JobTypeDataMigration = "DATA_MIGRATION"
    13  	JobTypeUpdate        = "UPDATE"
    14  	JobTypeDelete        = "DELETE"
    15  	JobTypeRestartQueue  = "RESTART_QUEUE"
    16  	JobTypeScaleQueue    = "SCALE_QUEUE"
    17  
    18  	JobModeSync  = "synchronous"
    19  	JobModeAsync = "asynchronous"
    20  
    21  	JobStatusLaunching = "LAUNCHING"
    22  	JobStatusRunning   = "RUNNING"
    23  	JobStatusFinished  = "FINISHED"
    24  	JobStatusFailed    = "FAILED"
    25  	JobStatusCancelled = "CANCELLED"
    26  )
    27  
    28  type SubmitJobResult struct {
    29  	// Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent.
    30  	IsSuccess bool `json:"is_success"`
    31  	// System prompt. If execution succeeds, the parameter setting may be left blank.
    32  	Message string `json:"message"`
    33  	// ID of a job returned after a job is generated and submitted by using SQL statements.
    34  	// The job ID can be used to query the job status and results.
    35  	JobId string `json:"job_id"`
    36  	// Type of a job. Job types include the following:
    37  	// DDL
    38  	// DCL
    39  	// IMPORT
    40  	// EXPORT
    41  	// QUERY
    42  	// INSERT
    43  	JobType string `json:"job_type"`
    44  	// If the statement type is DDL, the column name and type of DDL are displayed.
    45  	Schema []map[string]string `json:"schema"`
    46  	// When the statement type is DDL, results of the DDL are displayed.
    47  	Rows [][]string `json:"rows"`
    48  	// Job execution mode. The options are as follows:
    49  	// async: asynchronous
    50  	// sync: synchronous
    51  	JobMode string `json:"job_mode"`
    52  }
    53  
    54  type CommonResp struct {
    55  	IsSuccess bool   `json:"is_success"`
    56  	Message   string `json:"message"`
    57  }
    58  
    59  type ListJobsResp struct {
    60  	IsSuccess bool   `json:"is_success"`
    61  	Message   string `json:"message"`
    62  	JobCount  int    `json:"job_count"`
    63  	Jobs      []Job  `json:"jobs"`
    64  }
    65  
    66  type Job struct {
    67  	// Job ID.
    68  	JobId string `json:"job_id"`
    69  	// Type of a job.
    70  	JobType string `json:"job_type"`
    71  	// Queue to which a job is submitted.
    72  	QueueName string `json:"queue_name"`
    73  	// User who submits a job.
    74  	Owner string `json:"owner"`
    75  	// Time when a job is started. The timestamp is expressed in milliseconds.
    76  	StartTime int `json:"start_time"`
    77  	// Job running duration (unit: millisecond).
    78  	Duration int `json:"duration"`
    79  	// Status of a job, including LAUNCHING, RUNNING, FINISHED, FAILED, and CANCELLED.
    80  	Status string `json:"status"`
    81  	// Number of records scanned during the Insert job execution.
    82  	InputRowCount int `json:"input_row_count"`
    83  	// Number of error records scanned during the Insert job execution.
    84  	BadRowCount int `json:"bad_row_count"`
    85  	// Size of scanned files during job execution.
    86  	InputSize int `json:"input_size"`
    87  	// Total number of records returned by the current job or total number of records inserted by the Insert job.
    88  	ResultCount int `json:"result_count"`
    89  	// Name of the database where the target table resides.
    90  	// database_name is valid only for jobs of the Import and Export types.
    91  	DatabaseName string `json:"database_name"`
    92  	// Name of the target table. table_name is valid only for jobs of the Import and Export types.
    93  	TableName string `json:"table_name"`
    94  	// Import jobs, which record whether the imported data contains column names.
    95  	WithColumnHeader bool `json:"with_column_header"`
    96  	// JSON character string of related columns queried by using SQL statements.
    97  	Detail string `json:"detail"`
    98  	// SQL statements of a job.
    99  	Statement string             `json:"statement"`
   100  	Tags      []tags.ResourceTag `json:"tags"`
   101  }
   102  
   103  type JobStatus struct {
   104  	// Whether the request is successfully executed. Value true indicates that the request is successfully executed.
   105  	IsSuccess bool `json:"is_success" required:"true"`
   106  	// System prompt. If execution succeeds, the parameter setting may be left blank.
   107  	Message string `json:"message" required:"true"`
   108  	// Job ID.
   109  	JobId string `json:"job_id" required:"true"`
   110  	// Type of a job, Includes DDL, DCL, IMPORT, EXPORT, QUERY, INSERT, DATA_MIGRATION, UPDATE, DELETE, RESTART_QUEUE and SCALE_QUEUE.
   111  	JobType string `json:"job_type" required:"true"`
   112  	// Job execution mode. The options are as follows:
   113  	// async: asynchronous
   114  	// sync: synchronous
   115  	JobMode string `json:"job_mode" required:"true"`
   116  	// Name of the queue where the job is submitted.
   117  	QueueName string `json:"queue_name" required:"true"`
   118  	// User who submits a job.
   119  	Owner string `json:"owner" required:"true"`
   120  	// Time when a job is started. The timestamp is expressed in milliseconds.
   121  	StartTime int `json:"start_time" required:"true"`
   122  	// Job running duration (unit: millisecond).
   123  	Duration int `json:"duration"`
   124  	// Status of a job, including RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELLED.
   125  	Status string `json:"status" required:"true"`
   126  	// Number of records scanned during the Insert job execution.
   127  	InputRowCount int `json:"input_row_count"`
   128  	// Number of error records scanned during the Insert job execution.
   129  	BadRowCount int `json:"bad_row_count"`
   130  	// Size of scanned files during job execution (unit: byte).
   131  	InputSize int `json:"input_size" required:"true"`
   132  	// Total number of records returned by the current job or total number of records inserted by the Insert job.
   133  	ResultCount int `json:"result_count" required:"true"`
   134  	// Name of the database where the target table resides. database_name is valid only for jobs of the IMPORT EXPORT, and QUERY types.
   135  	DatabaseName string `json:"database_name"`
   136  	// Name of the target table. table_name is valid only for jobs of the IMPORT EXPORT, and QUERY types.
   137  	TableName string `json:"table_name"`
   138  	// JSON character string for information about related columns.
   139  	Detail string `json:"detail" required:"true"`
   140  	// SQL statements of a job.
   141  	Statement string             `json:"statement" required:"true"`
   142  	Tags      []tags.ResourceTag `json:"tags"`
   143  }
   144  
   145  type JobDetail struct {
   146  	// Whether the request is successfully executed. Value true indicates that the request is successfully executed.
   147  	IsSuccess bool `json:"is_success" required:"true"`
   148  	// System prompt. If execution succeeds, the parameter setting may be left blank.
   149  	Message string `json:"message" required:"true"`
   150  	// Job ID.
   151  	JobId string `json:"job_id" required:"true"`
   152  	// User who submits a job.
   153  	Owner string `json:"owner" required:"true"`
   154  	// Time when a job is started. The timestamp is expressed in milliseconds.
   155  	StartTime int `json:"start_time" required:"true"`
   156  	// Duration for executing the job (unit: millisecond).
   157  	Duration int `json:"duration" required:"true"`
   158  	// Specified export mode during data export and query result saving.
   159  	ExportMode string `json:"export_mode"`
   160  	// Path to imported or exported files.
   161  	DataPath string `json:"data_path" required:"true"`
   162  	// Type of data to be imported or exported. Currently, only CSV and JSON are supported.
   163  	DataType string `json:"data_type" required:"true"`
   164  	// Name of the database where the table, where data is imported or exported, resides.
   165  	DatabaseName string `json:"database_name" required:"true"`
   166  	// Name of the table where data is imported or exported.
   167  	TableName string `json:"table_name" required:"true"`
   168  	// Whether the imported data contains the column name during the execution of an import job.
   169  	WithColumnHeader bool `json:"with_column_header"`
   170  	// User-defined data delimiter set when the import job is executed.
   171  	Delimiter string `json:"delimiter"`
   172  	// User-defined quotation character set when the import job is executed.
   173  	QuoteChar string `json:"quote_char"`
   174  	// User-defined escape character set when the import job is executed.
   175  	EscapeChar string `json:"escape_char"`
   176  	// Table date format specified when the import job is executed.
   177  	DateFormat string `json:"date_format"`
   178  	// Table time format specified when the import job is executed.
   179  	TimestampFormat string `json:"timestamp_format"`
   180  	// Compression mode specified when the export job is executed.
   181  	Compress string `json:"compress"`
   182  }
   183  
   184  type CheckSqlResult struct {
   185  	IsSuccess bool   `json:"is_success"`
   186  	Message   string `json:"message"`
   187  	// Type of a job. Job types include the following: DDL, DCL, IMPORT, EXPORT, QUERY, and INSERT.
   188  	JobType string `json:"job_type"`
   189  }
   190  
   191  type JobResp struct {
   192  	IsSuccess bool   `json:"is_success" required:"true"`
   193  	Message   string `json:"message" required:"true"`
   194  	// ID of a job returned after a job is generated and submitted by using SQL statements.
   195  	// The job ID can be used to query the job status and results.
   196  	JobId string `json:"job_id"`
   197  	// Job execution mode. The options are as follows:
   198  	// async: asynchronous
   199  	// sync: synchronous
   200  	JobMode string `json:"job_mode"`
   201  }
   202  
   203  type JobProgress struct {
   204  	IsSuccess bool   `json:"is_success"`
   205  	Message   string `json:"message"`
   206  	JobId     string `json:"job_id"`
   207  	Status    string `json:"status"`
   208  	// ID of a subjob that is running. If the subjob is not running or it is already finished,
   209  	// the subjob ID may be empty.
   210  	SubJobId int `json:"sub_job_id"`
   211  	// Progress of a running subjob or the entire job. The value can only be a rough estimate of the subjob progress
   212  	// and does not indicate the detailed job progress.
   213  
   214  	// If the job is just started or being submitted, the progress is displayed as 0. If the job execution is complete,
   215  	//  the progress is displayed as 1. In this case, progress indicates the running progress of the entire job.
   216  	//  Because no subjob is running, sub_job_id is not displayed.
   217  	// If a subjob is running, the running progress of the subjob is displayed. The calculation method of progress is as
   218  	//  follows: Number of completed tasks of the subjob/Total number of tasks of the subjob. In this case,
   219  	//  progress indicates the running progress of the subjob, and sub_job_id indicates the subjob ID.
   220  	Progress int `json:"progress"`
   221  	// Details about a subjob of a running job. A job may contain multiple subjobs. For details
   222  	SubJobs []SubJob `json:"sub_jobs"`
   223  }
   224  
   225  type SubJob struct {
   226  	// Subjob ID, corresponding to jobId of the open-source spark JobData.
   227  	Id int `json:"id"`
   228  	// Subjob name, corresponding to the name of the open-source spark JobData.
   229  	Name string `json:"name"`
   230  	// Description of a subjob, corresponding to the description of the open-source spark JobData.
   231  	Description string `json:"description"`
   232  	// Submission time of a subjob, corresponding to the submissionTime of open-source Spark JobData.
   233  	SubmissionTime string `json:"submission_time"`
   234  	// Completion time of a subjob, corresponding to the completionTime of the open-source Spark JobData.
   235  	CompletionTime string `json:"completion_time"`
   236  	// Stage ID of the subjob, corresponding to the stageIds of the open-source spark JobData.
   237  	StageIds []int `json:"stage_ids"`
   238  	// ID of a DLI job, corresponding to the jobGroup of open-source Spark JobData.
   239  	JobGroup string `json:"job_group"`
   240  	// Subjob status, corresponding to the status of open-source spark JobData.
   241  	Status string `json:"status"`
   242  	// Number of subjobs, corresponding to numTasks of the open-source Spark JobData.
   243  	NumTasks int `json:"num_tasks"`
   244  	// Number of running tasks in a subjob, corresponding to numActiveTasks of the open-source Spark JobData.
   245  	NumActiveTasks int `json:"num_active_tasks"`
   246  	// Number of tasks that have been completed in a subjob, corresponding to numCompletedTasks of open-source Spark JobData.
   247  	NumCompletedTasks int `json:"num_completed_tasks"`
   248  	// Number of tasks skipped in a subjob, corresponding to numSkippedTasks of open-source Spark JobData.
   249  	NumSkippedTasks int `json:"num_skipped_tasks"`
   250  	// Number of subtasks that fail to be skipped, corresponding to numFailedTasks of open-source Spark JobData.
   251  	NumFailedTasks int `json:"num_failed_tasks"`
   252  	// Number of tasks killed in the subjob, corresponding to numKilledTasks of the open-source Spark JobData.
   253  	NumKilledTasks int `json:"num_killed_tasks"`
   254  	// Subjob completion index, corresponding to the numCompletedIndices of the open-source Spark JobData.
   255  	NumCompletedIndices int `json:"num_completed_indices"`
   256  	// Number of stages that are running in the subjob, corresponding to numActiveStages of the open-source Spark JobData.
   257  	NumActiveStages int `json:"num_active_stages"`
   258  	// Number of stages that have been completed in the subjob, corresponding to numCompletedStages of the open-source Spark JobData.
   259  	NumCompletedStages int `json:"num_completed_stages"`
   260  	// Number of stages skipped in the subjob, corresponding to numSkippedStages of the open-source Spark JobData.
   261  	NumSkippedStages int `json:"num_skipped_stages"`
   262  	// Number of failed stages in a subjob, corresponding to numFailedStages of the open-source Spark JobData.
   263  	NumFailedStages int `json:"num_failed_stages"`
   264  	// Summary of the killed tasks in the subjob, corresponding to killedTasksSummary of open-source spark JobData.
   265  	KilledTasksSummary map[string]int `json:"killed_tasks_summary"`
   266  }