Documentation ¶
Index ¶
- Constants
- type CancelSparkSessionBatchSQLRequest
- type CancelSparkSessionBatchSQLRequestParams
- type CancelSparkSessionBatchSQLResponse
- type CancelSparkSessionBatchSQLResponseParams
- type CancelTaskRequest
- type CancelTaskRequestParams
- type CancelTaskResponse
- type CancelTaskResponseParams
- type Client
- func (c *Client) CancelSparkSessionBatchSQL(request *CancelSparkSessionBatchSQLRequest) (response *CancelSparkSessionBatchSQLResponse, err error)
- func (c *Client) CancelSparkSessionBatchSQLWithContext(ctx context.Context, request *CancelSparkSessionBatchSQLRequest) (response *CancelSparkSessionBatchSQLResponse, err error)
- func (c *Client) CancelTask(request *CancelTaskRequest) (response *CancelTaskResponse, err error)
- func (c *Client) CancelTaskWithContext(ctx context.Context, request *CancelTaskRequest) (response *CancelTaskResponse, err error)
- func (c *Client) CreateDataEngine(request *CreateDataEngineRequest) (response *CreateDataEngineResponse, err error)
- func (c *Client) CreateDataEngineWithContext(ctx context.Context, request *CreateDataEngineRequest) (response *CreateDataEngineResponse, err error)
- func (c *Client) CreateInternalTable(request *CreateInternalTableRequest) (response *CreateInternalTableResponse, err error)
- func (c *Client) CreateInternalTableWithContext(ctx context.Context, request *CreateInternalTableRequest) (response *CreateInternalTableResponse, err error)
- func (c *Client) CreateResultDownload(request *CreateResultDownloadRequest) (response *CreateResultDownloadResponse, err error)
- func (c *Client) CreateResultDownloadWithContext(ctx context.Context, request *CreateResultDownloadRequest) (response *CreateResultDownloadResponse, err error)
- func (c *Client) CreateSparkApp(request *CreateSparkAppRequest) (response *CreateSparkAppResponse, err error)
- func (c *Client) CreateSparkAppTask(request *CreateSparkAppTaskRequest) (response *CreateSparkAppTaskResponse, err error)
- func (c *Client) CreateSparkAppTaskWithContext(ctx context.Context, request *CreateSparkAppTaskRequest) (response *CreateSparkAppTaskResponse, err error)
- func (c *Client) CreateSparkAppWithContext(ctx context.Context, request *CreateSparkAppRequest) (response *CreateSparkAppResponse, err error)
- func (c *Client) CreateSparkSessionBatchSQL(request *CreateSparkSessionBatchSQLRequest) (response *CreateSparkSessionBatchSQLResponse, err error)
- func (c *Client) CreateSparkSessionBatchSQLWithContext(ctx context.Context, request *CreateSparkSessionBatchSQLRequest) (response *CreateSparkSessionBatchSQLResponse, err error)
- func (c *Client) CreateTask(request *CreateTaskRequest) (response *CreateTaskResponse, err error)
- func (c *Client) CreateTaskWithContext(ctx context.Context, request *CreateTaskRequest) (response *CreateTaskResponse, err error)
- func (c *Client) CreateTasks(request *CreateTasksRequest) (response *CreateTasksResponse, err error)
- func (c *Client) CreateTasksWithContext(ctx context.Context, request *CreateTasksRequest) (response *CreateTasksResponse, err error)
- func (c *Client) DeleteSparkApp(request *DeleteSparkAppRequest) (response *DeleteSparkAppResponse, err error)
- func (c *Client) DeleteSparkAppWithContext(ctx context.Context, request *DeleteSparkAppRequest) (response *DeleteSparkAppResponse, err error)
- func (c *Client) DescribeEngineUsageInfo(request *DescribeEngineUsageInfoRequest) (response *DescribeEngineUsageInfoResponse, err error)
- func (c *Client) DescribeEngineUsageInfoWithContext(ctx context.Context, request *DescribeEngineUsageInfoRequest) (response *DescribeEngineUsageInfoResponse, err error)
- func (c *Client) DescribeForbiddenTablePro(request *DescribeForbiddenTableProRequest) (response *DescribeForbiddenTableProResponse, err error)
- func (c *Client) DescribeForbiddenTableProWithContext(ctx context.Context, request *DescribeForbiddenTableProRequest) (response *DescribeForbiddenTableProResponse, err error)
- func (c *Client) DescribeLakeFsDirSummary(request *DescribeLakeFsDirSummaryRequest) (response *DescribeLakeFsDirSummaryResponse, err error)
- func (c *Client) DescribeLakeFsDirSummaryWithContext(ctx context.Context, request *DescribeLakeFsDirSummaryRequest) (response *DescribeLakeFsDirSummaryResponse, err error)
- func (c *Client) DescribeLakeFsInfo(request *DescribeLakeFsInfoRequest) (response *DescribeLakeFsInfoResponse, err error)
- func (c *Client) DescribeLakeFsInfoWithContext(ctx context.Context, request *DescribeLakeFsInfoRequest) (response *DescribeLakeFsInfoResponse, err error)
- func (c *Client) DescribeResultDownload(request *DescribeResultDownloadRequest) (response *DescribeResultDownloadResponse, err error)
- func (c *Client) DescribeResultDownloadWithContext(ctx context.Context, request *DescribeResultDownloadRequest) (response *DescribeResultDownloadResponse, err error)
- func (c *Client) DescribeSparkAppJob(request *DescribeSparkAppJobRequest) (response *DescribeSparkAppJobResponse, err error)
- func (c *Client) DescribeSparkAppJobWithContext(ctx context.Context, request *DescribeSparkAppJobRequest) (response *DescribeSparkAppJobResponse, err error)
- func (c *Client) DescribeSparkAppJobs(request *DescribeSparkAppJobsRequest) (response *DescribeSparkAppJobsResponse, err error)
- func (c *Client) DescribeSparkAppJobsWithContext(ctx context.Context, request *DescribeSparkAppJobsRequest) (response *DescribeSparkAppJobsResponse, err error)
- func (c *Client) DescribeSparkAppTasks(request *DescribeSparkAppTasksRequest) (response *DescribeSparkAppTasksResponse, err error)
- func (c *Client) DescribeSparkAppTasksWithContext(ctx context.Context, request *DescribeSparkAppTasksRequest) (response *DescribeSparkAppTasksResponse, err error)
- func (c *Client) DescribeSparkSessionBatchSqlLog(request *DescribeSparkSessionBatchSqlLogRequest) (response *DescribeSparkSessionBatchSqlLogResponse, err error)
- func (c *Client) DescribeSparkSessionBatchSqlLogWithContext(ctx context.Context, request *DescribeSparkSessionBatchSqlLogRequest) (response *DescribeSparkSessionBatchSqlLogResponse, err error)
- func (c *Client) DescribeTaskResult(request *DescribeTaskResultRequest) (response *DescribeTaskResultResponse, err error)
- func (c *Client) DescribeTaskResultWithContext(ctx context.Context, request *DescribeTaskResultRequest) (response *DescribeTaskResultResponse, err error)
- func (c *Client) DescribeTasks(request *DescribeTasksRequest) (response *DescribeTasksResponse, err error)
- func (c *Client) DescribeTasksWithContext(ctx context.Context, request *DescribeTasksRequest) (response *DescribeTasksResponse, err error)
- func (c *Client) DescribeUserRoles(request *DescribeUserRolesRequest) (response *DescribeUserRolesResponse, err error)
- func (c *Client) DescribeUserRolesWithContext(ctx context.Context, request *DescribeUserRolesRequest) (response *DescribeUserRolesResponse, err error)
- func (c *Client) GenerateCreateMangedTableSql(request *GenerateCreateMangedTableSqlRequest) (response *GenerateCreateMangedTableSqlResponse, err error)
- func (c *Client) GenerateCreateMangedTableSqlWithContext(ctx context.Context, request *GenerateCreateMangedTableSqlRequest) (response *GenerateCreateMangedTableSqlResponse, err error)
- func (c *Client) ModifyGovernEventRule(request *ModifyGovernEventRuleRequest) (response *ModifyGovernEventRuleResponse, err error)
- func (c *Client) ModifyGovernEventRuleWithContext(ctx context.Context, request *ModifyGovernEventRuleRequest) (response *ModifyGovernEventRuleResponse, err error)
- func (c *Client) ModifySparkApp(request *ModifySparkAppRequest) (response *ModifySparkAppResponse, err error)
- func (c *Client) ModifySparkAppBatch(request *ModifySparkAppBatchRequest) (response *ModifySparkAppBatchResponse, err error)
- func (c *Client) ModifySparkAppBatchWithContext(ctx context.Context, request *ModifySparkAppBatchRequest) (response *ModifySparkAppBatchResponse, err error)
- func (c *Client) ModifySparkAppWithContext(ctx context.Context, request *ModifySparkAppRequest) (response *ModifySparkAppResponse, err error)
- func (c *Client) SuspendResumeDataEngine(request *SuspendResumeDataEngineRequest) (response *SuspendResumeDataEngineResponse, err error)
- func (c *Client) SuspendResumeDataEngineWithContext(ctx context.Context, request *SuspendResumeDataEngineRequest) (response *SuspendResumeDataEngineResponse, err error)
- func (c *Client) SwitchDataEngine(request *SwitchDataEngineRequest) (response *SwitchDataEngineResponse, err error)
- func (c *Client) SwitchDataEngineWithContext(ctx context.Context, request *SwitchDataEngineRequest) (response *SwitchDataEngineResponse, err error)
- func (c *Client) UpdateRowFilter(request *UpdateRowFilterRequest) (response *UpdateRowFilterResponse, err error)
- func (c *Client) UpdateRowFilterWithContext(ctx context.Context, request *UpdateRowFilterRequest) (response *UpdateRowFilterResponse, err error)
- type Column
- type CommonMetrics
- type CosPermission
- type CreateDataEngineRequest
- type CreateDataEngineRequestParams
- type CreateDataEngineResponse
- type CreateDataEngineResponseParams
- type CreateInternalTableRequest
- type CreateInternalTableRequestParams
- type CreateInternalTableResponse
- type CreateInternalTableResponseParams
- type CreateResultDownloadRequest
- type CreateResultDownloadRequestParams
- type CreateResultDownloadResponse
- type CreateResultDownloadResponseParams
- type CreateSparkAppRequest
- type CreateSparkAppRequestParams
- type CreateSparkAppResponse
- type CreateSparkAppResponseParams
- type CreateSparkAppTaskRequest
- type CreateSparkAppTaskRequestParams
- type CreateSparkAppTaskResponse
- type CreateSparkAppTaskResponseParams
- type CreateSparkSessionBatchSQLRequest
- type CreateSparkSessionBatchSQLRequestParams
- type CreateSparkSessionBatchSQLResponse
- type CreateSparkSessionBatchSQLResponseParams
- type CreateTaskRequest
- type CreateTaskRequestParams
- type CreateTaskResponse
- type CreateTaskResponseParams
- type CreateTasksRequest
- type CreateTasksRequestParams
- type CreateTasksResponse
- type CreateTasksResponseParams
- type CrontabResumeSuspendStrategy
- type DataEngineConfigPair
- type DataGovernPolicy
- type DeleteSparkAppRequest
- type DeleteSparkAppRequestParams
- type DeleteSparkAppResponse
- type DeleteSparkAppResponseParams
- type DescribeEngineUsageInfoRequest
- type DescribeEngineUsageInfoRequestParams
- type DescribeEngineUsageInfoResponse
- type DescribeEngineUsageInfoResponseParams
- type DescribeForbiddenTableProRequest
- type DescribeForbiddenTableProRequestParams
- type DescribeForbiddenTableProResponse
- type DescribeForbiddenTableProResponseParams
- type DescribeLakeFsDirSummaryRequest
- type DescribeLakeFsDirSummaryRequestParams
- type DescribeLakeFsDirSummaryResponse
- type DescribeLakeFsDirSummaryResponseParams
- type DescribeLakeFsInfoRequest
- type DescribeLakeFsInfoRequestParams
- type DescribeLakeFsInfoResponse
- type DescribeLakeFsInfoResponseParams
- type DescribeResultDownloadRequest
- type DescribeResultDownloadRequestParams
- type DescribeResultDownloadResponse
- type DescribeResultDownloadResponseParams
- type DescribeSparkAppJobRequest
- type DescribeSparkAppJobRequestParams
- type DescribeSparkAppJobResponse
- type DescribeSparkAppJobResponseParams
- type DescribeSparkAppJobsRequest
- type DescribeSparkAppJobsRequestParams
- type DescribeSparkAppJobsResponse
- type DescribeSparkAppJobsResponseParams
- type DescribeSparkAppTasksRequest
- type DescribeSparkAppTasksRequestParams
- type DescribeSparkAppTasksResponse
- type DescribeSparkAppTasksResponseParams
- type DescribeSparkSessionBatchSqlLogRequest
- type DescribeSparkSessionBatchSqlLogRequestParams
- type DescribeSparkSessionBatchSqlLogResponse
- type DescribeSparkSessionBatchSqlLogResponseParams
- type DescribeTaskResultRequest
- type DescribeTaskResultRequestParams
- type DescribeTaskResultResponse
- type DescribeTaskResultResponseParams
- type DescribeTasksRequest
- type DescribeTasksRequestParams
- type DescribeTasksResponse
- type DescribeTasksResponseParams
- type DescribeUserRolesRequest
- type DescribeUserRolesRequestParams
- type DescribeUserRolesResponse
- type DescribeUserRolesResponseParams
- type Execution
- type Filter
- type GenerateCreateMangedTableSqlRequest
- type GenerateCreateMangedTableSqlRequestParams
- type GenerateCreateMangedTableSqlResponse
- type GenerateCreateMangedTableSqlResponseParams
- type KVPair
- type ModifyGovernEventRuleRequest
- type ModifyGovernEventRuleRequestParams
- type ModifyGovernEventRuleResponse
- type ModifyGovernEventRuleResponseParams
- type ModifySparkAppBatchRequest
- type ModifySparkAppBatchRequestParams
- type ModifySparkAppBatchResponse
- type ModifySparkAppBatchResponseParams
- type ModifySparkAppRequest
- type ModifySparkAppRequestParams
- type ModifySparkAppResponse
- type ModifySparkAppResponseParams
- type Policy
- type PrestoMonitorMetrics
- type Property
- type SQLTask
- type SessionResourceTemplate
- type SparkJobInfo
- type SparkMonitorMetrics
- type SparkSessionBatchLog
- type SparkSessionBatchLogOperate
- type StreamingStatistics
- type SuspendResumeDataEngineRequest
- type SuspendResumeDataEngineRequestParams
- type SuspendResumeDataEngineResponse
- type SuspendResumeDataEngineResponseParams
- type SwitchDataEngineRequest
- type SwitchDataEngineRequestParams
- type SwitchDataEngineResponse
- type SwitchDataEngineResponseParams
- type TColumn
- type TPartition
- type TableBaseInfo
- type TagInfo
- type Task
- type TaskResponseInfo
- type TaskResultInfo
- type TasksInfo
- type TasksOverview
- type UpdateRowFilterRequest
- type UpdateRowFilterRequestParams
- type UpdateRowFilterResponse
- type UpdateRowFilterResponseParams
- type UserRole
Constants ¶
const ( // CAM signature/authentication error. AUTHFAILURE = "AuthFailure" // The operation failed. FAILEDOPERATION = "FailedOperation" // Order status exception. FAILEDOPERATION_ABNORMALORDERSTATUS = "FailedOperation.AbnormalOrderStatus" // Another data source is being created. FAILEDOPERATION_ANOTHERCREATEPROCESSRUNNING = "FailedOperation.AnotherCreateProcessRunning" // Another operation is in progress. Please try again later. FAILEDOPERATION_ANOTHERPROCESSRUNNING = "FailedOperation.AnotherProcessRunning" // Another request is being processed. Try again later. FAILEDOPERATION_ANOTHERREQUESTPROCESSING = "FailedOperation.AnotherRequestProcessing" // Insufficient account balance. FAILEDOPERATION_BALANCENOTENOUGH = "FailedOperation.BalanceNotEnough" // Billing system exception. FAILEDOPERATION_BILLINGSYSTEMERROR = "FailedOperation.BillingSystemError" // The number of tags set reached the limit. FAILEDOPERATION_BINDTOOMANYTAGS = "FailedOperation.BindTooManyTags" // Failed to create the engine. FAILEDOPERATION_CREATEDATAENGINEFAILED = "FailedOperation.CreateDataEngineFailed" // Failed to deliver the goods. FAILEDOPERATION_DELIVERGOODSFAILED = "FailedOperation.DeliverGoodsFailed" // Duplicate tag keys. FAILEDOPERATION_DUPLICATETAGKEY = "FailedOperation.DuplicateTagKey" // Deduction failed. FAILEDOPERATION_FEEDEDUCTIONFAILED = "FailedOperation.FeeDeductionFailed" // Failed to get product information. FAILEDOPERATION_GETPRODUCTINFORMATIONFAILED = "FailedOperation.GetProductInformationFailed" // The HTTP client request failed. FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" // Invalid resources. FAILEDOPERATION_ILLEGALRESOURCE = "FailedOperation.IllegalResource" // Invalid characters in the tag key. FAILEDOPERATION_ILLEGALTAGKEY = "FailedOperation.IllegalTagKey" // Invalid characters in the tag value. FAILEDOPERATION_ILLEGALTAGVALUE = "FailedOperation.IllegalTagValue" // Price query failed. FAILEDOPERATION_INQUIREPRICEFAILED = "FailedOperation.InquirePriceFailed" // Failed to scale up/down the instance. FAILEDOPERATION_MODIFYINSTANCEFAILED = "FailedOperation.ModifyInstanceFailed" // No permission. FAILEDOPERATION_NOPERMISSION = "FailedOperation.NoPermission" // Unverified account. FAILEDOPERATION_NOREALNAMEAUTHENTICATION = "FailedOperation.NoRealNameAuthentication" // Reached the allowed limit of engines. FAILEDOPERATION_NUMBEREXCEEDLIMIT = "FailedOperation.NumberExceedLimit" // Failed to verify parameters. FAILEDOPERATION_PARAMETERVALIDATIONFAILED = "FailedOperation.ParameterValidationFailed" // Refunding failed. FAILEDOPERATION_REFUNDDEPOSITFAILED = "FailedOperation.RefundDepositFailed" // A tag key of the same name has been set for the resource. FAILEDOPERATION_TAGALREADYATTACHED = "FailedOperation.TagAlreadyAttached" // The tag key exceeded the length limit. FAILEDOPERATION_TAGKEYTOOLONG = "FailedOperation.TagKeyTooLong" // The tag does not exist. FAILEDOPERATION_TAGNOTEXIST = "FailedOperation.TagNotExist" // The tag value exceeded the length limit. FAILEDOPERATION_TAGVALUETOOLONG = "FailedOperation.TagValueTooLong" // The number of resources reached the limit. FAILEDOPERATION_TOOMANYRESOURCES = "FailedOperation.TooManyResources" // The number of tags reached the limit. FAILEDOPERATION_TOOMANYTAGS = "FailedOperation.TooManyTags" // An internal error occurred. INTERNALERROR = "InternalError" // A database error occurred. INTERNALERROR_DBERROR = "InternalError.DBError" // The parameter is incorrect. INVALIDPARAMETER = "InvalidParameter" // Duplicate engine name. INVALIDPARAMETER_DUPLICATEDATAENGINENAME = "InvalidParameter.DuplicateDataEngineName" // Invalid data engine mode. INVALIDPARAMETER_INVALIDDATAENGINEMODE = "InvalidParameter.InvalidDataEngineMode" // The data engine name is invalid. INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" // Invalid data engine spec. INVALIDPARAMETER_INVALIDDATAENGINESPECS = "InvalidParameter.InvalidDataEngineSpecs" // Invalid engine type. INVALIDPARAMETER_INVALIDENGINETYPE = "InvalidParameter.InvalidEngineType" // The fault tolerance policy is invalid. INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" // Invalid maximum number of results. INVALIDPARAMETER_INVALIDMAXRESULTS = "InvalidParameter.InvalidMaxResults" // Invalid billing mode. INVALIDPARAMETER_INVALIDPAYMODE = "InvalidParameter.InvalidPayMode" // The CAM role arn is invalid. INVALIDPARAMETER_INVALIDROLEARN = "InvalidParameter.InvalidRoleArn" // SQL parsing failed. INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" // The number of SQL statements does not meet the specification. INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" // The `SparkAppParam` is invalid. INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam" // The storage location is incorrect. INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" // The `taskid` is invalid. INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId" // The task type is invalid. INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" // Invalid billing period. INVALIDPARAMETER_INVALIDTIMESPAN = "InvalidParameter.InvalidTimeSpan" // Invalid unit of billing period. INVALIDPARAMETER_INVALIDTIMEUNIT = "InvalidParameter.InvalidTimeUnit" // The task has ended and cannot be canceled. INVALIDPARAMETER_TASKALREADYFINISHED = "InvalidParameter.TaskAlreadyFinished" // Invalid VPC CIDR format. INVALIDPARAMETER_VPCCIDRFORMATERROR = "InvalidParameter.VpcCidrFormatError" // The parameter value is incorrect. INVALIDPARAMETERVALUE = "InvalidParameterValue" // The quota limit is reached. LIMITEXCEEDED = "LimitExceeded" // Missing parameters. MISSINGPARAMETER = "MissingParameter" // Operation denied. OPERATIONDENIED = "OperationDenied" // The resource does not exist. RESOURCENOTFOUND = "ResourceNotFound" // No resources are available to create a session currently. Please try again later or use a monthly subscription cluster. RESOURCENOTFOUND_RESOURCENOTFOUNDCODE_SESSIONINSUFFICIENTRESOURCES = "ResourceNotFound.ResourceNotFoundCode_SessionInsufficientResources" // The result path was not found. RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" // The session does not exist. RESOURCENOTFOUND_SESSIONNOTFOUND = "ResourceNotFound.SessionNotFound" // The session has expired. RESOURCENOTFOUND_SESSIONSTATEDEAD = "ResourceNotFound.SessionStateDead" // The resource is unavailable. RESOURCEUNAVAILABLE = "ResourceUnavailable" // The account balance is insufficient to run the SQL task. RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" // Unauthorized operation. UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // No permission to pay. UNAUTHORIZEDOPERATION_NOPAYMENTAUTHORITY = "UnauthorizedOperation.NoPaymentAuthority" // Unauthorized engine operation by a sub-user. UNAUTHORIZEDOPERATION_OPERATECOMPUTINGENGINE = "UnauthorizedOperation.OperateComputingEngine" // The sub-user does not have permission to use the compute engine. UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine" // Unknown parameter error. UNKNOWNPARAMETER = "UnknownParameter" // Unsupported operation. UNSUPPORTEDOPERATION = "UnsupportedOperation" )
const APIVersion = "2021-01-25"
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type CancelSparkSessionBatchSQLRequest ¶
type CancelSparkSessionBatchSQLRequest struct { *tchttp.BaseRequest // The unique identifier of a batch task. BatchId *string `json:"BatchId,omitnil" name:"BatchId"` }
func NewCancelSparkSessionBatchSQLRequest ¶
func NewCancelSparkSessionBatchSQLRequest() (request *CancelSparkSessionBatchSQLRequest)
func (*CancelSparkSessionBatchSQLRequest) FromJsonString ¶
func (r *CancelSparkSessionBatchSQLRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CancelSparkSessionBatchSQLRequest) ToJsonString ¶
func (r *CancelSparkSessionBatchSQLRequest) ToJsonString() string
type CancelSparkSessionBatchSQLRequestParams ¶
type CancelSparkSessionBatchSQLRequestParams struct { // The unique identifier of a batch task. BatchId *string `json:"BatchId,omitnil" name:"BatchId"` }
Predefined struct for user
type CancelSparkSessionBatchSQLResponse ¶
type CancelSparkSessionBatchSQLResponse struct { *tchttp.BaseResponse Response *CancelSparkSessionBatchSQLResponseParams `json:"Response"` }
func NewCancelSparkSessionBatchSQLResponse ¶
func NewCancelSparkSessionBatchSQLResponse() (response *CancelSparkSessionBatchSQLResponse)
func (*CancelSparkSessionBatchSQLResponse) FromJsonString ¶
func (r *CancelSparkSessionBatchSQLResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CancelSparkSessionBatchSQLResponse) ToJsonString ¶
func (r *CancelSparkSessionBatchSQLResponse) ToJsonString() string
type CancelSparkSessionBatchSQLResponseParams ¶
type CancelSparkSessionBatchSQLResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CancelTaskRequest ¶
type CancelTaskRequest struct { *tchttp.BaseRequest // Globally unique task ID TaskId *string `json:"TaskId,omitnil" name:"TaskId"` }
func NewCancelTaskRequest ¶
func NewCancelTaskRequest() (request *CancelTaskRequest)
func (*CancelTaskRequest) FromJsonString ¶
func (r *CancelTaskRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CancelTaskRequest) ToJsonString ¶
func (r *CancelTaskRequest) ToJsonString() string
type CancelTaskRequestParams ¶
type CancelTaskRequestParams struct { // Globally unique task ID TaskId *string `json:"TaskId,omitnil" name:"TaskId"` }
Predefined struct for user
type CancelTaskResponse ¶
type CancelTaskResponse struct { *tchttp.BaseResponse Response *CancelTaskResponseParams `json:"Response"` }
func NewCancelTaskResponse ¶
func NewCancelTaskResponse() (response *CancelTaskResponse)
func (*CancelTaskResponse) FromJsonString ¶
func (r *CancelTaskResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CancelTaskResponse) ToJsonString ¶
func (r *CancelTaskResponse) ToJsonString() string
type CancelTaskResponseParams ¶
type CancelTaskResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type Client ¶
func NewClient ¶
func NewClient(credential common.CredentialIface, region string, clientProfile *profile.ClientProfile) (client *Client, err error)
func NewClientWithSecretId ¶
Deprecated
func (*Client) CancelSparkSessionBatchSQL ¶
func (c *Client) CancelSparkSessionBatchSQL(request *CancelSparkSessionBatchSQLRequest) (response *CancelSparkSessionBatchSQLResponse, err error)
CancelSparkSessionBatchSQL This API is used to cancel a Spark SQL batch task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) CancelSparkSessionBatchSQLWithContext ¶
func (c *Client) CancelSparkSessionBatchSQLWithContext(ctx context.Context, request *CancelSparkSessionBatchSQLRequest) (response *CancelSparkSessionBatchSQLResponse, err error)
CancelSparkSessionBatchSQL This API is used to cancel a Spark SQL batch task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) CancelTask ¶
func (c *Client) CancelTask(request *CancelTaskRequest) (response *CancelTaskResponse, err error)
CancelTask This API is used to cancel a task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" FAILEDOPERATION_ANOTHERREQUESTPROCESSING = "FailedOperation.AnotherRequestProcessing" FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" INTERNALERROR = "InternalError" INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId" INVALIDPARAMETER_TASKALREADYFINISHED = "InvalidParameter.TaskAlreadyFinished"
func (*Client) CancelTaskWithContext ¶
func (c *Client) CancelTaskWithContext(ctx context.Context, request *CancelTaskRequest) (response *CancelTaskResponse, err error)
CancelTask This API is used to cancel a task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" FAILEDOPERATION_ANOTHERREQUESTPROCESSING = "FailedOperation.AnotherRequestProcessing" FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" INTERNALERROR = "InternalError" INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId" INVALIDPARAMETER_TASKALREADYFINISHED = "InvalidParameter.TaskAlreadyFinished"
func (*Client) CreateDataEngine ¶
func (c *Client) CreateDataEngine(request *CreateDataEngineRequest) (response *CreateDataEngineResponse, err error)
CreateDataEngine This API is used to create a data engine.
error code that may be returned:
FAILEDOPERATION_ABNORMALORDERSTATUS = "FailedOperation.AbnormalOrderStatus" FAILEDOPERATION_ANOTHERCREATEPROCESSRUNNING = "FailedOperation.AnotherCreateProcessRunning" FAILEDOPERATION_ANOTHERPROCESSRUNNING = "FailedOperation.AnotherProcessRunning" FAILEDOPERATION_ANOTHERREQUESTPROCESSING = "FailedOperation.AnotherRequestProcessing" FAILEDOPERATION_BALANCENOTENOUGH = "FailedOperation.BalanceNotEnough" FAILEDOPERATION_BILLINGSYSTEMERROR = "FailedOperation.BillingSystemError" FAILEDOPERATION_BINDTOOMANYTAGS = "FailedOperation.BindTooManyTags" FAILEDOPERATION_CREATEDATAENGINEFAILED = "FailedOperation.CreateDataEngineFailed" FAILEDOPERATION_DELIVERGOODSFAILED = "FailedOperation.DeliverGoodsFailed" FAILEDOPERATION_DUPLICATETAGKEY = "FailedOperation.DuplicateTagKey" FAILEDOPERATION_FEEDEDUCTIONFAILED = "FailedOperation.FeeDeductionFailed" FAILEDOPERATION_GETPRODUCTINFORMATIONFAILED = "FailedOperation.GetProductInformationFailed" FAILEDOPERATION_ILLEGALRESOURCE = "FailedOperation.IllegalResource" FAILEDOPERATION_ILLEGALTAGKEY = "FailedOperation.IllegalTagKey" FAILEDOPERATION_ILLEGALTAGVALUE = "FailedOperation.IllegalTagValue" FAILEDOPERATION_INQUIREPRICEFAILED = "FailedOperation.InquirePriceFailed" FAILEDOPERATION_MODIFYINSTANCEFAILED = "FailedOperation.ModifyInstanceFailed" FAILEDOPERATION_NOPERMISSION = "FailedOperation.NoPermission" FAILEDOPERATION_NOREALNAMEAUTHENTICATION = "FailedOperation.NoRealNameAuthentication" FAILEDOPERATION_NUMBEREXCEEDLIMIT = "FailedOperation.NumberExceedLimit" FAILEDOPERATION_PARAMETERVALIDATIONFAILED = "FailedOperation.ParameterValidationFailed" FAILEDOPERATION_REFUNDDEPOSITFAILED = "FailedOperation.RefundDepositFailed" FAILEDOPERATION_TAGALREADYATTACHED = "FailedOperation.TagAlreadyAttached" FAILEDOPERATION_TAGKEYTOOLONG = "FailedOperation.TagKeyTooLong" FAILEDOPERATION_TAGNOTEXIST = "FailedOperation.TagNotExist" FAILEDOPERATION_TAGVALUETOOLONG = "FailedOperation.TagValueTooLong" FAILEDOPERATION_TOOMANYRESOURCES = "FailedOperation.TooManyResources" FAILEDOPERATION_TOOMANYTAGS = "FailedOperation.TooManyTags" INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER_DUPLICATEDATAENGINENAME = "InvalidParameter.DuplicateDataEngineName" INVALIDPARAMETER_INVALIDDATAENGINEMODE = "InvalidParameter.InvalidDataEngineMode" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDDATAENGINESPECS = "InvalidParameter.InvalidDataEngineSpecs" INVALIDPARAMETER_INVALIDENGINETYPE = "InvalidParameter.InvalidEngineType" INVALIDPARAMETER_INVALIDPAYMODE = "InvalidParameter.InvalidPayMode" INVALIDPARAMETER_INVALIDTIMESPAN = "InvalidParameter.InvalidTimeSpan" INVALIDPARAMETER_INVALIDTIMEUNIT = "InvalidParameter.InvalidTimeUnit" INVALIDPARAMETER_VPCCIDRFORMATERROR = "InvalidParameter.VpcCidrFormatError" RESOURCENOTFOUND = "ResourceNotFound" UNAUTHORIZEDOPERATION_NOPAYMENTAUTHORITY = "UnauthorizedOperation.NoPaymentAuthority"
func (*Client) CreateDataEngineWithContext ¶
func (c *Client) CreateDataEngineWithContext(ctx context.Context, request *CreateDataEngineRequest) (response *CreateDataEngineResponse, err error)
CreateDataEngine This API is used to create a data engine.
error code that may be returned:
FAILEDOPERATION_ABNORMALORDERSTATUS = "FailedOperation.AbnormalOrderStatus" FAILEDOPERATION_ANOTHERCREATEPROCESSRUNNING = "FailedOperation.AnotherCreateProcessRunning" FAILEDOPERATION_ANOTHERPROCESSRUNNING = "FailedOperation.AnotherProcessRunning" FAILEDOPERATION_ANOTHERREQUESTPROCESSING = "FailedOperation.AnotherRequestProcessing" FAILEDOPERATION_BALANCENOTENOUGH = "FailedOperation.BalanceNotEnough" FAILEDOPERATION_BILLINGSYSTEMERROR = "FailedOperation.BillingSystemError" FAILEDOPERATION_BINDTOOMANYTAGS = "FailedOperation.BindTooManyTags" FAILEDOPERATION_CREATEDATAENGINEFAILED = "FailedOperation.CreateDataEngineFailed" FAILEDOPERATION_DELIVERGOODSFAILED = "FailedOperation.DeliverGoodsFailed" FAILEDOPERATION_DUPLICATETAGKEY = "FailedOperation.DuplicateTagKey" FAILEDOPERATION_FEEDEDUCTIONFAILED = "FailedOperation.FeeDeductionFailed" FAILEDOPERATION_GETPRODUCTINFORMATIONFAILED = "FailedOperation.GetProductInformationFailed" FAILEDOPERATION_ILLEGALRESOURCE = "FailedOperation.IllegalResource" FAILEDOPERATION_ILLEGALTAGKEY = "FailedOperation.IllegalTagKey" FAILEDOPERATION_ILLEGALTAGVALUE = "FailedOperation.IllegalTagValue" FAILEDOPERATION_INQUIREPRICEFAILED = "FailedOperation.InquirePriceFailed" FAILEDOPERATION_MODIFYINSTANCEFAILED = "FailedOperation.ModifyInstanceFailed" FAILEDOPERATION_NOPERMISSION = "FailedOperation.NoPermission" FAILEDOPERATION_NOREALNAMEAUTHENTICATION = "FailedOperation.NoRealNameAuthentication" FAILEDOPERATION_NUMBEREXCEEDLIMIT = "FailedOperation.NumberExceedLimit" FAILEDOPERATION_PARAMETERVALIDATIONFAILED = "FailedOperation.ParameterValidationFailed" FAILEDOPERATION_REFUNDDEPOSITFAILED = "FailedOperation.RefundDepositFailed" FAILEDOPERATION_TAGALREADYATTACHED = "FailedOperation.TagAlreadyAttached" FAILEDOPERATION_TAGKEYTOOLONG = "FailedOperation.TagKeyTooLong" FAILEDOPERATION_TAGNOTEXIST = "FailedOperation.TagNotExist" FAILEDOPERATION_TAGVALUETOOLONG = "FailedOperation.TagValueTooLong" FAILEDOPERATION_TOOMANYRESOURCES = "FailedOperation.TooManyResources" FAILEDOPERATION_TOOMANYTAGS = "FailedOperation.TooManyTags" INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER_DUPLICATEDATAENGINENAME = "InvalidParameter.DuplicateDataEngineName" INVALIDPARAMETER_INVALIDDATAENGINEMODE = "InvalidParameter.InvalidDataEngineMode" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDDATAENGINESPECS = "InvalidParameter.InvalidDataEngineSpecs" INVALIDPARAMETER_INVALIDENGINETYPE = "InvalidParameter.InvalidEngineType" INVALIDPARAMETER_INVALIDPAYMODE = "InvalidParameter.InvalidPayMode" INVALIDPARAMETER_INVALIDTIMESPAN = "InvalidParameter.InvalidTimeSpan" INVALIDPARAMETER_INVALIDTIMEUNIT = "InvalidParameter.InvalidTimeUnit" INVALIDPARAMETER_VPCCIDRFORMATERROR = "InvalidParameter.VpcCidrFormatError" RESOURCENOTFOUND = "ResourceNotFound" UNAUTHORIZEDOPERATION_NOPAYMENTAUTHORITY = "UnauthorizedOperation.NoPaymentAuthority"
func (*Client) CreateInternalTable ¶
func (c *Client) CreateInternalTable(request *CreateInternalTableRequest) (response *CreateInternalTableResponse, err error)
CreateInternalTable This API is used to create a managed internal table. It has been disused.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" RESOURCEUNAVAILABLE = "ResourceUnavailable" UNAUTHORIZEDOPERATION = "UnauthorizedOperation" UNSUPPORTEDOPERATION = "UnsupportedOperation"
func (*Client) CreateInternalTableWithContext ¶
func (c *Client) CreateInternalTableWithContext(ctx context.Context, request *CreateInternalTableRequest) (response *CreateInternalTableResponse, err error)
CreateInternalTable This API is used to create a managed internal table. It has been disused.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" RESOURCEUNAVAILABLE = "ResourceUnavailable" UNAUTHORIZEDOPERATION = "UnauthorizedOperation" UNSUPPORTEDOPERATION = "UnsupportedOperation"
func (*Client) CreateResultDownload ¶
func (c *Client) CreateResultDownload(request *CreateResultDownloadRequest) (response *CreateResultDownloadResponse, err error)
CreateResultDownload This API is used to create a query result download task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" FAILEDOPERATION_NOPERMISSION = "FailedOperation.NoPermission"
func (*Client) CreateResultDownloadWithContext ¶
func (c *Client) CreateResultDownloadWithContext(ctx context.Context, request *CreateResultDownloadRequest) (response *CreateResultDownloadResponse, err error)
CreateResultDownload This API is used to create a query result download task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" FAILEDOPERATION_NOPERMISSION = "FailedOperation.NoPermission"
func (*Client) CreateSparkApp ¶
func (c *Client) CreateSparkApp(request *CreateSparkAppRequest) (response *CreateSparkAppResponse, err error)
CreateSparkApp This API is used to create a Spark job.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDROLEARN = "InvalidParameter.InvalidRoleArn"
func (*Client) CreateSparkAppTask ¶
func (c *Client) CreateSparkAppTask(request *CreateSparkAppTaskRequest) (response *CreateSparkAppTaskResponse, err error)
CreateSparkAppTask This API is used to start a Spark job.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDROLEARN = "InvalidParameter.InvalidRoleArn" INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam" RESOURCEUNAVAILABLE = "ResourceUnavailable" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateSparkAppTaskWithContext ¶
func (c *Client) CreateSparkAppTaskWithContext(ctx context.Context, request *CreateSparkAppTaskRequest) (response *CreateSparkAppTaskResponse, err error)
CreateSparkAppTask This API is used to start a Spark job.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDROLEARN = "InvalidParameter.InvalidRoleArn" INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam" RESOURCEUNAVAILABLE = "ResourceUnavailable" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateSparkAppWithContext ¶
func (c *Client) CreateSparkAppWithContext(ctx context.Context, request *CreateSparkAppRequest) (response *CreateSparkAppResponse, err error)
CreateSparkApp This API is used to create a Spark job.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDROLEARN = "InvalidParameter.InvalidRoleArn"
func (*Client) CreateSparkSessionBatchSQL ¶
func (c *Client) CreateSparkSessionBatchSQL(request *CreateSparkSessionBatchSQLRequest) (response *CreateSparkSessionBatchSQLResponse, err error)
CreateSparkSessionBatchSQL This API is used to submit a Spark SQL batch task to the job engine.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESOURCENOTFOUNDCODE_SESSIONINSUFFICIENTRESOURCES = "ResourceNotFound.ResourceNotFoundCode_SessionInsufficientResources" RESOURCENOTFOUND_SESSIONNOTFOUND = "ResourceNotFound.SessionNotFound" RESOURCENOTFOUND_SESSIONSTATEDEAD = "ResourceNotFound.SessionStateDead"
func (*Client) CreateSparkSessionBatchSQLWithContext ¶
func (c *Client) CreateSparkSessionBatchSQLWithContext(ctx context.Context, request *CreateSparkSessionBatchSQLRequest) (response *CreateSparkSessionBatchSQLResponse, err error)
CreateSparkSessionBatchSQL This API is used to submit a Spark SQL batch task to the job engine.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESOURCENOTFOUNDCODE_SESSIONINSUFFICIENTRESOURCES = "ResourceNotFound.ResourceNotFoundCode_SessionInsufficientResources" RESOURCENOTFOUND_SESSIONNOTFOUND = "ResourceNotFound.SessionNotFound" RESOURCENOTFOUND_SESSIONSTATEDEAD = "ResourceNotFound.SessionStateDead"
func (*Client) CreateTask ¶
func (c *Client) CreateTask(request *CreateTaskRequest) (response *CreateTaskResponse, err error)
CreateTask This API is used to create and execute a SQL task. (`CreateTasks` is recommended.)
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateTaskWithContext ¶
func (c *Client) CreateTaskWithContext(ctx context.Context, request *CreateTaskRequest) (response *CreateTaskResponse, err error)
CreateTask This API is used to create and execute a SQL task. (`CreateTasks` is recommended.)
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateTasks ¶
func (c *Client) CreateTasks(request *CreateTasksRequest) (response *CreateTasksResponse, err error)
CreateTasks This API is used to create and execute SQL tasks in batches.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateTasksWithContext ¶
func (c *Client) CreateTasksWithContext(ctx context.Context, request *CreateTasksRequest) (response *CreateTasksResponse, err error)
CreateTasks This API is used to create and execute SQL tasks in batches.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) DeleteSparkApp ¶
func (c *Client) DeleteSparkApp(request *DeleteSparkAppRequest) (response *DeleteSparkAppResponse, err error)
DeleteSparkApp This API is used to delete a Spark job.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) DeleteSparkAppWithContext ¶
func (c *Client) DeleteSparkAppWithContext(ctx context.Context, request *DeleteSparkAppRequest) (response *DeleteSparkAppResponse, err error)
DeleteSparkApp This API is used to delete a Spark job.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) DescribeEngineUsageInfo ¶
func (c *Client) DescribeEngineUsageInfo(request *DescribeEngineUsageInfoRequest) (response *DescribeEngineUsageInfoResponse, err error)
DescribeEngineUsageInfo This API is used to query the resource usage of a data engine based on its ID.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeEngineUsageInfoWithContext ¶
func (c *Client) DescribeEngineUsageInfoWithContext(ctx context.Context, request *DescribeEngineUsageInfoRequest) (response *DescribeEngineUsageInfoResponse, err error)
DescribeEngineUsageInfo This API is used to query the resource usage of a data engine based on its ID.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeForbiddenTablePro ¶
func (c *Client) DescribeForbiddenTablePro(request *DescribeForbiddenTableProRequest) (response *DescribeForbiddenTableProResponse, err error)
DescribeForbiddenTablePro This API is used to get the list of disabled table attributes (new).
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeForbiddenTableProWithContext ¶
func (c *Client) DescribeForbiddenTableProWithContext(ctx context.Context, request *DescribeForbiddenTableProRequest) (response *DescribeForbiddenTableProResponse, err error)
DescribeForbiddenTablePro This API is used to get the list of disabled table attributes (new).
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeLakeFsDirSummary ¶
func (c *Client) DescribeLakeFsDirSummary(request *DescribeLakeFsDirSummaryRequest) (response *DescribeLakeFsDirSummaryResponse, err error)
DescribeLakeFsDirSummary This API is used to query the summary of a specified directory in a managed storage.
error code that may be returned:
AUTHFAILURE = "AuthFailure" FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue" OPERATIONDENIED = "OperationDenied" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeLakeFsDirSummaryWithContext ¶
func (c *Client) DescribeLakeFsDirSummaryWithContext(ctx context.Context, request *DescribeLakeFsDirSummaryRequest) (response *DescribeLakeFsDirSummaryResponse, err error)
DescribeLakeFsDirSummary This API is used to query the summary of a specified directory in a managed storage.
error code that may be returned:
AUTHFAILURE = "AuthFailure" FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue" OPERATIONDENIED = "OperationDenied" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeLakeFsInfo ¶
func (c *Client) DescribeLakeFsInfo(request *DescribeLakeFsInfoRequest) (response *DescribeLakeFsInfoResponse, err error)
DescribeLakeFsInfo This API is used to query managed storage information.
error code that may be returned:
AUTHFAILURE = "AuthFailure" FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue" LIMITEXCEEDED = "LimitExceeded" MISSINGPARAMETER = "MissingParameter" OPERATIONDENIED = "OperationDenied" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeLakeFsInfoWithContext ¶
func (c *Client) DescribeLakeFsInfoWithContext(ctx context.Context, request *DescribeLakeFsInfoRequest) (response *DescribeLakeFsInfoResponse, err error)
DescribeLakeFsInfo This API is used to query managed storage information.
error code that may be returned:
AUTHFAILURE = "AuthFailure" FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue" LIMITEXCEEDED = "LimitExceeded" MISSINGPARAMETER = "MissingParameter" OPERATIONDENIED = "OperationDenied" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeResultDownload ¶
func (c *Client) DescribeResultDownload(request *DescribeResultDownloadRequest) (response *DescribeResultDownloadResponse, err error)
DescribeResultDownload This API is used to get a query result download task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation"
func (*Client) DescribeResultDownloadWithContext ¶
func (c *Client) DescribeResultDownloadWithContext(ctx context.Context, request *DescribeResultDownloadRequest) (response *DescribeResultDownloadResponse, err error)
DescribeResultDownload This API is used to get a query result download task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation"
func (*Client) DescribeSparkAppJob ¶
func (c *Client) DescribeSparkAppJob(request *DescribeSparkAppJobRequest) (response *DescribeSparkAppJobResponse, err error)
DescribeSparkAppJob u200cThis API is used to query the information of a Spark job.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam"
func (*Client) DescribeSparkAppJobWithContext ¶
func (c *Client) DescribeSparkAppJobWithContext(ctx context.Context, request *DescribeSparkAppJobRequest) (response *DescribeSparkAppJobResponse, err error)
DescribeSparkAppJob u200cThis API is used to query the information of a Spark job.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam"
func (*Client) DescribeSparkAppJobs ¶
func (c *Client) DescribeSparkAppJobs(request *DescribeSparkAppJobsRequest) (response *DescribeSparkAppJobsResponse, err error)
DescribeSparkAppJobs This API is used to query the list of Spark jobs.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeSparkAppJobsWithContext ¶
func (c *Client) DescribeSparkAppJobsWithContext(ctx context.Context, request *DescribeSparkAppJobsRequest) (response *DescribeSparkAppJobsResponse, err error)
DescribeSparkAppJobs This API is used to query the list of Spark jobs.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeSparkAppTasks ¶
func (c *Client) DescribeSparkAppTasks(request *DescribeSparkAppTasksRequest) (response *DescribeSparkAppTasksResponse, err error)
DescribeSparkAppTasks This API is used to query the list of running task instances of a Spark job.
error code that may be returned:
FAILEDOPERATION = "FailedOperation"
func (*Client) DescribeSparkAppTasksWithContext ¶
func (c *Client) DescribeSparkAppTasksWithContext(ctx context.Context, request *DescribeSparkAppTasksRequest) (response *DescribeSparkAppTasksResponse, err error)
DescribeSparkAppTasks This API is used to query the list of running task instances of a Spark job.
error code that may be returned:
FAILEDOPERATION = "FailedOperation"
func (*Client) DescribeSparkSessionBatchSqlLog ¶
func (c *Client) DescribeSparkSessionBatchSqlLog(request *DescribeSparkSessionBatchSqlLogRequest) (response *DescribeSparkSessionBatchSqlLogResponse, err error)
DescribeSparkSessionBatchSqlLog This API is used to obtain the logs of a Spark SQL batch task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeSparkSessionBatchSqlLogWithContext ¶
func (c *Client) DescribeSparkSessionBatchSqlLogWithContext(ctx context.Context, request *DescribeSparkSessionBatchSqlLogRequest) (response *DescribeSparkSessionBatchSqlLogResponse, err error)
DescribeSparkSessionBatchSqlLog This API is used to obtain the logs of a Spark SQL batch task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeTaskResult ¶
func (c *Client) DescribeTaskResult(request *DescribeTaskResultRequest) (response *DescribeTaskResultResponse, err error)
DescribeTaskResult This API is used to query the result of a task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDMAXRESULTS = "InvalidParameter.InvalidMaxResults" INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId"
func (*Client) DescribeTaskResultWithContext ¶
func (c *Client) DescribeTaskResultWithContext(ctx context.Context, request *DescribeTaskResultRequest) (response *DescribeTaskResultResponse, err error)
DescribeTaskResult This API is used to query the result of a task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDMAXRESULTS = "InvalidParameter.InvalidMaxResults" INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId"
func (*Client) DescribeTasks ¶
func (c *Client) DescribeTasks(request *DescribeTasksRequest) (response *DescribeTasksResponse, err error)
DescribeTasks This API is used to query the list of tasks.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue"
func (*Client) DescribeTasksWithContext ¶
func (c *Client) DescribeTasksWithContext(ctx context.Context, request *DescribeTasksRequest) (response *DescribeTasksResponse, err error)
DescribeTasks This API is used to query the list of tasks.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue"
func (*Client) DescribeUserRoles ¶
func (c *Client) DescribeUserRoles(request *DescribeUserRolesRequest) (response *DescribeUserRolesResponse, err error)
DescribeUserRoles This API is used to enumerate user roles.
error code that may be returned:
FAILEDOPERATION = "FailedOperation"
func (*Client) DescribeUserRolesWithContext ¶
func (c *Client) DescribeUserRolesWithContext(ctx context.Context, request *DescribeUserRolesRequest) (response *DescribeUserRolesResponse, err error)
DescribeUserRoles This API is used to enumerate user roles.
error code that may be returned:
FAILEDOPERATION = "FailedOperation"
func (*Client) GenerateCreateMangedTableSql ¶
func (c *Client) GenerateCreateMangedTableSql(request *GenerateCreateMangedTableSqlRequest) (response *GenerateCreateMangedTableSqlResponse, err error)
GenerateCreateMangedTableSql This API is used to generate SQL statements for creating a managed table.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" RESOURCEUNAVAILABLE = "ResourceUnavailable" UNAUTHORIZEDOPERATION = "UnauthorizedOperation" UNSUPPORTEDOPERATION = "UnsupportedOperation"
func (*Client) GenerateCreateMangedTableSqlWithContext ¶
func (c *Client) GenerateCreateMangedTableSqlWithContext(ctx context.Context, request *GenerateCreateMangedTableSqlRequest) (response *GenerateCreateMangedTableSqlResponse, err error)
GenerateCreateMangedTableSql This API is used to generate SQL statements for creating a managed table.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" RESOURCEUNAVAILABLE = "ResourceUnavailable" UNAUTHORIZEDOPERATION = "UnauthorizedOperation" UNSUPPORTEDOPERATION = "UnsupportedOperation"
func (*Client) ModifyGovernEventRule ¶
func (c *Client) ModifyGovernEventRule(request *ModifyGovernEventRuleRequest) (response *ModifyGovernEventRuleResponse, err error)
ModifyGovernEventRule This API is used to change data governance event thresholds.
error code that may be returned:
AUTHFAILURE = "AuthFailure" FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue" LIMITEXCEEDED = "LimitExceeded" OPERATIONDENIED = "OperationDenied" RESOURCENOTFOUND = "ResourceNotFound" UNKNOWNPARAMETER = "UnknownParameter"
func (*Client) ModifyGovernEventRuleWithContext ¶
func (c *Client) ModifyGovernEventRuleWithContext(ctx context.Context, request *ModifyGovernEventRuleRequest) (response *ModifyGovernEventRuleResponse, err error)
ModifyGovernEventRule This API is used to change data governance event thresholds.
error code that may be returned:
AUTHFAILURE = "AuthFailure" FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue" LIMITEXCEEDED = "LimitExceeded" OPERATIONDENIED = "OperationDenied" RESOURCENOTFOUND = "ResourceNotFound" UNKNOWNPARAMETER = "UnknownParameter"
func (*Client) ModifySparkApp ¶
func (c *Client) ModifySparkApp(request *ModifySparkAppRequest) (response *ModifySparkAppResponse, err error)
ModifySparkApp This API is used to update a Spark job.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName"
func (*Client) ModifySparkAppBatch ¶
func (c *Client) ModifySparkAppBatch(request *ModifySparkAppBatchRequest) (response *ModifySparkAppBatchResponse, err error)
ModifySparkAppBatch This API is used to modify Spark job parameters in batches.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) ModifySparkAppBatchWithContext ¶
func (c *Client) ModifySparkAppBatchWithContext(ctx context.Context, request *ModifySparkAppBatchRequest) (response *ModifySparkAppBatchResponse, err error)
ModifySparkAppBatch This API is used to modify Spark job parameters in batches.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) ModifySparkAppWithContext ¶
func (c *Client) ModifySparkAppWithContext(ctx context.Context, request *ModifySparkAppRequest) (response *ModifySparkAppResponse, err error)
ModifySparkApp This API is used to update a Spark job.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName"
func (*Client) SuspendResumeDataEngine ¶
func (c *Client) SuspendResumeDataEngine(request *SuspendResumeDataEngineRequest) (response *SuspendResumeDataEngineResponse, err error)
SuspendResumeDataEngine This API is used to suspend or start a data engine.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" UNAUTHORIZEDOPERATION_OPERATECOMPUTINGENGINE = "UnauthorizedOperation.OperateComputingEngine"
func (*Client) SuspendResumeDataEngineWithContext ¶
func (c *Client) SuspendResumeDataEngineWithContext(ctx context.Context, request *SuspendResumeDataEngineRequest) (response *SuspendResumeDataEngineResponse, err error)
SuspendResumeDataEngine This API is used to suspend or start a data engine.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" UNAUTHORIZEDOPERATION_OPERATECOMPUTINGENGINE = "UnauthorizedOperation.OperateComputingEngine"
func (*Client) SwitchDataEngine ¶
func (c *Client) SwitchDataEngine(request *SwitchDataEngineRequest) (response *SwitchDataEngineResponse, err error)
SwitchDataEngine This API is used to switch between the primary and standby clusters.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" UNAUTHORIZEDOPERATION_OPERATECOMPUTINGENGINE = "UnauthorizedOperation.OperateComputingEngine"
func (*Client) SwitchDataEngineWithContext ¶
func (c *Client) SwitchDataEngineWithContext(ctx context.Context, request *SwitchDataEngineRequest) (response *SwitchDataEngineResponse, err error)
SwitchDataEngine This API is used to switch between the primary and standby clusters.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" UNAUTHORIZEDOPERATION_OPERATECOMPUTINGENGINE = "UnauthorizedOperation.OperateComputingEngine"
func (*Client) UpdateRowFilter ¶
func (c *Client) UpdateRowFilter(request *UpdateRowFilterRequest) (response *UpdateRowFilterResponse, err error)
UpdateRowFilter This API is used to update row filters. Please note that it updates filters only but not catalogs, databases, or tables.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" UNAUTHORIZEDOPERATION_OPERATECOMPUTINGENGINE = "UnauthorizedOperation.OperateComputingEngine"
func (*Client) UpdateRowFilterWithContext ¶
func (c *Client) UpdateRowFilterWithContext(ctx context.Context, request *UpdateRowFilterRequest) (response *UpdateRowFilterResponse, err error)
UpdateRowFilter This API is used to update row filters. Please note that it updates filters only but not catalogs, databases, or tables.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" UNAUTHORIZEDOPERATION_OPERATECOMPUTINGENGINE = "UnauthorizedOperation.OperateComputingEngine"
type Column ¶
type Column struct { // Column name, which is case-insensitive and can contain up to 25 characters. Name *string `json:"Name,omitnil" name:"Name"` // Column type. Valid values: // string|tinyint|smallint|int|bigint|boolean|float|double|decimal|timestamp|date|binary|array<data_type>|map<primitive_type, data_type>|struct<col_name : data_type [COMMENT col_comment], ...>|uniontype<data_type, data_type, ...>. Type *string `json:"Type,omitnil" name:"Type"` // Class comment. // Note: This field may return null, indicating that no valid values can be obtained. Comment *string `json:"Comment,omitnil" name:"Comment"` // Length of the entire numeric value // Note: This field may return null, indicating that no valid values can be obtained. Precision *int64 `json:"Precision,omitnil" name:"Precision"` // Length of the decimal part // Note: This field may return null, indicating that no valid values can be obtained. Scale *int64 `json:"Scale,omitnil" name:"Scale"` // Whether the column is null. // Note: This field may return null, indicating that no valid values can be obtained. Nullable *string `json:"Nullable,omitnil" name:"Nullable"` // Field position // Note: This field may return null, indicating that no valid values can be obtained. Position *int64 `json:"Position,omitnil" name:"Position"` // Field creation time // Note: This field may return null, indicating that no valid values can be obtained. CreateTime *string `json:"CreateTime,omitnil" name:"CreateTime"` // Field modification time // Note: This field may return null, indicating that no valid values can be obtained. ModifiedTime *string `json:"ModifiedTime,omitnil" name:"ModifiedTime"` // Whether the column is the partition field. // Note: This field may return null, indicating that no valid values can be obtained. IsPartition *bool `json:"IsPartition,omitnil" name:"IsPartition"` }
type CommonMetrics ¶
type CommonMetrics struct { // The task creation time in ms. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. CreateTaskTime *float64 `json:"CreateTaskTime,omitnil" name:"CreateTaskTime"` // The processing time in ms. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. ProcessTime *float64 `json:"ProcessTime,omitnil" name:"ProcessTime"` // The queue time in ms. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. QueueTime *float64 `json:"QueueTime,omitnil" name:"QueueTime"` // The execution duration in ms. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. ExecutionTime *float64 `json:"ExecutionTime,omitnil" name:"ExecutionTime"` // Whether the result cache is hit. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. IsResultCacheHit *bool `json:"IsResultCacheHit,omitnil" name:"IsResultCacheHit"` // The volume of matched materialized views, in bytes. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. MatchedMVBytes *int64 `json:"MatchedMVBytes,omitnil" name:"MatchedMVBytes"` // The list of matched materialized views. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. MatchedMVs *string `json:"MatchedMVs,omitnil" name:"MatchedMVs"` // The result data in bytes. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. AffectedBytes *string `json:"AffectedBytes,omitnil" name:"AffectedBytes"` // The number of rows in the result. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. AffectedRows *int64 `json:"AffectedRows,omitnil" name:"AffectedRows"` // The volume of the data scanned, in bytes. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. ProcessedBytes *int64 `json:"ProcessedBytes,omitnil" name:"ProcessedBytes"` // The number of scanned rows. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. ProcessedRows *int64 `json:"ProcessedRows,omitnil" name:"ProcessedRows"` }
type CosPermission ¶
type CosPermission struct { // The COS path. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. CosPath *string `json:"CosPath,omitnil" name:"CosPath"` // The permissions. Valid values: `read` and `write`. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. Permissions []*string `json:"Permissions,omitnil" name:"Permissions"` }
type CreateDataEngineRequest ¶
type CreateDataEngineRequest struct { *tchttp.BaseRequest // The engine type. Valid values: `spark` and `presto`. EngineType *string `json:"EngineType,omitnil" name:"EngineType"` // The name of the virtual cluster. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // The cluster type. Valid values: `spark_private`, `presto_private`, `presto_cu`, and `spark_cu`. ClusterType *string `json:"ClusterType,omitnil" name:"ClusterType"` // The billing mode. Valid values: `0` (shared engine), `1` (pay-as-you-go), and `2` (monthly subscription). Mode *int64 `json:"Mode,omitnil" name:"Mode"` // Whether to automatically start the clusters. AutoResume *bool `json:"AutoResume,omitnil" name:"AutoResume"` // The minimum number of clusters. MinClusters *int64 `json:"MinClusters,omitnil" name:"MinClusters"` // The maximum number of clusters. MaxClusters *int64 `json:"MaxClusters,omitnil" name:"MaxClusters"` // Whether the cluster is the default one. DefaultDataEngine *bool `json:"DefaultDataEngine,omitnil" name:"DefaultDataEngine"` // The VPC CIDR block. CidrBlock *string `json:"CidrBlock,omitnil" name:"CidrBlock"` // The description. Message *string `json:"Message,omitnil" name:"Message"` // The cluster size. Size *int64 `json:"Size,omitnil" name:"Size"` // The pay mode. Valid value: `0` (postpaid, default) and `1` (prepaid) (currently not available). PayMode *int64 `json:"PayMode,omitnil" name:"PayMode"` // The resource period. For the postpaid mode, the value is 3600 (default); for the prepaid mode, the value must be in the range of 1–120, representing purchasing the resource for 1–120 months. TimeSpan *int64 `json:"TimeSpan,omitnil" name:"TimeSpan"` // The unit of the resource period. Valid values: `s` (default) for the postpaid mode and `m` for the prepaid mode. TimeUnit *string `json:"TimeUnit,omitnil" name:"TimeUnit"` // The auto-renewal status of the resource. For the postpaid mode, no renewal is required, and the value is fixed to `0`. For the prepaid mode, valid values are `0` (manual), `1` (auto), and `2` (no renewal). If this parameter is set to `0` for a key account in the prepaid mode, auto-renewal applies. It defaults to `0`. AutoRenew *int64 `json:"AutoRenew,omitnil" name:"AutoRenew"` // The tags to be set for the resource being created. Tags []*TagInfo `json:"Tags,omitnil" name:"Tags"` // Whether to automatically suspend clusters. Valid values: `false` (default, no) and `true` (yes). AutoSuspend *bool `json:"AutoSuspend,omitnil" name:"AutoSuspend"` // Whether to enable scheduled start and suspension of clusters. Valid values: `0` (disable) and `1` (enable). Note: This policy and the auto-suspension policy are mutually exclusive. CrontabResumeSuspend *int64 `json:"CrontabResumeSuspend,omitnil" name:"CrontabResumeSuspend"` // The complex policy for scheduled start and suspension, including the start/suspension time and suspension policy. CrontabResumeSuspendStrategy *CrontabResumeSuspendStrategy `json:"CrontabResumeSuspendStrategy,omitnil" name:"CrontabResumeSuspendStrategy"` // The type of tasks to be executed by the engine, which defaults to SQL. Valid values: `SQL` and `BATCH`. EngineExecType *string `json:"EngineExecType,omitnil" name:"EngineExecType"` // The max task concurrency of a cluster, which defaults to 5. MaxConcurrency *int64 `json:"MaxConcurrency,omitnil" name:"MaxConcurrency"` // The task queue time limit, which defaults to 0. When the actual queue time exceeds the value set here, scale-out may be triggered. Setting this parameter to 0 represents that scale-out may be triggered immediately after a task queues up. TolerableQueueTime *int64 `json:"TolerableQueueTime,omitnil" name:"TolerableQueueTime"` // The cluster auto-suspension time, which defaults to 10 min. AutoSuspendTime *int64 `json:"AutoSuspendTime,omitnil" name:"AutoSuspendTime"` // The resource type. Valid values: `Standard_CU` (standard) and `Memory_CU` (memory). ResourceType *string `json:"ResourceType,omitnil" name:"ResourceType"` // The advanced configurations of clusters. DataEngineConfigPairs []*DataEngineConfigPair `json:"DataEngineConfigPairs,omitnil" name:"DataEngineConfigPairs"` // The version name of cluster image, such as SuperSQL-P 1.1 and SuperSQL-S 3.2. If no value is passed in, a cluster is created using the latest image version. ImageVersionName *string `json:"ImageVersionName,omitnil" name:"ImageVersionName"` // The primary cluster, which is specified when a failover cluster is created. MainClusterName *string `json:"MainClusterName,omitnil" name:"MainClusterName"` // Whether to enable the scaling feature for a monthly subscribed Spark job cluster. ElasticSwitch *bool `json:"ElasticSwitch,omitnil" name:"ElasticSwitch"` // The upper limit (in CUs) for scaling of the monthly subscribed Spark job cluster. ElasticLimit *int64 `json:"ElasticLimit,omitnil" name:"ElasticLimit"` // The session resource configuration template for a Spark job cluster. SessionResourceTemplate *SessionResourceTemplate `json:"SessionResourceTemplate,omitnil" name:"SessionResourceTemplate"` }
func NewCreateDataEngineRequest ¶
func NewCreateDataEngineRequest() (request *CreateDataEngineRequest)
func (*CreateDataEngineRequest) FromJsonString ¶
func (r *CreateDataEngineRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateDataEngineRequest) ToJsonString ¶
func (r *CreateDataEngineRequest) ToJsonString() string
type CreateDataEngineRequestParams ¶
type CreateDataEngineRequestParams struct { // The engine type. Valid values: `spark` and `presto`. EngineType *string `json:"EngineType,omitnil" name:"EngineType"` // The name of the virtual cluster. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // The cluster type. Valid values: `spark_private`, `presto_private`, `presto_cu`, and `spark_cu`. ClusterType *string `json:"ClusterType,omitnil" name:"ClusterType"` // The billing mode. Valid values: `0` (shared engine), `1` (pay-as-you-go), and `2` (monthly subscription). Mode *int64 `json:"Mode,omitnil" name:"Mode"` // Whether to automatically start the clusters. AutoResume *bool `json:"AutoResume,omitnil" name:"AutoResume"` // The minimum number of clusters. MinClusters *int64 `json:"MinClusters,omitnil" name:"MinClusters"` // The maximum number of clusters. MaxClusters *int64 `json:"MaxClusters,omitnil" name:"MaxClusters"` // Whether the cluster is the default one. // // Deprecated: DefaultDataEngine is deprecated. DefaultDataEngine *bool `json:"DefaultDataEngine,omitnil" name:"DefaultDataEngine"` // The VPC CIDR block. CidrBlock *string `json:"CidrBlock,omitnil" name:"CidrBlock"` // The description. Message *string `json:"Message,omitnil" name:"Message"` // The cluster size. Size *int64 `json:"Size,omitnil" name:"Size"` // The pay mode. Valid value: `0` (postpaid, default) and `1` (prepaid) (currently not available). PayMode *int64 `json:"PayMode,omitnil" name:"PayMode"` // The resource period. For the postpaid mode, the value is 3600 (default); for the prepaid mode, the value must be in the range of 1–120, representing purchasing the resource for 1–120 months. TimeSpan *int64 `json:"TimeSpan,omitnil" name:"TimeSpan"` // The unit of the resource period. Valid values: `s` (default) for the postpaid mode and `m` for the prepaid mode. TimeUnit *string `json:"TimeUnit,omitnil" name:"TimeUnit"` // The auto-renewal status of the resource. For the postpaid mode, no renewal is required, and the value is fixed to `0`. For the prepaid mode, valid values are `0` (manual), `1` (auto), and `2` (no renewal). If this parameter is set to `0` for a key account in the prepaid mode, auto-renewal applies. It defaults to `0`. AutoRenew *int64 `json:"AutoRenew,omitnil" name:"AutoRenew"` // The tags to be set for the resource being created. Tags []*TagInfo `json:"Tags,omitnil" name:"Tags"` // Whether to automatically suspend clusters. Valid values: `false` (default, no) and `true` (yes). AutoSuspend *bool `json:"AutoSuspend,omitnil" name:"AutoSuspend"` // Whether to enable scheduled start and suspension of clusters. Valid values: `0` (disable) and `1` (enable). Note: This policy and the auto-suspension policy are mutually exclusive. CrontabResumeSuspend *int64 `json:"CrontabResumeSuspend,omitnil" name:"CrontabResumeSuspend"` // The complex policy for scheduled start and suspension, including the start/suspension time and suspension policy. CrontabResumeSuspendStrategy *CrontabResumeSuspendStrategy `json:"CrontabResumeSuspendStrategy,omitnil" name:"CrontabResumeSuspendStrategy"` // The type of tasks to be executed by the engine, which defaults to SQL. Valid values: `SQL` and `BATCH`. EngineExecType *string `json:"EngineExecType,omitnil" name:"EngineExecType"` // The max task concurrency of a cluster, which defaults to 5. MaxConcurrency *int64 `json:"MaxConcurrency,omitnil" name:"MaxConcurrency"` // The task queue time limit, which defaults to 0. When the actual queue time exceeds the value set here, scale-out may be triggered. Setting this parameter to 0 represents that scale-out may be triggered immediately after a task queues up. TolerableQueueTime *int64 `json:"TolerableQueueTime,omitnil" name:"TolerableQueueTime"` // The cluster auto-suspension time, which defaults to 10 min. AutoSuspendTime *int64 `json:"AutoSuspendTime,omitnil" name:"AutoSuspendTime"` // The resource type. Valid values: `Standard_CU` (standard) and `Memory_CU` (memory). ResourceType *string `json:"ResourceType,omitnil" name:"ResourceType"` // The advanced configurations of clusters. DataEngineConfigPairs []*DataEngineConfigPair `json:"DataEngineConfigPairs,omitnil" name:"DataEngineConfigPairs"` // The version name of cluster image, such as SuperSQL-P 1.1 and SuperSQL-S 3.2. If no value is passed in, a cluster is created using the latest image version. ImageVersionName *string `json:"ImageVersionName,omitnil" name:"ImageVersionName"` // The primary cluster, which is specified when a failover cluster is created. MainClusterName *string `json:"MainClusterName,omitnil" name:"MainClusterName"` // Whether to enable the scaling feature for a monthly subscribed Spark job cluster. ElasticSwitch *bool `json:"ElasticSwitch,omitnil" name:"ElasticSwitch"` // The upper limit (in CUs) for scaling of the monthly subscribed Spark job cluster. ElasticLimit *int64 `json:"ElasticLimit,omitnil" name:"ElasticLimit"` // The session resource configuration template for a Spark job cluster. SessionResourceTemplate *SessionResourceTemplate `json:"SessionResourceTemplate,omitnil" name:"SessionResourceTemplate"` }
Predefined struct for user
type CreateDataEngineResponse ¶
type CreateDataEngineResponse struct { *tchttp.BaseResponse Response *CreateDataEngineResponseParams `json:"Response"` }
func NewCreateDataEngineResponse ¶
func NewCreateDataEngineResponse() (response *CreateDataEngineResponse)
func (*CreateDataEngineResponse) FromJsonString ¶
func (r *CreateDataEngineResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateDataEngineResponse) ToJsonString ¶
func (r *CreateDataEngineResponse) ToJsonString() string
type CreateDataEngineResponseParams ¶
type CreateDataEngineResponseParams struct { // The ID of the virtual engine. DataEngineId *string `json:"DataEngineId,omitnil" name:"DataEngineId"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CreateInternalTableRequest ¶
type CreateInternalTableRequest struct { *tchttp.BaseRequest // The basic table information. TableBaseInfo *TableBaseInfo `json:"TableBaseInfo,omitnil" name:"TableBaseInfo"` // The table fields. Columns []*TColumn `json:"Columns,omitnil" name:"Columns"` // The table partitions. Partitions []*TPartition `json:"Partitions,omitnil" name:"Partitions"` // The table properties. Properties []*Property `json:"Properties,omitnil" name:"Properties"` }
func NewCreateInternalTableRequest ¶
func NewCreateInternalTableRequest() (request *CreateInternalTableRequest)
func (*CreateInternalTableRequest) FromJsonString ¶
func (r *CreateInternalTableRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateInternalTableRequest) ToJsonString ¶
func (r *CreateInternalTableRequest) ToJsonString() string
type CreateInternalTableRequestParams ¶
type CreateInternalTableRequestParams struct { // The basic table information. TableBaseInfo *TableBaseInfo `json:"TableBaseInfo,omitnil" name:"TableBaseInfo"` // The table fields. Columns []*TColumn `json:"Columns,omitnil" name:"Columns"` // The table partitions. Partitions []*TPartition `json:"Partitions,omitnil" name:"Partitions"` // The table properties. Properties []*Property `json:"Properties,omitnil" name:"Properties"` }
Predefined struct for user
type CreateInternalTableResponse ¶
type CreateInternalTableResponse struct { *tchttp.BaseResponse Response *CreateInternalTableResponseParams `json:"Response"` }
func NewCreateInternalTableResponse ¶
func NewCreateInternalTableResponse() (response *CreateInternalTableResponse)
func (*CreateInternalTableResponse) FromJsonString ¶
func (r *CreateInternalTableResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateInternalTableResponse) ToJsonString ¶
func (r *CreateInternalTableResponse) ToJsonString() string
type CreateInternalTableResponseParams ¶
type CreateInternalTableResponseParams struct { // The SQL statements for creating the managed internal table. Execution *string `json:"Execution,omitnil" name:"Execution"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CreateResultDownloadRequest ¶
type CreateResultDownloadRequest struct { *tchttp.BaseRequest // The result query task ID. TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // The result format. Format *string `json:"Format,omitnil" name:"Format"` // Whether to re-generate a file to download. This parameter applies only when the last task is `timeout` or `error`. Force *bool `json:"Force,omitnil" name:"Force"` }
func NewCreateResultDownloadRequest ¶
func NewCreateResultDownloadRequest() (request *CreateResultDownloadRequest)
func (*CreateResultDownloadRequest) FromJsonString ¶
func (r *CreateResultDownloadRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateResultDownloadRequest) ToJsonString ¶
func (r *CreateResultDownloadRequest) ToJsonString() string
type CreateResultDownloadRequestParams ¶
type CreateResultDownloadRequestParams struct { // The result query task ID. TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // The result format. Format *string `json:"Format,omitnil" name:"Format"` // Whether to re-generate a file to download. This parameter applies only when the last task is `timeout` or `error`. Force *bool `json:"Force,omitnil" name:"Force"` }
Predefined struct for user
type CreateResultDownloadResponse ¶
type CreateResultDownloadResponse struct { *tchttp.BaseResponse Response *CreateResultDownloadResponseParams `json:"Response"` }
func NewCreateResultDownloadResponse ¶
func NewCreateResultDownloadResponse() (response *CreateResultDownloadResponse)
func (*CreateResultDownloadResponse) FromJsonString ¶
func (r *CreateResultDownloadResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateResultDownloadResponse) ToJsonString ¶
func (r *CreateResultDownloadResponse) ToJsonString() string
type CreateResultDownloadResponseParams ¶
type CreateResultDownloadResponseParams struct { // The download task ID. DownloadId *string `json:"DownloadId,omitnil" name:"DownloadId"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CreateSparkAppRequest ¶
type CreateSparkAppRequest struct { *tchttp.BaseRequest // The Spark job name. AppName *string `json:"AppName,omitnil" name:"AppName"` // The Spark job type. Valid values: `1` for Spark JAR job and `2` for Spark streaming job. AppType *int64 `json:"AppType,omitnil" name:"AppType"` // The data engine executing the Spark job. DataEngine *string `json:"DataEngine,omitnil" name:"DataEngine"` // The path of the Spark job package. AppFile *string `json:"AppFile,omitnil" name:"AppFile"` // The data access policy (CAM role arn). RoleArn *int64 `json:"RoleArn,omitnil" name:"RoleArn"` // The driver size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). AppDriverSize *string `json:"AppDriverSize,omitnil" name:"AppDriverSize"` // The executor size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). AppExecutorSize *string `json:"AppExecutorSize,omitnil" name:"AppExecutorSize"` // Number of Spark job executors AppExecutorNums *int64 `json:"AppExecutorNums,omitnil" name:"AppExecutorNums"` // This field has been disused. Use the `Datasource` field instead. Eni *string `json:"Eni,omitnil" name:"Eni"` // The source of the Spark job package. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocal *string `json:"IsLocal,omitnil" name:"IsLocal"` // The main class of the Spark job. MainClass *string `json:"MainClass,omitnil" name:"MainClass"` // Spark configurations separated by line break AppConf *string `json:"AppConf,omitnil" name:"AppConf"` // The source of the dependency JAR packages of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalJars *string `json:"IsLocalJars,omitnil" name:"IsLocalJars"` // The dependency JAR packages of the Spark JAR job (JAR packages), separated by comma. AppJars *string `json:"AppJars,omitnil" name:"AppJars"` // The source of the dependency files of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalFiles *string `json:"IsLocalFiles,omitnil" name:"IsLocalFiles"` // The dependency files of the Spark job (files other than JAR and ZIP packages) separated by comma. AppFiles *string `json:"AppFiles,omitnil" name:"AppFiles"` // The input parameters of the Spark job, separated by comma. CmdArgs *string `json:"CmdArgs,omitnil" name:"CmdArgs"` // The maximum number of retries, valid for Spark streaming tasks only. MaxRetries *int64 `json:"MaxRetries,omitnil" name:"MaxRetries"` // The data source name. DataSource *string `json:"DataSource,omitnil" name:"DataSource"` // The source of the PySpark dependencies. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitnil" name:"IsLocalPythonFiles"` // The PySpark dependencies (Python files), separated by comma, with .py, .zip, and .egg formats supported. AppPythonFiles *string `json:"AppPythonFiles,omitnil" name:"AppPythonFiles"` // The source of the dependency archives of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalArchives *string `json:"IsLocalArchives,omitnil" name:"IsLocalArchives"` // The dependency archives of the Spark job, separated by comma, with tar.gz, .tgz, and .tar formats supported. AppArchives *string `json:"AppArchives,omitnil" name:"AppArchives"` // The Spark image version. SparkImage *string `json:"SparkImage,omitnil" name:"SparkImage"` // The Spark image version name. SparkImageVersion *string `json:"SparkImageVersion,omitnil" name:"SparkImageVersion"` // The specified executor count (max), which defaults to 1. This parameter applies if the "Dynamic" mode is selected. If the "Dynamic" mode is not selected, the executor count is equal to `AppExecutorNums`. AppExecutorMaxNumbers *int64 `json:"AppExecutorMaxNumbers,omitnil" name:"AppExecutorMaxNumbers"` // The ID of the associated Data Lake Compute query script. SessionId *string `json:"SessionId,omitnil" name:"SessionId"` // Whether to inherit the task resource configuration from the cluster template. Valid values: `0` (default): No; `1`: Yes. IsInherit *uint64 `json:"IsInherit,omitnil" name:"IsInherit"` // Whether to run the task with the session SQLs. Valid values: `false` for no and `true` for yes. IsSessionStarted *bool `json:"IsSessionStarted,omitnil" name:"IsSessionStarted"` }
func NewCreateSparkAppRequest ¶
func NewCreateSparkAppRequest() (request *CreateSparkAppRequest)
func (*CreateSparkAppRequest) FromJsonString ¶
func (r *CreateSparkAppRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkAppRequest) ToJsonString ¶
func (r *CreateSparkAppRequest) ToJsonString() string
type CreateSparkAppRequestParams ¶
type CreateSparkAppRequestParams struct { // The Spark job name. AppName *string `json:"AppName,omitnil" name:"AppName"` // The Spark job type. Valid values: `1` for Spark JAR job and `2` for Spark streaming job. AppType *int64 `json:"AppType,omitnil" name:"AppType"` // The data engine executing the Spark job. DataEngine *string `json:"DataEngine,omitnil" name:"DataEngine"` // The path of the Spark job package. AppFile *string `json:"AppFile,omitnil" name:"AppFile"` // The data access policy (CAM role arn). RoleArn *int64 `json:"RoleArn,omitnil" name:"RoleArn"` // The driver size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). AppDriverSize *string `json:"AppDriverSize,omitnil" name:"AppDriverSize"` // The executor size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). AppExecutorSize *string `json:"AppExecutorSize,omitnil" name:"AppExecutorSize"` // Number of Spark job executors AppExecutorNums *int64 `json:"AppExecutorNums,omitnil" name:"AppExecutorNums"` // This field has been disused. Use the `Datasource` field instead. Eni *string `json:"Eni,omitnil" name:"Eni"` // The source of the Spark job package. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocal *string `json:"IsLocal,omitnil" name:"IsLocal"` // The main class of the Spark job. MainClass *string `json:"MainClass,omitnil" name:"MainClass"` // Spark configurations separated by line break AppConf *string `json:"AppConf,omitnil" name:"AppConf"` // The source of the dependency JAR packages of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalJars *string `json:"IsLocalJars,omitnil" name:"IsLocalJars"` // The dependency JAR packages of the Spark JAR job (JAR packages), separated by comma. AppJars *string `json:"AppJars,omitnil" name:"AppJars"` // The source of the dependency files of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalFiles *string `json:"IsLocalFiles,omitnil" name:"IsLocalFiles"` // The dependency files of the Spark job (files other than JAR and ZIP packages) separated by comma. AppFiles *string `json:"AppFiles,omitnil" name:"AppFiles"` // The input parameters of the Spark job, separated by comma. CmdArgs *string `json:"CmdArgs,omitnil" name:"CmdArgs"` // The maximum number of retries, valid for Spark streaming tasks only. MaxRetries *int64 `json:"MaxRetries,omitnil" name:"MaxRetries"` // The data source name. DataSource *string `json:"DataSource,omitnil" name:"DataSource"` // The source of the PySpark dependencies. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitnil" name:"IsLocalPythonFiles"` // The PySpark dependencies (Python files), separated by comma, with .py, .zip, and .egg formats supported. AppPythonFiles *string `json:"AppPythonFiles,omitnil" name:"AppPythonFiles"` // The source of the dependency archives of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalArchives *string `json:"IsLocalArchives,omitnil" name:"IsLocalArchives"` // The dependency archives of the Spark job, separated by comma, with tar.gz, .tgz, and .tar formats supported. AppArchives *string `json:"AppArchives,omitnil" name:"AppArchives"` // The Spark image version. SparkImage *string `json:"SparkImage,omitnil" name:"SparkImage"` // The Spark image version name. SparkImageVersion *string `json:"SparkImageVersion,omitnil" name:"SparkImageVersion"` // The specified executor count (max), which defaults to 1. This parameter applies if the "Dynamic" mode is selected. If the "Dynamic" mode is not selected, the executor count is equal to `AppExecutorNums`. AppExecutorMaxNumbers *int64 `json:"AppExecutorMaxNumbers,omitnil" name:"AppExecutorMaxNumbers"` // The ID of the associated Data Lake Compute query script. SessionId *string `json:"SessionId,omitnil" name:"SessionId"` // Whether to inherit the task resource configuration from the cluster template. Valid values: `0` (default): No; `1`: Yes. IsInherit *uint64 `json:"IsInherit,omitnil" name:"IsInherit"` // Whether to run the task with the session SQLs. Valid values: `false` for no and `true` for yes. IsSessionStarted *bool `json:"IsSessionStarted,omitnil" name:"IsSessionStarted"` }
Predefined struct for user
type CreateSparkAppResponse ¶
type CreateSparkAppResponse struct { *tchttp.BaseResponse Response *CreateSparkAppResponseParams `json:"Response"` }
func NewCreateSparkAppResponse ¶
func NewCreateSparkAppResponse() (response *CreateSparkAppResponse)
func (*CreateSparkAppResponse) FromJsonString ¶
func (r *CreateSparkAppResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkAppResponse) ToJsonString ¶
func (r *CreateSparkAppResponse) ToJsonString() string
type CreateSparkAppResponseParams ¶
type CreateSparkAppResponseParams struct { // The unique ID of the application. // Note: This field may return null, indicating that no valid values can be obtained. SparkAppId *string `json:"SparkAppId,omitnil" name:"SparkAppId"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CreateSparkAppTaskRequest ¶
type CreateSparkAppTaskRequest struct { *tchttp.BaseRequest // Spark job name JobName *string `json:"JobName,omitnil" name:"JobName"` // The input parameters of the Spark job, separated by space. They are generally used for periodic calls. CmdArgs *string `json:"CmdArgs,omitnil" name:"CmdArgs"` }
func NewCreateSparkAppTaskRequest ¶
func NewCreateSparkAppTaskRequest() (request *CreateSparkAppTaskRequest)
func (*CreateSparkAppTaskRequest) FromJsonString ¶
func (r *CreateSparkAppTaskRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkAppTaskRequest) ToJsonString ¶
func (r *CreateSparkAppTaskRequest) ToJsonString() string
type CreateSparkAppTaskRequestParams ¶
type CreateSparkAppTaskRequestParams struct { // Spark job name JobName *string `json:"JobName,omitnil" name:"JobName"` // The input parameters of the Spark job, separated by space. They are generally used for periodic calls. CmdArgs *string `json:"CmdArgs,omitnil" name:"CmdArgs"` }
Predefined struct for user
type CreateSparkAppTaskResponse ¶
type CreateSparkAppTaskResponse struct { *tchttp.BaseResponse Response *CreateSparkAppTaskResponseParams `json:"Response"` }
func NewCreateSparkAppTaskResponse ¶
func NewCreateSparkAppTaskResponse() (response *CreateSparkAppTaskResponse)
func (*CreateSparkAppTaskResponse) FromJsonString ¶
func (r *CreateSparkAppTaskResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkAppTaskResponse) ToJsonString ¶
func (r *CreateSparkAppTaskResponse) ToJsonString() string
type CreateSparkAppTaskResponseParams ¶
type CreateSparkAppTaskResponseParams struct { // Batch ID BatchId *string `json:"BatchId,omitnil" name:"BatchId"` // Task ID TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CreateSparkSessionBatchSQLRequest ¶
type CreateSparkSessionBatchSQLRequest struct { *tchttp.BaseRequest // The name of the engine for executing the Spark job. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // The SQL statement to execute. ExecuteSQL *string `json:"ExecuteSQL,omitnil" name:"ExecuteSQL"` // The driver size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). DriverSize *string `json:"DriverSize,omitnil" name:"DriverSize"` // The executor size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). ExecutorSize *string `json:"ExecutorSize,omitnil" name:"ExecutorSize"` // The executor count, which defaults to 1. ExecutorNumbers *uint64 `json:"ExecutorNumbers,omitnil" name:"ExecutorNumbers"` // The maximum executor count, which defaults to 1. This parameter applies if the "Dynamic" mode is selected. If the "Dynamic" mode is not selected, the value of this parameter is the same as that of `ExecutorNumbers`. ExecutorMaxNumbers *uint64 `json:"ExecutorMaxNumbers,omitnil" name:"ExecutorMaxNumbers"` // The session timeout period in seconds. Default value: 3600 TimeoutInSecond *int64 `json:"TimeoutInSecond,omitnil" name:"TimeoutInSecond"` // The unique ID of a session. If this parameter is specified, the task will be run using the specified session. SessionId *string `json:"SessionId,omitnil" name:"SessionId"` // The name of the session to create. SessionName *string `json:"SessionName,omitnil" name:"SessionName"` // The session configurations. Valid values: `1.dlc.eni` for user-defined ENI gateway information; // `2.dlc.role.arn` for user-defined roleArn configurations; // and `3.dlc.sql.set.config` for user-defined cluster configurations. Arguments []*KVPair `json:"Arguments,omitnil" name:"Arguments"` // Whether to inherit the resource configurations from the cluster. Valid values: `0` for no (default) and `1` for yes. IsInherit *int64 `json:"IsInherit,omitnil" name:"IsInherit"` }
func NewCreateSparkSessionBatchSQLRequest ¶
func NewCreateSparkSessionBatchSQLRequest() (request *CreateSparkSessionBatchSQLRequest)
func (*CreateSparkSessionBatchSQLRequest) FromJsonString ¶
func (r *CreateSparkSessionBatchSQLRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkSessionBatchSQLRequest) ToJsonString ¶
func (r *CreateSparkSessionBatchSQLRequest) ToJsonString() string
type CreateSparkSessionBatchSQLRequestParams ¶
type CreateSparkSessionBatchSQLRequestParams struct { // The name of the engine for executing the Spark job. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // The SQL statement to execute. ExecuteSQL *string `json:"ExecuteSQL,omitnil" name:"ExecuteSQL"` // The driver size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). DriverSize *string `json:"DriverSize,omitnil" name:"DriverSize"` // The executor size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). ExecutorSize *string `json:"ExecutorSize,omitnil" name:"ExecutorSize"` // The executor count, which defaults to 1. ExecutorNumbers *uint64 `json:"ExecutorNumbers,omitnil" name:"ExecutorNumbers"` // The maximum executor count, which defaults to 1. This parameter applies if the "Dynamic" mode is selected. If the "Dynamic" mode is not selected, the value of this parameter is the same as that of `ExecutorNumbers`. ExecutorMaxNumbers *uint64 `json:"ExecutorMaxNumbers,omitnil" name:"ExecutorMaxNumbers"` // The session timeout period in seconds. Default value: 3600 TimeoutInSecond *int64 `json:"TimeoutInSecond,omitnil" name:"TimeoutInSecond"` // The unique ID of a session. If this parameter is specified, the task will be run using the specified session. SessionId *string `json:"SessionId,omitnil" name:"SessionId"` // The name of the session to create. SessionName *string `json:"SessionName,omitnil" name:"SessionName"` // The session configurations. Valid values: `1.dlc.eni` for user-defined ENI gateway information; // `2.dlc.role.arn` for user-defined roleArn configurations; // and `3.dlc.sql.set.config` for user-defined cluster configurations. Arguments []*KVPair `json:"Arguments,omitnil" name:"Arguments"` // Whether to inherit the resource configurations from the cluster. Valid values: `0` for no (default) and `1` for yes. IsInherit *int64 `json:"IsInherit,omitnil" name:"IsInherit"` }
Predefined struct for user
type CreateSparkSessionBatchSQLResponse ¶
type CreateSparkSessionBatchSQLResponse struct { *tchttp.BaseResponse Response *CreateSparkSessionBatchSQLResponseParams `json:"Response"` }
func NewCreateSparkSessionBatchSQLResponse ¶
func NewCreateSparkSessionBatchSQLResponse() (response *CreateSparkSessionBatchSQLResponse)
func (*CreateSparkSessionBatchSQLResponse) FromJsonString ¶
func (r *CreateSparkSessionBatchSQLResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkSessionBatchSQLResponse) ToJsonString ¶
func (r *CreateSparkSessionBatchSQLResponse) ToJsonString() string
type CreateSparkSessionBatchSQLResponseParams ¶
type CreateSparkSessionBatchSQLResponseParams struct { // The unique identifier of a batch task. BatchId *string `json:"BatchId,omitnil" name:"BatchId"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CreateTaskRequest ¶
type CreateTaskRequest struct { *tchttp.BaseRequest // Computing task. This parameter contains the task type and related configuration information. Task *Task `json:"Task,omitnil" name:"Task"` // Database name. If there is a database name in the SQL statement, the database in the SQL statement will be used first; otherwise, the database specified by this parameter will be used (note: when submitting the database creation SQL statement, passed in an empty string for this field). DatabaseName *string `json:"DatabaseName,omitnil" name:"DatabaseName"` // Name of the default data source DatasourceConnectionName *string `json:"DatasourceConnectionName,omitnil" name:"DatasourceConnectionName"` // Data engine name. If this parameter is not specified, the task will be submitted to the default engine. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` }
func NewCreateTaskRequest ¶
func NewCreateTaskRequest() (request *CreateTaskRequest)
func (*CreateTaskRequest) FromJsonString ¶
func (r *CreateTaskRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateTaskRequest) ToJsonString ¶
func (r *CreateTaskRequest) ToJsonString() string
type CreateTaskRequestParams ¶
type CreateTaskRequestParams struct { // Computing task. This parameter contains the task type and related configuration information. Task *Task `json:"Task,omitnil" name:"Task"` // Database name. If there is a database name in the SQL statement, the database in the SQL statement will be used first; otherwise, the database specified by this parameter will be used (note: when submitting the database creation SQL statement, passed in an empty string for this field). DatabaseName *string `json:"DatabaseName,omitnil" name:"DatabaseName"` // Name of the default data source DatasourceConnectionName *string `json:"DatasourceConnectionName,omitnil" name:"DatasourceConnectionName"` // Data engine name. If this parameter is not specified, the task will be submitted to the default engine. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` }
Predefined struct for user
type CreateTaskResponse ¶
type CreateTaskResponse struct { *tchttp.BaseResponse Response *CreateTaskResponseParams `json:"Response"` }
func NewCreateTaskResponse ¶
func NewCreateTaskResponse() (response *CreateTaskResponse)
func (*CreateTaskResponse) FromJsonString ¶
func (r *CreateTaskResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateTaskResponse) ToJsonString ¶
func (r *CreateTaskResponse) ToJsonString() string
type CreateTaskResponseParams ¶
type CreateTaskResponseParams struct { // Task ID // Note: This field may return null, indicating that no valid values can be obtained. TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CreateTasksRequest ¶
type CreateTasksRequest struct { *tchttp.BaseRequest // Database name. If there is a database name in the SQL statement, the database in the SQL statement will be used first; otherwise, the database specified by this parameter will be used (note: when submitting the database creation SQL statement, passed in an empty string for this field). DatabaseName *string `json:"DatabaseName,omitnil" name:"DatabaseName"` // SQL task information Tasks *TasksInfo `json:"Tasks,omitnil" name:"Tasks"` // Data source name. Default value: DataLakeCatalog. DatasourceConnectionName *string `json:"DatasourceConnectionName,omitnil" name:"DatasourceConnectionName"` // Compute engine name. If this parameter is not specified, the task will be submitted to the default engine. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` }
func NewCreateTasksRequest ¶
func NewCreateTasksRequest() (request *CreateTasksRequest)
func (*CreateTasksRequest) FromJsonString ¶
func (r *CreateTasksRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateTasksRequest) ToJsonString ¶
func (r *CreateTasksRequest) ToJsonString() string
type CreateTasksRequestParams ¶
type CreateTasksRequestParams struct { // Database name. If there is a database name in the SQL statement, the database in the SQL statement will be used first; otherwise, the database specified by this parameter will be used (note: when submitting the database creation SQL statement, passed in an empty string for this field). DatabaseName *string `json:"DatabaseName,omitnil" name:"DatabaseName"` // SQL task information Tasks *TasksInfo `json:"Tasks,omitnil" name:"Tasks"` // Data source name. Default value: DataLakeCatalog. DatasourceConnectionName *string `json:"DatasourceConnectionName,omitnil" name:"DatasourceConnectionName"` // Compute engine name. If this parameter is not specified, the task will be submitted to the default engine. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` }
Predefined struct for user
type CreateTasksResponse ¶
type CreateTasksResponse struct { *tchttp.BaseResponse Response *CreateTasksResponseParams `json:"Response"` }
func NewCreateTasksResponse ¶
func NewCreateTasksResponse() (response *CreateTasksResponse)
func (*CreateTasksResponse) FromJsonString ¶
func (r *CreateTasksResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateTasksResponse) ToJsonString ¶
func (r *CreateTasksResponse) ToJsonString() string
type CreateTasksResponseParams ¶
type CreateTasksResponseParams struct { // ID of the current batch of submitted tasks BatchId *string `json:"BatchId,omitnil" name:"BatchId"` // Collection of task IDs arranged in order of execution TaskIdSet []*string `json:"TaskIdSet,omitnil" name:"TaskIdSet"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type CrontabResumeSuspendStrategy ¶
type CrontabResumeSuspendStrategy struct { // The scheduled start time, such as 8:00 AM every Monday. // Note: This field may return null, indicating that no valid values can be obtained. ResumeTime *string `json:"ResumeTime,omitnil" name:"ResumeTime"` // The scheduled suspension time, such as 8:00 PM every Monday. // Note: This field may return null, indicating that no valid values can be obtained. SuspendTime *string `json:"SuspendTime,omitnil" name:"SuspendTime"` // The suspension setting. Valid values: `0` (suspension after task end, default) and `1` (force suspension). // Note: This field may return null, indicating that no valid values can be obtained. SuspendStrategy *int64 `json:"SuspendStrategy,omitnil" name:"SuspendStrategy"` }
type DataEngineConfigPair ¶
type DataEngineConfigPair struct { }
type DataGovernPolicy ¶
type DataGovernPolicy struct { // Governance rule type. Valid values: `Customize` (custom) and `Intelligence` (intelligent). // Note: This field may return null, indicating that no valid values can be obtained. RuleType *string `json:"RuleType,omitnil" name:"RuleType"` // The governance engine. // Note: This field may return null, indicating that no valid values can be obtained. GovernEngine *string `json:"GovernEngine,omitnil" name:"GovernEngine"` }
type DeleteSparkAppRequest ¶
type DeleteSparkAppRequest struct { *tchttp.BaseRequest // The Spark job name. AppName *string `json:"AppName,omitnil" name:"AppName"` }
func NewDeleteSparkAppRequest ¶
func NewDeleteSparkAppRequest() (request *DeleteSparkAppRequest)
func (*DeleteSparkAppRequest) FromJsonString ¶
func (r *DeleteSparkAppRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DeleteSparkAppRequest) ToJsonString ¶
func (r *DeleteSparkAppRequest) ToJsonString() string
type DeleteSparkAppRequestParams ¶
type DeleteSparkAppRequestParams struct { // The Spark job name. AppName *string `json:"AppName,omitnil" name:"AppName"` }
Predefined struct for user
type DeleteSparkAppResponse ¶
type DeleteSparkAppResponse struct { *tchttp.BaseResponse Response *DeleteSparkAppResponseParams `json:"Response"` }
func NewDeleteSparkAppResponse ¶
func NewDeleteSparkAppResponse() (response *DeleteSparkAppResponse)
func (*DeleteSparkAppResponse) FromJsonString ¶
func (r *DeleteSparkAppResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DeleteSparkAppResponse) ToJsonString ¶
func (r *DeleteSparkAppResponse) ToJsonString() string
type DeleteSparkAppResponseParams ¶
type DeleteSparkAppResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeEngineUsageInfoRequest ¶
type DescribeEngineUsageInfoRequest struct { *tchttp.BaseRequest // The data engine ID. DataEngineId *string `json:"DataEngineId,omitnil" name:"DataEngineId"` }
func NewDescribeEngineUsageInfoRequest ¶
func NewDescribeEngineUsageInfoRequest() (request *DescribeEngineUsageInfoRequest)
func (*DescribeEngineUsageInfoRequest) FromJsonString ¶
func (r *DescribeEngineUsageInfoRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeEngineUsageInfoRequest) ToJsonString ¶
func (r *DescribeEngineUsageInfoRequest) ToJsonString() string
type DescribeEngineUsageInfoRequestParams ¶
type DescribeEngineUsageInfoRequestParams struct { // The data engine ID. DataEngineId *string `json:"DataEngineId,omitnil" name:"DataEngineId"` }
Predefined struct for user
type DescribeEngineUsageInfoResponse ¶
type DescribeEngineUsageInfoResponse struct { *tchttp.BaseResponse Response *DescribeEngineUsageInfoResponseParams `json:"Response"` }
func NewDescribeEngineUsageInfoResponse ¶
func NewDescribeEngineUsageInfoResponse() (response *DescribeEngineUsageInfoResponse)
func (*DescribeEngineUsageInfoResponse) FromJsonString ¶
func (r *DescribeEngineUsageInfoResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeEngineUsageInfoResponse) ToJsonString ¶
func (r *DescribeEngineUsageInfoResponse) ToJsonString() string
type DescribeEngineUsageInfoResponseParams ¶
type DescribeEngineUsageInfoResponseParams struct { // The total cluster spec. Total *int64 `json:"Total,omitnil" name:"Total"` // The used cluster spec. Used *int64 `json:"Used,omitnil" name:"Used"` // The available cluster spec. Available *int64 `json:"Available,omitnil" name:"Available"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeForbiddenTableProRequest ¶
type DescribeForbiddenTableProRequest struct {
*tchttp.BaseRequest
}
func NewDescribeForbiddenTableProRequest ¶
func NewDescribeForbiddenTableProRequest() (request *DescribeForbiddenTableProRequest)
func (*DescribeForbiddenTableProRequest) FromJsonString ¶
func (r *DescribeForbiddenTableProRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeForbiddenTableProRequest) ToJsonString ¶
func (r *DescribeForbiddenTableProRequest) ToJsonString() string
type DescribeForbiddenTableProRequestParams ¶
type DescribeForbiddenTableProRequestParams struct { }
Predefined struct for user
type DescribeForbiddenTableProResponse ¶
type DescribeForbiddenTableProResponse struct { *tchttp.BaseResponse Response *DescribeForbiddenTableProResponseParams `json:"Response"` }
func NewDescribeForbiddenTableProResponse ¶
func NewDescribeForbiddenTableProResponse() (response *DescribeForbiddenTableProResponse)
func (*DescribeForbiddenTableProResponse) FromJsonString ¶
func (r *DescribeForbiddenTableProResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeForbiddenTableProResponse) ToJsonString ¶
func (r *DescribeForbiddenTableProResponse) ToJsonString() string
type DescribeForbiddenTableProResponseParams ¶
type DescribeForbiddenTableProResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeLakeFsDirSummaryRequest ¶
type DescribeLakeFsDirSummaryRequest struct {
*tchttp.BaseRequest
}
func NewDescribeLakeFsDirSummaryRequest ¶
func NewDescribeLakeFsDirSummaryRequest() (request *DescribeLakeFsDirSummaryRequest)
func (*DescribeLakeFsDirSummaryRequest) FromJsonString ¶
func (r *DescribeLakeFsDirSummaryRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeLakeFsDirSummaryRequest) ToJsonString ¶
func (r *DescribeLakeFsDirSummaryRequest) ToJsonString() string
type DescribeLakeFsDirSummaryRequestParams ¶
type DescribeLakeFsDirSummaryRequestParams struct { }
Predefined struct for user
type DescribeLakeFsDirSummaryResponse ¶
type DescribeLakeFsDirSummaryResponse struct { *tchttp.BaseResponse Response *DescribeLakeFsDirSummaryResponseParams `json:"Response"` }
func NewDescribeLakeFsDirSummaryResponse ¶
func NewDescribeLakeFsDirSummaryResponse() (response *DescribeLakeFsDirSummaryResponse)
func (*DescribeLakeFsDirSummaryResponse) FromJsonString ¶
func (r *DescribeLakeFsDirSummaryResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeLakeFsDirSummaryResponse) ToJsonString ¶
func (r *DescribeLakeFsDirSummaryResponse) ToJsonString() string
type DescribeLakeFsDirSummaryResponseParams ¶
type DescribeLakeFsDirSummaryResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeLakeFsInfoRequest ¶
type DescribeLakeFsInfoRequest struct {
*tchttp.BaseRequest
}
func NewDescribeLakeFsInfoRequest ¶
func NewDescribeLakeFsInfoRequest() (request *DescribeLakeFsInfoRequest)
func (*DescribeLakeFsInfoRequest) FromJsonString ¶
func (r *DescribeLakeFsInfoRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeLakeFsInfoRequest) ToJsonString ¶
func (r *DescribeLakeFsInfoRequest) ToJsonString() string
type DescribeLakeFsInfoRequestParams ¶
type DescribeLakeFsInfoRequestParams struct { }
Predefined struct for user
type DescribeLakeFsInfoResponse ¶
type DescribeLakeFsInfoResponse struct { *tchttp.BaseResponse Response *DescribeLakeFsInfoResponseParams `json:"Response"` }
func NewDescribeLakeFsInfoResponse ¶
func NewDescribeLakeFsInfoResponse() (response *DescribeLakeFsInfoResponse)
func (*DescribeLakeFsInfoResponse) FromJsonString ¶
func (r *DescribeLakeFsInfoResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeLakeFsInfoResponse) ToJsonString ¶
func (r *DescribeLakeFsInfoResponse) ToJsonString() string
type DescribeLakeFsInfoResponseParams ¶
type DescribeLakeFsInfoResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeResultDownloadRequest ¶
type DescribeResultDownloadRequest struct { *tchttp.BaseRequest // The query task ID. DownloadId *string `json:"DownloadId,omitnil" name:"DownloadId"` }
func NewDescribeResultDownloadRequest ¶
func NewDescribeResultDownloadRequest() (request *DescribeResultDownloadRequest)
func (*DescribeResultDownloadRequest) FromJsonString ¶
func (r *DescribeResultDownloadRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeResultDownloadRequest) ToJsonString ¶
func (r *DescribeResultDownloadRequest) ToJsonString() string
type DescribeResultDownloadRequestParams ¶
type DescribeResultDownloadRequestParams struct { // The query task ID. DownloadId *string `json:"DownloadId,omitnil" name:"DownloadId"` }
Predefined struct for user
type DescribeResultDownloadResponse ¶
type DescribeResultDownloadResponse struct { *tchttp.BaseResponse Response *DescribeResultDownloadResponseParams `json:"Response"` }
func NewDescribeResultDownloadResponse ¶
func NewDescribeResultDownloadResponse() (response *DescribeResultDownloadResponse)
func (*DescribeResultDownloadResponse) FromJsonString ¶
func (r *DescribeResultDownloadResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeResultDownloadResponse) ToJsonString ¶
func (r *DescribeResultDownloadResponse) ToJsonString() string
type DescribeResultDownloadResponseParams ¶
type DescribeResultDownloadResponseParams struct { // The file save path. // Note: This field may return null, indicating that no valid values can be obtained. Path *string `json:"Path,omitnil" name:"Path"` // The task status. Valid values: `init`, `queue`, `format`, `compress`, `success`, `timeout`, and `error`. Status *string `json:"Status,omitnil" name:"Status"` // The task exception cause. // Note: This field may return null, indicating that no valid values can be obtained. Reason *string `json:"Reason,omitnil" name:"Reason"` // The temporary secret ID. // Note: This field may return null, indicating that no valid values can be obtained. SecretId *string `json:"SecretId,omitnil" name:"SecretId"` // The temporary secret key. // Note: This field may return null, indicating that no valid values can be obtained. SecretKey *string `json:"SecretKey,omitnil" name:"SecretKey"` // The temporary token. // Note: This field may return null, indicating that no valid values can be obtained. Token *string `json:"Token,omitnil" name:"Token"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeSparkAppJobRequest ¶
type DescribeSparkAppJobRequest struct { *tchttp.BaseRequest // The Spark job ID. If it co-exists with `JobName`, `JobName` is invalid. At least `JobId` or `JobName` must be used. JobId *string `json:"JobId,omitnil" name:"JobId"` // Spark job name JobName *string `json:"JobName,omitnil" name:"JobName"` }
func NewDescribeSparkAppJobRequest ¶
func NewDescribeSparkAppJobRequest() (request *DescribeSparkAppJobRequest)
func (*DescribeSparkAppJobRequest) FromJsonString ¶
func (r *DescribeSparkAppJobRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppJobRequest) ToJsonString ¶
func (r *DescribeSparkAppJobRequest) ToJsonString() string
type DescribeSparkAppJobRequestParams ¶
type DescribeSparkAppJobRequestParams struct { // The Spark job ID. If it co-exists with `JobName`, `JobName` is invalid. At least `JobId` or `JobName` must be used. JobId *string `json:"JobId,omitnil" name:"JobId"` // Spark job name JobName *string `json:"JobName,omitnil" name:"JobName"` }
Predefined struct for user
type DescribeSparkAppJobResponse ¶
type DescribeSparkAppJobResponse struct { *tchttp.BaseResponse Response *DescribeSparkAppJobResponseParams `json:"Response"` }
func NewDescribeSparkAppJobResponse ¶
func NewDescribeSparkAppJobResponse() (response *DescribeSparkAppJobResponse)
func (*DescribeSparkAppJobResponse) FromJsonString ¶
func (r *DescribeSparkAppJobResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppJobResponse) ToJsonString ¶
func (r *DescribeSparkAppJobResponse) ToJsonString() string
type DescribeSparkAppJobResponseParams ¶
type DescribeSparkAppJobResponseParams struct { // Spark job details // Note: This field may return null, indicating that no valid values can be obtained. Job *SparkJobInfo `json:"Job,omitnil" name:"Job"` // Whether the queried Spark job exists IsExists *bool `json:"IsExists,omitnil" name:"IsExists"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeSparkAppJobsRequest ¶
type DescribeSparkAppJobsRequest struct { *tchttp.BaseRequest // The returned results are sorted by this field. SortBy *string `json:"SortBy,omitnil" name:"SortBy"` // Descending or ascending order, such as `desc`. Sorting *string `json:"Sorting,omitnil" name:"Sorting"` // The filters. The following types are supported, and `Name` of the parameter passed in must be one of them: `spark-job-name` (job name), `spark-job-id` (job ID), `spark-app-type` (job type: `1` for batch, `2` for streaming, and `4` for SQL), `user-name` (creator), and `key-word` (job name or ID keywords for fuzzy search). Filters []*Filter `json:"Filters,omitnil" name:"Filters"` // The update start time in the format of yyyy-mm-dd HH:MM:SS. StartTime *string `json:"StartTime,omitnil" name:"StartTime"` // The update end time in the format of yyyy-mm-dd HH:MM:SS. EndTime *string `json:"EndTime,omitnil" name:"EndTime"` // The query list offset, which defaults to 0. Offset *int64 `json:"Offset,omitnil" name:"Offset"` // The maximum number of queries allowed in the list, which defaults to 100. Limit *int64 `json:"Limit,omitnil" name:"Limit"` }
func NewDescribeSparkAppJobsRequest ¶
func NewDescribeSparkAppJobsRequest() (request *DescribeSparkAppJobsRequest)
func (*DescribeSparkAppJobsRequest) FromJsonString ¶
func (r *DescribeSparkAppJobsRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppJobsRequest) ToJsonString ¶
func (r *DescribeSparkAppJobsRequest) ToJsonString() string
type DescribeSparkAppJobsRequestParams ¶
type DescribeSparkAppJobsRequestParams struct { // The returned results are sorted by this field. SortBy *string `json:"SortBy,omitnil" name:"SortBy"` // Descending or ascending order, such as `desc`. Sorting *string `json:"Sorting,omitnil" name:"Sorting"` // The filters. The following types are supported, and `Name` of the parameter passed in must be one of them: `spark-job-name` (job name), `spark-job-id` (job ID), `spark-app-type` (job type: `1` for batch, `2` for streaming, and `4` for SQL), `user-name` (creator), and `key-word` (job name or ID keywords for fuzzy search). Filters []*Filter `json:"Filters,omitnil" name:"Filters"` // The update start time in the format of yyyy-mm-dd HH:MM:SS. StartTime *string `json:"StartTime,omitnil" name:"StartTime"` // The update end time in the format of yyyy-mm-dd HH:MM:SS. EndTime *string `json:"EndTime,omitnil" name:"EndTime"` // The query list offset, which defaults to 0. Offset *int64 `json:"Offset,omitnil" name:"Offset"` // The maximum number of queries allowed in the list, which defaults to 100. Limit *int64 `json:"Limit,omitnil" name:"Limit"` }
Predefined struct for user
type DescribeSparkAppJobsResponse ¶
type DescribeSparkAppJobsResponse struct { *tchttp.BaseResponse Response *DescribeSparkAppJobsResponseParams `json:"Response"` }
func NewDescribeSparkAppJobsResponse ¶
func NewDescribeSparkAppJobsResponse() (response *DescribeSparkAppJobsResponse)
func (*DescribeSparkAppJobsResponse) FromJsonString ¶
func (r *DescribeSparkAppJobsResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppJobsResponse) ToJsonString ¶
func (r *DescribeSparkAppJobsResponse) ToJsonString() string
type DescribeSparkAppJobsResponseParams ¶
type DescribeSparkAppJobsResponseParams struct { // Detailed list of Spark jobs SparkAppJobs []*SparkJobInfo `json:"SparkAppJobs,omitnil" name:"SparkAppJobs"` // Total number of Spark jobs TotalCount *int64 `json:"TotalCount,omitnil" name:"TotalCount"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeSparkAppTasksRequest ¶
type DescribeSparkAppTasksRequest struct { *tchttp.BaseRequest // Spark job ID JobId *string `json:"JobId,omitnil" name:"JobId"` // Paginated query offset Offset *int64 `json:"Offset,omitnil" name:"Offset"` // Paginated query limit Limit *int64 `json:"Limit,omitnil" name:"Limit"` // Execution instance ID TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // The update start time in the format of yyyy-MM-dd HH:mm:ss. StartTime *string `json:"StartTime,omitnil" name:"StartTime"` // The update end time in the format of yyyy-MM-dd HH:mm:ss. EndTime *string `json:"EndTime,omitnil" name:"EndTime"` // Filter by this parameter, which can be `task-state`. Filters []*Filter `json:"Filters,omitnil" name:"Filters"` }
func NewDescribeSparkAppTasksRequest ¶
func NewDescribeSparkAppTasksRequest() (request *DescribeSparkAppTasksRequest)
func (*DescribeSparkAppTasksRequest) FromJsonString ¶
func (r *DescribeSparkAppTasksRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppTasksRequest) ToJsonString ¶
func (r *DescribeSparkAppTasksRequest) ToJsonString() string
type DescribeSparkAppTasksRequestParams ¶
type DescribeSparkAppTasksRequestParams struct { // Spark job ID JobId *string `json:"JobId,omitnil" name:"JobId"` // Paginated query offset Offset *int64 `json:"Offset,omitnil" name:"Offset"` // Paginated query limit Limit *int64 `json:"Limit,omitnil" name:"Limit"` // Execution instance ID TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // The update start time in the format of yyyy-MM-dd HH:mm:ss. StartTime *string `json:"StartTime,omitnil" name:"StartTime"` // The update end time in the format of yyyy-MM-dd HH:mm:ss. EndTime *string `json:"EndTime,omitnil" name:"EndTime"` // Filter by this parameter, which can be `task-state`. Filters []*Filter `json:"Filters,omitnil" name:"Filters"` }
Predefined struct for user
type DescribeSparkAppTasksResponse ¶
type DescribeSparkAppTasksResponse struct { *tchttp.BaseResponse Response *DescribeSparkAppTasksResponseParams `json:"Response"` }
func NewDescribeSparkAppTasksResponse ¶
func NewDescribeSparkAppTasksResponse() (response *DescribeSparkAppTasksResponse)
func (*DescribeSparkAppTasksResponse) FromJsonString ¶
func (r *DescribeSparkAppTasksResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppTasksResponse) ToJsonString ¶
func (r *DescribeSparkAppTasksResponse) ToJsonString() string
type DescribeSparkAppTasksResponseParams ¶
type DescribeSparkAppTasksResponseParams struct { // Task result (this field has been disused) // Note: This field may return null, indicating that no valid values can be obtained. Tasks *TaskResponseInfo `json:"Tasks,omitnil" name:"Tasks"` // Total number of tasks TotalCount *int64 `json:"TotalCount,omitnil" name:"TotalCount"` // List of task results // Note: This field may return null, indicating that no valid values can be obtained. SparkAppTasks []*TaskResponseInfo `json:"SparkAppTasks,omitnil" name:"SparkAppTasks"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeSparkSessionBatchSqlLogRequest ¶
type DescribeSparkSessionBatchSqlLogRequest struct { *tchttp.BaseRequest // The unique ID of a Spark SQL job. BatchId *string `json:"BatchId,omitnil" name:"BatchId"` }
func NewDescribeSparkSessionBatchSqlLogRequest ¶
func NewDescribeSparkSessionBatchSqlLogRequest() (request *DescribeSparkSessionBatchSqlLogRequest)
func (*DescribeSparkSessionBatchSqlLogRequest) FromJsonString ¶
func (r *DescribeSparkSessionBatchSqlLogRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkSessionBatchSqlLogRequest) ToJsonString ¶
func (r *DescribeSparkSessionBatchSqlLogRequest) ToJsonString() string
type DescribeSparkSessionBatchSqlLogRequestParams ¶
type DescribeSparkSessionBatchSqlLogRequestParams struct { // The unique ID of a Spark SQL job. BatchId *string `json:"BatchId,omitnil" name:"BatchId"` }
Predefined struct for user
type DescribeSparkSessionBatchSqlLogResponse ¶
type DescribeSparkSessionBatchSqlLogResponse struct { *tchttp.BaseResponse Response *DescribeSparkSessionBatchSqlLogResponseParams `json:"Response"` }
func NewDescribeSparkSessionBatchSqlLogResponse ¶
func NewDescribeSparkSessionBatchSqlLogResponse() (response *DescribeSparkSessionBatchSqlLogResponse)
func (*DescribeSparkSessionBatchSqlLogResponse) FromJsonString ¶
func (r *DescribeSparkSessionBatchSqlLogResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkSessionBatchSqlLogResponse) ToJsonString ¶
func (r *DescribeSparkSessionBatchSqlLogResponse) ToJsonString() string
type DescribeSparkSessionBatchSqlLogResponseParams ¶
type DescribeSparkSessionBatchSqlLogResponseParams struct { // The status. Valid values: `0` (initializing), `1` (successful), `2` (failed), `3` (canceled), and `4` (exception). State *uint64 `json:"State,omitnil" name:"State"` // The log information list. // Note: This field may return null, indicating that no valid values can be obtained. LogSet []*SparkSessionBatchLog `json:"LogSet,omitnil" name:"LogSet"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeTaskResultRequest ¶
type DescribeTaskResultRequest struct { *tchttp.BaseRequest // Unique task ID TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // The pagination information returned by the last response. This parameter can be omitted for the first response, where the data will be returned from the beginning. The data with a volume set by the `MaxResults` field is returned each time. NextToken *string `json:"NextToken,omitnil" name:"NextToken"` // Maximum number of returned rows. Value range: 0–1,000. Default value: 1,000. MaxResults *int64 `json:"MaxResults,omitnil" name:"MaxResults"` }
func NewDescribeTaskResultRequest ¶
func NewDescribeTaskResultRequest() (request *DescribeTaskResultRequest)
func (*DescribeTaskResultRequest) FromJsonString ¶
func (r *DescribeTaskResultRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeTaskResultRequest) ToJsonString ¶
func (r *DescribeTaskResultRequest) ToJsonString() string
type DescribeTaskResultRequestParams ¶
type DescribeTaskResultRequestParams struct { // Unique task ID TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // The pagination information returned by the last response. This parameter can be omitted for the first response, where the data will be returned from the beginning. The data with a volume set by the `MaxResults` field is returned each time. NextToken *string `json:"NextToken,omitnil" name:"NextToken"` // Maximum number of returned rows. Value range: 0–1,000. Default value: 1,000. MaxResults *int64 `json:"MaxResults,omitnil" name:"MaxResults"` }
Predefined struct for user
type DescribeTaskResultResponse ¶
type DescribeTaskResultResponse struct { *tchttp.BaseResponse Response *DescribeTaskResultResponseParams `json:"Response"` }
func NewDescribeTaskResultResponse ¶
func NewDescribeTaskResultResponse() (response *DescribeTaskResultResponse)
func (*DescribeTaskResultResponse) FromJsonString ¶
func (r *DescribeTaskResultResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeTaskResultResponse) ToJsonString ¶
func (r *DescribeTaskResultResponse) ToJsonString() string
type DescribeTaskResultResponseParams ¶
type DescribeTaskResultResponseParams struct { // The queried task information. If the returned value is empty, the task with the entered task ID does not exist. The task result will be returned only if the task status is `2` (succeeded). // Note: This field may return null, indicating that no valid values can be obtained. TaskInfo *TaskResultInfo `json:"TaskInfo,omitnil" name:"TaskInfo"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeTasksRequest ¶
type DescribeTasksRequest struct { *tchttp.BaseRequest // Number of returned results. Default value: 10. Maximum value: 100. Limit *int64 `json:"Limit,omitnil" name:"Limit"` // Offset. Default value: 0. Offset *int64 `json:"Offset,omitnil" name:"Offset"` // Filter. The following filters are supported, and the `Name` input parameter must be one of them. Up to 50 `task-id` values can be filtered, while up to 5 other parameters can be filtered in total. // task-id - String - (filter by task ID). `task-id` format: e386471f-139a-4e59-877f-50ece8135b99. // task-state - String - (filter exactly by task status). Valid values: `0` (initial), `1` (running), `2` (succeeded), `-1` (failed). // task-sql-keyword - String - (filter fuzzily by SQL statement keyword, such as `DROP TABLE`). // task-operator- string (filter by sub-UIN) // task-kind - string (filter by task type) Filters []*Filter `json:"Filters,omitnil" name:"Filters"` // Sorting field. Valid values: `create-time` (default value), `update-time`. SortBy *string `json:"SortBy,omitnil" name:"SortBy"` // Sorting order. Valid values: `asc` (ascending order), `desc` (descending order). Default value: `asc`. Sorting *string `json:"Sorting,omitnil" name:"Sorting"` // Start time in the format of `yyyy-mm-dd HH:MM:SS`, which is the current time seven days ago by default. StartTime *string `json:"StartTime,omitnil" name:"StartTime"` // End time in the format of `yyyy-mm-dd HH:MM:SS`, which is the current time by default. The time span is (0, 30] days. Data in the last 45 days can be queried. EndTime *string `json:"EndTime,omitnil" name:"EndTime"` // The data engine name for filtering. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` }
func NewDescribeTasksRequest ¶
func NewDescribeTasksRequest() (request *DescribeTasksRequest)
func (*DescribeTasksRequest) FromJsonString ¶
func (r *DescribeTasksRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeTasksRequest) ToJsonString ¶
func (r *DescribeTasksRequest) ToJsonString() string
type DescribeTasksRequestParams ¶
type DescribeTasksRequestParams struct { // Number of returned results. Default value: 10. Maximum value: 100. Limit *int64 `json:"Limit,omitnil" name:"Limit"` // Offset. Default value: 0. Offset *int64 `json:"Offset,omitnil" name:"Offset"` // Filter. The following filters are supported, and the `Name` input parameter must be one of them. Up to 50 `task-id` values can be filtered, while up to 5 other parameters can be filtered in total. // task-id - String - (filter by task ID). `task-id` format: e386471f-139a-4e59-877f-50ece8135b99. // task-state - String - (filter exactly by task status). Valid values: `0` (initial), `1` (running), `2` (succeeded), `-1` (failed). // task-sql-keyword - String - (filter fuzzily by SQL statement keyword, such as `DROP TABLE`). // task-operator- string (filter by sub-UIN) // task-kind - string (filter by task type) Filters []*Filter `json:"Filters,omitnil" name:"Filters"` // Sorting field. Valid values: `create-time` (default value), `update-time`. SortBy *string `json:"SortBy,omitnil" name:"SortBy"` // Sorting order. Valid values: `asc` (ascending order), `desc` (descending order). Default value: `asc`. Sorting *string `json:"Sorting,omitnil" name:"Sorting"` // Start time in the format of `yyyy-mm-dd HH:MM:SS`, which is the current time seven days ago by default. StartTime *string `json:"StartTime,omitnil" name:"StartTime"` // End time in the format of `yyyy-mm-dd HH:MM:SS`, which is the current time by default. The time span is (0, 30] days. Data in the last 45 days can be queried. EndTime *string `json:"EndTime,omitnil" name:"EndTime"` // The data engine name for filtering. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` }
Predefined struct for user
type DescribeTasksResponse ¶
type DescribeTasksResponse struct { *tchttp.BaseResponse Response *DescribeTasksResponseParams `json:"Response"` }
func NewDescribeTasksResponse ¶
func NewDescribeTasksResponse() (response *DescribeTasksResponse)
func (*DescribeTasksResponse) FromJsonString ¶
func (r *DescribeTasksResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeTasksResponse) ToJsonString ¶
func (r *DescribeTasksResponse) ToJsonString() string
type DescribeTasksResponseParams ¶
type DescribeTasksResponseParams struct { // List of task objects. TaskList []*TaskResponseInfo `json:"TaskList,omitnil" name:"TaskList"` // Total number of instances TotalCount *uint64 `json:"TotalCount,omitnil" name:"TotalCount"` // The task overview. // Note: This field may return null, indicating that no valid values can be obtained. TasksOverview *TasksOverview `json:"TasksOverview,omitnil" name:"TasksOverview"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type DescribeUserRolesRequest ¶
type DescribeUserRolesRequest struct { *tchttp.BaseRequest // The number limit of enumerated user roles. Limit *int64 `json:"Limit,omitnil" name:"Limit"` // The offset for starting enumeration. Offset *int64 `json:"Offset,omitnil" name:"Offset"` // Fuzzy enumeration by arn. Fuzzy *string `json:"Fuzzy,omitnil" name:"Fuzzy"` // The field for sorting the returned results. SortBy *string `json:"SortBy,omitnil" name:"SortBy"` // The sorting order, descending or ascending, such as `desc`. Sorting *string `json:"Sorting,omitnil" name:"Sorting"` }
func NewDescribeUserRolesRequest ¶
func NewDescribeUserRolesRequest() (request *DescribeUserRolesRequest)
func (*DescribeUserRolesRequest) FromJsonString ¶
func (r *DescribeUserRolesRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeUserRolesRequest) ToJsonString ¶
func (r *DescribeUserRolesRequest) ToJsonString() string
type DescribeUserRolesRequestParams ¶
type DescribeUserRolesRequestParams struct { // The number limit of enumerated user roles. Limit *int64 `json:"Limit,omitnil" name:"Limit"` // The offset for starting enumeration. Offset *int64 `json:"Offset,omitnil" name:"Offset"` // Fuzzy enumeration by arn. Fuzzy *string `json:"Fuzzy,omitnil" name:"Fuzzy"` // The field for sorting the returned results. SortBy *string `json:"SortBy,omitnil" name:"SortBy"` // The sorting order, descending or ascending, such as `desc`. Sorting *string `json:"Sorting,omitnil" name:"Sorting"` }
Predefined struct for user
type DescribeUserRolesResponse ¶
type DescribeUserRolesResponse struct { *tchttp.BaseResponse Response *DescribeUserRolesResponseParams `json:"Response"` }
func NewDescribeUserRolesResponse ¶
func NewDescribeUserRolesResponse() (response *DescribeUserRolesResponse)
func (*DescribeUserRolesResponse) FromJsonString ¶
func (r *DescribeUserRolesResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeUserRolesResponse) ToJsonString ¶
func (r *DescribeUserRolesResponse) ToJsonString() string
type DescribeUserRolesResponseParams ¶
type DescribeUserRolesResponseParams struct { // The total number of user roles meeting the enumeration conditions. Total *int64 `json:"Total,omitnil" name:"Total"` // The user roles. UserRoles []*UserRole `json:"UserRoles,omitnil" name:"UserRoles"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type Execution ¶
type Execution struct { // The automatically generated SQL statements. SQL *string `json:"SQL,omitnil" name:"SQL"` }
type Filter ¶
type Filter struct { // Attribute name. If more than one filter exists, the logical relationship between these filters is `OR`. Name *string `json:"Name,omitnil" name:"Name"` // Attribute value. If multiple values exist in one filter, the logical relationship between these values is `OR`. Values []*string `json:"Values,omitnil" name:"Values"` }
type GenerateCreateMangedTableSqlRequest ¶
type GenerateCreateMangedTableSqlRequest struct { *tchttp.BaseRequest // The basic table information. TableBaseInfo *TableBaseInfo `json:"TableBaseInfo,omitnil" name:"TableBaseInfo"` // The table fields. Columns []*TColumn `json:"Columns,omitnil" name:"Columns"` // The table partitions. Partitions []*TPartition `json:"Partitions,omitnil" name:"Partitions"` // The table properties. Properties []*Property `json:"Properties,omitnil" name:"Properties"` // The Upsert key for a v2 table (in Upsert mode). UpsertKeys []*string `json:"UpsertKeys,omitnil" name:"UpsertKeys"` }
func NewGenerateCreateMangedTableSqlRequest ¶
func NewGenerateCreateMangedTableSqlRequest() (request *GenerateCreateMangedTableSqlRequest)
func (*GenerateCreateMangedTableSqlRequest) FromJsonString ¶
func (r *GenerateCreateMangedTableSqlRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*GenerateCreateMangedTableSqlRequest) ToJsonString ¶
func (r *GenerateCreateMangedTableSqlRequest) ToJsonString() string
type GenerateCreateMangedTableSqlRequestParams ¶
type GenerateCreateMangedTableSqlRequestParams struct { // The basic table information. TableBaseInfo *TableBaseInfo `json:"TableBaseInfo,omitnil" name:"TableBaseInfo"` // The table fields. Columns []*TColumn `json:"Columns,omitnil" name:"Columns"` // The table partitions. Partitions []*TPartition `json:"Partitions,omitnil" name:"Partitions"` // The table properties. Properties []*Property `json:"Properties,omitnil" name:"Properties"` // The Upsert key for a v2 table (in Upsert mode). UpsertKeys []*string `json:"UpsertKeys,omitnil" name:"UpsertKeys"` }
Predefined struct for user
type GenerateCreateMangedTableSqlResponse ¶
type GenerateCreateMangedTableSqlResponse struct { *tchttp.BaseResponse Response *GenerateCreateMangedTableSqlResponseParams `json:"Response"` }
func NewGenerateCreateMangedTableSqlResponse ¶
func NewGenerateCreateMangedTableSqlResponse() (response *GenerateCreateMangedTableSqlResponse)
func (*GenerateCreateMangedTableSqlResponse) FromJsonString ¶
func (r *GenerateCreateMangedTableSqlResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*GenerateCreateMangedTableSqlResponse) ToJsonString ¶
func (r *GenerateCreateMangedTableSqlResponse) ToJsonString() string
type GenerateCreateMangedTableSqlResponseParams ¶
type GenerateCreateMangedTableSqlResponseParams struct { // The SQL statements for creating the managed internal table. Execution *Execution `json:"Execution,omitnil" name:"Execution"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type KVPair ¶
type KVPair struct { // Configured key // Note: This field may return null, indicating that no valid values can be obtained. Key *string `json:"Key,omitnil" name:"Key"` // Configured value // Note: This field may return null, indicating that no valid values can be obtained. Value *string `json:"Value,omitnil" name:"Value"` }
type ModifyGovernEventRuleRequest ¶
type ModifyGovernEventRuleRequest struct {
*tchttp.BaseRequest
}
func NewModifyGovernEventRuleRequest ¶
func NewModifyGovernEventRuleRequest() (request *ModifyGovernEventRuleRequest)
func (*ModifyGovernEventRuleRequest) FromJsonString ¶
func (r *ModifyGovernEventRuleRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*ModifyGovernEventRuleRequest) ToJsonString ¶
func (r *ModifyGovernEventRuleRequest) ToJsonString() string
type ModifyGovernEventRuleRequestParams ¶
type ModifyGovernEventRuleRequestParams struct { }
Predefined struct for user
type ModifyGovernEventRuleResponse ¶
type ModifyGovernEventRuleResponse struct { *tchttp.BaseResponse Response *ModifyGovernEventRuleResponseParams `json:"Response"` }
func NewModifyGovernEventRuleResponse ¶
func NewModifyGovernEventRuleResponse() (response *ModifyGovernEventRuleResponse)
func (*ModifyGovernEventRuleResponse) FromJsonString ¶
func (r *ModifyGovernEventRuleResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*ModifyGovernEventRuleResponse) ToJsonString ¶
func (r *ModifyGovernEventRuleResponse) ToJsonString() string
type ModifyGovernEventRuleResponseParams ¶
type ModifyGovernEventRuleResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type ModifySparkAppBatchRequest ¶
type ModifySparkAppBatchRequest struct { *tchttp.BaseRequest // The list of the IDs of the Spark job tasks to be modified in batches. SparkAppId []*string `json:"SparkAppId,omitnil" name:"SparkAppId"` // The engine ID. DataEngine *string `json:"DataEngine,omitnil" name:"DataEngine"` // The driver size. // Valid values for the standard resource type: `small`, `medium`, `large`, and `xlarge`. // Valid values for the memory resource type: `m.small`, `m.medium`, `m.large`, and `m.xlarge`. AppDriverSize *string `json:"AppDriverSize,omitnil" name:"AppDriverSize"` // The executor size. // Valid values for the standard resource type: `small`, `medium`, `large`, and `xlarge`. // Valid values for the memory resource type: `m.small`, `m.medium`, `m.large`, and `m.xlarge`. AppExecutorSize *string `json:"AppExecutorSize,omitnil" name:"AppExecutorSize"` // The executor count. The minimum value is 1 and the maximum value is less than the cluster specification. AppExecutorNums *uint64 `json:"AppExecutorNums,omitnil" name:"AppExecutorNums"` // The maximum executor count (in dynamic configuration scenarios). The minimum value is 1 and the maximum value is less than the cluster specification. If you set `ExecutorMaxNumbers` to a value smaller than that of `ExecutorNums`, the value of `ExecutorMaxNumbers` is automatically changed to that of `ExecutorNums`. AppExecutorMaxNumbers *uint64 `json:"AppExecutorMaxNumbers,omitnil" name:"AppExecutorMaxNumbers"` // Whether to inherit the task resource configuration from the cluster template. Valid values: `0` (default): No; `1`: Yes. IsInherit *uint64 `json:"IsInherit,omitnil" name:"IsInherit"` }
func NewModifySparkAppBatchRequest ¶
func NewModifySparkAppBatchRequest() (request *ModifySparkAppBatchRequest)
func (*ModifySparkAppBatchRequest) FromJsonString ¶
func (r *ModifySparkAppBatchRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*ModifySparkAppBatchRequest) ToJsonString ¶
func (r *ModifySparkAppBatchRequest) ToJsonString() string
type ModifySparkAppBatchRequestParams ¶
type ModifySparkAppBatchRequestParams struct { // The list of the IDs of the Spark job tasks to be modified in batches. SparkAppId []*string `json:"SparkAppId,omitnil" name:"SparkAppId"` // The engine ID. DataEngine *string `json:"DataEngine,omitnil" name:"DataEngine"` // The driver size. // Valid values for the standard resource type: `small`, `medium`, `large`, and `xlarge`. // Valid values for the memory resource type: `m.small`, `m.medium`, `m.large`, and `m.xlarge`. AppDriverSize *string `json:"AppDriverSize,omitnil" name:"AppDriverSize"` // The executor size. // Valid values for the standard resource type: `small`, `medium`, `large`, and `xlarge`. // Valid values for the memory resource type: `m.small`, `m.medium`, `m.large`, and `m.xlarge`. AppExecutorSize *string `json:"AppExecutorSize,omitnil" name:"AppExecutorSize"` // The executor count. The minimum value is 1 and the maximum value is less than the cluster specification. AppExecutorNums *uint64 `json:"AppExecutorNums,omitnil" name:"AppExecutorNums"` // The maximum executor count (in dynamic configuration scenarios). The minimum value is 1 and the maximum value is less than the cluster specification. If you set `ExecutorMaxNumbers` to a value smaller than that of `ExecutorNums`, the value of `ExecutorMaxNumbers` is automatically changed to that of `ExecutorNums`. AppExecutorMaxNumbers *uint64 `json:"AppExecutorMaxNumbers,omitnil" name:"AppExecutorMaxNumbers"` // Whether to inherit the task resource configuration from the cluster template. Valid values: `0` (default): No; `1`: Yes. IsInherit *uint64 `json:"IsInherit,omitnil" name:"IsInherit"` }
Predefined struct for user
type ModifySparkAppBatchResponse ¶
type ModifySparkAppBatchResponse struct { *tchttp.BaseResponse Response *ModifySparkAppBatchResponseParams `json:"Response"` }
func NewModifySparkAppBatchResponse ¶
func NewModifySparkAppBatchResponse() (response *ModifySparkAppBatchResponse)
func (*ModifySparkAppBatchResponse) FromJsonString ¶
func (r *ModifySparkAppBatchResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*ModifySparkAppBatchResponse) ToJsonString ¶
func (r *ModifySparkAppBatchResponse) ToJsonString() string
type ModifySparkAppBatchResponseParams ¶
type ModifySparkAppBatchResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type ModifySparkAppRequest ¶
type ModifySparkAppRequest struct { *tchttp.BaseRequest // The Spark job name. AppName *string `json:"AppName,omitnil" name:"AppName"` // The Spark job type. Valid values: `1` for Spark JAR job and `2` for Spark streaming job. AppType *int64 `json:"AppType,omitnil" name:"AppType"` // The data engine executing the Spark job. DataEngine *string `json:"DataEngine,omitnil" name:"DataEngine"` // The path of the Spark job package. AppFile *string `json:"AppFile,omitnil" name:"AppFile"` // The data access policy (CAM role arn). RoleArn *int64 `json:"RoleArn,omitnil" name:"RoleArn"` // The driver size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). AppDriverSize *string `json:"AppDriverSize,omitnil" name:"AppDriverSize"` // The executor size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). AppExecutorSize *string `json:"AppExecutorSize,omitnil" name:"AppExecutorSize"` // Number of Spark job executors AppExecutorNums *int64 `json:"AppExecutorNums,omitnil" name:"AppExecutorNums"` // The Spark job ID. SparkAppId *string `json:"SparkAppId,omitnil" name:"SparkAppId"` // This field has been disused. Use the `Datasource` field instead. Eni *string `json:"Eni,omitnil" name:"Eni"` // The source of the Spark job package. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocal *string `json:"IsLocal,omitnil" name:"IsLocal"` // The main class of the Spark job. MainClass *string `json:"MainClass,omitnil" name:"MainClass"` // Spark configurations separated by line break AppConf *string `json:"AppConf,omitnil" name:"AppConf"` // The source of the dependency JAR packages of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalJars *string `json:"IsLocalJars,omitnil" name:"IsLocalJars"` // The dependency JAR packages of the Spark JAR job (JAR packages), separated by comma. AppJars *string `json:"AppJars,omitnil" name:"AppJars"` // The source of the dependency files of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalFiles *string `json:"IsLocalFiles,omitnil" name:"IsLocalFiles"` // The dependency files of the Spark job (files other than JAR and ZIP packages), separated by comma. AppFiles *string `json:"AppFiles,omitnil" name:"AppFiles"` // The source of the PySpark dependencies. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitnil" name:"IsLocalPythonFiles"` // The PySpark dependencies (Python files), separated by comma, with .py, .zip, and .egg formats supported. AppPythonFiles *string `json:"AppPythonFiles,omitnil" name:"AppPythonFiles"` // The input parameters of the Spark job, separated by comma. CmdArgs *string `json:"CmdArgs,omitnil" name:"CmdArgs"` // The maximum number of retries, valid for Spark streaming tasks only. MaxRetries *int64 `json:"MaxRetries,omitnil" name:"MaxRetries"` // Data source name DataSource *string `json:"DataSource,omitnil" name:"DataSource"` // The source of the dependency archives of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalArchives *string `json:"IsLocalArchives,omitnil" name:"IsLocalArchives"` // The dependency archives of the Spark job, separated by comma, with tar.gz, .tgz, and .tar formats supported. AppArchives *string `json:"AppArchives,omitnil" name:"AppArchives"` // The Spark image version. SparkImage *string `json:"SparkImage,omitnil" name:"SparkImage"` // The Spark image version name. SparkImageVersion *string `json:"SparkImageVersion,omitnil" name:"SparkImageVersion"` // The specified executor count (max), which defaults to 1. This parameter applies if the "Dynamic" mode is selected. If the "Dynamic" mode is not selected, the executor count is equal to `AppExecutorNums`. AppExecutorMaxNumbers *int64 `json:"AppExecutorMaxNumbers,omitnil" name:"AppExecutorMaxNumbers"` // The associated Data Lake Compute query script. SessionId *string `json:"SessionId,omitnil" name:"SessionId"` // Whether to inherit the task resource configuration from the cluster configuration template. Valid values: `0` (default): No; `1`: Yes. IsInherit *uint64 `json:"IsInherit,omitnil" name:"IsInherit"` // Whether to run the task with the session SQLs. Valid values: `false` for no and `true` for yes. IsSessionStarted *bool `json:"IsSessionStarted,omitnil" name:"IsSessionStarted"` }
func NewModifySparkAppRequest ¶
func NewModifySparkAppRequest() (request *ModifySparkAppRequest)
func (*ModifySparkAppRequest) FromJsonString ¶
func (r *ModifySparkAppRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*ModifySparkAppRequest) ToJsonString ¶
func (r *ModifySparkAppRequest) ToJsonString() string
type ModifySparkAppRequestParams ¶
type ModifySparkAppRequestParams struct { // The Spark job name. AppName *string `json:"AppName,omitnil" name:"AppName"` // The Spark job type. Valid values: `1` for Spark JAR job and `2` for Spark streaming job. AppType *int64 `json:"AppType,omitnil" name:"AppType"` // The data engine executing the Spark job. DataEngine *string `json:"DataEngine,omitnil" name:"DataEngine"` // The path of the Spark job package. AppFile *string `json:"AppFile,omitnil" name:"AppFile"` // The data access policy (CAM role arn). RoleArn *int64 `json:"RoleArn,omitnil" name:"RoleArn"` // The driver size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). AppDriverSize *string `json:"AppDriverSize,omitnil" name:"AppDriverSize"` // The executor size. Valid values: `small` (default, 1 CU), `medium` (2 CUs), `large` (4 CUs), and `xlarge` (8 CUs). AppExecutorSize *string `json:"AppExecutorSize,omitnil" name:"AppExecutorSize"` // Number of Spark job executors AppExecutorNums *int64 `json:"AppExecutorNums,omitnil" name:"AppExecutorNums"` // The Spark job ID. SparkAppId *string `json:"SparkAppId,omitnil" name:"SparkAppId"` // This field has been disused. Use the `Datasource` field instead. Eni *string `json:"Eni,omitnil" name:"Eni"` // The source of the Spark job package. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocal *string `json:"IsLocal,omitnil" name:"IsLocal"` // The main class of the Spark job. MainClass *string `json:"MainClass,omitnil" name:"MainClass"` // Spark configurations separated by line break AppConf *string `json:"AppConf,omitnil" name:"AppConf"` // The source of the dependency JAR packages of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalJars *string `json:"IsLocalJars,omitnil" name:"IsLocalJars"` // The dependency JAR packages of the Spark JAR job (JAR packages), separated by comma. AppJars *string `json:"AppJars,omitnil" name:"AppJars"` // The source of the dependency files of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalFiles *string `json:"IsLocalFiles,omitnil" name:"IsLocalFiles"` // The dependency files of the Spark job (files other than JAR and ZIP packages), separated by comma. AppFiles *string `json:"AppFiles,omitnil" name:"AppFiles"` // The source of the PySpark dependencies. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitnil" name:"IsLocalPythonFiles"` // The PySpark dependencies (Python files), separated by comma, with .py, .zip, and .egg formats supported. AppPythonFiles *string `json:"AppPythonFiles,omitnil" name:"AppPythonFiles"` // The input parameters of the Spark job, separated by comma. CmdArgs *string `json:"CmdArgs,omitnil" name:"CmdArgs"` // The maximum number of retries, valid for Spark streaming tasks only. MaxRetries *int64 `json:"MaxRetries,omitnil" name:"MaxRetries"` // Data source name DataSource *string `json:"DataSource,omitnil" name:"DataSource"` // The source of the dependency archives of the Spark job. Valid values: `cos` for COS and `lakefs` for the local system (for use in the console, but this method does not support direct API calls). IsLocalArchives *string `json:"IsLocalArchives,omitnil" name:"IsLocalArchives"` // The dependency archives of the Spark job, separated by comma, with tar.gz, .tgz, and .tar formats supported. AppArchives *string `json:"AppArchives,omitnil" name:"AppArchives"` // The Spark image version. SparkImage *string `json:"SparkImage,omitnil" name:"SparkImage"` // The Spark image version name. SparkImageVersion *string `json:"SparkImageVersion,omitnil" name:"SparkImageVersion"` // The specified executor count (max), which defaults to 1. This parameter applies if the "Dynamic" mode is selected. If the "Dynamic" mode is not selected, the executor count is equal to `AppExecutorNums`. AppExecutorMaxNumbers *int64 `json:"AppExecutorMaxNumbers,omitnil" name:"AppExecutorMaxNumbers"` // The associated Data Lake Compute query script. SessionId *string `json:"SessionId,omitnil" name:"SessionId"` // Whether to inherit the task resource configuration from the cluster configuration template. Valid values: `0` (default): No; `1`: Yes. IsInherit *uint64 `json:"IsInherit,omitnil" name:"IsInherit"` // Whether to run the task with the session SQLs. Valid values: `false` for no and `true` for yes. IsSessionStarted *bool `json:"IsSessionStarted,omitnil" name:"IsSessionStarted"` }
Predefined struct for user
type ModifySparkAppResponse ¶
type ModifySparkAppResponse struct { *tchttp.BaseResponse Response *ModifySparkAppResponseParams `json:"Response"` }
func NewModifySparkAppResponse ¶
func NewModifySparkAppResponse() (response *ModifySparkAppResponse)
func (*ModifySparkAppResponse) FromJsonString ¶
func (r *ModifySparkAppResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*ModifySparkAppResponse) ToJsonString ¶
func (r *ModifySparkAppResponse) ToJsonString() string
type ModifySparkAppResponseParams ¶
type ModifySparkAppResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type Policy ¶
type Policy struct { // The name of the target database. `*` represents all databases in the current catalog. To grant admin permissions, it must be `*`; to grant data connection permissions, it must be null; to grant other permissions, it can be any database. Database *string `json:"Database,omitnil" name:"Database"` // The name of the target data source. To grant admin permission, it must be `*` (all resources at this level); to grant data source and database permissions, it must be `COSDataCatalog` or `*`; to grant table permissions, it can be a custom data source; if it is left empty, `DataLakeCatalog` is used. Note: To grant permissions on a custom data source, the permissions that can be managed in the Data Lake Compute console are subsets of the account permissions granted when you connect the data source to the console. Catalog *string `json:"Catalog,omitnil" name:"Catalog"` // The name of the target table. `*` represents all tables in the current database. To grant admin permissions, it must be `*`; to grant data connection and database permissions, it must be null; to grant other permissions, it can be any table. Table *string `json:"Table,omitnil" name:"Table"` // The target permissions, which vary by permission level. Admin: `ALL` (default); data connection: `CREATE`; database: `ALL`, `CREATE`, `ALTER`, and `DROP`; table: `ALL`, `SELECT`, `INSERT`, `ALTER`, `DELETE`, `DROP`, and `UPDATE`. Note: For table permissions, if a data source other than `COSDataCatalog` is specified, only the `SELECT` permission can be granted here. Operation *string `json:"Operation,omitnil" name:"Operation"` // The permission type. Valid values: `ADMIN`, `DATASOURCE`, `DATABASE`, `TABLE`, `VIEW`, `FUNCTION`, `COLUMN`, and `ENGINE`. Note: If it is left empty, `ADMIN` is used. PolicyType *string `json:"PolicyType,omitnil" name:"PolicyType"` // The name of the target function. `*` represents all functions in the current catalog. To grant admin permissions, it must be `*`; to grant data connection permissions, it must be null; to grant other permissions, it can be any function. // Note: This field may return null, indicating that no valid values can be obtained. Function *string `json:"Function,omitnil" name:"Function"` // The name of the target view. `*` represents all views in the current database. To grant admin permissions, it must be `*`; to grant data connection and database permissions, it must be null; to grant other permissions, it can be any view. // Note: This field may return null, indicating that no valid values can be obtained. View *string `json:"View,omitnil" name:"View"` // The name of the target column. `*` represents all columns. To grant admin permissions, it must be `*`. // Note: This field may return null, indicating that no valid values can be obtained. Column *string `json:"Column,omitnil" name:"Column"` // The name of the target data engine. `*` represents all engines. To grant admin permissions, it must be `*`. // Note: This field may return null, indicating that no valid values can be obtained. DataEngine *string `json:"DataEngine,omitnil" name:"DataEngine"` // Whether the grantee is allowed to further grant the permissions. Valid values: `false` (default) and `true` (the grantee can grant permissions gained here to other sub-users). // Note: This field may return null, indicating that no valid values can be obtained. ReAuth *bool `json:"ReAuth,omitnil" name:"ReAuth"` // The permission source, which is not required when input parameters are passed in. Valid values: `USER` (from the user) and `WORKGROUP` (from one or more associated work groups). // Note: This field may return null, indicating that no valid values can be obtained. Source *string `json:"Source,omitnil" name:"Source"` // The grant mode, which is not required as an input parameter. Valid values: `COMMON` and `SENIOR`. // Note: This field may return null, indicating that no valid values can be obtained. Mode *string `json:"Mode,omitnil" name:"Mode"` // The operator, which is not required as an input parameter. // Note: This field may return null, indicating that no valid values can be obtained. Operator *string `json:"Operator,omitnil" name:"Operator"` // The permission policy creation time, which is not required as an input parameter. // Note: This field may return null, indicating that no valid values can be obtained. CreateTime *string `json:"CreateTime,omitnil" name:"CreateTime"` // The ID of the work group, which applies only when the value of the `Source` field is `WORKGROUP`. // Note: This field may return null, indicating that no valid values can be obtained. SourceId *int64 `json:"SourceId,omitnil" name:"SourceId"` // The name of the work group, which applies only when the value of the `Source` field is `WORKGROUP`. // Note: This field may return null, indicating that no valid values can be obtained. SourceName *string `json:"SourceName,omitnil" name:"SourceName"` // The policy ID. // Note: This field may return null, indicating that no valid values can be obtained. Id *int64 `json:"Id,omitnil" name:"Id"` }
type PrestoMonitorMetrics ¶
type PrestoMonitorMetrics struct { // The Alluxio cache hit rate. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. LocalCacheHitRate *float64 `json:"LocalCacheHitRate,omitnil" name:"LocalCacheHitRate"` // The Fragment cache hit rate. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. FragmentCacheHitRate *float64 `json:"FragmentCacheHitRate,omitnil" name:"FragmentCacheHitRate"` }
type SessionResourceTemplate ¶
type SessionResourceTemplate struct { // The driver size. // Valid values for the standard resource type: `small`, `medium`, `large`, and `xlarge`. // Valid values for the memory resource type: `m.small`, `m.medium`, `m.large`, and `m.xlarge`. // Note: This field may return null, indicating that no valid values can be obtained. DriverSize *string `json:"DriverSize,omitnil" name:"DriverSize"` // The executor size. // Valid values for the standard resource type: `small`, `medium`, `large`, and `xlarge`. // Valid values for the memory resource type: `m.small`, `m.medium`, `m.large`, and `m.xlarge`. // Note: This field may return null, indicating that no valid values can be obtained. ExecutorSize *string `json:"ExecutorSize,omitnil" name:"ExecutorSize"` // The executor count. The minimum value is 1 and the maximum value is less than the cluster specification. // Note: This field may return null, indicating that no valid values can be obtained. ExecutorNums *uint64 `json:"ExecutorNums,omitnil" name:"ExecutorNums"` // The maximum executor count (in dynamic mode). The minimum value is 1 and the maximum value is less than the cluster specification. If you set `ExecutorMaxNumbers` to a value smaller than that of `ExecutorNums`, the value of `ExecutorMaxNumbers` is automatically changed to that of `ExecutorNums`. // Note: This field may return null, indicating that no valid values can be obtained. ExecutorMaxNumbers *uint64 `json:"ExecutorMaxNumbers,omitnil" name:"ExecutorMaxNumbers"` }
type SparkJobInfo ¶
type SparkJobInfo struct { // Spark job ID JobId *string `json:"JobId,omitnil" name:"JobId"` // Spark job name JobName *string `json:"JobName,omitnil" name:"JobName"` // Spark job type. Valid values: `1` (batch job), `2` (streaming job). JobType *int64 `json:"JobType,omitnil" name:"JobType"` // Engine name DataEngine *string `json:"DataEngine,omitnil" name:"DataEngine"` // This field has been disused. Use the `Datasource` field instead. Eni *string `json:"Eni,omitnil" name:"Eni"` // Whether the program package is uploaded locally. Valid values: `cos`, `lakefs`. IsLocal *string `json:"IsLocal,omitnil" name:"IsLocal"` // Program package path JobFile *string `json:"JobFile,omitnil" name:"JobFile"` // Role ID RoleArn *int64 `json:"RoleArn,omitnil" name:"RoleArn"` // Main class of Spark job execution MainClass *string `json:"MainClass,omitnil" name:"MainClass"` // Command line parameters of the Spark job separated by space CmdArgs *string `json:"CmdArgs,omitnil" name:"CmdArgs"` // Native Spark configurations separated by line break JobConf *string `json:"JobConf,omitnil" name:"JobConf"` // Whether the dependency JAR packages are uploaded locally. Valid values: `cos`, `lakefs`. IsLocalJars *string `json:"IsLocalJars,omitnil" name:"IsLocalJars"` // Dependency JAR packages of the Spark job separated by comma JobJars *string `json:"JobJars,omitnil" name:"JobJars"` // Whether the dependency file is uploaded locally. Valid values: `cos`, `lakefs`. IsLocalFiles *string `json:"IsLocalFiles,omitnil" name:"IsLocalFiles"` // Dependency files of the Spark job separated by comma JobFiles *string `json:"JobFiles,omitnil" name:"JobFiles"` // Driver resource size of the Spark job JobDriverSize *string `json:"JobDriverSize,omitnil" name:"JobDriverSize"` // Executor resource size of the Spark job JobExecutorSize *string `json:"JobExecutorSize,omitnil" name:"JobExecutorSize"` // Number of Spark job executors JobExecutorNums *int64 `json:"JobExecutorNums,omitnil" name:"JobExecutorNums"` // Maximum number of retries of the Spark flow task JobMaxAttempts *int64 `json:"JobMaxAttempts,omitnil" name:"JobMaxAttempts"` // Spark job creator JobCreator *string `json:"JobCreator,omitnil" name:"JobCreator"` // Spark job creation time JobCreateTime *int64 `json:"JobCreateTime,omitnil" name:"JobCreateTime"` // Spark job update time JobUpdateTime *uint64 `json:"JobUpdateTime,omitnil" name:"JobUpdateTime"` // Last task ID of the Spark job CurrentTaskId *string `json:"CurrentTaskId,omitnil" name:"CurrentTaskId"` // Last status of the Spark job JobStatus *int64 `json:"JobStatus,omitnil" name:"JobStatus"` // Spark streaming job statistics // Note: This field may return null, indicating that no valid values can be obtained. StreamingStat *StreamingStatistics `json:"StreamingStat,omitnil" name:"StreamingStat"` // Data source name // Note: This field may return null, indicating that no valid values can be obtained. DataSource *string `json:"DataSource,omitnil" name:"DataSource"` // PySpark: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs). // Note: This field may return null, indicating that no valid values can be obtained. IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitnil" name:"IsLocalPythonFiles"` // Note: This returned value has been disused. // Note: This field may return null, indicating that no valid values can be obtained. AppPythonFiles *string `json:"AppPythonFiles,omitnil" name:"AppPythonFiles"` // Archives: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs). // Note: This field may return null, indicating that no valid values can be obtained. IsLocalArchives *string `json:"IsLocalArchives,omitnil" name:"IsLocalArchives"` // Archives: Dependency resources // Note: This field may return null, indicating that no valid values can be obtained. JobArchives *string `json:"JobArchives,omitnil" name:"JobArchives"` // The Spark image version. // Note: This field may return null, indicating that no valid values can be obtained. SparkImage *string `json:"SparkImage,omitnil" name:"SparkImage"` // PySpark: Python dependency, which can be in .py, .zip, or .egg format. Multiple files should be separated by comma. // Note: This field may return null, indicating that no valid values can be obtained. JobPythonFiles *string `json:"JobPythonFiles,omitnil" name:"JobPythonFiles"` // Number of tasks running or ready to run under the current job // Note: This field may return null, indicating that no valid values can be obtained. TaskNum *int64 `json:"TaskNum,omitnil" name:"TaskNum"` // Engine status. -100 (default value): unknown; -2–11: normal. // Note: This field may return null, indicating that no valid values can be obtained. DataEngineStatus *int64 `json:"DataEngineStatus,omitnil" name:"DataEngineStatus"` // The specified executor count (max), which defaults to 1. This parameter applies if the "Dynamic" mode is selected. If the "Dynamic" mode is not selected, the executor count is equal to `JobExecutorNums`. // Note: This field may return null, indicating that no valid values can be obtained. JobExecutorMaxNumbers *int64 `json:"JobExecutorMaxNumbers,omitnil" name:"JobExecutorMaxNumbers"` // The image version. // Note: This field may return null, indicating that no valid values can be obtained. SparkImageVersion *string `json:"SparkImageVersion,omitnil" name:"SparkImageVersion"` // The ID of the associated Data Lake Compute query script. // Note: This field may return null, indicating that no valid values can be obtained. SessionId *string `json:"SessionId,omitnil" name:"SessionId"` // `spark_emr_livy` indicates to create an EMR cluster. // Note: This field may return null, indicating that no valid values can be obtained. DataEngineClusterType *string `json:"DataEngineClusterType,omitnil" name:"DataEngineClusterType"` // `Spark 3.2-EMR` indicates to use the Spark 3.2 image. // Note: This field may return null, indicating that no valid values can be obtained. DataEngineImageVersion *string `json:"DataEngineImageVersion,omitnil" name:"DataEngineImageVersion"` // Whether the task resource configuration is inherited from the cluster template. Valid values: `0` (default): No; `1`: Yes. // Note: This field may return null, indicating that no valid values can be obtained. IsInherit *uint64 `json:"IsInherit,omitnil" name:"IsInherit"` // Whether the task runs with the session SQLs. Valid values: `false` for no and `true` for yes. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. IsSessionStarted *bool `json:"IsSessionStarted,omitnil" name:"IsSessionStarted"` }
type SparkMonitorMetrics ¶
type SparkMonitorMetrics struct { // The shuffle data (in bytes) that overflows to COS. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. ShuffleWriteBytesCos *int64 `json:"ShuffleWriteBytesCos,omitnil" name:"ShuffleWriteBytesCos"` // The total shuffle data (in bytes). // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. ShuffleWriteBytesTotal *int64 `json:"ShuffleWriteBytesTotal,omitnil" name:"ShuffleWriteBytesTotal"` }
type SparkSessionBatchLog ¶
type SparkSessionBatchLog struct { // The log step. Valid values: `BEG`, `CS`, `DS`, `DSS`, `DSF`, `FINF`, `RTO`, `CANCEL`, `CT`, `DT`, `DTS`, `DTF`, `FINT`, and `EXCE`. // Note: This field may return null, indicating that no valid values can be obtained. Step *string `json:"Step,omitnil" name:"Step"` // Time. // Note: This field may return null, indicating that no valid values can be obtained. Time *string `json:"Time,omitnil" name:"Time"` // The log message. // Note: This field may return null, indicating that no valid values can be obtained. Message *string `json:"Message,omitnil" name:"Message"` // The operation. // Note: This field may return null, indicating that no valid values can be obtained. Operate []*SparkSessionBatchLogOperate `json:"Operate,omitnil" name:"Operate"` }
type SparkSessionBatchLogOperate ¶
type SparkSessionBatchLogOperate struct { // The operation message. // Note: This field may return null, indicating that no valid values can be obtained. Text *string `json:"Text,omitnil" name:"Text"` // The operation type. Valid values: `COPY`, `LOG`, `UI`, `RESULT`, `List`, and `TAB`. // Note: This field may return null, indicating that no valid values can be obtained. Operate *string `json:"Operate,omitnil" name:"Operate"` // Additional information, such as taskid, sessionid, and sparkui. // Note: This field may return null, indicating that no valid values can be obtained. Supplement []*KVPair `json:"Supplement,omitnil" name:"Supplement"` }
type StreamingStatistics ¶
type StreamingStatistics struct { // Task start time StartTime *string `json:"StartTime,omitnil" name:"StartTime"` // Number of data receivers Receivers *int64 `json:"Receivers,omitnil" name:"Receivers"` // Number of receivers in service NumActiveReceivers *int64 `json:"NumActiveReceivers,omitnil" name:"NumActiveReceivers"` // Number of inactive receivers NumInactiveReceivers *int64 `json:"NumInactiveReceivers,omitnil" name:"NumInactiveReceivers"` // Number of running batches NumActiveBatches *int64 `json:"NumActiveBatches,omitnil" name:"NumActiveBatches"` // Number of batches to be processed NumRetainedCompletedBatches *int64 `json:"NumRetainedCompletedBatches,omitnil" name:"NumRetainedCompletedBatches"` // Number of completed batches NumTotalCompletedBatches *int64 `json:"NumTotalCompletedBatches,omitnil" name:"NumTotalCompletedBatches"` // Average input speed AverageInputRate *float64 `json:"AverageInputRate,omitnil" name:"AverageInputRate"` // Average queue time AverageSchedulingDelay *float64 `json:"AverageSchedulingDelay,omitnil" name:"AverageSchedulingDelay"` // Average processing time AverageProcessingTime *float64 `json:"AverageProcessingTime,omitnil" name:"AverageProcessingTime"` // Average latency AverageTotalDelay *float64 `json:"AverageTotalDelay,omitnil" name:"AverageTotalDelay"` }
type SuspendResumeDataEngineRequest ¶
type SuspendResumeDataEngineRequest struct { *tchttp.BaseRequest // The name of a virtual cluster. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // The operation type: `suspend` or `resume`. Operate *string `json:"Operate,omitnil" name:"Operate"` }
func NewSuspendResumeDataEngineRequest ¶
func NewSuspendResumeDataEngineRequest() (request *SuspendResumeDataEngineRequest)
func (*SuspendResumeDataEngineRequest) FromJsonString ¶
func (r *SuspendResumeDataEngineRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*SuspendResumeDataEngineRequest) ToJsonString ¶
func (r *SuspendResumeDataEngineRequest) ToJsonString() string
type SuspendResumeDataEngineRequestParams ¶
type SuspendResumeDataEngineRequestParams struct { // The name of a virtual cluster. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // The operation type: `suspend` or `resume`. Operate *string `json:"Operate,omitnil" name:"Operate"` }
Predefined struct for user
type SuspendResumeDataEngineResponse ¶
type SuspendResumeDataEngineResponse struct { *tchttp.BaseResponse Response *SuspendResumeDataEngineResponseParams `json:"Response"` }
func NewSuspendResumeDataEngineResponse ¶
func NewSuspendResumeDataEngineResponse() (response *SuspendResumeDataEngineResponse)
func (*SuspendResumeDataEngineResponse) FromJsonString ¶
func (r *SuspendResumeDataEngineResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*SuspendResumeDataEngineResponse) ToJsonString ¶
func (r *SuspendResumeDataEngineResponse) ToJsonString() string
type SuspendResumeDataEngineResponseParams ¶
type SuspendResumeDataEngineResponseParams struct { // The details of the virtual cluster. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type SwitchDataEngineRequest ¶
type SwitchDataEngineRequest struct { *tchttp.BaseRequest // The name of the primary cluster. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // Whether to start the standby cluster. StartStandbyCluster *bool `json:"StartStandbyCluster,omitnil" name:"StartStandbyCluster"` }
func NewSwitchDataEngineRequest ¶
func NewSwitchDataEngineRequest() (request *SwitchDataEngineRequest)
func (*SwitchDataEngineRequest) FromJsonString ¶
func (r *SwitchDataEngineRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*SwitchDataEngineRequest) ToJsonString ¶
func (r *SwitchDataEngineRequest) ToJsonString() string
type SwitchDataEngineRequestParams ¶
type SwitchDataEngineRequestParams struct { // The name of the primary cluster. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // Whether to start the standby cluster. StartStandbyCluster *bool `json:"StartStandbyCluster,omitnil" name:"StartStandbyCluster"` }
Predefined struct for user
type SwitchDataEngineResponse ¶
type SwitchDataEngineResponse struct { *tchttp.BaseResponse Response *SwitchDataEngineResponseParams `json:"Response"` }
func NewSwitchDataEngineResponse ¶
func NewSwitchDataEngineResponse() (response *SwitchDataEngineResponse)
func (*SwitchDataEngineResponse) FromJsonString ¶
func (r *SwitchDataEngineResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*SwitchDataEngineResponse) ToJsonString ¶
func (r *SwitchDataEngineResponse) ToJsonString() string
type SwitchDataEngineResponseParams ¶
type SwitchDataEngineResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type TColumn ¶
type TColumn struct { // The field name. Name *string `json:"Name,omitnil" name:"Name"` // The field type. Type *string `json:"Type,omitnil" name:"Type"` // The field description. Comment *string `json:"Comment,omitnil" name:"Comment"` // The default field value. Default *string `json:"Default,omitnil" name:"Default"` // Whether the field is not null. NotNull *bool `json:"NotNull,omitnil" name:"NotNull"` }
type TPartition ¶
type TPartition struct { // The field name. Name *string `json:"Name,omitnil" name:"Name"` // The field type. Type *string `json:"Type,omitnil" name:"Type"` // The field description. Comment *string `json:"Comment,omitnil" name:"Comment"` // The partition type. PartitionType *string `json:"PartitionType,omitnil" name:"PartitionType"` // The partition format. PartitionFormat *string `json:"PartitionFormat,omitnil" name:"PartitionFormat"` // The separator count of the partition conversion policy. PartitionDot *int64 `json:"PartitionDot,omitnil" name:"PartitionDot"` // The partition conversion policy. Transform *string `json:"Transform,omitnil" name:"Transform"` // The policy parameters. TransformArgs []*string `json:"TransformArgs,omitnil" name:"TransformArgs"` }
type TableBaseInfo ¶
type TableBaseInfo struct { // The database name. DatabaseName *string `json:"DatabaseName,omitnil" name:"DatabaseName"` // The table name. TableName *string `json:"TableName,omitnil" name:"TableName"` // The data source name. // Note: This field may return null, indicating that no valid values can be obtained. DatasourceConnectionName *string `json:"DatasourceConnectionName,omitnil" name:"DatasourceConnectionName"` // The table remarks. // Note: This field may return null, indicating that no valid values can be obtained. TableComment *string `json:"TableComment,omitnil" name:"TableComment"` // The specific type: `table` or `view`. // Note: This field may return null, indicating that no valid values can be obtained. Type *string `json:"Type,omitnil" name:"Type"` // The data format type, such as `hive` and `iceberg`. // Note: This field may return null, indicating that no valid values can be obtained. TableFormat *string `json:"TableFormat,omitnil" name:"TableFormat"` // The table creator name. // Note: This field may return null, indicating that no valid values can be obtained. UserAlias *string `json:"UserAlias,omitnil" name:"UserAlias"` // The table creator ID. // Note: This field may return null, indicating that no valid values can be obtained. UserSubUin *string `json:"UserSubUin,omitnil" name:"UserSubUin"` // The data governance configuration. // Note: This field may return null, indicating that no valid values can be obtained. GovernPolicy *DataGovernPolicy `json:"GovernPolicy,omitnil" name:"GovernPolicy"` // Whether database data governance is disabled. Valid values: `true` (disabled) and `false` (not disabled). // Note: This field may return null, indicating that no valid values can be obtained. DbGovernPolicyIsDisable *string `json:"DbGovernPolicyIsDisable,omitnil" name:"DbGovernPolicyIsDisable"` }
type TagInfo ¶
type TagInfo struct { // The tag key. // Note: This field may return null, indicating that no valid values can be obtained. TagKey *string `json:"TagKey,omitnil" name:"TagKey"` // The tag value. // Note: This field may return null, indicating that no valid values can be obtained. TagValue *string `json:"TagValue,omitnil" name:"TagValue"` }
type TaskResponseInfo ¶
type TaskResponseInfo struct { // Database name of the task DatabaseName *string `json:"DatabaseName,omitnil" name:"DatabaseName"` // Data volume of the task DataAmount *int64 `json:"DataAmount,omitnil" name:"DataAmount"` // Task ID Id *string `json:"Id,omitnil" name:"Id"` // The compute time in ms. UsedTime *int64 `json:"UsedTime,omitnil" name:"UsedTime"` // Task output path OutputPath *string `json:"OutputPath,omitnil" name:"OutputPath"` // Task creation time CreateTime *string `json:"CreateTime,omitnil" name:"CreateTime"` // The task status. Valid values: `0` (initializing), `1` (executing), `2` (executed), `3` (writing data), `4` (queuing), `-1` (failed), and `-3` (canceled). State *int64 `json:"State,omitnil" name:"State"` // SQL statement type of the task, such as DDL and DML. SQLType *string `json:"SQLType,omitnil" name:"SQLType"` // SQL statement of the task SQL *string `json:"SQL,omitnil" name:"SQL"` // Whether the result has expired ResultExpired *bool `json:"ResultExpired,omitnil" name:"ResultExpired"` // Number of affected data rows RowAffectInfo *string `json:"RowAffectInfo,omitnil" name:"RowAffectInfo"` // Dataset of task results // Note: This field may return null, indicating that no valid values can be obtained. DataSet *string `json:"DataSet,omitnil" name:"DataSet"` // Failure information, such as `errorMessage`. This field has been disused. Error *string `json:"Error,omitnil" name:"Error"` // Task progress (%) Percentage *int64 `json:"Percentage,omitnil" name:"Percentage"` // Output information of task execution OutputMessage *string `json:"OutputMessage,omitnil" name:"OutputMessage"` // Type of the engine executing the SQL statement TaskType *string `json:"TaskType,omitnil" name:"TaskType"` // Task progress details // Note: This field may return null, indicating that no valid values can be obtained. ProgressDetail *string `json:"ProgressDetail,omitnil" name:"ProgressDetail"` // Task end time // Note: This field may return null, indicating that no valid values can be obtained. UpdateTime *string `json:"UpdateTime,omitnil" name:"UpdateTime"` // Compute resource ID // Note: This field may return null, indicating that no valid values can be obtained. DataEngineId *string `json:"DataEngineId,omitnil" name:"DataEngineId"` // Sub-UIN that executes the SQL statement // Note: This field may return null, indicating that no valid values can be obtained. OperateUin *string `json:"OperateUin,omitnil" name:"OperateUin"` // Compute resource name // Note: This field may return null, indicating that no valid values can be obtained. DataEngineName *string `json:"DataEngineName,omitnil" name:"DataEngineName"` // Whether the import type is local import or COS // Note: This field may return null, indicating that no valid values can be obtained. InputType *string `json:"InputType,omitnil" name:"InputType"` // Import configuration // Note: This field may return null, indicating that no valid values can be obtained. InputConf *string `json:"InputConf,omitnil" name:"InputConf"` // Number of data entries // Note: This field may return null, indicating that no valid values can be obtained. DataNumber *int64 `json:"DataNumber,omitnil" name:"DataNumber"` // Whether the data can be downloaded // Note: This field may return null, indicating that no valid values can be obtained. CanDownload *bool `json:"CanDownload,omitnil" name:"CanDownload"` // User alias // Note: This field may return null, indicating that no valid values can be obtained. UserAlias *string `json:"UserAlias,omitnil" name:"UserAlias"` // Spark application job name // Note: This field may return null, indicating that no valid values can be obtained. SparkJobName *string `json:"SparkJobName,omitnil" name:"SparkJobName"` // Spark application job ID // Note: This field may return null, indicating that no valid values can be obtained. SparkJobId *string `json:"SparkJobId,omitnil" name:"SparkJobId"` // JAR file of the Spark application entry // Note: This field may return null, indicating that no valid values can be obtained. SparkJobFile *string `json:"SparkJobFile,omitnil" name:"SparkJobFile"` // Spark UI URL // Note: This field may return null, indicating that no valid values can be obtained. UiUrl *string `json:"UiUrl,omitnil" name:"UiUrl"` // The task time in ms. // Note: This field may return null, indicating that no valid values can be obtained. TotalTime *int64 `json:"TotalTime,omitnil" name:"TotalTime"` // The program entry parameter for running a task under a Spark job. // Note: This field may return null, indicating that no valid values can be obtained. CmdArgs *string `json:"CmdArgs,omitnil" name:"CmdArgs"` // The image version of the cluster. // Note: This field may return null, indicating that no valid values can be obtained. ImageVersion *string `json:"ImageVersion,omitnil" name:"ImageVersion"` // The driver size. // Valid values for the standard resource type: `small`, `medium`, `large`, and `xlarge`. // Valid values for the memory resource type: `m.small`, `m.medium`, `m.large`, and `m.xlarge`. // Note: This field may return null, indicating that no valid values can be obtained. DriverSize *string `json:"DriverSize,omitnil" name:"DriverSize"` // The executor size. // Valid values for the standard resource type: `small`, `medium`, `large`, and `xlarge`. // Valid values for the memory resource type: `m.small`, `m.medium`, `m.large`, and `m.xlarge`. // Note: This field may return null, indicating that no valid values can be obtained. ExecutorSize *string `json:"ExecutorSize,omitnil" name:"ExecutorSize"` // The executor count. The minimum value is 1 and the maximum value is less than the cluster specification. // Note: This field may return null, indicating that no valid values can be obtained. ExecutorNums *uint64 `json:"ExecutorNums,omitnil" name:"ExecutorNums"` // The maximum executor count (in dynamic mode). The minimum value is 1 and the maximum value is less than the cluster specification. If you set `ExecutorMaxNumbers` to a value smaller than that of `ExecutorNums`, the value of `ExecutorMaxNumbers` is automatically changed to that of `ExecutorNums`. // Note: This field may return null, indicating that no valid values can be obtained. ExecutorMaxNumbers *uint64 `json:"ExecutorMaxNumbers,omitnil" name:"ExecutorMaxNumbers"` // Common task metrics // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. CommonMetrics *CommonMetrics `json:"CommonMetrics,omitnil" name:"CommonMetrics"` // The Spark task metrics. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. SparkMonitorMetrics *SparkMonitorMetrics `json:"SparkMonitorMetrics,omitnil" name:"SparkMonitorMetrics"` // The Presto task metrics. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. PrestoMonitorMetrics *PrestoMonitorMetrics `json:"PrestoMonitorMetrics,omitnil" name:"PrestoMonitorMetrics"` }
type TaskResultInfo ¶
type TaskResultInfo struct { // Unique task ID TaskId *string `json:"TaskId,omitnil" name:"TaskId"` // Name of the default selected data source when the current job is executed // Note: This field may return null, indicating that no valid values can be obtained. DatasourceConnectionName *string `json:"DatasourceConnectionName,omitnil" name:"DatasourceConnectionName"` // Name of the default selected database when the current job is executed // Note: This field may return null, indicating that no valid values can be obtained. DatabaseName *string `json:"DatabaseName,omitnil" name:"DatabaseName"` // The currently executed SQL statement. Each task contains one SQL statement. SQL *string `json:"SQL,omitnil" name:"SQL"` // Type of the executed task. Valid values: `DDL`, `DML`, `DQL`. SQLType *string `json:"SQLType,omitnil" name:"SQLType"` // u200cThe current task status. Valid values: `0` (initializing), `1` (executing), `2` (executed), `3` (writing data), `4` (queuing), u200c`-1` (failed), and `-3` (canceled). Only when the task is successfully executed, a task execution result will be returned. State *int64 `json:"State,omitnil" name:"State"` // Amount of the data scanned in bytes DataAmount *int64 `json:"DataAmount,omitnil" name:"DataAmount"` // The compute time in ms. UsedTime *int64 `json:"UsedTime,omitnil" name:"UsedTime"` // Address of the COS bucket for storing the task result OutputPath *string `json:"OutputPath,omitnil" name:"OutputPath"` // Task creation timestamp CreateTime *string `json:"CreateTime,omitnil" name:"CreateTime"` // Task execution information. `success` will be returned if the task succeeds; otherwise, the failure cause will be returned. OutputMessage *string `json:"OutputMessage,omitnil" name:"OutputMessage"` // Number of affected rows RowAffectInfo *string `json:"RowAffectInfo,omitnil" name:"RowAffectInfo"` // Schema information of the result // Note: This field may return null, indicating that no valid values can be obtained. ResultSchema []*Column `json:"ResultSchema,omitnil" name:"ResultSchema"` // Result information. After it is unescaped, each element of the outer array is a data row. // Note: This field may return null, indicating that no valid values can be obtained. ResultSet *string `json:"ResultSet,omitnil" name:"ResultSet"` // Pagination information. If there is no more result data, `nextToken` will be empty. NextToken *string `json:"NextToken,omitnil" name:"NextToken"` // Task progress (%) Percentage *int64 `json:"Percentage,omitnil" name:"Percentage"` // Task progress details ProgressDetail *string `json:"ProgressDetail,omitnil" name:"ProgressDetail"` // Console display format. Valid values: `table`, `text`. DisplayFormat *string `json:"DisplayFormat,omitnil" name:"DisplayFormat"` // The task time in ms. TotalTime *int64 `json:"TotalTime,omitnil" name:"TotalTime"` }
type TasksInfo ¶
type TasksInfo struct { // Task type. Valid values: `SQLTask` (SQL query task), `SparkSQLTask` (Spark SQL query task). TaskType *string `json:"TaskType,omitnil" name:"TaskType"` // Fault tolerance policy. `Proceed`: continues to execute subsequent tasks after the current task fails or is canceled. `Terminate`: terminates the execution of subsequent tasks after the current task fails or is canceled, and marks all subsequent tasks as canceled. FailureTolerance *string `json:"FailureTolerance,omitnil" name:"FailureTolerance"` // Base64-encrypted SQL statements separated by ";". Up to 50 tasks can be submitted at a time, and they will be executed strictly in sequence. SQL *string `json:"SQL,omitnil" name:"SQL"` // Configuration information of the task. Currently, only `SparkSQLTask` tasks are supported. Config []*KVPair `json:"Config,omitnil" name:"Config"` // User-defined parameters of the task Params []*KVPair `json:"Params,omitnil" name:"Params"` }
type TasksOverview ¶
type TasksOverview struct { // The number of tasks in queue. TaskQueuedCount *int64 `json:"TaskQueuedCount,omitnil" name:"TaskQueuedCount"` // The number of initialized tasks. TaskInitCount *int64 `json:"TaskInitCount,omitnil" name:"TaskInitCount"` // The number of tasks in progress. TaskRunningCount *int64 `json:"TaskRunningCount,omitnil" name:"TaskRunningCount"` // The total number of tasks in this time range. TotalTaskCount *int64 `json:"TotalTaskCount,omitnil" name:"TotalTaskCount"` }
type UpdateRowFilterRequest ¶
type UpdateRowFilterRequest struct { *tchttp.BaseRequest // The ID of the row filter policy, which can be obtained using the `DescribeUserInfo` or `DescribeWorkGroupInfo` API. PolicyId *int64 `json:"PolicyId,omitnil" name:"PolicyId"` // The new filter policy. Policy *Policy `json:"Policy,omitnil" name:"Policy"` }
func NewUpdateRowFilterRequest ¶
func NewUpdateRowFilterRequest() (request *UpdateRowFilterRequest)
func (*UpdateRowFilterRequest) FromJsonString ¶
func (r *UpdateRowFilterRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*UpdateRowFilterRequest) ToJsonString ¶
func (r *UpdateRowFilterRequest) ToJsonString() string
type UpdateRowFilterRequestParams ¶
type UpdateRowFilterRequestParams struct { // The ID of the row filter policy, which can be obtained using the `DescribeUserInfo` or `DescribeWorkGroupInfo` API. PolicyId *int64 `json:"PolicyId,omitnil" name:"PolicyId"` // The new filter policy. Policy *Policy `json:"Policy,omitnil" name:"Policy"` }
Predefined struct for user
type UpdateRowFilterResponse ¶
type UpdateRowFilterResponse struct { *tchttp.BaseResponse Response *UpdateRowFilterResponseParams `json:"Response"` }
func NewUpdateRowFilterResponse ¶
func NewUpdateRowFilterResponse() (response *UpdateRowFilterResponse)
func (*UpdateRowFilterResponse) FromJsonString ¶
func (r *UpdateRowFilterResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*UpdateRowFilterResponse) ToJsonString ¶
func (r *UpdateRowFilterResponse) ToJsonString() string
type UpdateRowFilterResponseParams ¶
type UpdateRowFilterResponseParams struct { // The unique request ID, which is returned for each request. RequestId is required for locating a problem. RequestId *string `json:"RequestId,omitnil" name:"RequestId"` }
Predefined struct for user
type UserRole ¶
type UserRole struct { // The role ID. RoleId *int64 `json:"RoleId,omitnil" name:"RoleId"` // The user's app ID. AppId *string `json:"AppId,omitnil" name:"AppId"` // The user ID. Uin *string `json:"Uin,omitnil" name:"Uin"` // The role permission. Arn *string `json:"Arn,omitnil" name:"Arn"` // The last modified timestamp. ModifyTime *int64 `json:"ModifyTime,omitnil" name:"ModifyTime"` // The role description. Desc *string `json:"Desc,omitnil" name:"Desc"` // The role name. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. RoleName *string `json:"RoleName,omitnil" name:"RoleName"` // The creator UIN. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. Creator *string `json:"Creator,omitnil" name:"Creator"` // The COS permission list. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. CosPermissionList []*CosPermission `json:"CosPermissionList,omitnil" name:"CosPermissionList"` // The CAM policy in JSON. // Note: u200dThis field may returnu200d·nullu200d, indicating that no valid values can be obtained. PermissionJson *string `json:"PermissionJson,omitnil" name:"PermissionJson"` }