Documentation ¶
Index ¶
- func Create(createJob jobs.CreateJob, w *databricks.WorkspaceClient, ctx context.Context) (int64, error)
- func DataSourceJob() common.Resource
- func DataSourceJobs() common.Resource
- func Read(jobID int64, w *databricks.WorkspaceClient, ctx context.Context) (job *jobs.Job, err error)
- func ResourceJob() common.Resource
- func Start(jobID int64, timeout time.Duration, w *databricks.WorkspaceClient, ...) error
- func StopActiveRun(jobID int64, timeout time.Duration, w *databricks.WorkspaceClient, ...) error
- func Update(jobID int64, js JobSettingsResource, w *databricks.WorkspaceClient, ...) error
- type ContinuousConf
- type CronSchedule
- type DbtTask
- type FileArrival
- type ForEachNestedTask
- type ForEachTask
- type GitSource
- type Job
- type JobCluster
- type JobCreateStruct
- type JobHealth
- type JobHealthRule
- type JobListResponse
- type JobParameter
- type JobRun
- type JobRunAs
- type JobRunsList
- type JobRunsListRequest
- type JobSettings
- type JobSettingsResource
- type JobTaskSettings
- type JobsAPI
- func (a JobsAPI) Create(jobSettings JobSettings) (Job, error)
- func (a JobsAPI) Delete(id string) error
- func (a JobsAPI) List() (l []Job, err error)
- func (a JobsAPI) ListByName(name string, expandTasks bool) ([]Job, error)
- func (a JobsAPI) Read(id string) (job Job, err error)
- func (a JobsAPI) RunNow(jobID int64) (int64, error)
- func (a JobsAPI) RunsCancel(runID int64, timeout time.Duration) error
- func (a JobsAPI) RunsGet(runID int64) (JobRun, error)
- func (a JobsAPI) RunsList(r JobRunsListRequest) (jrl JobRunsList, err error)
- func (a JobsAPI) Start(jobID int64, timeout time.Duration) error
- func (a JobsAPI) StopActiveRun(jobID int64, timeout time.Duration) error
- func (a JobsAPI) Update(id string, jobSettings JobSettings) error
- type NotebookTask
- type Periodic
- type PipelineTask
- type PythonWheelTask
- type RunJobTask
- type RunParameters
- type RunState
- type SparkJarTask
- type SparkPythonTask
- type SparkSubmitTask
- type SqlAlertTask
- type SqlDashboardTask
- type SqlFileTask
- type SqlQueryTask
- type SqlSubscription
- type SqlTask
- type TableUpdate
- type Trigger
- type UpdateJobRequest
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func DataSourceJob ¶ added in v1.4.0
func DataSourceJobs ¶
func ResourceJob ¶
func StopActiveRun ¶ added in v1.46.0
Types ¶
type ContinuousConf ¶ added in v1.10.0
type ContinuousConf struct {
PauseStatus string `json:"pause_status,omitempty" tf:"default:UNPAUSED"`
}
type CronSchedule ¶
type CronSchedule struct { QuartzCronExpression string `json:"quartz_cron_expression"` TimezoneID string `json:"timezone_id"` PauseStatus string `json:"pause_status,omitempty" tf:"default:UNPAUSED"` }
CronSchedule contains the information for the quartz cron expression
type DbtTask ¶
type DbtTask struct { Commands []string `json:"commands"` ProfilesDirectory string `json:"profiles_directory,omitempty"` ProjectDirectory string `json:"project_directory,omitempty"` Schema string `json:"schema,omitempty" tf:"default:default"` Catalog string `json:"catalog,omitempty"` WarehouseId string `json:"warehouse_id,omitempty"` Source string `json:"source,omitempty" tf:"suppress_diff"` }
DbtTask contains information about DBT task TODO: add validation for non-empty commands
type FileArrival ¶ added in v1.14.0
type ForEachNestedTask ¶ added in v1.37.0
type ForEachNestedTask struct { TaskKey string `json:"task_key"` Description string `json:"description,omitempty"` DependsOn []jobs.TaskDependency `json:"depends_on,omitempty"` RunIf string `json:"run_if,omitempty"` ExistingClusterID string `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"` NewCluster *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"` JobClusterKey string `json:"job_cluster_key,omitempty" tf:"group:cluster_type"` Libraries []compute.Library `json:"libraries,omitempty" tf:"alias:library"` NotebookTask *NotebookTask `json:"notebook_task,omitempty" tf:"group:task_type"` SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" tf:"group:task_type"` SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"` SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"` PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"` PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"` SqlTask *SqlTask `json:"sql_task,omitempty" tf:"group:task_type"` DbtTask *DbtTask `json:"dbt_task,omitempty" tf:"group:task_type"` RunJobTask *RunJobTask `json:"run_job_task,omitempty" tf:"group:task_type"` ConditionTask *jobs.ConditionTask `json:"condition_task,omitempty" tf:"group:task_type"` EmailNotifications *jobs.TaskEmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` WebhookNotifications *jobs.WebhookNotifications `json:"webhook_notifications,omitempty" tf:"suppress_diff"` NotificationSettings *jobs.TaskNotificationSettings `json:"notification_settings,omitempty"` TimeoutSeconds int32 `json:"timeout_seconds,omitempty"` MaxRetries int32 `json:"max_retries,omitempty"` MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"` RetryOnTimeout bool `json:"retry_on_timeout,omitempty" tf:"computed"` Health *JobHealth `json:"health,omitempty"` EnvironmentKey string `json:"environment_key,omitempty"` }
type ForEachTask ¶ added in v1.37.0
type ForEachTask struct { Concurrency int `json:"concurrency,omitempty"` Inputs string `json:"inputs"` Task ForEachNestedTask `json:"task"` }
TODO: As TF does not support recursive nesting, limit the nesting depth. Example: https://github.com/hashicorp/terraform-provider-aws/blob/b4a9f93a2b7323202c8904e86cff03d3f2cb006b/internal/service/wafv2/rule_group.go#L110
type GitSource ¶
type GitSource struct { Url string `json:"git_url" tf:"alias:url"` Provider string `json:"git_provider,omitempty" tf:"alias:provider"` Branch string `json:"git_branch,omitempty" tf:"alias:branch"` Tag string `json:"git_tag,omitempty" tf:"alias:tag"` Commit string `json:"git_commit,omitempty" tf:"alias:commit"` JobSource *jobs.JobSource `json:"job_source,omitempty"` }
BEGIN Jobs + Repo integration preview
type Job ¶
type Job struct { JobID int64 `json:"job_id,omitempty"` CreatorUserName string `json:"creator_user_name,omitempty"` RunAsUserName string `json:"run_as_user_name,omitempty" tf:"computed"` Settings *JobSettings `json:"settings,omitempty"` CreatedTime int64 `json:"created_time,omitempty"` }
Job contains the information when using a GET request from the Databricks Jobs api
type JobCluster ¶
type JobCreateStruct ¶ added in v1.46.0
Need a struct for JobCreate because there are aliases we need and it'll be needed in the create method.
func (JobCreateStruct) Aliases ¶ added in v1.46.0
func (JobCreateStruct) Aliases() map[string]map[string]string
func (JobCreateStruct) CustomizeSchema ¶ added in v1.46.0
func (JobCreateStruct) CustomizeSchema(s *common.CustomizableSchema) *common.CustomizableSchema
type JobHealth ¶ added in v1.22.0
type JobHealth struct {
Rules []JobHealthRule `json:"rules"`
}
type JobHealthRule ¶ added in v1.22.0
type JobListResponse ¶ added in v1.6.5
type JobListResponse struct { Jobs []Job `json:"jobs"` HasMore bool `json:"has_more,omitempty"` NextPageToken string `json:"next_page_token,omitempty"` PrevPageToken string `json:"prev_page_token,omitempty"` }
JobListResponse returns a list of all jobs
type JobParameter ¶ added in v1.23.0
type JobParameter struct { Name string `json:"name,omitempty"` Default string `json:"default,omitempty"` Value string `json:"value,omitempty"` }
Job-level parameter
type JobRun ¶
type JobRun struct { JobID int64 `json:"job_id,omitempty"` RunID int64 `json:"run_id,omitempty"` NumberInJob int64 `json:"number_in_job,omitempty"` StartTime int64 `json:"start_time,omitempty"` State RunState `json:"state,omitempty"` Trigger string `json:"trigger,omitempty"` RuntType string `json:"run_type,omitempty"` OverridingParameters RunParameters `json:"overriding_parameters,omitempty"` JobParameters []JobParameter `json:"job_parameters,omitempty"` }
JobRun is a simplified representation of corresponding entity
type JobRunsList ¶
JobRunsList returns a page of job runs
type JobRunsListRequest ¶
type JobRunsListRequest struct { JobID int64 `url:"job_id,omitempty"` ActiveOnly bool `url:"active_only,omitempty"` CompletedOnly bool `url:"completed_only,omitempty"` Offset int32 `url:"offset,omitempty"` Limit int32 `url:"limit,omitempty"` }
JobRunsListRequest used to do what it sounds like
type JobSettings ¶
type JobSettings struct { Name string `json:"name,omitempty" tf:"default:Untitled"` Description string `json:"description,omitempty"` // BEGIN Jobs API 2.0 ExistingClusterID string `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"` NewCluster *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"` NotebookTask *NotebookTask `json:"notebook_task,omitempty" tf:"group:task_type"` SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" tf:"group:task_type"` SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"` SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"` PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"` PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"` DbtTask *DbtTask `json:"dbt_task,omitempty" tf:"group:task_type"` RunJobTask *RunJobTask `json:"run_job_task,omitempty" tf:"group:task_type"` Libraries []compute.Library `json:"libraries,omitempty" tf:"alias:library"` TimeoutSeconds int32 `json:"timeout_seconds,omitempty"` MaxRetries int32 `json:"max_retries,omitempty"` MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"` RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` // BEGIN Jobs API 2.1 Tasks []JobTaskSettings `json:"tasks,omitempty" tf:"alias:task"` Format string `json:"format,omitempty" tf:"computed"` JobClusters []JobCluster `json:"job_clusters,omitempty" tf:"alias:job_cluster"` Environments []jobs.JobEnvironment `json:"environments,omitempty" tf:"alias:environment"` // BEGIN Jobs + Repo integration preview GitSource *GitSource `json:"git_source,omitempty"` Schedule *CronSchedule `json:"schedule,omitempty"` Continuous *ContinuousConf `json:"continuous,omitempty"` Trigger *Trigger `json:"trigger,omitempty"` MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty"` EmailNotifications *jobs.JobEmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` WebhookNotifications *jobs.WebhookNotifications `json:"webhook_notifications,omitempty" tf:"suppress_diff"` NotificationSettings *jobs.JobNotificationSettings `json:"notification_settings,omitempty"` Tags map[string]string `json:"tags,omitempty"` Queue *jobs.QueueSettings `json:"queue,omitempty"` RunAs *JobRunAs `json:"run_as,omitempty" tf:"computed"` Health *JobHealth `json:"health,omitempty"` Parameters []jobs.JobParameterDefinition `json:"parameters,omitempty" tf:"alias:parameter"` Deployment *jobs.JobDeployment `json:"deployment,omitempty"` EditMode jobs.JobEditMode `json:"edit_mode,omitempty"` }
JobSettings contains the information for configuring a job on databricks
type JobSettingsResource ¶ added in v1.43.0
type JobSettingsResource struct { jobs.JobSettings // BEGIN Jobs API 2.0 ExistingClusterID string `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"` NewCluster *compute.ClusterSpec `json:"new_cluster,omitempty" tf:"group:cluster_type"` NotebookTask *NotebookTask `json:"notebook_task,omitempty" tf:"group:task_type"` SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" tf:"group:task_type"` SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"` SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"` PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"` PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"` DbtTask *DbtTask `json:"dbt_task,omitempty" tf:"group:task_type"` RunJobTask *RunJobTask `json:"run_job_task,omitempty" tf:"group:task_type"` Libraries []compute.Library `json:"libraries,omitempty" tf:"alias:library"` MaxRetries int32 `json:"max_retries,omitempty"` MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"` RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` }
func (JobSettingsResource) Aliases ¶ added in v1.43.0
func (JobSettingsResource) Aliases() map[string]map[string]string
func (JobSettingsResource) CustomizeSchema ¶ added in v1.43.0
func (JobSettingsResource) CustomizeSchema(s *common.CustomizableSchema) *common.CustomizableSchema
func (JobSettingsResource) MaxDepthForTypes ¶ added in v1.43.0
func (JobSettingsResource) MaxDepthForTypes() map[string]int
type JobTaskSettings ¶
type JobTaskSettings struct { TaskKey string `json:"task_key"` Description string `json:"description,omitempty"` DependsOn []jobs.TaskDependency `json:"depends_on,omitempty"` RunIf string `json:"run_if,omitempty"` ExistingClusterID string `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"` NewCluster *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"` JobClusterKey string `json:"job_cluster_key,omitempty" tf:"group:cluster_type"` Libraries []compute.Library `json:"libraries,omitempty" tf:"alias:library"` NotebookTask *NotebookTask `json:"notebook_task,omitempty" tf:"group:task_type"` SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" tf:"group:task_type"` SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"` SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"` PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"` PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"` SqlTask *SqlTask `json:"sql_task,omitempty" tf:"group:task_type"` DbtTask *DbtTask `json:"dbt_task,omitempty" tf:"group:task_type"` RunJobTask *RunJobTask `json:"run_job_task,omitempty" tf:"group:task_type"` ConditionTask *jobs.ConditionTask `json:"condition_task,omitempty" tf:"group:task_type"` ForEachTask *ForEachTask `json:"for_each_task,omitempty" tf:"group:task_type"` EmailNotifications *jobs.TaskEmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` WebhookNotifications *jobs.WebhookNotifications `json:"webhook_notifications,omitempty" tf:"suppress_diff"` NotificationSettings *jobs.TaskNotificationSettings `json:"notification_settings,omitempty"` TimeoutSeconds int32 `json:"timeout_seconds,omitempty"` MaxRetries int32 `json:"max_retries,omitempty"` MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"` RetryOnTimeout bool `json:"retry_on_timeout,omitempty" tf:"computed"` Health *JobHealth `json:"health,omitempty"` EnvironmentKey string `json:"environment_key,omitempty"` }
type JobsAPI ¶
type JobsAPI struct {
// contains filtered or unexported fields
}
JobsAPI exposes the Jobs API
func NewJobsAPI ¶
NewJobsAPI creates JobsAPI instance from provider meta
func (JobsAPI) Create ¶
func (a JobsAPI) Create(jobSettings JobSettings) (Job, error)
Create creates a job on the workspace given the job settings
func (JobsAPI) ListByName ¶ added in v1.6.5
List all jobs matching the name. If name is empty, returns all jobs
func (JobsAPI) RunsCancel ¶
RunsCancel cancels job run and waits till it's finished
func (JobsAPI) RunsList ¶
func (a JobsAPI) RunsList(r JobRunsListRequest) (jrl JobRunsList, err error)
RunsList returns a job runs list
func (JobsAPI) StopActiveRun ¶ added in v1.22.0
type NotebookTask ¶
type NotebookTask struct { NotebookPath string `json:"notebook_path"` Source string `json:"source,omitempty" tf:"suppress_diff"` BaseParameters map[string]string `json:"base_parameters,omitempty"` WarehouseId string `json:"warehouse_id,omitempty"` }
NotebookTask contains the information for notebook jobs
type PipelineTask ¶
type PipelineTask struct { PipelineID string `json:"pipeline_id"` FullRefresh bool `json:"full_refresh,omitempty"` }
PipelineTask contains the information for pipeline jobs
type PythonWheelTask ¶
type PythonWheelTask struct { EntryPoint string `json:"entry_point,omitempty"` PackageName string `json:"package_name,omitempty"` Parameters []string `json:"parameters,omitempty"` NamedParameters map[string]string `json:"named_parameters,omitempty"` }
PythonWheelTask contains the information for python wheel jobs
type RunJobTask ¶ added in v1.23.0
type RunJobTask struct { JobID int64 `json:"job_id"` JobParameters map[string]string `json:"job_parameters,omitempty"` }
RunJobTask contains information about RunJobTask
type RunParameters ¶
type RunParameters struct { // a shortcut field to reuse this type for RunNow JobID int64 `json:"job_id,omitempty"` NotebookParams map[string]string `json:"notebook_params,omitempty"` JarParams []string `json:"jar_params,omitempty"` PythonParams []string `json:"python_params,omitempty"` SparkSubmitParams []string `json:"spark_submit_params,omitempty"` }
RunParameters used to pass params to tasks
type RunState ¶
type RunState struct { ResultState string `json:"result_state,omitempty"` LifeCycleState string `json:"life_cycle_state,omitempty"` StateMessage string `json:"state_message,omitempty"` }
RunState of the job
type SparkJarTask ¶
type SparkJarTask struct { JarURI string `json:"jar_uri,omitempty"` MainClassName string `json:"main_class_name,omitempty"` Parameters []string `json:"parameters,omitempty"` }
SparkJarTask contains the information for jar jobs
type SparkPythonTask ¶
type SparkPythonTask struct { PythonFile string `json:"python_file"` Source string `json:"source,omitempty" tf:"suppress_diff"` Parameters []string `json:"parameters,omitempty"` }
SparkPythonTask contains the information for python jobs
type SparkSubmitTask ¶
type SparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"`
}
SparkSubmitTask contains the information for spark submit jobs
type SqlAlertTask ¶ added in v1.2.0
type SqlAlertTask struct { AlertID string `json:"alert_id"` Subscriptions []SqlSubscription `json:"subscriptions,omitempty"` PauseSubscriptions bool `json:"pause_subscriptions,omitempty"` }
type SqlDashboardTask ¶ added in v1.2.0
type SqlDashboardTask struct { DashboardID string `json:"dashboard_id"` Subscriptions []SqlSubscription `json:"subscriptions,omitempty"` CustomSubject string `json:"custom_subject,omitempty"` PauseSubscriptions bool `json:"pause_subscriptions,omitempty"` }
type SqlFileTask ¶ added in v1.14.2
type SqlQueryTask ¶ added in v1.2.0
type SqlQueryTask struct {
QueryID string `json:"query_id"`
}
type SqlSubscription ¶ added in v1.21.0
type SqlTask ¶
type SqlTask struct { Query *SqlQueryTask `json:"query,omitempty"` Dashboard *SqlDashboardTask `json:"dashboard,omitempty"` Alert *SqlAlertTask `json:"alert,omitempty"` File *SqlFileTask `json:"file,omitempty"` WarehouseID string `json:"warehouse_id"` Parameters map[string]string `json:"parameters,omitempty"` }
SqlTask contains information about DBSQL task TODO: add validation & conflictsWith
type TableUpdate ¶ added in v1.39.0
type Trigger ¶ added in v1.14.0
type Trigger struct { FileArrival *FileArrival `json:"file_arrival,omitempty"` TableUpdate *TableUpdate `json:"table_update,omitempty"` Periodic *Periodic `json:"periodic,omitempty"` PauseStatus string `json:"pause_status,omitempty" tf:"default:UNPAUSED"` }
type UpdateJobRequest ¶
type UpdateJobRequest struct { JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` NewSettings *JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"` }
UpdateJobRequest used to do what it sounds like