Documentation
¶
Index ¶
- func DataSourceJob() *schema.Resource
- func DataSourceJobs() *schema.Resource
- func ResourceJob() *schema.Resource
- type ContinuousConf
- type CronSchedule
- type DbtTask
- type EmailNotifications
- type GitSource
- type Job
- type JobCluster
- type JobListResponse
- type JobRun
- type JobRunsList
- type JobRunsListRequest
- type JobSettings
- type JobTaskSettings
- type JobsAPI
- func (a JobsAPI) Create(jobSettings JobSettings) (Job, error)
- func (a JobsAPI) Delete(id string) error
- func (a JobsAPI) List() (l []Job, err error)
- func (a JobsAPI) ListByName(name string, expandTasks bool) ([]Job, error)
- func (a JobsAPI) Read(id string) (job Job, err error)
- func (a JobsAPI) Restart(id string, timeout time.Duration) error
- func (a JobsAPI) RunNow(jobID int64) (int64, error)
- func (a JobsAPI) RunsCancel(runID int64, timeout time.Duration) error
- func (a JobsAPI) RunsGet(runID int64) (JobRun, error)
- func (a JobsAPI) RunsList(r JobRunsListRequest) (jrl JobRunsList, err error)
- func (a JobsAPI) Start(jobID int64, timeout time.Duration) error
- func (a JobsAPI) Update(id string, jobSettings JobSettings) error
- type NotebookTask
- type PipelineTask
- type PythonWheelTask
- type RunParameters
- type RunState
- type SparkJarTask
- type SparkPythonTask
- type SparkSubmitTask
- type SqlAlertTask
- type SqlDashboardTask
- type SqlQueryTask
- type SqlTask
- type TaskDependency
- type UpdateJobRequest
- type Webhook
- type WebhookNotifications
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func DataSourceJob ¶ added in v1.4.0
func DataSourceJobs ¶
func ResourceJob ¶
Types ¶
type ContinuousConf ¶ added in v1.10.0
type ContinuousConf struct {
PauseStatus string `json:"pause_status,omitempty" tf:"computed"`
}
type CronSchedule ¶
type CronSchedule struct { QuartzCronExpression string `json:"quartz_cron_expression"` TimezoneID string `json:"timezone_id"` PauseStatus string `json:"pause_status,omitempty" tf:"computed"` }
CronSchedule contains the information for the quartz cron expression
type DbtTask ¶
type DbtTask struct { Commands []string `json:"commands"` ProfilesDirectory string `json:"profiles_directory,omitempty"` ProjectDirectory string `json:"project_directory,omitempty"` Schema string `json:"schema,omitempty" tf:"default:default"` Catalog string `json:"catalog,omitempty"` WarehouseId string `json:"warehouse_id,omitempty"` }
DbtTask contains information about DBT task TODO: add validation for non-empty commands
type EmailNotifications ¶
type EmailNotifications struct { OnStart []string `json:"on_start,omitempty"` OnSuccess []string `json:"on_success,omitempty"` OnFailure []string `json:"on_failure,omitempty"` NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` }
EmailNotifications contains the information for email notifications after job or task run start or completion
type GitSource ¶
type GitSource struct { Url string `json:"git_url" tf:"alias:url"` Provider string `json:"git_provider,omitempty" tf:"alias:provider"` Branch string `json:"git_branch,omitempty" tf:"alias:branch"` Tag string `json:"git_tag,omitempty" tf:"alias:tag"` Commit string `json:"git_commit,omitempty" tf:"alias:commit"` }
BEGIN Jobs + Repo integration preview
type Job ¶
type Job struct { JobID int64 `json:"job_id,omitempty"` CreatorUserName string `json:"creator_user_name,omitempty"` Settings *JobSettings `json:"settings,omitempty"` CreatedTime int64 `json:"created_time,omitempty"` }
Job contains the information when using a GET request from the Databricks Jobs api
type JobCluster ¶
type JobListResponse ¶ added in v1.6.5
JobListResponse returns a list of all jobs
type JobRun ¶
type JobRun struct { JobID int64 `json:"job_id"` RunID int64 `json:"run_id"` NumberInJob int64 `json:"number_in_job"` StartTime int64 `json:"start_time,omitempty"` State RunState `json:"state"` Trigger string `json:"trigger,omitempty"` RuntType string `json:"run_type,omitempty"` OverridingParameters RunParameters `json:"overriding_parameters,omitempty"` }
JobRun is a simplified representation of corresponding entity
type JobRunsList ¶
JobRunsList returns a page of job runs
type JobRunsListRequest ¶
type JobRunsListRequest struct { JobID int64 `url:"job_id,omitempty"` ActiveOnly bool `url:"active_only,omitempty"` CompletedOnly bool `url:"completed_only,omitempty"` Offset int32 `url:"offset,omitempty"` Limit int32 `url:"limit,omitempty"` }
JobRunsListRequest used to do what it sounds like
type JobSettings ¶
type JobSettings struct { Name string `json:"name,omitempty" tf:"default:Untitled"` // BEGIN Jobs API 2.0 ExistingClusterID string `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"` NewCluster *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"` NotebookTask *NotebookTask `json:"notebook_task,omitempty" tf:"group:task_type"` SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" tf:"group:task_type"` SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"` SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"` PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"` PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"` DbtTask *DbtTask `json:"dbt_task,omitempty" tf:"group:task_type"` Libraries []libraries.Library `json:"libraries,omitempty" tf:"slice_set,alias:library"` TimeoutSeconds int32 `json:"timeout_seconds,omitempty"` MaxRetries int32 `json:"max_retries,omitempty"` MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"` RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` // BEGIN Jobs API 2.1 Tasks []JobTaskSettings `json:"tasks,omitempty" tf:"alias:task"` Format string `json:"format,omitempty" tf:"computed"` JobClusters []JobCluster `json:"job_clusters,omitempty" tf:"alias:job_cluster"` // BEGIN Jobs + Repo integration preview GitSource *GitSource `json:"git_source,omitempty"` Schedule *CronSchedule `json:"schedule,omitempty"` Continuous *ContinuousConf `json:"continuous,omitempty"` MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty"` EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` WebhookNotifications *WebhookNotifications `json:"webhook_notifications,omitempty" tf:"suppress_diff"` Tags map[string]string `json:"tags,omitempty"` }
JobSettings contains the information for configuring a job on databricks
type JobTaskSettings ¶
type JobTaskSettings struct { TaskKey string `json:"task_key,omitempty"` Description string `json:"description,omitempty"` DependsOn []TaskDependency `json:"depends_on,omitempty"` ExistingClusterID string `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"` NewCluster *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"` JobClusterKey string `json:"job_cluster_key,omitempty" tf:"group:cluster_type"` Libraries []libraries.Library `json:"libraries,omitempty" tf:"slice_set,alias:library"` NotebookTask *NotebookTask `json:"notebook_task,omitempty" tf:"group:task_type"` SparkJarTask *SparkJarTask `json:"spark_jar_task,omitempty" tf:"group:task_type"` SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"` SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"` PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"` PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"` SqlTask *SqlTask `json:"sql_task,omitempty" tf:"group:task_type"` DbtTask *DbtTask `json:"dbt_task,omitempty" tf:"group:task_type"` EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` TimeoutSeconds int32 `json:"timeout_seconds,omitempty"` MaxRetries int32 `json:"max_retries,omitempty"` MinRetryIntervalMillis int32 `json:"min_retry_interval_millis,omitempty"` RetryOnTimeout bool `json:"retry_on_timeout,omitempty" tf:"computed"` }
type JobsAPI ¶
type JobsAPI struct {
// contains filtered or unexported fields
}
JobsAPI exposes the Jobs API
func NewJobsAPI ¶
NewJobsAPI creates JobsAPI instance from provider meta
func (JobsAPI) Create ¶
func (a JobsAPI) Create(jobSettings JobSettings) (Job, error)
Create creates a job on the workspace given the job settings
func (JobsAPI) ListByName ¶ added in v1.6.5
List all jobs matching the name. If name is empty, returns all jobs
func (JobsAPI) RunsCancel ¶
RunsCancel cancels job run and waits till it's finished
func (JobsAPI) RunsList ¶
func (a JobsAPI) RunsList(r JobRunsListRequest) (jrl JobRunsList, err error)
RunsList returns a job runs list
type NotebookTask ¶
type NotebookTask struct { NotebookPath string `json:"notebook_path"` Source string `json:"source,omitempty" tf:"suppress_diff"` BaseParameters map[string]string `json:"base_parameters,omitempty"` }
NotebookTask contains the information for notebook jobs
type PipelineTask ¶
type PipelineTask struct {
PipelineID string `json:"pipeline_id"`
}
PipelineTask contains the information for pipeline jobs
type PythonWheelTask ¶
type PythonWheelTask struct { EntryPoint string `json:"entry_point,omitempty"` PackageName string `json:"package_name,omitempty"` Parameters []string `json:"parameters,omitempty"` NamedParameters map[string]string `json:"named_parameters,omitempty"` }
PythonWheelTask contains the information for python wheel jobs
type RunParameters ¶
type RunParameters struct { // a shortcut field to reuse this type for RunNow JobID int64 `json:"job_id,omitempty"` NotebookParams map[string]string `json:"notebook_params,omitempty"` JarParams []string `json:"jar_params,omitempty"` PythonParams []string `json:"python_params,omitempty"` SparkSubmitParams []string `json:"spark_submit_params,omitempty"` }
RunParameters used to pass params to tasks
type RunState ¶
type RunState struct { ResultState string `json:"result_state,omitempty"` LifeCycleState string `json:"life_cycle_state,omitempty"` StateMessage string `json:"state_message,omitempty"` }
RunState of the job
type SparkJarTask ¶
type SparkJarTask struct { JarURI string `json:"jar_uri,omitempty"` MainClassName string `json:"main_class_name,omitempty"` Parameters []string `json:"parameters,omitempty"` }
SparkJarTask contains the information for jar jobs
type SparkPythonTask ¶
type SparkPythonTask struct { PythonFile string `json:"python_file"` Parameters []string `json:"parameters,omitempty"` }
SparkPythonTask contains the information for python jobs
type SparkSubmitTask ¶
type SparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"`
}
SparkSubmitTask contains the information for spark submit jobs
type SqlAlertTask ¶ added in v1.2.0
type SqlAlertTask struct {
AlertID string `json:"alert_id"`
}
type SqlDashboardTask ¶ added in v1.2.0
type SqlDashboardTask struct {
DashboardID string `json:"dashboard_id"`
}
type SqlQueryTask ¶ added in v1.2.0
type SqlQueryTask struct {
QueryID string `json:"query_id"`
}
type SqlTask ¶
type SqlTask struct { Query *SqlQueryTask `json:"query,omitempty"` Dashboard *SqlDashboardTask `json:"dashboard,omitempty"` Alert *SqlAlertTask `json:"alert,omitempty"` WarehouseID string `json:"warehouse_id,omitempty"` Parameters map[string]string `json:"parameters,omitempty"` }
SqlTask contains information about DBSQL task TODO: add validation & conflictsWith
type TaskDependency ¶
type TaskDependency struct {
TaskKey string `json:"task_key,omitempty"`
}
type UpdateJobRequest ¶
type UpdateJobRequest struct { JobID int64 `json:"job_id,omitempty" url:"job_id,omitempty"` NewSettings *JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"` }
UpdateJobRequest used to do what it sounds like
type Webhook ¶ added in v1.6.1
type Webhook struct {
ID string `json:"id"`
}
Webhook contains a reference by id to one of the centrally configured webhooks.
type WebhookNotifications ¶ added in v1.6.1
type WebhookNotifications struct { OnStart []Webhook `json:"on_start,omitempty"` OnSuccess []Webhook `json:"on_success,omitempty"` OnFailure []Webhook `json:"on_failure,omitempty"` }
WebhookNotifications contains the information for webhook notifications sent after job start or completion.
func (*WebhookNotifications) Sort ¶ added in v1.6.1
func (wn *WebhookNotifications) Sort()