jobs

package
v1.52.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 17, 2024 License: Apache-2.0 Imports: 18 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func Create added in v1.46.0

func Create(createJob jobs.CreateJob, w *databricks.WorkspaceClient, ctx context.Context) (int64, error)

func DataSourceJob added in v1.4.0

func DataSourceJob() common.Resource

func DataSourceJobs

func DataSourceJobs() common.Resource

func Read added in v1.46.0

func Read(jobID int64, w *databricks.WorkspaceClient, ctx context.Context) (job *jobs.Job, err error)

func ResourceJob

func ResourceJob() common.Resource

func Start added in v1.46.0

func Start(jobID int64, timeout time.Duration, w *databricks.WorkspaceClient, ctx context.Context) error

func StopActiveRun added in v1.46.0

func StopActiveRun(jobID int64, timeout time.Duration, w *databricks.WorkspaceClient, ctx context.Context) error

func Update added in v1.46.0

func Update(jobID int64, js JobSettingsResource, w *databricks.WorkspaceClient, ctx context.Context) error

Types

type ContinuousConf added in v1.10.0

type ContinuousConf struct {
	PauseStatus string `json:"pause_status,omitempty" tf:"default:UNPAUSED"`
}

type CronSchedule

type CronSchedule struct {
	QuartzCronExpression string `json:"quartz_cron_expression"`
	TimezoneID           string `json:"timezone_id"`
	PauseStatus          string `json:"pause_status,omitempty" tf:"default:UNPAUSED"`
}

CronSchedule contains the information for the quartz cron expression

type DbtTask

type DbtTask struct {
	Commands          []string `json:"commands"`
	ProfilesDirectory string   `json:"profiles_directory,omitempty"`
	ProjectDirectory  string   `json:"project_directory,omitempty"`
	Schema            string   `json:"schema,omitempty" tf:"default:default"`
	Catalog           string   `json:"catalog,omitempty"`
	WarehouseId       string   `json:"warehouse_id,omitempty"`
	Source            string   `json:"source,omitempty" tf:"suppress_diff"`
}

DbtTask contains information about DBT task TODO: add validation for non-empty commands

type FileArrival added in v1.14.0

type FileArrival struct {
	URL                           string `json:"url"`
	MinTimeBetweenTriggersSeconds int32  `json:"min_time_between_triggers_seconds,omitempty"`
	WaitAfterLastChangeSeconds    int32  `json:"wait_after_last_change_seconds,omitempty"`
}

type ForEachNestedTask added in v1.37.0

type ForEachNestedTask struct {
	TaskKey     string                `json:"task_key"`
	Description string                `json:"description,omitempty"`
	DependsOn   []jobs.TaskDependency `json:"depends_on,omitempty"`
	RunIf       string                `json:"run_if,omitempty"`

	ExistingClusterID string            `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"`
	NewCluster        *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"`
	JobClusterKey     string            `json:"job_cluster_key,omitempty" tf:"group:cluster_type"`
	Libraries         []compute.Library `json:"libraries,omitempty" tf:"alias:library"`

	NotebookTask    *NotebookTask       `json:"notebook_task,omitempty" tf:"group:task_type"`
	SparkJarTask    *SparkJarTask       `json:"spark_jar_task,omitempty" tf:"group:task_type"`
	SparkPythonTask *SparkPythonTask    `json:"spark_python_task,omitempty" tf:"group:task_type"`
	SparkSubmitTask *SparkSubmitTask    `json:"spark_submit_task,omitempty" tf:"group:task_type"`
	PipelineTask    *PipelineTask       `json:"pipeline_task,omitempty" tf:"group:task_type"`
	PythonWheelTask *PythonWheelTask    `json:"python_wheel_task,omitempty" tf:"group:task_type"`
	SqlTask         *SqlTask            `json:"sql_task,omitempty" tf:"group:task_type"`
	DbtTask         *DbtTask            `json:"dbt_task,omitempty" tf:"group:task_type"`
	RunJobTask      *RunJobTask         `json:"run_job_task,omitempty" tf:"group:task_type"`
	ConditionTask   *jobs.ConditionTask `json:"condition_task,omitempty" tf:"group:task_type"`

	EmailNotifications     *jobs.TaskEmailNotifications   `json:"email_notifications,omitempty" tf:"suppress_diff"`
	WebhookNotifications   *jobs.WebhookNotifications     `json:"webhook_notifications,omitempty" tf:"suppress_diff"`
	NotificationSettings   *jobs.TaskNotificationSettings `json:"notification_settings,omitempty"`
	TimeoutSeconds         int32                          `json:"timeout_seconds,omitempty"`
	MaxRetries             int32                          `json:"max_retries,omitempty"`
	MinRetryIntervalMillis int32                          `json:"min_retry_interval_millis,omitempty"`
	RetryOnTimeout         bool                           `json:"retry_on_timeout,omitempty" tf:"computed"`
	Health                 *JobHealth                     `json:"health,omitempty"`

	EnvironmentKey string `json:"environment_key,omitempty"`
}

type ForEachTask added in v1.37.0

type ForEachTask struct {
	Concurrency int               `json:"concurrency,omitempty"`
	Inputs      string            `json:"inputs"`
	Task        ForEachNestedTask `json:"task"`
}

TODO: As TF does not support recursive nesting, limit the nesting depth. Example: https://github.com/hashicorp/terraform-provider-aws/blob/b4a9f93a2b7323202c8904e86cff03d3f2cb006b/internal/service/wafv2/rule_group.go#L110

type GitSource

type GitSource struct {
	Url       string          `json:"git_url" tf:"alias:url"`
	Provider  string          `json:"git_provider,omitempty" tf:"alias:provider"`
	Branch    string          `json:"git_branch,omitempty" tf:"alias:branch"`
	Tag       string          `json:"git_tag,omitempty" tf:"alias:tag"`
	Commit    string          `json:"git_commit,omitempty" tf:"alias:commit"`
	JobSource *jobs.JobSource `json:"job_source,omitempty"`
}

BEGIN Jobs + Repo integration preview

type Job

type Job struct {
	JobID           int64        `json:"job_id,omitempty"`
	CreatorUserName string       `json:"creator_user_name,omitempty"`
	RunAsUserName   string       `json:"run_as_user_name,omitempty" tf:"computed"`
	Settings        *JobSettings `json:"settings,omitempty"`
	CreatedTime     int64        `json:"created_time,omitempty"`
}

Job contains the information when using a GET request from the Databricks Jobs api

func (Job) ID

func (j Job) ID() string

ID returns job id as string

type JobCluster

type JobCluster struct {
	JobClusterKey string            `json:"job_cluster_key" tf:"group:cluster_type"`
	NewCluster    *clusters.Cluster `json:"new_cluster" tf:"group:cluster_type"`
}

type JobCreateStruct added in v1.46.0

type JobCreateStruct struct {
	jobs.CreateJob
}

Need a struct for JobCreate because there are aliases we need and it'll be needed in the create method.

func (JobCreateStruct) Aliases added in v1.46.0

func (JobCreateStruct) Aliases() map[string]map[string]string

func (JobCreateStruct) CustomizeSchema added in v1.46.0

type JobHealth added in v1.22.0

type JobHealth struct {
	Rules []JobHealthRule `json:"rules"`
}

type JobHealthRule added in v1.22.0

type JobHealthRule struct {
	Metric    string `json:"metric"`
	Operation string `json:"op"`
	Value     int64  `json:"value"`
}

type JobListResponse added in v1.6.5

type JobListResponse struct {
	Jobs          []Job  `json:"jobs"`
	HasMore       bool   `json:"has_more,omitempty"`
	NextPageToken string `json:"next_page_token,omitempty"`
	PrevPageToken string `json:"prev_page_token,omitempty"`
}

JobListResponse returns a list of all jobs

type JobParameter added in v1.23.0

type JobParameter struct {
	Name    string `json:"name,omitempty"`
	Default string `json:"default,omitempty"`
	Value   string `json:"value,omitempty"`
}

Job-level parameter

type JobRun

type JobRun struct {
	JobID       int64    `json:"job_id,omitempty"`
	RunID       int64    `json:"run_id,omitempty"`
	NumberInJob int64    `json:"number_in_job,omitempty"`
	StartTime   int64    `json:"start_time,omitempty"`
	State       RunState `json:"state,omitempty"`
	Trigger     string   `json:"trigger,omitempty"`
	RuntType    string   `json:"run_type,omitempty"`

	OverridingParameters RunParameters  `json:"overriding_parameters,omitempty"`
	JobParameters        []JobParameter `json:"job_parameters,omitempty"`
}

JobRun is a simplified representation of corresponding entity

type JobRunAs added in v1.19.0

type JobRunAs struct {
	UserName             string `json:"user_name,omitempty"`
	ServicePrincipalName string `json:"service_principal_name,omitempty"`
}

type JobRunsList

type JobRunsList struct {
	Runs    []JobRun `json:"runs"`
	HasMore bool     `json:"has_more"`
}

JobRunsList returns a page of job runs

type JobRunsListRequest

type JobRunsListRequest struct {
	JobID         int64 `url:"job_id,omitempty"`
	ActiveOnly    bool  `url:"active_only,omitempty"`
	CompletedOnly bool  `url:"completed_only,omitempty"`
	Offset        int32 `url:"offset,omitempty"`
	Limit         int32 `url:"limit,omitempty"`
}

JobRunsListRequest used to do what it sounds like

type JobSettings

type JobSettings struct {
	Name        string `json:"name,omitempty" tf:"default:Untitled"`
	Description string `json:"description,omitempty"`

	// BEGIN Jobs API 2.0
	ExistingClusterID      string            `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"`
	NewCluster             *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"`
	NotebookTask           *NotebookTask     `json:"notebook_task,omitempty" tf:"group:task_type"`
	SparkJarTask           *SparkJarTask     `json:"spark_jar_task,omitempty" tf:"group:task_type"`
	SparkPythonTask        *SparkPythonTask  `json:"spark_python_task,omitempty" tf:"group:task_type"`
	SparkSubmitTask        *SparkSubmitTask  `json:"spark_submit_task,omitempty" tf:"group:task_type"`
	PipelineTask           *PipelineTask     `json:"pipeline_task,omitempty" tf:"group:task_type"`
	PythonWheelTask        *PythonWheelTask  `json:"python_wheel_task,omitempty" tf:"group:task_type"`
	DbtTask                *DbtTask          `json:"dbt_task,omitempty" tf:"group:task_type"`
	RunJobTask             *RunJobTask       `json:"run_job_task,omitempty" tf:"group:task_type"`
	Libraries              []compute.Library `json:"libraries,omitempty" tf:"alias:library"`
	TimeoutSeconds         int32             `json:"timeout_seconds,omitempty"`
	MaxRetries             int32             `json:"max_retries,omitempty"`
	MinRetryIntervalMillis int32             `json:"min_retry_interval_millis,omitempty"`
	RetryOnTimeout         bool              `json:"retry_on_timeout,omitempty"`

	// BEGIN Jobs API 2.1
	Tasks        []JobTaskSettings     `json:"tasks,omitempty" tf:"alias:task"`
	Format       string                `json:"format,omitempty" tf:"computed"`
	JobClusters  []JobCluster          `json:"job_clusters,omitempty" tf:"alias:job_cluster"`
	Environments []jobs.JobEnvironment `json:"environments,omitempty" tf:"alias:environment"`

	// BEGIN Jobs + Repo integration preview
	GitSource *GitSource `json:"git_source,omitempty"`

	Schedule             *CronSchedule                 `json:"schedule,omitempty"`
	Continuous           *ContinuousConf               `json:"continuous,omitempty"`
	Trigger              *Trigger                      `json:"trigger,omitempty"`
	MaxConcurrentRuns    int32                         `json:"max_concurrent_runs,omitempty"`
	EmailNotifications   *jobs.JobEmailNotifications   `json:"email_notifications,omitempty" tf:"suppress_diff"`
	WebhookNotifications *jobs.WebhookNotifications    `json:"webhook_notifications,omitempty" tf:"suppress_diff"`
	NotificationSettings *jobs.JobNotificationSettings `json:"notification_settings,omitempty"`
	Tags                 map[string]string             `json:"tags,omitempty"`
	Queue                *jobs.QueueSettings           `json:"queue,omitempty"`
	RunAs                *JobRunAs                     `json:"run_as,omitempty" tf:"computed"`
	Health               *JobHealth                    `json:"health,omitempty"`
	Parameters           []jobs.JobParameterDefinition `json:"parameters,omitempty" tf:"alias:parameter"`
	Deployment           *jobs.JobDeployment           `json:"deployment,omitempty"`
	EditMode             jobs.JobEditMode              `json:"edit_mode,omitempty"`
}

JobSettings contains the information for configuring a job on databricks

type JobSettingsResource added in v1.43.0

type JobSettingsResource struct {
	jobs.JobSettings

	// BEGIN Jobs API 2.0
	ExistingClusterID      string               `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"`
	NewCluster             *compute.ClusterSpec `json:"new_cluster,omitempty" tf:"group:cluster_type"`
	NotebookTask           *NotebookTask        `json:"notebook_task,omitempty" tf:"group:task_type"`
	SparkJarTask           *SparkJarTask        `json:"spark_jar_task,omitempty" tf:"group:task_type"`
	SparkPythonTask        *SparkPythonTask     `json:"spark_python_task,omitempty" tf:"group:task_type"`
	SparkSubmitTask        *SparkSubmitTask     `json:"spark_submit_task,omitempty" tf:"group:task_type"`
	PipelineTask           *PipelineTask        `json:"pipeline_task,omitempty" tf:"group:task_type"`
	PythonWheelTask        *PythonWheelTask     `json:"python_wheel_task,omitempty" tf:"group:task_type"`
	DbtTask                *DbtTask             `json:"dbt_task,omitempty" tf:"group:task_type"`
	RunJobTask             *RunJobTask          `json:"run_job_task,omitempty" tf:"group:task_type"`
	Libraries              []compute.Library    `json:"libraries,omitempty" tf:"alias:library"`
	MaxRetries             int32                `json:"max_retries,omitempty"`
	MinRetryIntervalMillis int32                `json:"min_retry_interval_millis,omitempty"`
	RetryOnTimeout         bool                 `json:"retry_on_timeout,omitempty"`
}

func (JobSettingsResource) Aliases added in v1.43.0

func (JobSettingsResource) Aliases() map[string]map[string]string

func (JobSettingsResource) CustomizeSchema added in v1.43.0

func (JobSettingsResource) MaxDepthForTypes added in v1.43.0

func (JobSettingsResource) MaxDepthForTypes() map[string]int

type JobTaskSettings

type JobTaskSettings struct {
	TaskKey     string                `json:"task_key"`
	Description string                `json:"description,omitempty"`
	DependsOn   []jobs.TaskDependency `json:"depends_on,omitempty"`
	RunIf       string                `json:"run_if,omitempty"`

	ExistingClusterID string            `json:"existing_cluster_id,omitempty" tf:"group:cluster_type"`
	NewCluster        *clusters.Cluster `json:"new_cluster,omitempty" tf:"group:cluster_type"`
	JobClusterKey     string            `json:"job_cluster_key,omitempty" tf:"group:cluster_type"`
	Libraries         []compute.Library `json:"libraries,omitempty" tf:"alias:library"`

	NotebookTask    *NotebookTask       `json:"notebook_task,omitempty" tf:"group:task_type"`
	SparkJarTask    *SparkJarTask       `json:"spark_jar_task,omitempty" tf:"group:task_type"`
	SparkPythonTask *SparkPythonTask    `json:"spark_python_task,omitempty" tf:"group:task_type"`
	SparkSubmitTask *SparkSubmitTask    `json:"spark_submit_task,omitempty" tf:"group:task_type"`
	PipelineTask    *PipelineTask       `json:"pipeline_task,omitempty" tf:"group:task_type"`
	PythonWheelTask *PythonWheelTask    `json:"python_wheel_task,omitempty" tf:"group:task_type"`
	SqlTask         *SqlTask            `json:"sql_task,omitempty" tf:"group:task_type"`
	DbtTask         *DbtTask            `json:"dbt_task,omitempty" tf:"group:task_type"`
	RunJobTask      *RunJobTask         `json:"run_job_task,omitempty" tf:"group:task_type"`
	ConditionTask   *jobs.ConditionTask `json:"condition_task,omitempty" tf:"group:task_type"`
	ForEachTask     *ForEachTask        `json:"for_each_task,omitempty" tf:"group:task_type"`

	EmailNotifications     *jobs.TaskEmailNotifications   `json:"email_notifications,omitempty" tf:"suppress_diff"`
	WebhookNotifications   *jobs.WebhookNotifications     `json:"webhook_notifications,omitempty" tf:"suppress_diff"`
	NotificationSettings   *jobs.TaskNotificationSettings `json:"notification_settings,omitempty"`
	TimeoutSeconds         int32                          `json:"timeout_seconds,omitempty"`
	MaxRetries             int32                          `json:"max_retries,omitempty"`
	MinRetryIntervalMillis int32                          `json:"min_retry_interval_millis,omitempty"`
	RetryOnTimeout         bool                           `json:"retry_on_timeout,omitempty" tf:"computed"`
	Health                 *JobHealth                     `json:"health,omitempty"`

	EnvironmentKey string `json:"environment_key,omitempty"`
}

type JobsAPI

type JobsAPI struct {
	// contains filtered or unexported fields
}

JobsAPI exposes the Jobs API

func NewJobsAPI

func NewJobsAPI(ctx context.Context, m any) JobsAPI

NewJobsAPI creates JobsAPI instance from provider meta

func (JobsAPI) Create

func (a JobsAPI) Create(jobSettings JobSettings) (Job, error)

Create creates a job on the workspace given the job settings

func (JobsAPI) Delete

func (a JobsAPI) Delete(id string) error

Delete deletes the job given a job id

func (JobsAPI) List

func (a JobsAPI) List() (l []Job, err error)

List all jobs

func (JobsAPI) ListByName added in v1.6.5

func (a JobsAPI) ListByName(name string, expandTasks bool) ([]Job, error)

List all jobs matching the name. If name is empty, returns all jobs

func (JobsAPI) Read

func (a JobsAPI) Read(id string) (job Job, err error)

Read returns the job object with all the attributes

func (JobsAPI) RunNow

func (a JobsAPI) RunNow(jobID int64) (int64, error)

RunNow triggers the job and returns a run ID

func (JobsAPI) RunsCancel

func (a JobsAPI) RunsCancel(runID int64, timeout time.Duration) error

RunsCancel cancels job run and waits till it's finished

func (JobsAPI) RunsGet

func (a JobsAPI) RunsGet(runID int64) (JobRun, error)

RunsGet to retrieve information about the run

func (JobsAPI) RunsList

func (a JobsAPI) RunsList(r JobRunsListRequest) (jrl JobRunsList, err error)

RunsList returns a job runs list

func (JobsAPI) Start

func (a JobsAPI) Start(jobID int64, timeout time.Duration) error

func (JobsAPI) StopActiveRun added in v1.22.0

func (a JobsAPI) StopActiveRun(jobID int64, timeout time.Duration) error

func (JobsAPI) Update

func (a JobsAPI) Update(id string, jobSettings JobSettings) error

Update updates a job given the id and a new set of job settings

type NotebookTask

type NotebookTask struct {
	NotebookPath   string            `json:"notebook_path"`
	Source         string            `json:"source,omitempty" tf:"suppress_diff"`
	BaseParameters map[string]string `json:"base_parameters,omitempty"`
	WarehouseId    string            `json:"warehouse_id,omitempty"`
}

NotebookTask contains the information for notebook jobs

type Periodic added in v1.48.2

type Periodic struct {
	Interval int32  `json:"interval"`
	Unit     string `json:"unit"`
}

type PipelineTask

type PipelineTask struct {
	PipelineID  string `json:"pipeline_id"`
	FullRefresh bool   `json:"full_refresh,omitempty"`
}

PipelineTask contains the information for pipeline jobs

type PythonWheelTask

type PythonWheelTask struct {
	EntryPoint      string            `json:"entry_point,omitempty"`
	PackageName     string            `json:"package_name,omitempty"`
	Parameters      []string          `json:"parameters,omitempty"`
	NamedParameters map[string]string `json:"named_parameters,omitempty"`
}

PythonWheelTask contains the information for python wheel jobs

type RunJobTask added in v1.23.0

type RunJobTask struct {
	JobID         int64             `json:"job_id"`
	JobParameters map[string]string `json:"job_parameters,omitempty"`
}

RunJobTask contains information about RunJobTask

type RunParameters

type RunParameters struct {
	// a shortcut field to reuse this type for RunNow
	JobID int64 `json:"job_id,omitempty"`

	NotebookParams    map[string]string `json:"notebook_params,omitempty"`
	JarParams         []string          `json:"jar_params,omitempty"`
	PythonParams      []string          `json:"python_params,omitempty"`
	SparkSubmitParams []string          `json:"spark_submit_params,omitempty"`
}

RunParameters used to pass params to tasks

type RunState

type RunState struct {
	ResultState    string `json:"result_state,omitempty"`
	LifeCycleState string `json:"life_cycle_state,omitempty"`
	StateMessage   string `json:"state_message,omitempty"`
}

RunState of the job

type SparkJarTask

type SparkJarTask struct {
	JarURI        string   `json:"jar_uri,omitempty"`
	MainClassName string   `json:"main_class_name,omitempty"`
	Parameters    []string `json:"parameters,omitempty"`
}

SparkJarTask contains the information for jar jobs

type SparkPythonTask

type SparkPythonTask struct {
	PythonFile string   `json:"python_file"`
	Source     string   `json:"source,omitempty" tf:"suppress_diff"`
	Parameters []string `json:"parameters,omitempty"`
}

SparkPythonTask contains the information for python jobs

type SparkSubmitTask

type SparkSubmitTask struct {
	Parameters []string `json:"parameters,omitempty"`
}

SparkSubmitTask contains the information for spark submit jobs

type SqlAlertTask added in v1.2.0

type SqlAlertTask struct {
	AlertID            string            `json:"alert_id"`
	Subscriptions      []SqlSubscription `json:"subscriptions,omitempty"`
	PauseSubscriptions bool              `json:"pause_subscriptions,omitempty"`
}

type SqlDashboardTask added in v1.2.0

type SqlDashboardTask struct {
	DashboardID        string            `json:"dashboard_id"`
	Subscriptions      []SqlSubscription `json:"subscriptions,omitempty"`
	CustomSubject      string            `json:"custom_subject,omitempty"`
	PauseSubscriptions bool              `json:"pause_subscriptions,omitempty"`
}

type SqlFileTask added in v1.14.2

type SqlFileTask struct {
	Path   string `json:"path"`
	Source string `json:"source,omitempty" tf:"suppress_diff"`
}

type SqlQueryTask added in v1.2.0

type SqlQueryTask struct {
	QueryID string `json:"query_id"`
}

type SqlSubscription added in v1.21.0

type SqlSubscription struct {
	UserName      string `json:"user_name,omitempty"`
	DestinationID string `json:"destination_id,omitempty"`
}

type SqlTask

type SqlTask struct {
	Query       *SqlQueryTask     `json:"query,omitempty"`
	Dashboard   *SqlDashboardTask `json:"dashboard,omitempty"`
	Alert       *SqlAlertTask     `json:"alert,omitempty"`
	File        *SqlFileTask      `json:"file,omitempty"`
	WarehouseID string            `json:"warehouse_id"`
	Parameters  map[string]string `json:"parameters,omitempty"`
}

SqlTask contains information about DBSQL task TODO: add validation & conflictsWith

type TableUpdate added in v1.39.0

type TableUpdate struct {
	TableNames                    []string `json:"table_names"`
	Condition                     string   `json:"condition,omitempty"`
	MinTimeBetweenTriggersSeconds int32    `json:"min_time_between_triggers_seconds,omitempty"`
	WaitAfterLastChangeSeconds    int32    `json:"wait_after_last_change_seconds,omitempty"`
}

type Trigger added in v1.14.0

type Trigger struct {
	FileArrival *FileArrival `json:"file_arrival,omitempty"`
	TableUpdate *TableUpdate `json:"table_update,omitempty"`
	Periodic    *Periodic    `json:"periodic,omitempty"`
	PauseStatus string       `json:"pause_status,omitempty" tf:"default:UNPAUSED"`
}

type UpdateJobRequest

type UpdateJobRequest struct {
	JobID       int64        `json:"job_id,omitempty" url:"job_id,omitempty"`
	NewSettings *JobSettings `json:"new_settings,omitempty" url:"new_settings,omitempty"`
}

UpdateJobRequest used to do what it sounds like

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL