billing

package
v0.41.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 21, 2024 License: Apache-2.0 Imports: 8 Imported by: 6

Documentation

Overview

These APIs allow you to manage Billable Usage, Budgets, Log Delivery, etc.

Index

Examples

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type BillableUsageAPI

type BillableUsageAPI struct {
	// contains filtered or unexported fields
}

This API allows you to download billable usage logs for the specified account and date range. This feature works with all account types.

func NewBillableUsage

func NewBillableUsage(client *client.DatabricksClient) *BillableUsageAPI

func (*BillableUsageAPI) Download

func (a *BillableUsageAPI) Download(ctx context.Context, request DownloadRequest) (*DownloadResponse, error)

Return billable usage logs.

Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see CSV file schema. Note that this method might take multiple minutes to complete.

**Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges.

Example (UsageDownload)
ctx := context.Background()
a, err := databricks.NewAccountClient()
if err != nil {
	panic(err)
}

resp, err := a.BillableUsage.Download(ctx, billing.DownloadRequest{
	StartMonth: "2023-01",
	EndMonth:   "2023-02",
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", resp)
Output:

func (*BillableUsageAPI) Impl

Impl returns low-level BillableUsage API implementation Deprecated: use MockBillableUsageInterface instead.

func (*BillableUsageAPI) WithImpl

WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks. Deprecated: use MockBillableUsageInterface instead.

type BillableUsageInterface added in v0.29.0

type BillableUsageInterface interface {
	// WithImpl could be used to override low-level API implementations for unit
	// testing purposes with [github.com/golang/mock] or other mocking frameworks.
	// Deprecated: use MockBillableUsageInterface instead.
	WithImpl(impl BillableUsageService) BillableUsageInterface

	// Impl returns low-level BillableUsage API implementation
	// Deprecated: use MockBillableUsageInterface instead.
	Impl() BillableUsageService

	// Return billable usage logs.
	//
	// Returns billable usage logs in CSV format for the specified account and date
	// range. For the data schema, see [CSV file schema]. Note that this method
	// might take multiple minutes to complete.
	//
	// **Warning**: Depending on the queried date range, the number of workspaces in
	// the account, the size of the response and the internet speed of the caller,
	// this API may hit a timeout after a few minutes. If you experience this, try
	// to mitigate by calling the API with narrower date ranges.
	//
	// [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
	Download(ctx context.Context, request DownloadRequest) (*DownloadResponse, error)
}

type BillableUsageService

type BillableUsageService interface {

	// Return billable usage logs.
	//
	// Returns billable usage logs in CSV format for the specified account and
	// date range. For the data schema, see [CSV file schema]. Note that this
	// method might take multiple minutes to complete.
	//
	// **Warning**: Depending on the queried date range, the number of
	// workspaces in the account, the size of the response and the internet
	// speed of the caller, this API may hit a timeout after a few minutes. If
	// you experience this, try to mitigate by calling the API with narrower
	// date ranges.
	//
	// [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
	Download(ctx context.Context, request DownloadRequest) (*DownloadResponse, error)
}

This API allows you to download billable usage logs for the specified account and date range. This feature works with all account types.

type Budget

type Budget struct {
	Alerts []BudgetAlert `json:"alerts,omitempty"`
	// Optional end date of the budget.
	EndDate string `json:"end_date,omitempty"`
	// SQL-like filter expression with workspaceId, SKU and tag. Usage in your
	// account that matches this expression will be counted in this budget.
	//
	// Supported properties on left-hand side of comparison: * `workspaceId` -
	// the ID of the workspace * `sku` - SKU of the cluster, e.g.
	// `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag name'` - tag of
	// the cluster
	//
	// Supported comparison operators: * `=` - equal * `!=` - not equal
	//
	// Supported logical operators: `AND`, `OR`.
	//
	// Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND
	// tag.'my tag'='my value')` * `workspaceId!=456` *
	// `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR sku='PREMIUM_ALL_PURPOSE_COMPUTE'`
	// * `tag.name1='value1' AND tag.name2='value2'`
	Filter string `json:"filter"`
	// Human-readable name of the budget.
	Name string `json:"name"`
	// Period length in years, months, weeks and/or days. Examples: `1 month`,
	// `30 days`, `1 year, 2 months, 1 week, 2 days`
	Period string `json:"period"`
	// Start date of the budget period calculation.
	StartDate string `json:"start_date"`
	// Target amount of the budget per period in USD.
	TargetAmount string `json:"target_amount"`

	ForceSendFields []string `json:"-"`
}

Budget configuration to be created.

func (Budget) MarshalJSON added in v0.23.0

func (s Budget) MarshalJSON() ([]byte, error)

func (*Budget) UnmarshalJSON added in v0.23.0

func (s *Budget) UnmarshalJSON(b []byte) error

type BudgetAlert

type BudgetAlert struct {
	// List of email addresses to be notified when budget percentage is exceeded
	// in the given period.
	EmailNotifications []string `json:"email_notifications,omitempty"`
	// Percentage of the target amount used in the currect period that will
	// trigger a notification.
	MinPercentage int `json:"min_percentage,omitempty"`

	ForceSendFields []string `json:"-"`
}

func (BudgetAlert) MarshalJSON added in v0.23.0

func (s BudgetAlert) MarshalJSON() ([]byte, error)

func (*BudgetAlert) UnmarshalJSON added in v0.23.0

func (s *BudgetAlert) UnmarshalJSON(b []byte) error

type BudgetList

type BudgetList struct {
	Budgets []BudgetWithStatus `json:"budgets,omitempty"`
}

List of budgets.

type BudgetWithStatus

type BudgetWithStatus struct {
	Alerts []BudgetAlert `json:"alerts,omitempty"`

	BudgetId string `json:"budget_id,omitempty"`

	CreationTime string `json:"creation_time,omitempty"`
	// Optional end date of the budget.
	EndDate string `json:"end_date,omitempty"`
	// SQL-like filter expression with workspaceId, SKU and tag. Usage in your
	// account that matches this expression will be counted in this budget.
	//
	// Supported properties on left-hand side of comparison: * `workspaceId` -
	// the ID of the workspace * `sku` - SKU of the cluster, e.g.
	// `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag name'` - tag of
	// the cluster
	//
	// Supported comparison operators: * `=` - equal * `!=` - not equal
	//
	// Supported logical operators: `AND`, `OR`.
	//
	// Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND
	// tag.'my tag'='my value')` * `workspaceId!=456` *
	// `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR sku='PREMIUM_ALL_PURPOSE_COMPUTE'`
	// * `tag.name1='value1' AND tag.name2='value2'`
	Filter string `json:"filter,omitempty"`
	// Human-readable name of the budget.
	Name string `json:"name,omitempty"`
	// Period length in years, months, weeks and/or days. Examples: `1 month`,
	// `30 days`, `1 year, 2 months, 1 week, 2 days`
	Period string `json:"period,omitempty"`
	// Start date of the budget period calculation.
	StartDate string `json:"start_date,omitempty"`
	// Amount used in the budget for each day (noncumulative).
	StatusDaily []BudgetWithStatusStatusDailyItem `json:"status_daily,omitempty"`
	// Target amount of the budget per period in USD.
	TargetAmount string `json:"target_amount,omitempty"`

	UpdateTime string `json:"update_time,omitempty"`

	ForceSendFields []string `json:"-"`
}

Budget configuration with daily status.

func (BudgetWithStatus) MarshalJSON added in v0.23.0

func (s BudgetWithStatus) MarshalJSON() ([]byte, error)

func (*BudgetWithStatus) UnmarshalJSON added in v0.23.0

func (s *BudgetWithStatus) UnmarshalJSON(b []byte) error

type BudgetWithStatusStatusDailyItem

type BudgetWithStatusStatusDailyItem struct {
	// Amount used in this day in USD.
	Amount string `json:"amount,omitempty"`

	Date string `json:"date,omitempty"`

	ForceSendFields []string `json:"-"`
}

func (BudgetWithStatusStatusDailyItem) MarshalJSON added in v0.23.0

func (s BudgetWithStatusStatusDailyItem) MarshalJSON() ([]byte, error)

func (*BudgetWithStatusStatusDailyItem) UnmarshalJSON added in v0.23.0

func (s *BudgetWithStatusStatusDailyItem) UnmarshalJSON(b []byte) error

type BudgetsAPI

type BudgetsAPI struct {
	// contains filtered or unexported fields
}

These APIs manage budget configuration including notifications for exceeding a budget for a period. They can also retrieve the status of each budget.

func NewBudgets

func NewBudgets(client *client.DatabricksClient) *BudgetsAPI

func (*BudgetsAPI) BudgetWithStatusNameToBudgetIdMap

func (a *BudgetsAPI) BudgetWithStatusNameToBudgetIdMap(ctx context.Context) (map[string]string, error)

BudgetWithStatusNameToBudgetIdMap calls BudgetsAPI.ListAll and creates a map of results with BudgetWithStatus.Name as key and BudgetWithStatus.BudgetId as value.

Returns an error if there's more than one BudgetWithStatus with the same .Name.

Note: All BudgetWithStatus instances are loaded into memory before creating a map.

This method is generated by Databricks SDK Code Generator.

func (*BudgetsAPI) Create

Create a new budget.

Creates a new budget in the specified account.

Example (Budgets)
ctx := context.Background()
a, err := databricks.NewAccountClient()
if err != nil {
	panic(err)
}

created, err := a.Budgets.Create(ctx, billing.WrappedBudget{
	Budget: billing.Budget{
		Name:         fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
		Filter:       "tag.tagName = 'all'",
		Period:       "1 month",
		StartDate:    "2022-01-01",
		TargetAmount: "100",
		Alerts: []billing.BudgetAlert{billing.BudgetAlert{
			EmailNotifications: []string{"admin@example.com"},
			MinPercentage:      50,
		}},
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", created)

// cleanup

err = a.Budgets.DeleteByBudgetId(ctx, created.Budget.BudgetId)
if err != nil {
	panic(err)
}
Output:

func (*BudgetsAPI) Delete

func (a *BudgetsAPI) Delete(ctx context.Context, request DeleteBudgetRequest) error

Delete budget.

Deletes the budget specified by its UUID.

func (*BudgetsAPI) DeleteByBudgetId

func (a *BudgetsAPI) DeleteByBudgetId(ctx context.Context, budgetId string) error

Delete budget.

Deletes the budget specified by its UUID.

func (*BudgetsAPI) Get

Get budget and its status.

Gets the budget specified by its UUID, including noncumulative status for each day that the budget is configured to include.

Example (Budgets)
ctx := context.Background()
a, err := databricks.NewAccountClient()
if err != nil {
	panic(err)
}

created, err := a.Budgets.Create(ctx, billing.WrappedBudget{
	Budget: billing.Budget{
		Name:         fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
		Filter:       "tag.tagName = 'all'",
		Period:       "1 month",
		StartDate:    "2022-01-01",
		TargetAmount: "100",
		Alerts: []billing.BudgetAlert{billing.BudgetAlert{
			EmailNotifications: []string{"admin@example.com"},
			MinPercentage:      50,
		}},
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", created)

byId, err := a.Budgets.GetByBudgetId(ctx, created.Budget.BudgetId)
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", byId)

// cleanup

err = a.Budgets.DeleteByBudgetId(ctx, created.Budget.BudgetId)
if err != nil {
	panic(err)
}
Output:

func (*BudgetsAPI) GetByBudgetId

func (a *BudgetsAPI) GetByBudgetId(ctx context.Context, budgetId string) (*WrappedBudgetWithStatus, error)

Get budget and its status.

Gets the budget specified by its UUID, including noncumulative status for each day that the budget is configured to include.

func (*BudgetsAPI) GetByName

func (a *BudgetsAPI) GetByName(ctx context.Context, name string) (*BudgetWithStatus, error)

GetByName calls BudgetsAPI.BudgetWithStatusNameToBudgetIdMap and returns a single BudgetWithStatus.

Returns an error if there's more than one BudgetWithStatus with the same .Name.

Note: All BudgetWithStatus instances are loaded into memory before returning matching by name.

This method is generated by Databricks SDK Code Generator.

func (*BudgetsAPI) Impl

func (a *BudgetsAPI) Impl() BudgetsService

Impl returns low-level Budgets API implementation Deprecated: use MockBudgetsInterface instead.

func (*BudgetsAPI) List added in v0.24.0

Get all budgets.

Gets all budgets associated with this account, including noncumulative status for each day that the budget is configured to include.

This method is generated by Databricks SDK Code Generator.

func (*BudgetsAPI) ListAll

func (a *BudgetsAPI) ListAll(ctx context.Context) ([]BudgetWithStatus, error)

Get all budgets.

Gets all budgets associated with this account, including noncumulative status for each day that the budget is configured to include.

This method is generated by Databricks SDK Code Generator.

Example (Budgets)
ctx := context.Background()
a, err := databricks.NewAccountClient()
if err != nil {
	panic(err)
}

all, err := a.Budgets.ListAll(ctx)
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", all)
Output:

func (*BudgetsAPI) Update

func (a *BudgetsAPI) Update(ctx context.Context, request WrappedBudget) error

Modify budget.

Modifies a budget in this account. Budget properties are completely overwritten.

Example (Budgets)
ctx := context.Background()
a, err := databricks.NewAccountClient()
if err != nil {
	panic(err)
}

created, err := a.Budgets.Create(ctx, billing.WrappedBudget{
	Budget: billing.Budget{
		Name:         fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
		Filter:       "tag.tagName = 'all'",
		Period:       "1 month",
		StartDate:    "2022-01-01",
		TargetAmount: "100",
		Alerts: []billing.BudgetAlert{billing.BudgetAlert{
			EmailNotifications: []string{"admin@example.com"},
			MinPercentage:      50,
		}},
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", created)

err = a.Budgets.Update(ctx, billing.WrappedBudget{
	BudgetId: created.Budget.BudgetId,
	Budget: billing.Budget{
		Name:         fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
		Filter:       "tag.tagName = 'all'",
		Period:       "1 month",
		StartDate:    "2022-01-01",
		TargetAmount: "100",
		Alerts: []billing.BudgetAlert{billing.BudgetAlert{
			EmailNotifications: []string{"admin@example.com"},
			MinPercentage:      70,
		}},
	},
})
if err != nil {
	panic(err)
}

// cleanup

err = a.Budgets.DeleteByBudgetId(ctx, created.Budget.BudgetId)
if err != nil {
	panic(err)
}
Output:

func (*BudgetsAPI) WithImpl

func (a *BudgetsAPI) WithImpl(impl BudgetsService) BudgetsInterface

WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks. Deprecated: use MockBudgetsInterface instead.

type BudgetsInterface added in v0.29.0

type BudgetsInterface interface {
	// WithImpl could be used to override low-level API implementations for unit
	// testing purposes with [github.com/golang/mock] or other mocking frameworks.
	// Deprecated: use MockBudgetsInterface instead.
	WithImpl(impl BudgetsService) BudgetsInterface

	// Impl returns low-level Budgets API implementation
	// Deprecated: use MockBudgetsInterface instead.
	Impl() BudgetsService

	// Create a new budget.
	//
	// Creates a new budget in the specified account.
	Create(ctx context.Context, request WrappedBudget) (*WrappedBudgetWithStatus, error)

	// Delete budget.
	//
	// Deletes the budget specified by its UUID.
	Delete(ctx context.Context, request DeleteBudgetRequest) error

	// Delete budget.
	//
	// Deletes the budget specified by its UUID.
	DeleteByBudgetId(ctx context.Context, budgetId string) error

	// Get budget and its status.
	//
	// Gets the budget specified by its UUID, including noncumulative status for
	// each day that the budget is configured to include.
	Get(ctx context.Context, request GetBudgetRequest) (*WrappedBudgetWithStatus, error)

	// Get budget and its status.
	//
	// Gets the budget specified by its UUID, including noncumulative status for
	// each day that the budget is configured to include.
	GetByBudgetId(ctx context.Context, budgetId string) (*WrappedBudgetWithStatus, error)

	// Get all budgets.
	//
	// Gets all budgets associated with this account, including noncumulative status
	// for each day that the budget is configured to include.
	//
	// This method is generated by Databricks SDK Code Generator.
	List(ctx context.Context) listing.Iterator[BudgetWithStatus]

	// Get all budgets.
	//
	// Gets all budgets associated with this account, including noncumulative status
	// for each day that the budget is configured to include.
	//
	// This method is generated by Databricks SDK Code Generator.
	ListAll(ctx context.Context) ([]BudgetWithStatus, error)

	// BudgetWithStatusNameToBudgetIdMap calls [BudgetsAPI.ListAll] and creates a map of results with [BudgetWithStatus].Name as key and [BudgetWithStatus].BudgetId as value.
	//
	// Returns an error if there's more than one [BudgetWithStatus] with the same .Name.
	//
	// Note: All [BudgetWithStatus] instances are loaded into memory before creating a map.
	//
	// This method is generated by Databricks SDK Code Generator.
	BudgetWithStatusNameToBudgetIdMap(ctx context.Context) (map[string]string, error)

	// GetByName calls [BudgetsAPI.BudgetWithStatusNameToBudgetIdMap] and returns a single [BudgetWithStatus].
	//
	// Returns an error if there's more than one [BudgetWithStatus] with the same .Name.
	//
	// Note: All [BudgetWithStatus] instances are loaded into memory before returning matching by name.
	//
	// This method is generated by Databricks SDK Code Generator.
	GetByName(ctx context.Context, name string) (*BudgetWithStatus, error)

	// Modify budget.
	//
	// Modifies a budget in this account. Budget properties are completely
	// overwritten.
	Update(ctx context.Context, request WrappedBudget) error
}

type BudgetsService

type BudgetsService interface {

	// Create a new budget.
	//
	// Creates a new budget in the specified account.
	Create(ctx context.Context, request WrappedBudget) (*WrappedBudgetWithStatus, error)

	// Delete budget.
	//
	// Deletes the budget specified by its UUID.
	Delete(ctx context.Context, request DeleteBudgetRequest) error

	// Get budget and its status.
	//
	// Gets the budget specified by its UUID, including noncumulative status for
	// each day that the budget is configured to include.
	Get(ctx context.Context, request GetBudgetRequest) (*WrappedBudgetWithStatus, error)

	// Get all budgets.
	//
	// Gets all budgets associated with this account, including noncumulative
	// status for each day that the budget is configured to include.
	//
	// Use ListAll() to get all BudgetWithStatus instances
	List(ctx context.Context) (*BudgetList, error)

	// Modify budget.
	//
	// Modifies a budget in this account. Budget properties are completely
	// overwritten.
	Update(ctx context.Context, request WrappedBudget) error
}

These APIs manage budget configuration including notifications for exceeding a budget for a period. They can also retrieve the status of each budget.

type CreateLogDeliveryConfigurationParams

type CreateLogDeliveryConfigurationParams struct {
	// The optional human-readable name of the log delivery configuration.
	// Defaults to empty.
	ConfigName string `json:"config_name,omitempty"`
	// The ID for a method:credentials/create that represents the AWS IAM role
	// with policy and trust relationship as described in the main billable
	// usage documentation page. See [Configure billable usage delivery].
	//
	// [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
	CredentialsId string `json:"credentials_id"`
	// The optional delivery path prefix within Amazon S3 storage. Defaults to
	// empty, which means that logs are delivered to the root of the bucket.
	// This must be a valid S3 object key. This must not start or end with a
	// slash character.
	DeliveryPathPrefix string `json:"delivery_path_prefix,omitempty"`
	// This field applies only if `log_type` is `BILLABLE_USAGE`. This is the
	// optional start month and year for delivery, specified in `YYYY-MM`
	// format. Defaults to current year and month. `BILLABLE_USAGE` logs are not
	// available for usage before March 2019 (`2019-03`).
	DeliveryStartTime string `json:"delivery_start_time,omitempty"`
	// Log delivery type. Supported values are:
	//
	// * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the
	// CSV schema, see the [View billable usage].
	//
	// * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema,
	// see [Configure audit logging]
	//
	// [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
	// [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html
	// [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
	// [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
	LogType LogType `json:"log_type"`
	// The file type of log delivery.
	//
	// * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the
	// CSV (comma-separated values) format is supported. For the schema, see the
	// [View billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be
	// `JSON`. Only the JSON (JavaScript Object Notation) format is supported.
	// For the schema, see the [Configuring audit logs].
	//
	// [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
	// [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html
	OutputFormat OutputFormat `json:"output_format"`
	// Status of log delivery configuration. Set to `ENABLED` (enabled) or
	// `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable
	// the configuration](#operation/patch-log-delivery-config-status) later.
	// Deletion of a configuration is not supported, so disable a log delivery
	// configuration that is no longer needed.
	Status LogDeliveryConfigStatus `json:"status,omitempty"`
	// The ID for a method:storage/create that represents the S3 bucket with
	// bucket policy as described in the main billable usage documentation page.
	// See [Configure billable usage delivery].
	//
	// [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
	StorageConfigurationId string `json:"storage_configuration_id"`
	// Optional filter that specifies workspace IDs to deliver logs for. By
	// default the workspace filter is empty and log delivery applies at the
	// account level, delivering workspace-level logs for all workspaces in your
	// account, plus account level logs. You can optionally set this field to an
	// array of workspace IDs (each one is an `int64`) to which log delivery
	// should apply, in which case only workspace-level logs relating to the
	// specified workspaces are delivered. If you plan to use different log
	// delivery configurations for different workspaces, set this field
	// explicitly. Be aware that delivery configurations mentioning specific
	// workspaces won't apply to new workspaces created in the future, and
	// delivery won't include account level logs. For some types of Databricks
	// deployments there is only one workspace per account ID, so this field is
	// unnecessary.
	WorkspaceIdsFilter []int64 `json:"workspace_ids_filter,omitempty"`

	ForceSendFields []string `json:"-"`
}

func (CreateLogDeliveryConfigurationParams) MarshalJSON added in v0.23.0

func (s CreateLogDeliveryConfigurationParams) MarshalJSON() ([]byte, error)

func (*CreateLogDeliveryConfigurationParams) UnmarshalJSON added in v0.23.0

func (s *CreateLogDeliveryConfigurationParams) UnmarshalJSON(b []byte) error

type DeleteBudgetRequest

type DeleteBudgetRequest struct {
	// Budget ID
	BudgetId string `json:"-" url:"-"`
}

Delete budget

type DeleteResponse added in v0.34.0

type DeleteResponse struct {
}

type DeliveryStatus

type DeliveryStatus string

The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.

const DeliveryStatusCreated DeliveryStatus = `CREATED`

There were no log delivery attempts since the config was created.

const DeliveryStatusNotFound DeliveryStatus = `NOT_FOUND`

The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.

const DeliveryStatusSucceeded DeliveryStatus = `SUCCEEDED`

The latest attempt of log delivery has succeeded completely.

const DeliveryStatusSystemFailure DeliveryStatus = `SYSTEM_FAILURE`

The latest attempt of log delivery failed because of an <Databricks> internal error. Contact support if it doesn't go away soon.

const DeliveryStatusUserFailure DeliveryStatus = `USER_FAILURE`

The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage.

func (*DeliveryStatus) Set added in v0.2.0

func (f *DeliveryStatus) Set(v string) error

Set raw string value and validate it against allowed values

func (*DeliveryStatus) String added in v0.2.0

func (f *DeliveryStatus) String() string

String representation for fmt.Print

func (*DeliveryStatus) Type added in v0.2.0

func (f *DeliveryStatus) Type() string

Type always returns DeliveryStatus to satisfy [pflag.Value] interface

type DownloadRequest

type DownloadRequest struct {
	// Format: `YYYY-MM`. Last month to return billable usage logs for. This
	// field is required.
	EndMonth string `json:"-" url:"end_month"`
	// Specify whether to include personally identifiable information in the
	// billable usage logs, for example the email addresses of cluster creators.
	// Handle this information with care. Defaults to false.
	PersonalData bool `json:"-" url:"personal_data,omitempty"`
	// Format: `YYYY-MM`. First month to return billable usage logs for. This
	// field is required.
	StartMonth string `json:"-" url:"start_month"`

	ForceSendFields []string `json:"-"`
}

Return billable usage logs

func (DownloadRequest) MarshalJSON added in v0.23.0

func (s DownloadRequest) MarshalJSON() ([]byte, error)

func (*DownloadRequest) UnmarshalJSON added in v0.23.0

func (s *DownloadRequest) UnmarshalJSON(b []byte) error

type DownloadResponse added in v0.23.0

type DownloadResponse struct {
	Contents io.ReadCloser `json:"-"`
}

type GetBudgetRequest

type GetBudgetRequest struct {
	// Budget ID
	BudgetId string `json:"-" url:"-"`
}

Get budget and its status

type GetLogDeliveryRequest

type GetLogDeliveryRequest struct {
	// Databricks log delivery configuration ID
	LogDeliveryConfigurationId string `json:"-" url:"-"`
}

Get log delivery configuration

type ListLogDeliveryRequest

type ListLogDeliveryRequest struct {
	// Filter by credential configuration ID.
	CredentialsId string `json:"-" url:"credentials_id,omitempty"`
	// Filter by status `ENABLED` or `DISABLED`.
	Status LogDeliveryConfigStatus `json:"-" url:"status,omitempty"`
	// Filter by storage configuration ID.
	StorageConfigurationId string `json:"-" url:"storage_configuration_id,omitempty"`

	ForceSendFields []string `json:"-"`
}

Get all log delivery configurations

func (ListLogDeliveryRequest) MarshalJSON added in v0.23.0

func (s ListLogDeliveryRequest) MarshalJSON() ([]byte, error)

func (*ListLogDeliveryRequest) UnmarshalJSON added in v0.23.0

func (s *ListLogDeliveryRequest) UnmarshalJSON(b []byte) error

type LogDeliveryAPI

type LogDeliveryAPI struct {
	// contains filtered or unexported fields
}

These APIs manage log delivery configurations for this account. The two supported log types for this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all account ID types.

Log delivery works with all account types. However, if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account, you can optionally configure different storage destinations for each workspace. Log delivery status is also provided to know the latest status of log delivery attempts. The high-level flow of billable usage delivery:

1. **Create storage**: In AWS, create a new AWS S3 bucket with a specific bucket policy. Using Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For full details, including the required IAM role policies and trust relationship, see Billable usage log delivery. Using Databricks APIs, call the Account API to create a [credential configuration object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from previous steps. You can specify if the logs should include all events of that log type in your account (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery). Account level log delivery applies to all current and future workspaces plus account level logs, while workspace level log delivery solely delivers logs related to the specified workspaces. You can create multiple types of delivery configurations per account.

For billable usage delivery: * For more information about billable usage logs, see Billable usage log delivery. For the CSV schema, see the Usage page. * The delivery location is `<bucket-name>/<prefix>/billable-usage/csv/`, where `<prefix>` is the name of the optional delivery path prefix you set up during log delivery configuration. Files are named `workspaceId=<workspace-id>-usageMonth=<month>.csv`. * All billable usage logs apply to specific workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an _account level_ delivery configuration that delivers logs for all current and future workspaces in your account. * The files are delivered daily by overwriting the month's CSV file for each workspace.

For audit log delivery: * For more information about about audit log delivery, see Audit log delivery, which includes information about the used JSON schema. * The delivery location is `<bucket-name>/<delivery-path-prefix>/workspaceId=<workspaceId>/date=<yyyy-mm-dd>/auditlogs_<internal-id>.json`. Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for those workspaces are delivered. If the log delivery configuration applies to the entire account (_account level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all workspaces in the account as well as account-level audit logs. See Audit log delivery for details. * Auditable events are typically available in logs within 15 minutes.

func NewLogDelivery

func NewLogDelivery(client *client.DatabricksClient) *LogDeliveryAPI

func (*LogDeliveryAPI) Create

Create a new log delivery configuration.

Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you already created a [credential object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).

For full details, including the required IAM role policies and bucket policies, see Deliver and access billable usage logs or Configure audit logging.

**Note**: There is a limit on the number of log delivery configurations available per account (each limit applies separately to each log type including billable usage and audit logs). You can create a maximum of two enabled account-level delivery configurations (configurations without a workspace filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per workspace for each log type, which means that the same workspace ID can occur in the workspace filter for no more than two delivery configurations per log type.

You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log delivery configuration](:method:LogDelivery/PatchStatus)).

Example (LogDelivery)
ctx := context.Background()
a, err := databricks.NewAccountClient()
if err != nil {
	panic(err)
}

bucket, err := a.Storage.Create(ctx, provisioning.CreateStorageConfigurationRequest{
	StorageConfigurationName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
	RootBucketInfo: provisioning.RootBucketInfo{
		BucketName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", bucket)

creds, err := a.Credentials.Create(ctx, provisioning.CreateCredentialRequest{
	CredentialsName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
	AwsCredentials: provisioning.CreateCredentialAwsCredentials{
		StsRole: &provisioning.CreateCredentialStsRole{
			RoleArn: os.Getenv("TEST_LOGDELIVERY_ARN"),
		},
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", creds)

created, err := a.LogDelivery.Create(ctx, billing.WrappedCreateLogDeliveryConfiguration{
	LogDeliveryConfiguration: &billing.CreateLogDeliveryConfigurationParams{
		ConfigName:             fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
		CredentialsId:          creds.CredentialsId,
		StorageConfigurationId: bucket.StorageConfigurationId,
		LogType:                billing.LogTypeAuditLogs,
		OutputFormat:           billing.OutputFormatJson,
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", created)

// cleanup

err = a.Storage.DeleteByStorageConfigurationId(ctx, bucket.StorageConfigurationId)
if err != nil {
	panic(err)
}
err = a.Credentials.DeleteByCredentialsId(ctx, creds.CredentialsId)
if err != nil {
	panic(err)
}
err = a.LogDelivery.PatchStatus(ctx, billing.UpdateLogDeliveryConfigurationStatusRequest{
	LogDeliveryConfigurationId: created.LogDeliveryConfiguration.ConfigId,
	Status:                     billing.LogDeliveryConfigStatusDisabled,
})
if err != nil {
	panic(err)
}
Output:

func (*LogDeliveryAPI) Get

Get log delivery configuration.

Gets a Databricks log delivery configuration object for an account, both specified by ID.

Example (LogDelivery)
ctx := context.Background()
a, err := databricks.NewAccountClient()
if err != nil {
	panic(err)
}

bucket, err := a.Storage.Create(ctx, provisioning.CreateStorageConfigurationRequest{
	StorageConfigurationName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
	RootBucketInfo: provisioning.RootBucketInfo{
		BucketName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", bucket)

creds, err := a.Credentials.Create(ctx, provisioning.CreateCredentialRequest{
	CredentialsName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
	AwsCredentials: provisioning.CreateCredentialAwsCredentials{
		StsRole: &provisioning.CreateCredentialStsRole{
			RoleArn: os.Getenv("TEST_LOGDELIVERY_ARN"),
		},
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", creds)

created, err := a.LogDelivery.Create(ctx, billing.WrappedCreateLogDeliveryConfiguration{
	LogDeliveryConfiguration: &billing.CreateLogDeliveryConfigurationParams{
		ConfigName:             fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
		CredentialsId:          creds.CredentialsId,
		StorageConfigurationId: bucket.StorageConfigurationId,
		LogType:                billing.LogTypeAuditLogs,
		OutputFormat:           billing.OutputFormatJson,
	},
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", created)

byId, err := a.LogDelivery.GetByLogDeliveryConfigurationId(ctx, created.LogDeliveryConfiguration.ConfigId)
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", byId)

// cleanup

err = a.Storage.DeleteByStorageConfigurationId(ctx, bucket.StorageConfigurationId)
if err != nil {
	panic(err)
}
err = a.Credentials.DeleteByCredentialsId(ctx, creds.CredentialsId)
if err != nil {
	panic(err)
}
err = a.LogDelivery.PatchStatus(ctx, billing.UpdateLogDeliveryConfigurationStatusRequest{
	LogDeliveryConfigurationId: created.LogDeliveryConfiguration.ConfigId,
	Status:                     billing.LogDeliveryConfigStatusDisabled,
})
if err != nil {
	panic(err)
}
Output:

func (*LogDeliveryAPI) GetByConfigName

func (a *LogDeliveryAPI) GetByConfigName(ctx context.Context, name string) (*LogDeliveryConfiguration, error)

GetByConfigName calls LogDeliveryAPI.LogDeliveryConfigurationConfigNameToConfigIdMap and returns a single LogDeliveryConfiguration.

Returns an error if there's more than one LogDeliveryConfiguration with the same .ConfigName.

Note: All LogDeliveryConfiguration instances are loaded into memory before returning matching by name.

This method is generated by Databricks SDK Code Generator.

func (*LogDeliveryAPI) GetByLogDeliveryConfigurationId

func (a *LogDeliveryAPI) GetByLogDeliveryConfigurationId(ctx context.Context, logDeliveryConfigurationId string) (*WrappedLogDeliveryConfiguration, error)

Get log delivery configuration.

Gets a Databricks log delivery configuration object for an account, both specified by ID.

func (*LogDeliveryAPI) Impl

Impl returns low-level LogDelivery API implementation Deprecated: use MockLogDeliveryInterface instead.

func (*LogDeliveryAPI) List added in v0.24.0

Get all log delivery configurations.

Gets all Databricks log delivery configurations associated with an account specified by ID.

This method is generated by Databricks SDK Code Generator.

func (*LogDeliveryAPI) ListAll

Get all log delivery configurations.

Gets all Databricks log delivery configurations associated with an account specified by ID.

This method is generated by Databricks SDK Code Generator.

Example (LogDelivery)
ctx := context.Background()
a, err := databricks.NewAccountClient()
if err != nil {
	panic(err)
}

all, err := a.LogDelivery.ListAll(ctx, billing.ListLogDeliveryRequest{})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", all)
Output:

func (*LogDeliveryAPI) LogDeliveryConfigurationConfigNameToConfigIdMap

func (a *LogDeliveryAPI) LogDeliveryConfigurationConfigNameToConfigIdMap(ctx context.Context, request ListLogDeliveryRequest) (map[string]string, error)

LogDeliveryConfigurationConfigNameToConfigIdMap calls LogDeliveryAPI.ListAll and creates a map of results with LogDeliveryConfiguration.ConfigName as key and LogDeliveryConfiguration.ConfigId as value.

Returns an error if there's more than one LogDeliveryConfiguration with the same .ConfigName.

Note: All LogDeliveryConfiguration instances are loaded into memory before creating a map.

This method is generated by Databricks SDK Code Generator.

func (*LogDeliveryAPI) PatchStatus

Enable or disable log delivery configuration.

Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under [Create log delivery](:method:LogDelivery/Create).

func (*LogDeliveryAPI) WithImpl

WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks. Deprecated: use MockLogDeliveryInterface instead.

type LogDeliveryConfigStatus

type LogDeliveryConfigStatus string

Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.

const LogDeliveryConfigStatusDisabled LogDeliveryConfigStatus = `DISABLED`
const LogDeliveryConfigStatusEnabled LogDeliveryConfigStatus = `ENABLED`

func (*LogDeliveryConfigStatus) Set added in v0.2.0

Set raw string value and validate it against allowed values

func (*LogDeliveryConfigStatus) String added in v0.2.0

func (f *LogDeliveryConfigStatus) String() string

String representation for fmt.Print

func (*LogDeliveryConfigStatus) Type added in v0.2.0

func (f *LogDeliveryConfigStatus) Type() string

Type always returns LogDeliveryConfigStatus to satisfy [pflag.Value] interface

type LogDeliveryConfiguration

type LogDeliveryConfiguration struct {
	// The Databricks account ID that hosts the log delivery configuration.
	AccountId string `json:"account_id,omitempty"`
	// Databricks log delivery configuration ID.
	ConfigId string `json:"config_id,omitempty"`
	// The optional human-readable name of the log delivery configuration.
	// Defaults to empty.
	ConfigName string `json:"config_name,omitempty"`
	// Time in epoch milliseconds when the log delivery configuration was
	// created.
	CreationTime int64 `json:"creation_time,omitempty"`
	// The ID for a method:credentials/create that represents the AWS IAM role
	// with policy and trust relationship as described in the main billable
	// usage documentation page. See [Configure billable usage delivery].
	//
	// [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
	CredentialsId string `json:"credentials_id,omitempty"`
	// The optional delivery path prefix within Amazon S3 storage. Defaults to
	// empty, which means that logs are delivered to the root of the bucket.
	// This must be a valid S3 object key. This must not start or end with a
	// slash character.
	DeliveryPathPrefix string `json:"delivery_path_prefix,omitempty"`
	// This field applies only if `log_type` is `BILLABLE_USAGE`. This is the
	// optional start month and year for delivery, specified in `YYYY-MM`
	// format. Defaults to current year and month. `BILLABLE_USAGE` logs are not
	// available for usage before March 2019 (`2019-03`).
	DeliveryStartTime string `json:"delivery_start_time,omitempty"`
	// Databricks log delivery status.
	LogDeliveryStatus *LogDeliveryStatus `json:"log_delivery_status,omitempty"`
	// Log delivery type. Supported values are:
	//
	// * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the
	// CSV schema, see the [View billable usage].
	//
	// * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema,
	// see [Configure audit logging]
	//
	// [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
	// [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html
	// [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
	// [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
	LogType LogType `json:"log_type,omitempty"`
	// The file type of log delivery.
	//
	// * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the
	// CSV (comma-separated values) format is supported. For the schema, see the
	// [View billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be
	// `JSON`. Only the JSON (JavaScript Object Notation) format is supported.
	// For the schema, see the [Configuring audit logs].
	//
	// [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
	// [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html
	OutputFormat OutputFormat `json:"output_format,omitempty"`
	// Status of log delivery configuration. Set to `ENABLED` (enabled) or
	// `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable
	// the configuration](#operation/patch-log-delivery-config-status) later.
	// Deletion of a configuration is not supported, so disable a log delivery
	// configuration that is no longer needed.
	Status LogDeliveryConfigStatus `json:"status,omitempty"`
	// The ID for a method:storage/create that represents the S3 bucket with
	// bucket policy as described in the main billable usage documentation page.
	// See [Configure billable usage delivery].
	//
	// [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
	StorageConfigurationId string `json:"storage_configuration_id,omitempty"`
	// Time in epoch milliseconds when the log delivery configuration was
	// updated.
	UpdateTime int64 `json:"update_time,omitempty"`
	// Optional filter that specifies workspace IDs to deliver logs for. By
	// default the workspace filter is empty and log delivery applies at the
	// account level, delivering workspace-level logs for all workspaces in your
	// account, plus account level logs. You can optionally set this field to an
	// array of workspace IDs (each one is an `int64`) to which log delivery
	// should apply, in which case only workspace-level logs relating to the
	// specified workspaces are delivered. If you plan to use different log
	// delivery configurations for different workspaces, set this field
	// explicitly. Be aware that delivery configurations mentioning specific
	// workspaces won't apply to new workspaces created in the future, and
	// delivery won't include account level logs. For some types of Databricks
	// deployments there is only one workspace per account ID, so this field is
	// unnecessary.
	WorkspaceIdsFilter []int64 `json:"workspace_ids_filter,omitempty"`

	ForceSendFields []string `json:"-"`
}

func (LogDeliveryConfiguration) MarshalJSON added in v0.23.0

func (s LogDeliveryConfiguration) MarshalJSON() ([]byte, error)

func (*LogDeliveryConfiguration) UnmarshalJSON added in v0.23.0

func (s *LogDeliveryConfiguration) UnmarshalJSON(b []byte) error

type LogDeliveryInterface added in v0.29.0

type LogDeliveryInterface interface {
	// WithImpl could be used to override low-level API implementations for unit
	// testing purposes with [github.com/golang/mock] or other mocking frameworks.
	// Deprecated: use MockLogDeliveryInterface instead.
	WithImpl(impl LogDeliveryService) LogDeliveryInterface

	// Impl returns low-level LogDelivery API implementation
	// Deprecated: use MockLogDeliveryInterface instead.
	Impl() LogDeliveryService

	// Create a new log delivery configuration.
	//
	// Creates a new Databricks log delivery configuration to enable delivery of the
	// specified type of logs to your storage location. This requires that you
	// already created a [credential object](:method:Credentials/Create) (which
	// encapsulates a cross-account service IAM role) and a [storage configuration
	// object](:method:Storage/Create) (which encapsulates an S3 bucket).
	//
	// For full details, including the required IAM role policies and bucket
	// policies, see [Deliver and access billable usage logs] or [Configure audit
	// logging].
	//
	// **Note**: There is a limit on the number of log delivery configurations
	// available per account (each limit applies separately to each log type
	// including billable usage and audit logs). You can create a maximum of two
	// enabled account-level delivery configurations (configurations without a
	// workspace filter) per type. Additionally, you can create two enabled
	// workspace-level delivery configurations per workspace for each log type,
	// which means that the same workspace ID can occur in the workspace filter for
	// no more than two delivery configurations per log type.
	//
	// You cannot delete a log delivery configuration, but you can disable it (see
	// [Enable or disable log delivery
	// configuration](:method:LogDelivery/PatchStatus)).
	//
	// [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
	// [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
	Create(ctx context.Context, request WrappedCreateLogDeliveryConfiguration) (*WrappedLogDeliveryConfiguration, error)

	// Get log delivery configuration.
	//
	// Gets a Databricks log delivery configuration object for an account, both
	// specified by ID.
	Get(ctx context.Context, request GetLogDeliveryRequest) (*WrappedLogDeliveryConfiguration, error)

	// Get log delivery configuration.
	//
	// Gets a Databricks log delivery configuration object for an account, both
	// specified by ID.
	GetByLogDeliveryConfigurationId(ctx context.Context, logDeliveryConfigurationId string) (*WrappedLogDeliveryConfiguration, error)

	// Get all log delivery configurations.
	//
	// Gets all Databricks log delivery configurations associated with an account
	// specified by ID.
	//
	// This method is generated by Databricks SDK Code Generator.
	List(ctx context.Context, request ListLogDeliveryRequest) listing.Iterator[LogDeliveryConfiguration]

	// Get all log delivery configurations.
	//
	// Gets all Databricks log delivery configurations associated with an account
	// specified by ID.
	//
	// This method is generated by Databricks SDK Code Generator.
	ListAll(ctx context.Context, request ListLogDeliveryRequest) ([]LogDeliveryConfiguration, error)

	// LogDeliveryConfigurationConfigNameToConfigIdMap calls [LogDeliveryAPI.ListAll] and creates a map of results with [LogDeliveryConfiguration].ConfigName as key and [LogDeliveryConfiguration].ConfigId as value.
	//
	// Returns an error if there's more than one [LogDeliveryConfiguration] with the same .ConfigName.
	//
	// Note: All [LogDeliveryConfiguration] instances are loaded into memory before creating a map.
	//
	// This method is generated by Databricks SDK Code Generator.
	LogDeliveryConfigurationConfigNameToConfigIdMap(ctx context.Context, request ListLogDeliveryRequest) (map[string]string, error)

	// GetByConfigName calls [LogDeliveryAPI.LogDeliveryConfigurationConfigNameToConfigIdMap] and returns a single [LogDeliveryConfiguration].
	//
	// Returns an error if there's more than one [LogDeliveryConfiguration] with the same .ConfigName.
	//
	// Note: All [LogDeliveryConfiguration] instances are loaded into memory before returning matching by name.
	//
	// This method is generated by Databricks SDK Code Generator.
	GetByConfigName(ctx context.Context, name string) (*LogDeliveryConfiguration, error)

	// Enable or disable log delivery configuration.
	//
	// Enables or disables a log delivery configuration. Deletion of delivery
	// configurations is not supported, so disable log delivery configurations that
	// are no longer needed. Note that you can't re-enable a delivery configuration
	// if this would violate the delivery configuration limits described under
	// [Create log delivery](:method:LogDelivery/Create).
	PatchStatus(ctx context.Context, request UpdateLogDeliveryConfigurationStatusRequest) error
}

type LogDeliveryService

type LogDeliveryService interface {

	// Create a new log delivery configuration.
	//
	// Creates a new Databricks log delivery configuration to enable delivery of
	// the specified type of logs to your storage location. This requires that
	// you already created a [credential object](:method:Credentials/Create)
	// (which encapsulates a cross-account service IAM role) and a [storage
	// configuration object](:method:Storage/Create) (which encapsulates an S3
	// bucket).
	//
	// For full details, including the required IAM role policies and bucket
	// policies, see [Deliver and access billable usage logs] or [Configure
	// audit logging].
	//
	// **Note**: There is a limit on the number of log delivery configurations
	// available per account (each limit applies separately to each log type
	// including billable usage and audit logs). You can create a maximum of two
	// enabled account-level delivery configurations (configurations without a
	// workspace filter) per type. Additionally, you can create two enabled
	// workspace-level delivery configurations per workspace for each log type,
	// which means that the same workspace ID can occur in the workspace filter
	// for no more than two delivery configurations per log type.
	//
	// You cannot delete a log delivery configuration, but you can disable it
	// (see [Enable or disable log delivery
	// configuration](:method:LogDelivery/PatchStatus)).
	//
	// [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
	// [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
	Create(ctx context.Context, request WrappedCreateLogDeliveryConfiguration) (*WrappedLogDeliveryConfiguration, error)

	// Get log delivery configuration.
	//
	// Gets a Databricks log delivery configuration object for an account, both
	// specified by ID.
	Get(ctx context.Context, request GetLogDeliveryRequest) (*WrappedLogDeliveryConfiguration, error)

	// Get all log delivery configurations.
	//
	// Gets all Databricks log delivery configurations associated with an
	// account specified by ID.
	//
	// Use ListAll() to get all LogDeliveryConfiguration instances
	List(ctx context.Context, request ListLogDeliveryRequest) (*WrappedLogDeliveryConfigurations, error)

	// Enable or disable log delivery configuration.
	//
	// Enables or disables a log delivery configuration. Deletion of delivery
	// configurations is not supported, so disable log delivery configurations
	// that are no longer needed. Note that you can't re-enable a delivery
	// configuration if this would violate the delivery configuration limits
	// described under [Create log delivery](:method:LogDelivery/Create).
	PatchStatus(ctx context.Context, request UpdateLogDeliveryConfigurationStatusRequest) error
}

These APIs manage log delivery configurations for this account. The two supported log types for this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all account ID types.

Log delivery works with all account types. However, if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account, you can optionally configure different storage destinations for each workspace. Log delivery status is also provided to know the latest status of log delivery attempts. The high-level flow of billable usage delivery:

1. **Create storage**: In AWS, create a new AWS S3 bucket with a specific bucket policy. Using Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For full details, including the required IAM role policies and trust relationship, see Billable usage log delivery. Using Databricks APIs, call the Account API to create a [credential configuration object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from previous steps. You can specify if the logs should include all events of that log type in your account (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery). Account level log delivery applies to all current and future workspaces plus account level logs, while workspace level log delivery solely delivers logs related to the specified workspaces. You can create multiple types of delivery configurations per account.

For billable usage delivery: * For more information about billable usage logs, see Billable usage log delivery. For the CSV schema, see the Usage page. * The delivery location is `<bucket-name>/<prefix>/billable-usage/csv/`, where `<prefix>` is the name of the optional delivery path prefix you set up during log delivery configuration. Files are named `workspaceId=<workspace-id>-usageMonth=<month>.csv`. * All billable usage logs apply to specific workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an _account level_ delivery configuration that delivers logs for all current and future workspaces in your account. * The files are delivered daily by overwriting the month's CSV file for each workspace.

For audit log delivery: * For more information about about audit log delivery, see Audit log delivery, which includes information about the used JSON schema. * The delivery location is `<bucket-name>/<delivery-path-prefix>/workspaceId=<workspaceId>/date=<yyyy-mm-dd>/auditlogs_<internal-id>.json`. Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for those workspaces are delivered. If the log delivery configuration applies to the entire account (_account level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all workspaces in the account as well as account-level audit logs. See Audit log delivery for details. * Auditable events are typically available in logs within 15 minutes.

type LogDeliveryStatus

type LogDeliveryStatus struct {
	// The UTC time for the latest log delivery attempt.
	LastAttemptTime string `json:"last_attempt_time,omitempty"`
	// The UTC time for the latest successful log delivery.
	LastSuccessfulAttemptTime string `json:"last_successful_attempt_time,omitempty"`
	// Informative message about the latest log delivery attempt. If the log
	// delivery fails with USER_FAILURE, error details will be provided for
	// fixing misconfigurations in cloud permissions.
	Message string `json:"message,omitempty"`
	// The status string for log delivery. Possible values are: * `CREATED`:
	// There were no log delivery attempts since the config was created. *
	// `SUCCEEDED`: The latest attempt of log delivery has succeeded completely.
	// * `USER_FAILURE`: The latest attempt of log delivery failed because of
	// misconfiguration of customer provided permissions on role or storage. *
	// `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an
	// Databricks internal error. Contact support if it doesn't go away soon. *
	// `NOT_FOUND`: The log delivery status as the configuration has been
	// disabled since the release of this feature or there are no workspaces in
	// the account.
	Status DeliveryStatus `json:"status,omitempty"`

	ForceSendFields []string `json:"-"`
}

Databricks log delivery status.

func (LogDeliveryStatus) MarshalJSON added in v0.23.0

func (s LogDeliveryStatus) MarshalJSON() ([]byte, error)

func (*LogDeliveryStatus) UnmarshalJSON added in v0.23.0

func (s *LogDeliveryStatus) UnmarshalJSON(b []byte) error

type LogType

type LogType string

Log delivery type. Supported values are:

* `BILLABLE_USAGE` — Configure billable usage log delivery. For the CSV schema, see the View billable usage.

* `AUDIT_LOGS` — Configure audit log delivery. For the JSON schema, see Configure audit logging

const LogTypeAuditLogs LogType = `AUDIT_LOGS`
const LogTypeBillableUsage LogType = `BILLABLE_USAGE`

func (*LogType) Set added in v0.2.0

func (f *LogType) Set(v string) error

Set raw string value and validate it against allowed values

func (*LogType) String added in v0.2.0

func (f *LogType) String() string

String representation for fmt.Print

func (*LogType) Type added in v0.2.0

func (f *LogType) Type() string

Type always returns LogType to satisfy [pflag.Value] interface

type OutputFormat

type OutputFormat string

The file type of log delivery.

* If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated values) format is supported. For the schema, see the View billable usage * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is supported. For the schema, see the Configuring audit logs.

const OutputFormatCsv OutputFormat = `CSV`
const OutputFormatJson OutputFormat = `JSON`

func (*OutputFormat) Set added in v0.2.0

func (f *OutputFormat) Set(v string) error

Set raw string value and validate it against allowed values

func (*OutputFormat) String added in v0.2.0

func (f *OutputFormat) String() string

String representation for fmt.Print

func (*OutputFormat) Type added in v0.2.0

func (f *OutputFormat) Type() string

Type always returns OutputFormat to satisfy [pflag.Value] interface

type PatchStatusResponse added in v0.34.0

type PatchStatusResponse struct {
}

type UpdateLogDeliveryConfigurationStatusRequest

type UpdateLogDeliveryConfigurationStatusRequest struct {
	// Databricks log delivery configuration ID
	LogDeliveryConfigurationId string `json:"-" url:"-"`
	// Status of log delivery configuration. Set to `ENABLED` (enabled) or
	// `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable
	// the configuration](#operation/patch-log-delivery-config-status) later.
	// Deletion of a configuration is not supported, so disable a log delivery
	// configuration that is no longer needed.
	Status LogDeliveryConfigStatus `json:"status"`
}

type UpdateResponse added in v0.34.0

type UpdateResponse struct {
}

type WrappedBudget

type WrappedBudget struct {
	// Budget configuration to be created.
	Budget Budget `json:"budget"`
	// Budget ID
	BudgetId string `json:"-" url:"-"`
}

type WrappedBudgetWithStatus

type WrappedBudgetWithStatus struct {
	// Budget configuration with daily status.
	Budget BudgetWithStatus `json:"budget"`
}

type WrappedCreateLogDeliveryConfiguration

type WrappedCreateLogDeliveryConfiguration struct {
	LogDeliveryConfiguration *CreateLogDeliveryConfigurationParams `json:"log_delivery_configuration,omitempty"`
}

type WrappedLogDeliveryConfiguration

type WrappedLogDeliveryConfiguration struct {
	LogDeliveryConfiguration *LogDeliveryConfiguration `json:"log_delivery_configuration,omitempty"`
}

type WrappedLogDeliveryConfigurations

type WrappedLogDeliveryConfigurations struct {
	LogDeliveryConfigurations []LogDeliveryConfiguration `json:"log_delivery_configurations,omitempty"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL