Documentation ¶
Overview ¶
These APIs allow you to manage Billable Usage, Budgets, Log Delivery, etc.
Index ¶
- type BillableUsageAPI
- type BillableUsageService
- type Budget
- type BudgetAlert
- type BudgetList
- type BudgetWithStatus
- type BudgetWithStatusStatusDailyItem
- type BudgetsAPI
- func (a *BudgetsAPI) BudgetWithStatusNameToBudgetIdMap(ctx context.Context) (map[string]string, error)
- func (a *BudgetsAPI) Create(ctx context.Context, request WrappedBudget) (*WrappedBudgetWithStatus, error)
- func (a *BudgetsAPI) Delete(ctx context.Context, request DeleteBudgetRequest) error
- func (a *BudgetsAPI) DeleteByBudgetId(ctx context.Context, budgetId string) error
- func (a *BudgetsAPI) Get(ctx context.Context, request GetBudgetRequest) (*WrappedBudgetWithStatus, error)
- func (a *BudgetsAPI) GetByBudgetId(ctx context.Context, budgetId string) (*WrappedBudgetWithStatus, error)
- func (a *BudgetsAPI) GetByName(ctx context.Context, name string) (*BudgetWithStatus, error)
- func (a *BudgetsAPI) Impl() BudgetsService
- func (a *BudgetsAPI) ListAll(ctx context.Context) ([]BudgetWithStatus, error)
- func (a *BudgetsAPI) Update(ctx context.Context, request WrappedBudget) error
- func (a *BudgetsAPI) WithImpl(impl BudgetsService) *BudgetsAPI
- type BudgetsService
- type CreateLogDeliveryConfigurationParams
- type DeleteBudgetRequest
- type DeliveryStatus
- type DownloadRequest
- type GetBudgetRequest
- type GetLogDeliveryRequest
- type ListLogDeliveryRequest
- type LogDeliveryAPI
- func (a *LogDeliveryAPI) Create(ctx context.Context, request WrappedCreateLogDeliveryConfiguration) (*WrappedLogDeliveryConfiguration, error)
- func (a *LogDeliveryAPI) Get(ctx context.Context, request GetLogDeliveryRequest) (*WrappedLogDeliveryConfiguration, error)
- func (a *LogDeliveryAPI) GetByConfigName(ctx context.Context, name string) (*LogDeliveryConfiguration, error)
- func (a *LogDeliveryAPI) GetByLogDeliveryConfigurationId(ctx context.Context, logDeliveryConfigurationId string) (*WrappedLogDeliveryConfiguration, error)
- func (a *LogDeliveryAPI) Impl() LogDeliveryService
- func (a *LogDeliveryAPI) ListAll(ctx context.Context, request ListLogDeliveryRequest) ([]LogDeliveryConfiguration, error)
- func (a *LogDeliveryAPI) LogDeliveryConfigurationConfigNameToConfigIdMap(ctx context.Context, request ListLogDeliveryRequest) (map[string]string, error)
- func (a *LogDeliveryAPI) PatchStatus(ctx context.Context, request UpdateLogDeliveryConfigurationStatusRequest) error
- func (a *LogDeliveryAPI) WithImpl(impl LogDeliveryService) *LogDeliveryAPI
- type LogDeliveryConfigStatus
- type LogDeliveryConfiguration
- type LogDeliveryService
- type LogDeliveryStatus
- type LogType
- type OutputFormat
- type UpdateLogDeliveryConfigurationStatusRequest
- type WrappedBudget
- type WrappedBudgetWithStatus
- type WrappedCreateLogDeliveryConfiguration
- type WrappedLogDeliveryConfiguration
- type WrappedLogDeliveryConfigurations
Examples ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type BillableUsageAPI ¶
type BillableUsageAPI struct {
// contains filtered or unexported fields
}
This API allows you to download billable usage logs for the specified account and date range. This feature works with all account types.
func NewBillableUsage ¶
func NewBillableUsage(client *client.DatabricksClient) *BillableUsageAPI
func (*BillableUsageAPI) Download ¶
func (a *BillableUsageAPI) Download(ctx context.Context, request DownloadRequest) error
Return billable usage logs.
Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see CSV file schema. Note that this method might take multiple minutes to complete.
**Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges.
Example (UsageDownload) ¶
ctx := context.Background() a, err := databricks.NewAccountClient() if err != nil { panic(err) } err = a.BillableUsage.Download(ctx, billing.DownloadRequest{ StartMonth: "2022-01", EndMonth: "2022-02", }) if err != nil { panic(err) }
Output:
func (*BillableUsageAPI) Impl ¶
func (a *BillableUsageAPI) Impl() BillableUsageService
Impl returns low-level BillableUsage API implementation
func (*BillableUsageAPI) WithImpl ¶
func (a *BillableUsageAPI) WithImpl(impl BillableUsageService) *BillableUsageAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type BillableUsageService ¶
type BillableUsageService interface { // Return billable usage logs. // // Returns billable usage logs in CSV format for the specified account and // date range. For the data schema, see [CSV file schema]. Note that this // method might take multiple minutes to complete. // // **Warning**: Depending on the queried date range, the number of // workspaces in the account, the size of the response and the internet // speed of the caller, this API may hit a timeout after a few minutes. If // you experience this, try to mitigate by calling the API with narrower // date ranges. // // [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema Download(ctx context.Context, request DownloadRequest) error }
This API allows you to download billable usage logs for the specified account and date range. This feature works with all account types.
type Budget ¶
type Budget struct { Alerts []BudgetAlert `json:"alerts,omitempty"` // Optional end date of the budget. EndDate string `json:"end_date,omitempty"` // SQL-like filter expression with workspaceId, SKU and tag. Usage in your // account that matches this expression will be counted in this budget. // // Supported properties on left-hand side of comparison: * `workspaceId` - // the ID of the workspace * `sku` - SKU of the cluster, e.g. // `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag name'` - tag of // the cluster // // Supported comparison operators: * `=` - equal * `!=` - not equal // // Supported logical operators: `AND`, `OR`. // // Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND // tag.'my tag'='my value')` * `workspaceId!=456` * // `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR sku='PREMIUM_ALL_PURPOSE_COMPUTE'` // * `tag.name1='value1' AND tag.name2='value2'` Filter string `json:"filter"` // Human-readable name of the budget. Name string `json:"name"` // Period length in years, months, weeks and/or days. Examples: `1 month`, // `30 days`, `1 year, 2 months, 1 week, 2 days` Period string `json:"period"` // Start date of the budget period calculation. StartDate string `json:"start_date"` // Target amount of the budget per period in USD. TargetAmount string `json:"target_amount"` }
Budget configuration to be created.
type BudgetAlert ¶
type BudgetAlert struct { // List of email addresses to be notified when budget percentage is exceeded // in the given period. EmailNotifications []string `json:"email_notifications,omitempty"` // Percentage of the target amount used in the currect period that will // trigger a notification. MinPercentage int `json:"min_percentage,omitempty"` }
type BudgetList ¶
type BudgetList struct {
Budgets []BudgetWithStatus `json:"budgets,omitempty"`
}
List of budgets.
type BudgetWithStatus ¶
type BudgetWithStatus struct { Alerts []BudgetAlert `json:"alerts,omitempty"` BudgetId string `json:"budget_id,omitempty"` CreationTime string `json:"creation_time,omitempty"` // Optional end date of the budget. EndDate string `json:"end_date,omitempty"` // SQL-like filter expression with workspaceId, SKU and tag. Usage in your // account that matches this expression will be counted in this budget. // // Supported properties on left-hand side of comparison: * `workspaceId` - // the ID of the workspace * `sku` - SKU of the cluster, e.g. // `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag name'` - tag of // the cluster // // Supported comparison operators: * `=` - equal * `!=` - not equal // // Supported logical operators: `AND`, `OR`. // // Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND // tag.'my tag'='my value')` * `workspaceId!=456` * // `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR sku='PREMIUM_ALL_PURPOSE_COMPUTE'` // * `tag.name1='value1' AND tag.name2='value2'` Filter string `json:"filter,omitempty"` // Human-readable name of the budget. Name string `json:"name,omitempty"` // Period length in years, months, weeks and/or days. Examples: `1 month`, // `30 days`, `1 year, 2 months, 1 week, 2 days` Period string `json:"period,omitempty"` // Start date of the budget period calculation. StartDate string `json:"start_date,omitempty"` // Amount used in the budget for each day (noncumulative). StatusDaily []BudgetWithStatusStatusDailyItem `json:"status_daily,omitempty"` // Target amount of the budget per period in USD. TargetAmount string `json:"target_amount,omitempty"` UpdateTime string `json:"update_time,omitempty"` }
Budget configuration with daily status.
type BudgetsAPI ¶
type BudgetsAPI struct {
// contains filtered or unexported fields
}
These APIs manage budget configuration including notifications for exceeding a budget for a period. They can also retrieve the status of each budget.
func NewBudgets ¶
func NewBudgets(client *client.DatabricksClient) *BudgetsAPI
func (*BudgetsAPI) BudgetWithStatusNameToBudgetIdMap ¶
func (a *BudgetsAPI) BudgetWithStatusNameToBudgetIdMap(ctx context.Context) (map[string]string, error)
BudgetWithStatusNameToBudgetIdMap calls BudgetsAPI.ListAll and creates a map of results with BudgetWithStatus.Name as key and BudgetWithStatus.BudgetId as value.
Returns an error if there's more than one BudgetWithStatus with the same .Name.
Note: All BudgetWithStatus instances are loaded into memory before creating a map.
This method is generated by Databricks SDK Code Generator.
func (*BudgetsAPI) Create ¶
func (a *BudgetsAPI) Create(ctx context.Context, request WrappedBudget) (*WrappedBudgetWithStatus, error)
Create a new budget.
Creates a new budget in the specified account.
Example (Budgets) ¶
ctx := context.Background() a, err := databricks.NewAccountClient() if err != nil { panic(err) } created, err := a.Budgets.Create(ctx, billing.WrappedBudget{ Budget: billing.Budget{ Name: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), Filter: "tag.tagName = 'all'", Period: "1 month", StartDate: "2022-01-01", TargetAmount: "100", Alerts: []billing.BudgetAlert{billing.BudgetAlert{ EmailNotifications: []string{"admin@example.com"}, MinPercentage: 50, }}, }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", created) // cleanup err = a.Budgets.DeleteByBudgetId(ctx, created.Budget.BudgetId) if err != nil { panic(err) }
Output:
func (*BudgetsAPI) Delete ¶
func (a *BudgetsAPI) Delete(ctx context.Context, request DeleteBudgetRequest) error
Delete budget.
Deletes the budget specified by its UUID.
func (*BudgetsAPI) DeleteByBudgetId ¶
func (a *BudgetsAPI) DeleteByBudgetId(ctx context.Context, budgetId string) error
Delete budget.
Deletes the budget specified by its UUID.
func (*BudgetsAPI) Get ¶
func (a *BudgetsAPI) Get(ctx context.Context, request GetBudgetRequest) (*WrappedBudgetWithStatus, error)
Get budget and its status.
Gets the budget specified by its UUID, including noncumulative status for each day that the budget is configured to include.
Example (Budgets) ¶
ctx := context.Background() a, err := databricks.NewAccountClient() if err != nil { panic(err) } created, err := a.Budgets.Create(ctx, billing.WrappedBudget{ Budget: billing.Budget{ Name: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), Filter: "tag.tagName = 'all'", Period: "1 month", StartDate: "2022-01-01", TargetAmount: "100", Alerts: []billing.BudgetAlert{billing.BudgetAlert{ EmailNotifications: []string{"admin@example.com"}, MinPercentage: 50, }}, }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", created) byId, err := a.Budgets.GetByBudgetId(ctx, created.Budget.BudgetId) if err != nil { panic(err) } logger.Infof(ctx, "found %v", byId) // cleanup err = a.Budgets.DeleteByBudgetId(ctx, created.Budget.BudgetId) if err != nil { panic(err) }
Output:
func (*BudgetsAPI) GetByBudgetId ¶
func (a *BudgetsAPI) GetByBudgetId(ctx context.Context, budgetId string) (*WrappedBudgetWithStatus, error)
Get budget and its status.
Gets the budget specified by its UUID, including noncumulative status for each day that the budget is configured to include.
func (*BudgetsAPI) GetByName ¶
func (a *BudgetsAPI) GetByName(ctx context.Context, name string) (*BudgetWithStatus, error)
GetByName calls BudgetsAPI.BudgetWithStatusNameToBudgetIdMap and returns a single BudgetWithStatus.
Returns an error if there's more than one BudgetWithStatus with the same .Name.
Note: All BudgetWithStatus instances are loaded into memory before returning matching by name.
This method is generated by Databricks SDK Code Generator.
func (*BudgetsAPI) Impl ¶
func (a *BudgetsAPI) Impl() BudgetsService
Impl returns low-level Budgets API implementation
func (*BudgetsAPI) ListAll ¶
func (a *BudgetsAPI) ListAll(ctx context.Context) ([]BudgetWithStatus, error)
Get all budgets.
Gets all budgets associated with this account, including noncumulative status for each day that the budget is configured to include.
This method is generated by Databricks SDK Code Generator.
Example (Budgets) ¶
ctx := context.Background() a, err := databricks.NewAccountClient() if err != nil { panic(err) } all, err := a.Budgets.ListAll(ctx) if err != nil { panic(err) } logger.Infof(ctx, "found %v", all)
Output:
func (*BudgetsAPI) Update ¶
func (a *BudgetsAPI) Update(ctx context.Context, request WrappedBudget) error
Modify budget.
Modifies a budget in this account. Budget properties are completely overwritten.
Example (Budgets) ¶
ctx := context.Background() a, err := databricks.NewAccountClient() if err != nil { panic(err) } created, err := a.Budgets.Create(ctx, billing.WrappedBudget{ Budget: billing.Budget{ Name: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), Filter: "tag.tagName = 'all'", Period: "1 month", StartDate: "2022-01-01", TargetAmount: "100", Alerts: []billing.BudgetAlert{billing.BudgetAlert{ EmailNotifications: []string{"admin@example.com"}, MinPercentage: 50, }}, }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", created) err = a.Budgets.Update(ctx, billing.WrappedBudget{ BudgetId: created.Budget.BudgetId, Budget: billing.Budget{ Name: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), Filter: "tag.tagName = 'all'", Period: "1 month", StartDate: "2022-01-01", TargetAmount: "100", Alerts: []billing.BudgetAlert{billing.BudgetAlert{ EmailNotifications: []string{"admin@example.com"}, MinPercentage: 70, }}, }, }) if err != nil { panic(err) } // cleanup err = a.Budgets.DeleteByBudgetId(ctx, created.Budget.BudgetId) if err != nil { panic(err) }
Output:
func (*BudgetsAPI) WithImpl ¶
func (a *BudgetsAPI) WithImpl(impl BudgetsService) *BudgetsAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type BudgetsService ¶
type BudgetsService interface { // Create a new budget. // // Creates a new budget in the specified account. Create(ctx context.Context, request WrappedBudget) (*WrappedBudgetWithStatus, error) // Delete budget. // // Deletes the budget specified by its UUID. Delete(ctx context.Context, request DeleteBudgetRequest) error // Get budget and its status. // // Gets the budget specified by its UUID, including noncumulative status for // each day that the budget is configured to include. Get(ctx context.Context, request GetBudgetRequest) (*WrappedBudgetWithStatus, error) // Get all budgets. // // Gets all budgets associated with this account, including noncumulative // status for each day that the budget is configured to include. // // Use ListAll() to get all BudgetWithStatus instances List(ctx context.Context) (*BudgetList, error) // Modify budget. // // Modifies a budget in this account. Budget properties are completely // overwritten. Update(ctx context.Context, request WrappedBudget) error }
These APIs manage budget configuration including notifications for exceeding a budget for a period. They can also retrieve the status of each budget.
type CreateLogDeliveryConfigurationParams ¶
type CreateLogDeliveryConfigurationParams struct { // The optional human-readable name of the log delivery configuration. // Defaults to empty. ConfigName string `json:"config_name,omitempty"` // The ID for a method:credentials/create that represents the AWS IAM role // with policy and trust relationship as described in the main billable // usage documentation page. See [Configure billable usage delivery]. // // [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html CredentialsId string `json:"credentials_id"` // The optional delivery path prefix within Amazon S3 storage. Defaults to // empty, which means that logs are delivered to the root of the bucket. // This must be a valid S3 object key. This must not start or end with a // slash character. DeliveryPathPrefix string `json:"delivery_path_prefix,omitempty"` // This field applies only if `log_type` is `BILLABLE_USAGE`. This is the // optional start month and year for delivery, specified in `YYYY-MM` // format. Defaults to current year and month. `BILLABLE_USAGE` logs are not // available for usage before March 2019 (`2019-03`). DeliveryStartTime string `json:"delivery_start_time,omitempty"` // Log delivery type. Supported values are: // // * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the // CSV schema, see the [View billable usage]. // // * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, // see [Configure audit logging] // // [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html // [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html // [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html // [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html LogType LogType `json:"log_type"` // The file type of log delivery. // // * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the // CSV (comma-separated values) format is supported. For the schema, see the // [View billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be // `JSON`. Only the JSON (JavaScript Object Notation) format is supported. // For the schema, see the [Configuring audit logs]. // // [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html // [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html OutputFormat OutputFormat `json:"output_format"` // Status of log delivery configuration. Set to `ENABLED` (enabled) or // `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable // the configuration](#operation/patch-log-delivery-config-status) later. // Deletion of a configuration is not supported, so disable a log delivery // configuration that is no longer needed. Status LogDeliveryConfigStatus `json:"status,omitempty"` // The ID for a method:storage/create that represents the S3 bucket with // bucket policy as described in the main billable usage documentation page. // See [Configure billable usage delivery]. // // [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html StorageConfigurationId string `json:"storage_configuration_id"` // Optional filter that specifies workspace IDs to deliver logs for. By // default the workspace filter is empty and log delivery applies at the // account level, delivering workspace-level logs for all workspaces in your // account, plus account level logs. You can optionally set this field to an // array of workspace IDs (each one is an `int64`) to which log delivery // should apply, in which case only workspace-level logs relating to the // specified workspaces are delivered. If you plan to use different log // delivery configurations for different workspaces, set this field // explicitly. Be aware that delivery configurations mentioning specific // workspaces won't apply to new workspaces created in the future, and // delivery won't include account level logs. For some types of Databricks // deployments there is only one workspace per account ID, so this field is // unnecessary. WorkspaceIdsFilter []int64 `json:"workspace_ids_filter,omitempty"` }
type DeleteBudgetRequest ¶
type DeleteBudgetRequest struct { // Budget ID BudgetId string `json:"-" url:"-"` }
Delete budget
type DeliveryStatus ¶
type DeliveryStatus string
The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.
const DeliveryStatusCreated DeliveryStatus = `CREATED`
There were no log delivery attempts since the config was created.
const DeliveryStatusNotFound DeliveryStatus = `NOT_FOUND`
The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.
const DeliveryStatusSucceeded DeliveryStatus = `SUCCEEDED`
The latest attempt of log delivery has succeeded completely.
const DeliveryStatusSystemFailure DeliveryStatus = `SYSTEM_FAILURE`
The latest attempt of log delivery failed because of an <Databricks> internal error. Contact support if it doesn't go away soon.
const DeliveryStatusUserFailure DeliveryStatus = `USER_FAILURE`
The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage.
func (*DeliveryStatus) Set ¶ added in v0.2.0
func (f *DeliveryStatus) Set(v string) error
Set raw string value and validate it against allowed values
func (*DeliveryStatus) String ¶ added in v0.2.0
func (f *DeliveryStatus) String() string
String representation for fmt.Print
func (*DeliveryStatus) Type ¶ added in v0.2.0
func (f *DeliveryStatus) Type() string
Type always returns DeliveryStatus to satisfy [pflag.Value] interface
type DownloadRequest ¶
type DownloadRequest struct { // Format: `YYYY-MM`. Last month to return billable usage logs for. This // field is required. EndMonth string `json:"-" url:"end_month"` // Specify whether to include personally identifiable information in the // billable usage logs, for example the email addresses of cluster creators. // Handle this information with care. Defaults to false. PersonalData bool `json:"-" url:"personal_data,omitempty"` // Format: `YYYY-MM`. First month to return billable usage logs for. This // field is required. StartMonth string `json:"-" url:"start_month"` }
Return billable usage logs
type GetBudgetRequest ¶
type GetBudgetRequest struct { // Budget ID BudgetId string `json:"-" url:"-"` }
Get budget and its status
type GetLogDeliveryRequest ¶
type GetLogDeliveryRequest struct { // Databricks log delivery configuration ID LogDeliveryConfigurationId string `json:"-" url:"-"` }
Get log delivery configuration
type ListLogDeliveryRequest ¶
type ListLogDeliveryRequest struct { // Filter by credential configuration ID. CredentialsId string `json:"-" url:"credentials_id,omitempty"` // Filter by status `ENABLED` or `DISABLED`. Status LogDeliveryConfigStatus `json:"-" url:"status,omitempty"` // Filter by storage configuration ID. StorageConfigurationId string `json:"-" url:"storage_configuration_id,omitempty"` }
Get all log delivery configurations
type LogDeliveryAPI ¶
type LogDeliveryAPI struct {
// contains filtered or unexported fields
}
These APIs manage log delivery configurations for this account. The two supported log types for this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all account ID types.
Log delivery works with all account types. However, if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account, you can optionally configure different storage destinations for each workspace. Log delivery status is also provided to know the latest status of log delivery attempts. The high-level flow of billable usage delivery:
1. **Create storage**: In AWS, create a new AWS S3 bucket with a specific bucket policy. Using Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For full details, including the required IAM role policies and trust relationship, see Billable usage log delivery. Using Databricks APIs, call the Account API to create a [credential configuration object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from previous steps. You can specify if the logs should include all events of that log type in your account (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery). Account level log delivery applies to all current and future workspaces plus account level logs, while workspace level log delivery solely delivers logs related to the specified workspaces. You can create multiple types of delivery configurations per account.
For billable usage delivery: * For more information about billable usage logs, see Billable usage log delivery. For the CSV schema, see the Usage page. * The delivery location is `<bucket-name>/<prefix>/billable-usage/csv/`, where `<prefix>` is the name of the optional delivery path prefix you set up during log delivery configuration. Files are named `workspaceId=<workspace-id>-usageMonth=<month>.csv`. * All billable usage logs apply to specific workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an _account level_ delivery configuration that delivers logs for all current and future workspaces in your account. * The files are delivered daily by overwriting the month's CSV file for each workspace.
For audit log delivery: * For more information about about audit log delivery, see Audit log delivery, which includes information about the used JSON schema. * The delivery location is `<bucket-name>/<delivery-path-prefix>/workspaceId=<workspaceId>/date=<yyyy-mm-dd>/auditlogs_<internal-id>.json`. Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for those workspaces are delivered. If the log delivery configuration applies to the entire account (_account level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all workspaces in the account as well as account-level audit logs. See Audit log delivery for details. * Auditable events are typically available in logs within 15 minutes.
func NewLogDelivery ¶
func NewLogDelivery(client *client.DatabricksClient) *LogDeliveryAPI
func (*LogDeliveryAPI) Create ¶
func (a *LogDeliveryAPI) Create(ctx context.Context, request WrappedCreateLogDeliveryConfiguration) (*WrappedLogDeliveryConfiguration, error)
Create a new log delivery configuration.
Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you already created a [credential object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).
For full details, including the required IAM role policies and bucket policies, see Deliver and access billable usage logs or Configure audit logging.
**Note**: There is a limit on the number of log delivery configurations available per account (each limit applies separately to each log type including billable usage and audit logs). You can create a maximum of two enabled account-level delivery configurations (configurations without a workspace filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per workspace for each log type, which means that the same workspace ID can occur in the workspace filter for no more than two delivery configurations per log type.
You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log delivery configuration](:method:LogDelivery/PatchStatus)).
Example (LogDelivery) ¶
ctx := context.Background() a, err := databricks.NewAccountClient() if err != nil { panic(err) } bucket, err := a.Storage.Create(ctx, provisioning.CreateStorageConfigurationRequest{ StorageConfigurationName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), RootBucketInfo: provisioning.RootBucketInfo{ BucketName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", bucket) creds, err := a.Credentials.Create(ctx, provisioning.CreateCredentialRequest{ CredentialsName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), AwsCredentials: provisioning.CreateCredentialAwsCredentials{ StsRole: &provisioning.CreateCredentialStsRole{ RoleArn: os.Getenv("TEST_LOGDELIVERY_ARN"), }, }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", creds) created, err := a.LogDelivery.Create(ctx, billing.WrappedCreateLogDeliveryConfiguration{ LogDeliveryConfiguration: &billing.CreateLogDeliveryConfigurationParams{ ConfigName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), CredentialsId: creds.CredentialsId, StorageConfigurationId: bucket.StorageConfigurationId, LogType: billing.LogTypeAuditLogs, OutputFormat: billing.OutputFormatJson, }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", created) // cleanup err = a.Storage.DeleteByStorageConfigurationId(ctx, bucket.StorageConfigurationId) if err != nil { panic(err) } err = a.Credentials.DeleteByCredentialsId(ctx, creds.CredentialsId) if err != nil { panic(err) } err = a.LogDelivery.PatchStatus(ctx, billing.UpdateLogDeliveryConfigurationStatusRequest{ LogDeliveryConfigurationId: created.LogDeliveryConfiguration.ConfigId, Status: billing.LogDeliveryConfigStatusDisabled, }) if err != nil { panic(err) }
Output:
func (*LogDeliveryAPI) Get ¶
func (a *LogDeliveryAPI) Get(ctx context.Context, request GetLogDeliveryRequest) (*WrappedLogDeliveryConfiguration, error)
Get log delivery configuration.
Gets a Databricks log delivery configuration object for an account, both specified by ID.
Example (LogDelivery) ¶
ctx := context.Background() a, err := databricks.NewAccountClient() if err != nil { panic(err) } bucket, err := a.Storage.Create(ctx, provisioning.CreateStorageConfigurationRequest{ StorageConfigurationName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), RootBucketInfo: provisioning.RootBucketInfo{ BucketName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", bucket) creds, err := a.Credentials.Create(ctx, provisioning.CreateCredentialRequest{ CredentialsName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), AwsCredentials: provisioning.CreateCredentialAwsCredentials{ StsRole: &provisioning.CreateCredentialStsRole{ RoleArn: os.Getenv("TEST_LOGDELIVERY_ARN"), }, }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", creds) created, err := a.LogDelivery.Create(ctx, billing.WrappedCreateLogDeliveryConfiguration{ LogDeliveryConfiguration: &billing.CreateLogDeliveryConfigurationParams{ ConfigName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()), CredentialsId: creds.CredentialsId, StorageConfigurationId: bucket.StorageConfigurationId, LogType: billing.LogTypeAuditLogs, OutputFormat: billing.OutputFormatJson, }, }) if err != nil { panic(err) } logger.Infof(ctx, "found %v", created) byId, err := a.LogDelivery.GetByLogDeliveryConfigurationId(ctx, created.LogDeliveryConfiguration.ConfigId) if err != nil { panic(err) } logger.Infof(ctx, "found %v", byId) // cleanup err = a.Storage.DeleteByStorageConfigurationId(ctx, bucket.StorageConfigurationId) if err != nil { panic(err) } err = a.Credentials.DeleteByCredentialsId(ctx, creds.CredentialsId) if err != nil { panic(err) } err = a.LogDelivery.PatchStatus(ctx, billing.UpdateLogDeliveryConfigurationStatusRequest{ LogDeliveryConfigurationId: created.LogDeliveryConfiguration.ConfigId, Status: billing.LogDeliveryConfigStatusDisabled, }) if err != nil { panic(err) }
Output:
func (*LogDeliveryAPI) GetByConfigName ¶
func (a *LogDeliveryAPI) GetByConfigName(ctx context.Context, name string) (*LogDeliveryConfiguration, error)
GetByConfigName calls LogDeliveryAPI.LogDeliveryConfigurationConfigNameToConfigIdMap and returns a single LogDeliveryConfiguration.
Returns an error if there's more than one LogDeliveryConfiguration with the same .ConfigName.
Note: All LogDeliveryConfiguration instances are loaded into memory before returning matching by name.
This method is generated by Databricks SDK Code Generator.
func (*LogDeliveryAPI) GetByLogDeliveryConfigurationId ¶
func (a *LogDeliveryAPI) GetByLogDeliveryConfigurationId(ctx context.Context, logDeliveryConfigurationId string) (*WrappedLogDeliveryConfiguration, error)
Get log delivery configuration.
Gets a Databricks log delivery configuration object for an account, both specified by ID.
func (*LogDeliveryAPI) Impl ¶
func (a *LogDeliveryAPI) Impl() LogDeliveryService
Impl returns low-level LogDelivery API implementation
func (*LogDeliveryAPI) ListAll ¶
func (a *LogDeliveryAPI) ListAll(ctx context.Context, request ListLogDeliveryRequest) ([]LogDeliveryConfiguration, error)
Get all log delivery configurations.
Gets all Databricks log delivery configurations associated with an account specified by ID.
This method is generated by Databricks SDK Code Generator.
Example (LogDelivery) ¶
ctx := context.Background() a, err := databricks.NewAccountClient() if err != nil { panic(err) } all, err := a.LogDelivery.ListAll(ctx, billing.ListLogDeliveryRequest{}) if err != nil { panic(err) } logger.Infof(ctx, "found %v", all)
Output:
func (*LogDeliveryAPI) LogDeliveryConfigurationConfigNameToConfigIdMap ¶
func (a *LogDeliveryAPI) LogDeliveryConfigurationConfigNameToConfigIdMap(ctx context.Context, request ListLogDeliveryRequest) (map[string]string, error)
LogDeliveryConfigurationConfigNameToConfigIdMap calls LogDeliveryAPI.ListAll and creates a map of results with LogDeliveryConfiguration.ConfigName as key and LogDeliveryConfiguration.ConfigId as value.
Returns an error if there's more than one LogDeliveryConfiguration with the same .ConfigName.
Note: All LogDeliveryConfiguration instances are loaded into memory before creating a map.
This method is generated by Databricks SDK Code Generator.
func (*LogDeliveryAPI) PatchStatus ¶
func (a *LogDeliveryAPI) PatchStatus(ctx context.Context, request UpdateLogDeliveryConfigurationStatusRequest) error
Enable or disable log delivery configuration.
Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under [Create log delivery](:method:LogDelivery/Create).
func (*LogDeliveryAPI) WithImpl ¶
func (a *LogDeliveryAPI) WithImpl(impl LogDeliveryService) *LogDeliveryAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type LogDeliveryConfigStatus ¶
type LogDeliveryConfigStatus string
Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.
const LogDeliveryConfigStatusDisabled LogDeliveryConfigStatus = `DISABLED`
const LogDeliveryConfigStatusEnabled LogDeliveryConfigStatus = `ENABLED`
func (*LogDeliveryConfigStatus) Set ¶ added in v0.2.0
func (f *LogDeliveryConfigStatus) Set(v string) error
Set raw string value and validate it against allowed values
func (*LogDeliveryConfigStatus) String ¶ added in v0.2.0
func (f *LogDeliveryConfigStatus) String() string
String representation for fmt.Print
func (*LogDeliveryConfigStatus) Type ¶ added in v0.2.0
func (f *LogDeliveryConfigStatus) Type() string
Type always returns LogDeliveryConfigStatus to satisfy [pflag.Value] interface
type LogDeliveryConfiguration ¶
type LogDeliveryConfiguration struct { // The Databricks account ID that hosts the log delivery configuration. AccountId string `json:"account_id,omitempty"` // Databricks log delivery configuration ID. ConfigId string `json:"config_id,omitempty"` // The optional human-readable name of the log delivery configuration. // Defaults to empty. ConfigName string `json:"config_name,omitempty"` // Time in epoch milliseconds when the log delivery configuration was // created. CreationTime int64 `json:"creation_time,omitempty"` // The ID for a method:credentials/create that represents the AWS IAM role // with policy and trust relationship as described in the main billable // usage documentation page. See [Configure billable usage delivery]. // // [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html CredentialsId string `json:"credentials_id,omitempty"` // The optional delivery path prefix within Amazon S3 storage. Defaults to // empty, which means that logs are delivered to the root of the bucket. // This must be a valid S3 object key. This must not start or end with a // slash character. DeliveryPathPrefix string `json:"delivery_path_prefix,omitempty"` // This field applies only if `log_type` is `BILLABLE_USAGE`. This is the // optional start month and year for delivery, specified in `YYYY-MM` // format. Defaults to current year and month. `BILLABLE_USAGE` logs are not // available for usage before March 2019 (`2019-03`). DeliveryStartTime string `json:"delivery_start_time,omitempty"` // Databricks log delivery status. LogDeliveryStatus *LogDeliveryStatus `json:"log_delivery_status,omitempty"` // Log delivery type. Supported values are: // // * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the // CSV schema, see the [View billable usage]. // // * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, // see [Configure audit logging] // // [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html // [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html // [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html // [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html LogType LogType `json:"log_type,omitempty"` // The file type of log delivery. // // * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the // CSV (comma-separated values) format is supported. For the schema, see the // [View billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be // `JSON`. Only the JSON (JavaScript Object Notation) format is supported. // For the schema, see the [Configuring audit logs]. // // [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html // [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html OutputFormat OutputFormat `json:"output_format,omitempty"` // Status of log delivery configuration. Set to `ENABLED` (enabled) or // `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable // the configuration](#operation/patch-log-delivery-config-status) later. // Deletion of a configuration is not supported, so disable a log delivery // configuration that is no longer needed. Status LogDeliveryConfigStatus `json:"status,omitempty"` // The ID for a method:storage/create that represents the S3 bucket with // bucket policy as described in the main billable usage documentation page. // See [Configure billable usage delivery]. // // [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html StorageConfigurationId string `json:"storage_configuration_id,omitempty"` // Time in epoch milliseconds when the log delivery configuration was // updated. UpdateTime int64 `json:"update_time,omitempty"` // Optional filter that specifies workspace IDs to deliver logs for. By // default the workspace filter is empty and log delivery applies at the // account level, delivering workspace-level logs for all workspaces in your // account, plus account level logs. You can optionally set this field to an // array of workspace IDs (each one is an `int64`) to which log delivery // should apply, in which case only workspace-level logs relating to the // specified workspaces are delivered. If you plan to use different log // delivery configurations for different workspaces, set this field // explicitly. Be aware that delivery configurations mentioning specific // workspaces won't apply to new workspaces created in the future, and // delivery won't include account level logs. For some types of Databricks // deployments there is only one workspace per account ID, so this field is // unnecessary. WorkspaceIdsFilter []int64 `json:"workspace_ids_filter,omitempty"` }
type LogDeliveryService ¶
type LogDeliveryService interface { // Create a new log delivery configuration. // // Creates a new Databricks log delivery configuration to enable delivery of // the specified type of logs to your storage location. This requires that // you already created a [credential object](:method:Credentials/Create) // (which encapsulates a cross-account service IAM role) and a [storage // configuration object](:method:Storage/Create) (which encapsulates an S3 // bucket). // // For full details, including the required IAM role policies and bucket // policies, see [Deliver and access billable usage logs] or [Configure // audit logging]. // // **Note**: There is a limit on the number of log delivery configurations // available per account (each limit applies separately to each log type // including billable usage and audit logs). You can create a maximum of two // enabled account-level delivery configurations (configurations without a // workspace filter) per type. Additionally, you can create two enabled // workspace-level delivery configurations per workspace for each log type, // which means that the same workspace ID can occur in the workspace filter // for no more than two delivery configurations per log type. // // You cannot delete a log delivery configuration, but you can disable it // (see [Enable or disable log delivery // configuration](:method:LogDelivery/PatchStatus)). // // [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html // [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html Create(ctx context.Context, request WrappedCreateLogDeliveryConfiguration) (*WrappedLogDeliveryConfiguration, error) // Get log delivery configuration. // // Gets a Databricks log delivery configuration object for an account, both // specified by ID. Get(ctx context.Context, request GetLogDeliveryRequest) (*WrappedLogDeliveryConfiguration, error) // Get all log delivery configurations. // // Gets all Databricks log delivery configurations associated with an // account specified by ID. // // Use ListAll() to get all LogDeliveryConfiguration instances List(ctx context.Context, request ListLogDeliveryRequest) (*WrappedLogDeliveryConfigurations, error) // Enable or disable log delivery configuration. // // Enables or disables a log delivery configuration. Deletion of delivery // configurations is not supported, so disable log delivery configurations // that are no longer needed. Note that you can't re-enable a delivery // configuration if this would violate the delivery configuration limits // described under [Create log delivery](:method:LogDelivery/Create). PatchStatus(ctx context.Context, request UpdateLogDeliveryConfigurationStatusRequest) error }
These APIs manage log delivery configurations for this account. The two supported log types for this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all account ID types.
Log delivery works with all account types. However, if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account, you can optionally configure different storage destinations for each workspace. Log delivery status is also provided to know the latest status of log delivery attempts. The high-level flow of billable usage delivery:
1. **Create storage**: In AWS, create a new AWS S3 bucket with a specific bucket policy. Using Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create) that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For full details, including the required IAM role policies and trust relationship, see Billable usage log delivery. Using Databricks APIs, call the Account API to create a [credential configuration object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery configuration**: Using Databricks APIs, call the Account API to [create a log delivery configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from previous steps. You can specify if the logs should include all events of that log type in your account (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery). Account level log delivery applies to all current and future workspaces plus account level logs, while workspace level log delivery solely delivers logs related to the specified workspaces. You can create multiple types of delivery configurations per account.
For billable usage delivery: * For more information about billable usage logs, see Billable usage log delivery. For the CSV schema, see the Usage page. * The delivery location is `<bucket-name>/<prefix>/billable-usage/csv/`, where `<prefix>` is the name of the optional delivery path prefix you set up during log delivery configuration. Files are named `workspaceId=<workspace-id>-usageMonth=<month>.csv`. * All billable usage logs apply to specific workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an _account level_ delivery configuration that delivers logs for all current and future workspaces in your account. * The files are delivered daily by overwriting the month's CSV file for each workspace.
For audit log delivery: * For more information about about audit log delivery, see Audit log delivery, which includes information about the used JSON schema. * The delivery location is `<bucket-name>/<delivery-path-prefix>/workspaceId=<workspaceId>/date=<yyyy-mm-dd>/auditlogs_<internal-id>.json`. Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for those workspaces are delivered. If the log delivery configuration applies to the entire account (_account level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all workspaces in the account as well as account-level audit logs. See Audit log delivery for details. * Auditable events are typically available in logs within 15 minutes.
type LogDeliveryStatus ¶
type LogDeliveryStatus struct { // The UTC time for the latest log delivery attempt. LastAttemptTime string `json:"last_attempt_time,omitempty"` // The UTC time for the latest successful log delivery. LastSuccessfulAttemptTime string `json:"last_successful_attempt_time,omitempty"` // Informative message about the latest log delivery attempt. If the log // delivery fails with USER_FAILURE, error details will be provided for // fixing misconfigurations in cloud permissions. Message string `json:"message,omitempty"` // The status string for log delivery. Possible values are: * `CREATED`: // There were no log delivery attempts since the config was created. * // `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. // * `USER_FAILURE`: The latest attempt of log delivery failed because of // misconfiguration of customer provided permissions on role or storage. * // `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an // Databricks internal error. Contact support if it doesn't go away soon. * // `NOT_FOUND`: The log delivery status as the configuration has been // disabled since the release of this feature or there are no workspaces in // the account. Status DeliveryStatus `json:"status,omitempty"` }
Databricks log delivery status.
type LogType ¶
type LogType string
Log delivery type. Supported values are:
* `BILLABLE_USAGE` — Configure billable usage log delivery. For the CSV schema, see the View billable usage.
* `AUDIT_LOGS` — Configure audit log delivery. For the JSON schema, see Configure audit logging
const LogTypeAuditLogs LogType = `AUDIT_LOGS`
const LogTypeBillableUsage LogType = `BILLABLE_USAGE`
type OutputFormat ¶
type OutputFormat string
The file type of log delivery.
* If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated values) format is supported. For the schema, see the View billable usage * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is supported. For the schema, see the Configuring audit logs.
const OutputFormatCsv OutputFormat = `CSV`
const OutputFormatJson OutputFormat = `JSON`
func (*OutputFormat) Set ¶ added in v0.2.0
func (f *OutputFormat) Set(v string) error
Set raw string value and validate it against allowed values
func (*OutputFormat) String ¶ added in v0.2.0
func (f *OutputFormat) String() string
String representation for fmt.Print
func (*OutputFormat) Type ¶ added in v0.2.0
func (f *OutputFormat) Type() string
Type always returns OutputFormat to satisfy [pflag.Value] interface
type UpdateLogDeliveryConfigurationStatusRequest ¶
type UpdateLogDeliveryConfigurationStatusRequest struct { // Databricks log delivery configuration ID LogDeliveryConfigurationId string `json:"-" url:"-"` // Status of log delivery configuration. Set to `ENABLED` (enabled) or // `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable // the configuration](#operation/patch-log-delivery-config-status) later. // Deletion of a configuration is not supported, so disable a log delivery // configuration that is no longer needed. Status LogDeliveryConfigStatus `json:"status"` }
type WrappedBudget ¶
type WrappedBudgetWithStatus ¶
type WrappedBudgetWithStatus struct { // Budget configuration with daily status. Budget BudgetWithStatus `json:"budget"` }
type WrappedCreateLogDeliveryConfiguration ¶
type WrappedCreateLogDeliveryConfiguration struct {
LogDeliveryConfiguration *CreateLogDeliveryConfigurationParams `json:"log_delivery_configuration,omitempty"`
}
type WrappedLogDeliveryConfiguration ¶
type WrappedLogDeliveryConfiguration struct {
LogDeliveryConfiguration *LogDeliveryConfiguration `json:"log_delivery_configuration,omitempty"`
}
type WrappedLogDeliveryConfigurations ¶
type WrappedLogDeliveryConfigurations struct {
LogDeliveryConfigurations []LogDeliveryConfiguration `json:"log_delivery_configurations,omitempty"`
}