Documentation ¶
Index ¶
- Constants
- Variables
- func AddContextToAllResources(p *schema.Provider, prefix string)
- func CleanupEnvironment() func()
- func DataResource(sc any, read func(context.Context, any, *DatabricksClient) error) *schema.Resource
- func DataToReflectValue(d *schema.ResourceData, r *schema.Resource, rv reflect.Value) error
- func DataToStructPointer(d *schema.ResourceData, scm map[string]*schema.Schema, result any)
- func DiffToStructPointer(d attributeGetter, scm map[string]*schema.Schema, result any)
- func IsMissing(err error) bool
- func MustCompileKeyRE(name string) *regexp.Regexp
- func MustSchemaPath(s map[string]*schema.Schema, path ...string) *schema.Schema
- func ResetCommonEnvironmentClient()
- func SchemaPath(s map[string]*schema.Schema, path ...string) (*schema.Schema, error)
- func StructToData(result any, s map[string]*schema.Schema, d *schema.ResourceData) error
- func StructToSchema(v any, customize func(map[string]*schema.Schema) map[string]*schema.Schema) map[string]*schema.Schema
- func Version() string
- type APIError
- type APIErrorBody
- type ApiVersion
- type BindResource
- type CommandExecutor
- type CommandMock
- type CommandResults
- type ConfigAttribute
- type DatabricksClient
- func (c *DatabricksClient) Authenticate(ctx context.Context) error
- func (c *DatabricksClient) ClientForHost(ctx context.Context, url string) (*DatabricksClient, error)
- func (c *DatabricksClient) CommandExecutor(ctx context.Context) CommandExecutor
- func (c *DatabricksClient) Configure(attrsUsed ...string) error
- func (c *DatabricksClient) Delete(ctx context.Context, path string, request any) error
- func (c *DatabricksClient) FormatURL(strs ...string) string
- func (c *DatabricksClient) Get(ctx context.Context, path string, request any, response any) error
- func (aa *DatabricksClient) GetAzureJwtProperty(key string) (any, error)
- func (c *DatabricksClient) IsAws() bool
- func (c *DatabricksClient) IsAzure() bool
- func (aa *DatabricksClient) IsAzureClientSecretSet() bool
- func (c *DatabricksClient) IsGcp() bool
- func (c *DatabricksClient) Patch(ctx context.Context, path string, request any) error
- func (c *DatabricksClient) Post(ctx context.Context, path string, request any, response any) error
- func (c *DatabricksClient) Put(ctx context.Context, path string, request any) error
- func (c *DatabricksClient) Scim(ctx context.Context, method, path string, request any, response any) error
- func (c *DatabricksClient) WithCommandExecutor(cef func(context.Context, *DatabricksClient) CommandExecutor)
- func (c *DatabricksClient) WithCommandMock(mock CommandMock)
- type Pair
- type Resource
Constants ¶
const ( DefaultTruncateBytes = 96 DefaultRateLimitPerSecond = 15 DefaultHTTPTimeoutSeconds = 60 )
Default settings
Variables ¶
var ( // ResourceName is resource name without databricks_ prefix ResourceName contextKey = 1 // Provider is the current instance of provider Provider contextKey = 2 // Current is the current name of integration test Current contextKey = 3 // If current resource is data IsData contextKey = 4 // apiVersion Api contextKey = 5 )
Functions ¶
func AddContextToAllResources ¶
AddContextToAllResources ...
func CleanupEnvironment ¶
func CleanupEnvironment() func()
CleanupEnvironment backs up environment - use as `defer CleanupEnvironment()()` clears it and restores it in the end. It's meant strictly for "unit" tests as last resort, because it slows down parallel execution with mutex.
func DataResource ¶
func DataToReflectValue ¶
DataToReflectValue reads reflect value from data
func DataToStructPointer ¶
DataToStructPointer reads resource data with given schema onto result pointer. Panics.
func DiffToStructPointer ¶
DiffToStructPointer reads resource diff with given schema onto result pointer. Panics.
func MustCompileKeyRE ¶
func MustSchemaPath ¶
func ResetCommonEnvironmentClient ¶
func ResetCommonEnvironmentClient()
ResetCommonEnvironmentClient resets test dummy
func SchemaPath ¶
SchemaPath helps to navigate
func StructToData ¶
StructToData reads result using schema onto resource data
Types ¶
type APIError ¶
APIError is a generic struct for an api error on databricks
func (APIError) DocumentationURL ¶
DocumentationURL guesses doc link
func (APIError) IsRetriable ¶
IsRetriable returns true if error is retriable
func (APIError) IsTooManyRequests ¶
IsTooManyRequests shows rate exceeded limits
type APIErrorBody ¶
type APIErrorBody struct { ErrorCode string `json:"error_code,omitempty"` Message string `json:"message,omitempty"` // The following two are for scim api only // for RFC 7644 Section 3.7.3 https://tools.ietf.org/html/rfc7644#section-3.7.3 ScimDetail string `json:"detail,omitempty"` ScimStatus string `json:"status,omitempty"` ScimType string `json:"scimType,omitempty"` API12Error string `json:"error,omitempty"` }
APIErrorBody maps "proper" databricks rest api errors to a struct
type ApiVersion ¶
type ApiVersion string
const ( API_1_2 ApiVersion = "1.2" API_2_0 ApiVersion = "2.0" API_2_1 ApiVersion = "2.1" )
type BindResource ¶
type BindResource struct { ReadContext func(ctx context.Context, left, right string, c *DatabricksClient) error CreateContext func(ctx context.Context, left, right string, c *DatabricksClient) error DeleteContext func(ctx context.Context, left, right string, c *DatabricksClient) error }
BindResource defines resource with simplified functions
type CommandExecutor ¶
type CommandExecutor interface {
Execute(clusterID, language, commandStr string) CommandResults
}
CommandExecutor creates a spark context and executes a command and then closes context
type CommandMock ¶
type CommandMock func(commandStr string) CommandResults
CommandMock mocks the execution of command
type CommandResults ¶
type CommandResults struct { ResultType string `json:"resultType,omitempty"` Summary string `json:"summary,omitempty"` Cause string `json:"cause,omitempty"` Data any `json:"data,omitempty"` Schema any `json:"schema,omitempty"` Truncated bool `json:"truncated,omitempty"` IsJSONSchema bool `json:"isJsonSchema,omitempty"` // contains filtered or unexported fields }
CommandResults captures results of a command
func (*CommandResults) Error ¶
func (cr *CommandResults) Error() string
Error returns error in a bit more friendly way
func (*CommandResults) Failed ¶
func (cr *CommandResults) Failed() bool
Failed tells if command execution failed
func (*CommandResults) Scan ¶
func (cr *CommandResults) Scan(dest ...any) bool
Scan scans for results
func (*CommandResults) Text ¶
func (cr *CommandResults) Text() string
Text returns plain text results
type ConfigAttribute ¶
type ConfigAttribute struct { Name string Kind reflect.Kind EnvVars []string Auth string Sensitive bool Internal bool // contains filtered or unexported fields }
func ClientAttributes ¶
func ClientAttributes() (attrs []ConfigAttribute)
ClientAttributes returns meta-representation of DatabricksClient configuration options
func (*ConfigAttribute) GetString ¶
func (ca *ConfigAttribute) GetString(client *DatabricksClient) string
func (*ConfigAttribute) Set ¶
func (ca *ConfigAttribute) Set(client *DatabricksClient, i any) error
type DatabricksClient ¶
type DatabricksClient struct { Host string `name:"host" env:"DATABRICKS_HOST"` Token string `name:"token" env:"DATABRICKS_TOKEN" auth:"token,sensitive"` Username string `name:"username" env:"DATABRICKS_USERNAME" auth:"password"` Password string `name:"password" env:"DATABRICKS_PASSWORD" auth:"password,sensitive"` // Databricks Account ID for Accounts API. This field is used in dependencies. AccountID string `name:"account_id" env:"DATABRICKS_ACCOUNT_ID"` // Connection profile specified within ~/.databrickscfg. Profile string `name:"profile" env:"DATABRICKS_CONFIG_PROFILE" auth:"config profile"` // Location of the Databricks CLI credentials file, that is created // by `databricks configure --token` command. By default, it is located // in ~/.databrickscfg. ConfigFile string `name:"config_file" env:"DATABRICKS_CONFIG_FILE"` GoogleServiceAccount string `name:"google_service_account" env:"DATABRICKS_GOOGLE_SERVICE_ACCOUNT" auth:"google"` GoogleCredentials string `name:"google_credentials" env:"GOOGLE_CREDENTIALS" auth:"google,sensitive"` AzureResourceID string `name:"azure_workspace_resource_id" env:"DATABRICKS_AZURE_RESOURCE_ID" auth:"azure"` AzureUseMSI bool `name:"azure_use_msi" env:"ARM_USE_MSI" auth:"azure"` AzureClientSecret string `name:"azure_client_secret" env:"ARM_CLIENT_SECRET" auth:"azure,sensitive"` AzureClientID string `name:"azure_client_id" env:"ARM_CLIENT_ID" auth:"azure"` AzureTenantID string `name:"azure_tenant_id" env:"ARM_TENANT_ID" auth:"azure"` AzurermEnvironment string `name:"azure_environment" env:"ARM_ENVIRONMENT"` // When multiple auth attributes are available in the environment, use the auth type // specified by this argument. This argument also holds currently selected auth. AuthType string `name:"auth_type" auth:"-"` // Azure Environment endpoints AzureEnvironment *azure.Environment // Skip SSL certificate verification for HTTP calls. // Use at your own risk or for unit testing purposes. InsecureSkipVerify bool `name:"skip_verify" auth:"-"` HTTPTimeoutSeconds int `name:"http_timeout_seconds" auth:"-"` // Truncate JSON fields in JSON above this limit. Default is 96. DebugTruncateBytes int `name:"debug_truncate_bytes" env:"DATABRICKS_DEBUG_TRUNCATE_BYTES" auth:"-"` // Debug HTTP headers of requests made by the provider. Default is false. DebugHeaders bool `name:"debug_headers" env:"DATABRICKS_DEBUG_HEADERS" auth:"-"` // Maximum number of requests per second made to Databricks REST API. RateLimitPerSecond int `name:"rate_limit" env:"DATABRICKS_RATE_LIMIT" auth:"-"` // Terraform provider instance to include Terraform binary version in // User-Agent header Provider *schema.Provider // contains filtered or unexported fields }
DatabricksClient holds properties needed for authentication and HTTP client setup fields with `name` struct tags become Terraform provider attributes. `env` struct tag can hold one or more coma-separated env variable names to find value, if not specified directly. `auth` struct tag describes the type of conflicting authentication used.
func CommonEnvironmentClient ¶
func CommonEnvironmentClient() *DatabricksClient
CommonEnvironmentClient configured once per run of application
func NewClientFromEnvironment ¶
func NewClientFromEnvironment() *DatabricksClient
NewClientFromEnvironment makes very good client for testing purposes
func (*DatabricksClient) Authenticate ¶
func (c *DatabricksClient) Authenticate(ctx context.Context) error
Authenticate lazily authenticates across authorizers or returns error
func (*DatabricksClient) ClientForHost ¶
func (c *DatabricksClient) ClientForHost(ctx context.Context, url string) (*DatabricksClient, error)
ClientForHost creates a new DatabricksClient instance with the same auth parameters, but for the given host. Authentication has to be reinitialized, as Google OIDC has different authorizers, depending if it's workspace or Accounts API we're talking to.
func (*DatabricksClient) CommandExecutor ¶
func (c *DatabricksClient) CommandExecutor(ctx context.Context) CommandExecutor
CommandExecutor service
func (*DatabricksClient) Configure ¶
func (c *DatabricksClient) Configure(attrsUsed ...string) error
Configure client to work, optionally specifying configuration attributes used
func (*DatabricksClient) FormatURL ¶
func (c *DatabricksClient) FormatURL(strs ...string) string
FormatURL creates URL from the client Host and additional strings
func (*DatabricksClient) GetAzureJwtProperty ¶
func (aa *DatabricksClient) GetAzureJwtProperty(key string) (any, error)
func (*DatabricksClient) IsAws ¶
func (c *DatabricksClient) IsAws() bool
IsAws returns true if client is configured for AWS
func (*DatabricksClient) IsAzure ¶
func (c *DatabricksClient) IsAzure() bool
IsAzure returns true if client is configured for Azure Databricks - either by using AAD auth or with host+token combination
func (*DatabricksClient) IsAzureClientSecretSet ¶
func (aa *DatabricksClient) IsAzureClientSecretSet() bool
IsAzureClientSecretSet returns true if client id/secret and tenand id are supplied
func (*DatabricksClient) IsGcp ¶
func (c *DatabricksClient) IsGcp() bool
IsGcp returns true if client is configured for GCP
func (*DatabricksClient) Scim ¶
func (c *DatabricksClient) Scim(ctx context.Context, method, path string, request any, response any) error
Scim sets SCIM headers
func (*DatabricksClient) WithCommandExecutor ¶
func (c *DatabricksClient) WithCommandExecutor(cef func(context.Context, *DatabricksClient) CommandExecutor)
WithCommandExecutor sets command executor implementation to use
func (*DatabricksClient) WithCommandMock ¶
func (c *DatabricksClient) WithCommandMock(mock CommandMock)
WithCommandMock mocks all command executions for this client
type Pair ¶
type Pair struct {
// contains filtered or unexported fields
}
Pair defines an ID pair
func NewPairSeparatedID ¶
NewPairSeparatedID creates new ID pair with a custom separator
func (*Pair) BindResource ¶
func (p *Pair) BindResource(pr BindResource) *schema.Resource
BindResource creates resource that relies on binding ID pair with simple schema & importer
type Resource ¶
type Resource struct { Create func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error Read func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error Update func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error Delete func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error CustomizeDiff func(ctx context.Context, d *schema.ResourceDiff, c any) error StateUpgraders []schema.StateUpgrader Schema map[string]*schema.Schema SchemaVersion int Timeouts *schema.ResourceTimeout }
Resource aims to simplify things like error & deleted entities handling
func (Resource) ToResource ¶
ToResource converts to Terraform resource definition