common

package
v1.6.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 2, 2022 License: Apache-2.0 Imports: 41 Imported by: 0

Documentation

Index

Constants

View Source
const (
	DefaultTruncateBytes      = 96
	DefaultRateLimitPerSecond = 15
	DefaultHTTPTimeoutSeconds = 60
)

Default settings

Variables

View Source
var (

	// ResourceName is resource name without databricks_ prefix
	ResourceName contextKey = 1
	// Provider is the current instance of provider
	Provider contextKey = 2
	// Current is the current name of integration test
	Current contextKey = 3
	// If current resource is data
	IsData contextKey = 4
	// apiVersion
	Api contextKey = 5
)

Functions

func AddContextToAllResources

func AddContextToAllResources(p *schema.Provider, prefix string)

AddContextToAllResources ...

func CleanupEnvironment

func CleanupEnvironment() func()

CleanupEnvironment backs up environment - use as `defer CleanupEnvironment()()` clears it and restores it in the end. It's meant strictly for "unit" tests as last resort, because it slows down parallel execution with mutex.

func DataResource

func DataResource(sc any, read func(context.Context, any, *DatabricksClient) error) *schema.Resource

func DataToReflectValue

func DataToReflectValue(d *schema.ResourceData, r *schema.Resource, rv reflect.Value) error

DataToReflectValue reads reflect value from data

func DataToStructPointer

func DataToStructPointer(d *schema.ResourceData, scm map[string]*schema.Schema, result any)

DataToStructPointer reads resource data with given schema onto result pointer. Panics.

func DiffToStructPointer

func DiffToStructPointer(d attributeGetter, scm map[string]*schema.Schema, result any)

DiffToStructPointer reads resource diff with given schema onto result pointer. Panics.

func IsMissing

func IsMissing(err error) bool

IsMissing tells if error is about missing resource

func MustCompileKeyRE

func MustCompileKeyRE(name string) *regexp.Regexp

func MustSchemaPath

func MustSchemaPath(s map[string]*schema.Schema, path ...string) *schema.Schema

func ResetCommonEnvironmentClient

func ResetCommonEnvironmentClient()

ResetCommonEnvironmentClient resets test dummy

func SchemaPath

func SchemaPath(s map[string]*schema.Schema, path ...string) (*schema.Schema, error)

SchemaPath helps to navigate

func StructToData

func StructToData(result any, s map[string]*schema.Schema, d *schema.ResourceData) error

StructToData reads result using schema onto resource data

func StructToSchema

func StructToSchema(v any, customize func(map[string]*schema.Schema) map[string]*schema.Schema) map[string]*schema.Schema

StructToSchema makes schema from a struct type & applies customizations from callback given

func Version

func Version() string

Version returns version of provider

Types

type APIError

type APIError struct {
	ErrorCode  string
	Message    string
	Resource   string
	StatusCode int
}

APIError is a generic struct for an api error on databricks

func NotFound

func NotFound(message string) APIError

NotFound returns properly formatted Not Found error

func (APIError) DocumentationURL

func (apiError APIError) DocumentationURL() string

DocumentationURL guesses doc link

func (APIError) Error

func (apiError APIError) Error() string

Error returns error message string instead of

func (APIError) IsMissing

func (apiError APIError) IsMissing() bool

IsMissing tells if it is missing resource

func (APIError) IsRetriable

func (apiError APIError) IsRetriable() bool

IsRetriable returns true if error is retriable

func (APIError) IsTooManyRequests

func (apiError APIError) IsTooManyRequests() bool

IsTooManyRequests shows rate exceeded limits

type APIErrorBody

type APIErrorBody struct {
	ErrorCode string `json:"error_code,omitempty"`
	Message   string `json:"message,omitempty"`
	// The following two are for scim api only
	// for RFC 7644 Section 3.7.3 https://tools.ietf.org/html/rfc7644#section-3.7.3
	ScimDetail string `json:"detail,omitempty"`
	ScimStatus string `json:"status,omitempty"`
	ScimType   string `json:"scimType,omitempty"`
	API12Error string `json:"error,omitempty"`
}

APIErrorBody maps "proper" databricks rest api errors to a struct

type ApiVersion

type ApiVersion string
const (
	API_1_2 ApiVersion = "1.2"
	API_2_0 ApiVersion = "2.0"
	API_2_1 ApiVersion = "2.1"
)

type BindResource

type BindResource struct {
	ReadContext   func(ctx context.Context, left, right string, c *DatabricksClient) error
	CreateContext func(ctx context.Context, left, right string, c *DatabricksClient) error
	DeleteContext func(ctx context.Context, left, right string, c *DatabricksClient) error
}

BindResource defines resource with simplified functions

type CommandExecutor

type CommandExecutor interface {
	Execute(clusterID, language, commandStr string) CommandResults
}

CommandExecutor creates a spark context and executes a command and then closes context

type CommandMock

type CommandMock func(commandStr string) CommandResults

CommandMock mocks the execution of command

type CommandResults

type CommandResults struct {
	ResultType   string `json:"resultType,omitempty"`
	Summary      string `json:"summary,omitempty"`
	Cause        string `json:"cause,omitempty"`
	Data         any    `json:"data,omitempty"`
	Schema       any    `json:"schema,omitempty"`
	Truncated    bool   `json:"truncated,omitempty"`
	IsJSONSchema bool   `json:"isJsonSchema,omitempty"`
	// contains filtered or unexported fields
}

CommandResults captures results of a command

func (*CommandResults) Err

func (cr *CommandResults) Err() error

Err returns error type

func (*CommandResults) Error

func (cr *CommandResults) Error() string

Error returns error in a bit more friendly way

func (*CommandResults) Failed

func (cr *CommandResults) Failed() bool

Failed tells if command execution failed

func (*CommandResults) Scan

func (cr *CommandResults) Scan(dest ...any) bool

Scan scans for results

func (*CommandResults) Text

func (cr *CommandResults) Text() string

Text returns plain text results

type ConfigAttribute

type ConfigAttribute struct {
	Name      string
	Kind      reflect.Kind
	EnvVars   []string
	Auth      string
	Sensitive bool
	Internal  bool
	// contains filtered or unexported fields
}

func ClientAttributes

func ClientAttributes() (attrs []ConfigAttribute)

ClientAttributes returns meta-representation of DatabricksClient configuration options

func (*ConfigAttribute) GetString

func (ca *ConfigAttribute) GetString(client *DatabricksClient) string

func (*ConfigAttribute) Set

func (ca *ConfigAttribute) Set(client *DatabricksClient, i any) error

type DatabricksClient

type DatabricksClient struct {
	Host     string `name:"host" env:"DATABRICKS_HOST"`
	Token    string `name:"token" env:"DATABRICKS_TOKEN" auth:"token,sensitive"`
	Username string `name:"username" env:"DATABRICKS_USERNAME" auth:"password"`
	Password string `name:"password" env:"DATABRICKS_PASSWORD" auth:"password,sensitive"`

	ClientID      string `name:"client_id" env:"DATABRICKS_CLIENT_ID" auth:"oauth"`
	ClientSecret  string `name:"client_secret" env:"DATABRICKS_CLIENT_SECRET" auth:"oauth,sensitive"`
	TokenEndpoint string `name:"token_endpoint" env:"DATABRICKS_TOKEN_ENDPOINT" auth:"oauth"`

	// Databricks Account ID for Accounts API. This field is used in dependencies.
	AccountID string `name:"account_id" env:"DATABRICKS_ACCOUNT_ID"`

	// Connection profile specified within ~/.databrickscfg.
	Profile string `name:"profile" env:"DATABRICKS_CONFIG_PROFILE" auth:"config profile"`

	// Location of the Databricks CLI credentials file, that is created
	// by `databricks configure --token` command. By default, it is located
	// in ~/.databrickscfg.
	ConfigFile string `name:"config_file" env:"DATABRICKS_CONFIG_FILE"`

	GoogleServiceAccount string `name:"google_service_account" env:"DATABRICKS_GOOGLE_SERVICE_ACCOUNT" auth:"google"`
	GoogleCredentials    string `name:"google_credentials" env:"GOOGLE_CREDENTIALS" auth:"google,sensitive"`

	AzureResourceID           string `name:"azure_workspace_resource_id" env:"DATABRICKS_AZURE_RESOURCE_ID" auth:"azure"`
	AzureUseMSI               bool   `name:"azure_use_msi" env:"ARM_USE_MSI" auth:"azure"`
	AzureClientSecret         string `name:"azure_client_secret" env:"ARM_CLIENT_SECRET" auth:"azure,sensitive"`
	AzureClientID             string `name:"azure_client_id" env:"ARM_CLIENT_ID" auth:"azure"`
	AzureTenantID             string `name:"azure_tenant_id" env:"ARM_TENANT_ID" auth:"azure"`
	AzurermEnvironment        string `name:"azure_environment" env:"ARM_ENVIRONMENT"`
	AzureDatabricksLoginAppId string `name:"azure_login_app_id" env:"DATABRICKS_AZURE_LOGIN_APP_ID" auth:"azure"`

	// When multiple auth attributes are available in the environment, use the auth type
	// specified by this argument. This argument also holds currently selected auth.
	AuthType string `name:"auth_type" auth:"-"`

	// Azure Environment endpoints
	AzureEnvironment *azure.Environment

	// Skip SSL certificate verification for HTTP calls.
	// Use at your own risk or for unit testing purposes.
	InsecureSkipVerify bool `name:"skip_verify" auth:"-"`
	HTTPTimeoutSeconds int  `name:"http_timeout_seconds" auth:"-"`

	// Truncate JSON fields in JSON above this limit. Default is 96.
	DebugTruncateBytes int `name:"debug_truncate_bytes" env:"DATABRICKS_DEBUG_TRUNCATE_BYTES" auth:"-"`

	// Debug HTTP headers of requests made by the provider. Default is false.
	DebugHeaders bool `name:"debug_headers" env:"DATABRICKS_DEBUG_HEADERS" auth:"-"`

	// Maximum number of requests per second made to Databricks REST API.
	RateLimitPerSecond int `name:"rate_limit" env:"DATABRICKS_RATE_LIMIT" auth:"-"`

	// Terraform provider instance to include Terraform binary version in
	// User-Agent header
	Provider *schema.Provider
	// contains filtered or unexported fields
}

DatabricksClient holds properties needed for authentication and HTTP client setup fields with `name` struct tags become Terraform provider attributes. `env` struct tag can hold one or more coma-separated env variable names to find value, if not specified directly. `auth` struct tag describes the type of conflicting authentication used.

func CommonEnvironmentClient

func CommonEnvironmentClient() *DatabricksClient

CommonEnvironmentClient configured once per run of application

func NewClientFromEnvironment

func NewClientFromEnvironment() *DatabricksClient

NewClientFromEnvironment makes very good client for testing purposes

func (*DatabricksClient) Authenticate

func (c *DatabricksClient) Authenticate(ctx context.Context) error

Authenticate lazily authenticates across authorizers or returns error

func (*DatabricksClient) ClientForHost

func (c *DatabricksClient) ClientForHost(ctx context.Context, url string) (*DatabricksClient, error)

ClientForHost creates a new DatabricksClient instance with the same auth parameters, but for the given host. Authentication has to be reinitialized, as Google OIDC has different authorizers, depending if it's workspace or Accounts API we're talking to.

func (*DatabricksClient) CommandExecutor

func (c *DatabricksClient) CommandExecutor(ctx context.Context) CommandExecutor

CommandExecutor service

func (*DatabricksClient) Configure

func (c *DatabricksClient) Configure(attrsUsed ...string) error

Configure client to work, optionally specifying configuration attributes used

func (*DatabricksClient) Delete

func (c *DatabricksClient) Delete(ctx context.Context, path string, request any) error

Delete on path

func (*DatabricksClient) FormatURL

func (c *DatabricksClient) FormatURL(strs ...string) string

FormatURL creates URL from the client Host and additional strings

func (*DatabricksClient) Get

func (c *DatabricksClient) Get(ctx context.Context, path string, request any, response any) error

Get on path

func (*DatabricksClient) GetAzureDatabricksLoginAppId added in v1.3.1

func (aa *DatabricksClient) GetAzureDatabricksLoginAppId() string

func (*DatabricksClient) GetAzureJwtProperty

func (aa *DatabricksClient) GetAzureJwtProperty(key string) (any, error)

func (*DatabricksClient) IsAws

func (c *DatabricksClient) IsAws() bool

IsAws returns true if client is configured for AWS

func (*DatabricksClient) IsAzure

func (c *DatabricksClient) IsAzure() bool

IsAzure returns true if client is configured for Azure Databricks - either by using AAD auth or with host+token combination

func (*DatabricksClient) IsAzureClientSecretSet

func (aa *DatabricksClient) IsAzureClientSecretSet() bool

IsAzureClientSecretSet returns true if client id/secret and tenand id are supplied

func (*DatabricksClient) IsGcp

func (c *DatabricksClient) IsGcp() bool

IsGcp returns true if client is configured for GCP

func (*DatabricksClient) Patch

func (c *DatabricksClient) Patch(ctx context.Context, path string, request any) error

Patch on path

func (*DatabricksClient) Post

func (c *DatabricksClient) Post(ctx context.Context, path string, request any, response any) error

Post on path

func (*DatabricksClient) Put

func (c *DatabricksClient) Put(ctx context.Context, path string, request any) error

Put on path

func (*DatabricksClient) Scim

func (c *DatabricksClient) Scim(ctx context.Context, method, path string, request any, response any) error

Scim sets SCIM headers

func (*DatabricksClient) WithCommandExecutor

func (c *DatabricksClient) WithCommandExecutor(cef func(context.Context, *DatabricksClient) CommandExecutor)

WithCommandExecutor sets command executor implementation to use

func (*DatabricksClient) WithCommandMock

func (c *DatabricksClient) WithCommandMock(mock CommandMock)

WithCommandMock mocks all command executions for this client

type Pair

type Pair struct {
	// contains filtered or unexported fields
}

Pair defines an ID pair

func NewPairID

func NewPairID(left, right string) *Pair

NewPairID creates new ID pair

func NewPairSeparatedID

func NewPairSeparatedID(left, right, separator string) *Pair

NewPairSeparatedID creates new ID pair with a custom separator

func (*Pair) BindResource

func (p *Pair) BindResource(pr BindResource) *schema.Resource

BindResource creates resource that relies on binding ID pair with simple schema & importer

func (*Pair) Pack

func (p *Pair) Pack(d *schema.ResourceData)

Pack data attributes to ID

func (*Pair) Schema

func (p *Pair) Schema(do func(map[string]*schema.Schema) map[string]*schema.Schema) *Pair

Schema sets custom schema

func (*Pair) Unpack

func (p *Pair) Unpack(d *schema.ResourceData) (string, string, error)

Unpack ID into two strings and set data

type Resource

type Resource struct {
	Create         func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
	Read           func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
	Update         func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
	Delete         func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
	CustomizeDiff  func(ctx context.Context, d *schema.ResourceDiff, c any) error
	StateUpgraders []schema.StateUpgrader
	Schema         map[string]*schema.Schema
	SchemaVersion  int
	Timeouts       *schema.ResourceTimeout
}

Resource aims to simplify things like error & deleted entities handling

func (Resource) ToResource

func (r Resource) ToResource() *schema.Resource

ToResource converts to Terraform resource definition

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL