common

package
v1.19.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 14, 2023 License: Apache-2.0 Imports: 19 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (

	// ResourceName is resource name without databricks_ prefix
	ResourceName contextKey = 1
	// Provider is the current instance of provider
	Provider contextKey = 2
	// Current is the current name of integration test
	Current contextKey = 3
	// If current resource is data
	IsData contextKey = 4
	// apiVersion
	Api contextKey = 5
)

Functions

func AddContextToAllResources

func AddContextToAllResources(p *schema.Provider, prefix string)

AddContextToAllResources ...

func DataResource deprecated

func DataResource(sc any, read func(context.Context, any, *DatabricksClient) error) *schema.Resource

Deprecated: migrate to WorkspaceData

func DataToReflectValue

func DataToReflectValue(d *schema.ResourceData, r *schema.Resource, rv reflect.Value) error

DataToReflectValue reads reflect value from data

func DataToStructPointer

func DataToStructPointer(d *schema.ResourceData, scm map[string]*schema.Schema, result any)

DataToStructPointer reads resource data with given schema onto result pointer. Panics.

func DiffToStructPointer

func DiffToStructPointer(d attributeGetter, scm map[string]*schema.Schema, result any)

DiffToStructPointer reads resource diff with given schema onto result pointer. Panics.

func EqualFoldDiffSuppress added in v1.14.3

func EqualFoldDiffSuppress(k, old, new string, d *schema.ResourceData) bool

func MustCompileKeyRE

func MustCompileKeyRE(name string) *regexp.Regexp

func MustSchemaPath

func MustSchemaPath(s map[string]*schema.Schema, path ...string) *schema.Schema

func SchemaPath

func SchemaPath(s map[string]*schema.Schema, path ...string) (*schema.Schema, error)

SchemaPath helps to navigate

func StructToData

func StructToData(result any, s map[string]*schema.Schema, d *schema.ResourceData) error

StructToData reads result using schema onto resource data

func StructToSchema

func StructToSchema(v any, customize func(map[string]*schema.Schema) map[string]*schema.Schema) map[string]*schema.Schema

StructToSchema makes schema from a struct type & applies customizations from callback given

func Version

func Version() string

Version returns version of provider

func WorkspaceData added in v1.10.1

func WorkspaceData[T any](read func(context.Context, *T, *databricks.WorkspaceClient) error) *schema.Resource

WorkspaceData is a generic way to define data resources in Terraform provider.

Example usage:

type catalogsData struct {
	Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
}
return common.WorkspaceData(func(ctx context.Context, data *catalogsData, w *databricks.WorkspaceClient) error {
	catalogs, err := w.Catalogs.ListAll(ctx)
	...
})

Types

type ApiVersion

type ApiVersion string
const (
	API_1_2 ApiVersion = "1.2"
	API_2_0 ApiVersion = "2.0"
	API_2_1 ApiVersion = "2.1"
)

type BindResource

type BindResource struct {
	ReadContext   func(ctx context.Context, left, right string, c *DatabricksClient) error
	CreateContext func(ctx context.Context, left, right string, c *DatabricksClient) error
	DeleteContext func(ctx context.Context, left, right string, c *DatabricksClient) error
}

BindResource defines resource with simplified functions

type CommandExecutor

type CommandExecutor interface {
	Execute(clusterID, language, commandStr string) CommandResults
}

CommandExecutor creates a spark context and executes a command and then closes context

type CommandMock

type CommandMock func(commandStr string) CommandResults

CommandMock mocks the execution of command

type CommandResults

type CommandResults struct {
	ResultType   string `json:"resultType,omitempty"`
	Summary      string `json:"summary,omitempty"`
	Cause        string `json:"cause,omitempty"`
	Data         any    `json:"data,omitempty"`
	Schema       any    `json:"schema,omitempty"`
	Truncated    bool   `json:"truncated,omitempty"`
	IsJSONSchema bool   `json:"isJsonSchema,omitempty"`
	// contains filtered or unexported fields
}

CommandResults captures results of a command

func (*CommandResults) Err

func (cr *CommandResults) Err() error

Err returns error type

func (*CommandResults) Error

func (cr *CommandResults) Error() string

Error returns error in a bit more friendly way

func (*CommandResults) Failed

func (cr *CommandResults) Failed() bool

Failed tells if command execution failed

func (*CommandResults) Scan

func (cr *CommandResults) Scan(dest ...any) bool

Scan scans for results

func (*CommandResults) Text

func (cr *CommandResults) Text() string

Text returns plain text results

type DatabricksClient

type DatabricksClient struct {
	*client.DatabricksClient
	// contains filtered or unexported fields
}

DatabricksClient holds properties needed for authentication and HTTP client setup fields with `name` struct tags become Terraform provider attributes. `env` struct tag can hold one or more coma-separated env variable names to find value, if not specified directly. `auth` struct tag describes the type of conflicting authentication used.

func CommonEnvironmentClient

func CommonEnvironmentClient() *DatabricksClient

func (*DatabricksClient) ClientForHost

func (c *DatabricksClient) ClientForHost(ctx context.Context, url string) (*DatabricksClient, error)

ClientForHost creates a new DatabricksClient instance with the same auth parameters, but for the given host. Authentication has to be reinitialized, as Google OIDC has different authorizers, depending if it's workspace or Accounts API we're talking to.

func (*DatabricksClient) CommandExecutor

func (c *DatabricksClient) CommandExecutor(ctx context.Context) CommandExecutor

CommandExecutor service

func (*DatabricksClient) Delete

func (c *DatabricksClient) Delete(ctx context.Context, path string, request any) error

Delete on path

func (*DatabricksClient) FormatURL

func (c *DatabricksClient) FormatURL(strs ...string) string

FormatURL creates URL from the client Host and additional strings

func (*DatabricksClient) Get

func (c *DatabricksClient) Get(ctx context.Context, path string, request any, response any) error

Get on path

func (*DatabricksClient) GetAzureJwtProperty

func (aa *DatabricksClient) GetAzureJwtProperty(key string) (any, error)

func (*DatabricksClient) IsAws

func (c *DatabricksClient) IsAws() bool

IsAws returns true if client is configured for AWS

func (*DatabricksClient) IsAzure

func (c *DatabricksClient) IsAzure() bool

IsAzure returns true if client is configured for Azure Databricks - either by using AAD auth or with host+token combination

func (*DatabricksClient) IsGcp

func (c *DatabricksClient) IsGcp() bool

IsGcp returns true if client is configured for GCP

func (*DatabricksClient) Patch

func (c *DatabricksClient) Patch(ctx context.Context, path string, request any) error

Patch on path

func (*DatabricksClient) Post

func (c *DatabricksClient) Post(ctx context.Context, path string, request any, response any) error

Post on path

func (*DatabricksClient) Put

func (c *DatabricksClient) Put(ctx context.Context, path string, request any) error

Put on path

func (*DatabricksClient) Scim

func (c *DatabricksClient) Scim(ctx context.Context, method, path string, request any, response any) error

Scim sets SCIM headers

func (*DatabricksClient) WithCommandExecutor

func (c *DatabricksClient) WithCommandExecutor(cef func(context.Context, *DatabricksClient) CommandExecutor)

WithCommandExecutor sets command executor implementation to use

func (*DatabricksClient) WithCommandMock

func (c *DatabricksClient) WithCommandMock(mock CommandMock)

WithCommandMock mocks all command executions for this client

func (*DatabricksClient) WorkspaceClient added in v1.10.0

func (c *DatabricksClient) WorkspaceClient() (*databricks.WorkspaceClient, error)

type Pair

type Pair struct {
	// contains filtered or unexported fields
}

Pair defines an ID pair

func NewPairID

func NewPairID(left, right string) *Pair

NewPairID creates new ID pair

func NewPairSeparatedID

func NewPairSeparatedID(left, right, separator string) *Pair

NewPairSeparatedID creates new ID pair with a custom separator

func (*Pair) BindResource

func (p *Pair) BindResource(pr BindResource) *schema.Resource

BindResource creates resource that relies on binding ID pair with simple schema & importer

func (*Pair) Pack

func (p *Pair) Pack(d *schema.ResourceData)

Pack data attributes to ID

func (*Pair) Schema

func (p *Pair) Schema(do func(map[string]*schema.Schema) map[string]*schema.Schema) *Pair

Schema sets custom schema

func (*Pair) Unpack

func (p *Pair) Unpack(d *schema.ResourceData) (string, string, error)

Unpack ID into two strings and set data

type Resource

type Resource struct {
	Create         func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
	Read           func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
	Update         func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
	Delete         func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
	CustomizeDiff  func(ctx context.Context, d *schema.ResourceDiff) error
	StateUpgraders []schema.StateUpgrader
	Schema         map[string]*schema.Schema
	SchemaVersion  int
	Timeouts       *schema.ResourceTimeout
}

Resource aims to simplify things like error & deleted entities handling

func (Resource) ToResource

func (r Resource) ToResource() *schema.Resource

ToResource converts to Terraform resource definition

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL