bigdatapools

package
v0.20240522.1080424 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 22, 2024 License: MPL-2.0 Imports: 14 Imported by: 1

README

github.com/hashicorp/go-azure-sdk/resource-manager/synapse/2021-06-01/bigdatapools Documentation

The bigdatapools SDK allows for interaction with the Azure Resource Manager Service synapse (API Version 2021-06-01).

This readme covers example usages, but further information on using this SDK can be found in the project root.

Import Path

import "github.com/hashicorp/go-azure-sdk/resource-manager/synapse/2021-06-01/bigdatapools"

Client Initialization

client := bigdatapools.NewBigDataPoolsClientWithBaseURI("https://management.azure.com")
client.Client.Authorizer = authorizer

Example Usage: BigDataPoolsClient.CreateOrUpdate

ctx := context.TODO()
id := bigdatapools.NewBigDataPoolID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue", "bigDataPoolValue")

payload := bigdatapools.BigDataPoolResourceInfo{
	// ...
}


if err := client.CreateOrUpdateThenPoll(ctx, id, payload, bigdatapools.DefaultCreateOrUpdateOperationOptions()); err != nil {
	// handle the error
}

Example Usage: BigDataPoolsClient.Delete

ctx := context.TODO()
id := bigdatapools.NewBigDataPoolID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue", "bigDataPoolValue")

if err := client.DeleteThenPoll(ctx, id); err != nil {
	// handle the error
}

Example Usage: BigDataPoolsClient.Get

ctx := context.TODO()
id := bigdatapools.NewBigDataPoolID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue", "bigDataPoolValue")

read, err := client.Get(ctx, id)
if err != nil {
	// handle the error
}
if model := read.Model; model != nil {
	// do something with the model/response object
}

Example Usage: BigDataPoolsClient.ListByWorkspace

ctx := context.TODO()
id := bigdatapools.NewWorkspaceID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue")

// alternatively `client.ListByWorkspace(ctx, id)` can be used to do batched pagination
items, err := client.ListByWorkspaceComplete(ctx, id)
if err != nil {
	// handle the error
}
for _, item := range items {
	// do something
}

Example Usage: BigDataPoolsClient.Update

ctx := context.TODO()
id := bigdatapools.NewBigDataPoolID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue", "bigDataPoolValue")

payload := bigdatapools.BigDataPoolPatchInfo{
	// ...
}


read, err := client.Update(ctx, id, payload)
if err != nil {
	// handle the error
}
if model := read.Model; model != nil {
	// do something with the model/response object
}

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func PossibleValuesForConfigurationType

func PossibleValuesForConfigurationType() []string

func PossibleValuesForNodeSize

func PossibleValuesForNodeSize() []string

func PossibleValuesForNodeSizeFamily

func PossibleValuesForNodeSizeFamily() []string

func ValidateBigDataPoolID

func ValidateBigDataPoolID(input interface{}, key string) (warnings []string, errors []error)

ValidateBigDataPoolID checks that 'input' can be parsed as a Big Data Pool ID

func ValidateWorkspaceID

func ValidateWorkspaceID(input interface{}, key string) (warnings []string, errors []error)

ValidateWorkspaceID checks that 'input' can be parsed as a Workspace ID

Types

type AutoPauseProperties

type AutoPauseProperties struct {
	DelayInMinutes *int64 `json:"delayInMinutes,omitempty"`
	Enabled        *bool  `json:"enabled,omitempty"`
}

type AutoScaleProperties

type AutoScaleProperties struct {
	Enabled      *bool  `json:"enabled,omitempty"`
	MaxNodeCount *int64 `json:"maxNodeCount,omitempty"`
	MinNodeCount *int64 `json:"minNodeCount,omitempty"`
}

type BigDataPoolId

type BigDataPoolId struct {
	SubscriptionId    string
	ResourceGroupName string
	WorkspaceName     string
	BigDataPoolName   string
}

BigDataPoolId is a struct representing the Resource ID for a Big Data Pool

func NewBigDataPoolID

func NewBigDataPoolID(subscriptionId string, resourceGroupName string, workspaceName string, bigDataPoolName string) BigDataPoolId

NewBigDataPoolID returns a new BigDataPoolId struct

func ParseBigDataPoolID

func ParseBigDataPoolID(input string) (*BigDataPoolId, error)

ParseBigDataPoolID parses 'input' into a BigDataPoolId

func ParseBigDataPoolIDInsensitively

func ParseBigDataPoolIDInsensitively(input string) (*BigDataPoolId, error)

ParseBigDataPoolIDInsensitively parses 'input' case-insensitively into a BigDataPoolId note: this method should only be used for API response data and not user input

func (*BigDataPoolId) FromParseResult

func (id *BigDataPoolId) FromParseResult(input resourceids.ParseResult) error

func (BigDataPoolId) ID

func (id BigDataPoolId) ID() string

ID returns the formatted Big Data Pool ID

func (BigDataPoolId) Segments

func (id BigDataPoolId) Segments() []resourceids.Segment

Segments returns a slice of Resource ID Segments which comprise this Big Data Pool ID

func (BigDataPoolId) String

func (id BigDataPoolId) String() string

String returns a human-readable description of this Big Data Pool ID

type BigDataPoolPatchInfo

type BigDataPoolPatchInfo struct {
	Tags *map[string]string `json:"tags,omitempty"`
}

type BigDataPoolResourceInfo

type BigDataPoolResourceInfo struct {
	Id         *string                        `json:"id,omitempty"`
	Location   string                         `json:"location"`
	Name       *string                        `json:"name,omitempty"`
	Properties *BigDataPoolResourceProperties `json:"properties,omitempty"`
	Tags       *map[string]string             `json:"tags,omitempty"`
	Type       *string                        `json:"type,omitempty"`
}

type BigDataPoolResourceInfoOperationPredicate

type BigDataPoolResourceInfoOperationPredicate struct {
	Id       *string
	Location *string
	Name     *string
	Type     *string
}

func (BigDataPoolResourceInfoOperationPredicate) Matches

type BigDataPoolResourceProperties

type BigDataPoolResourceProperties struct {
	AutoPause                   *AutoPauseProperties       `json:"autoPause,omitempty"`
	AutoScale                   *AutoScaleProperties       `json:"autoScale,omitempty"`
	CacheSize                   *int64                     `json:"cacheSize,omitempty"`
	CreationDate                *string                    `json:"creationDate,omitempty"`
	CustomLibraries             *[]LibraryInfo             `json:"customLibraries,omitempty"`
	DefaultSparkLogFolder       *string                    `json:"defaultSparkLogFolder,omitempty"`
	DynamicExecutorAllocation   *DynamicExecutorAllocation `json:"dynamicExecutorAllocation,omitempty"`
	IsAutotuneEnabled           *bool                      `json:"isAutotuneEnabled,omitempty"`
	IsComputeIsolationEnabled   *bool                      `json:"isComputeIsolationEnabled,omitempty"`
	LastSucceededTimestamp      *string                    `json:"lastSucceededTimestamp,omitempty"`
	LibraryRequirements         *LibraryRequirements       `json:"libraryRequirements,omitempty"`
	NodeCount                   *int64                     `json:"nodeCount,omitempty"`
	NodeSize                    *NodeSize                  `json:"nodeSize,omitempty"`
	NodeSizeFamily              *NodeSizeFamily            `json:"nodeSizeFamily,omitempty"`
	ProvisioningState           *string                    `json:"provisioningState,omitempty"`
	SessionLevelPackagesEnabled *bool                      `json:"sessionLevelPackagesEnabled,omitempty"`
	SparkConfigProperties       *SparkConfigProperties     `json:"sparkConfigProperties,omitempty"`
	SparkEventsFolder           *string                    `json:"sparkEventsFolder,omitempty"`
	SparkVersion                *string                    `json:"sparkVersion,omitempty"`
}

func (*BigDataPoolResourceProperties) GetCreationDateAsTime

func (o *BigDataPoolResourceProperties) GetCreationDateAsTime() (*time.Time, error)

func (*BigDataPoolResourceProperties) GetLastSucceededTimestampAsTime

func (o *BigDataPoolResourceProperties) GetLastSucceededTimestampAsTime() (*time.Time, error)

func (*BigDataPoolResourceProperties) SetCreationDateAsTime

func (o *BigDataPoolResourceProperties) SetCreationDateAsTime(input time.Time)

func (*BigDataPoolResourceProperties) SetLastSucceededTimestampAsTime

func (o *BigDataPoolResourceProperties) SetLastSucceededTimestampAsTime(input time.Time)

type BigDataPoolsClient

type BigDataPoolsClient struct {
	Client *resourcemanager.Client
}

func NewBigDataPoolsClientWithBaseURI

func NewBigDataPoolsClientWithBaseURI(sdkApi sdkEnv.Api) (*BigDataPoolsClient, error)

func (BigDataPoolsClient) CreateOrUpdate

CreateOrUpdate ...

func (BigDataPoolsClient) CreateOrUpdateThenPoll

CreateOrUpdateThenPoll performs CreateOrUpdate then polls until it's completed

func (BigDataPoolsClient) Delete

Delete ...

func (BigDataPoolsClient) DeleteThenPoll

func (c BigDataPoolsClient) DeleteThenPoll(ctx context.Context, id BigDataPoolId) error

DeleteThenPoll performs Delete then polls until it's completed

func (BigDataPoolsClient) Get

Get ...

func (BigDataPoolsClient) ListByWorkspace

func (c BigDataPoolsClient) ListByWorkspace(ctx context.Context, id WorkspaceId) (result ListByWorkspaceOperationResponse, err error)

ListByWorkspace ...

func (BigDataPoolsClient) ListByWorkspaceComplete

func (c BigDataPoolsClient) ListByWorkspaceComplete(ctx context.Context, id WorkspaceId) (ListByWorkspaceCompleteResult, error)

ListByWorkspaceComplete retrieves all the results into a single object

func (BigDataPoolsClient) ListByWorkspaceCompleteMatchingPredicate

func (c BigDataPoolsClient) ListByWorkspaceCompleteMatchingPredicate(ctx context.Context, id WorkspaceId, predicate BigDataPoolResourceInfoOperationPredicate) (result ListByWorkspaceCompleteResult, err error)

ListByWorkspaceCompleteMatchingPredicate retrieves all the results and then applies the predicate

func (BigDataPoolsClient) Update

Update ...

type ConfigurationType

type ConfigurationType string
const (
	ConfigurationTypeArtifact ConfigurationType = "Artifact"
	ConfigurationTypeFile     ConfigurationType = "File"
)

func (*ConfigurationType) UnmarshalJSON

func (s *ConfigurationType) UnmarshalJSON(bytes []byte) error

type CreateOrUpdateOperationOptions

type CreateOrUpdateOperationOptions struct {
	Force *bool
}

func DefaultCreateOrUpdateOperationOptions

func DefaultCreateOrUpdateOperationOptions() CreateOrUpdateOperationOptions

func (CreateOrUpdateOperationOptions) ToHeaders

func (CreateOrUpdateOperationOptions) ToOData

func (CreateOrUpdateOperationOptions) ToQuery

type CreateOrUpdateOperationResponse

type CreateOrUpdateOperationResponse struct {
	Poller       pollers.Poller
	HttpResponse *http.Response
	OData        *odata.OData
	Model        *BigDataPoolResourceInfo
}

type DeleteOperationResponse

type DeleteOperationResponse struct {
	Poller       pollers.Poller
	HttpResponse *http.Response
	OData        *odata.OData
	Model        *BigDataPoolResourceInfo
}

type DynamicExecutorAllocation

type DynamicExecutorAllocation struct {
	Enabled      *bool  `json:"enabled,omitempty"`
	MaxExecutors *int64 `json:"maxExecutors,omitempty"`
	MinExecutors *int64 `json:"minExecutors,omitempty"`
}

type GetOperationResponse

type GetOperationResponse struct {
	HttpResponse *http.Response
	OData        *odata.OData
	Model        *BigDataPoolResourceInfo
}

type LibraryInfo

type LibraryInfo struct {
	ContainerName      *string `json:"containerName,omitempty"`
	CreatorId          *string `json:"creatorId,omitempty"`
	Name               *string `json:"name,omitempty"`
	Path               *string `json:"path,omitempty"`
	ProvisioningStatus *string `json:"provisioningStatus,omitempty"`
	Type               *string `json:"type,omitempty"`
	UploadedTimestamp  *string `json:"uploadedTimestamp,omitempty"`
}

func (*LibraryInfo) GetUploadedTimestampAsTime

func (o *LibraryInfo) GetUploadedTimestampAsTime() (*time.Time, error)

func (*LibraryInfo) SetUploadedTimestampAsTime

func (o *LibraryInfo) SetUploadedTimestampAsTime(input time.Time)

type LibraryRequirements

type LibraryRequirements struct {
	Content  *string `json:"content,omitempty"`
	Filename *string `json:"filename,omitempty"`
	Time     *string `json:"time,omitempty"`
}

func (*LibraryRequirements) GetTimeAsTime

func (o *LibraryRequirements) GetTimeAsTime() (*time.Time, error)

func (*LibraryRequirements) SetTimeAsTime

func (o *LibraryRequirements) SetTimeAsTime(input time.Time)

type ListByWorkspaceCompleteResult

type ListByWorkspaceCompleteResult struct {
	LatestHttpResponse *http.Response
	Items              []BigDataPoolResourceInfo
}

type ListByWorkspaceOperationResponse

type ListByWorkspaceOperationResponse struct {
	HttpResponse *http.Response
	OData        *odata.OData
	Model        *[]BigDataPoolResourceInfo
}

type NodeSize

type NodeSize string
const (
	NodeSizeLarge    NodeSize = "Large"
	NodeSizeMedium   NodeSize = "Medium"
	NodeSizeNone     NodeSize = "None"
	NodeSizeSmall    NodeSize = "Small"
	NodeSizeXLarge   NodeSize = "XLarge"
	NodeSizeXXLarge  NodeSize = "XXLarge"
	NodeSizeXXXLarge NodeSize = "XXXLarge"
)

func (*NodeSize) UnmarshalJSON

func (s *NodeSize) UnmarshalJSON(bytes []byte) error

type NodeSizeFamily

type NodeSizeFamily string
const (
	NodeSizeFamilyHardwareAcceleratedFPGA NodeSizeFamily = "HardwareAcceleratedFPGA"
	NodeSizeFamilyHardwareAcceleratedGPU  NodeSizeFamily = "HardwareAcceleratedGPU"
	NodeSizeFamilyMemoryOptimized         NodeSizeFamily = "MemoryOptimized"
	NodeSizeFamilyNone                    NodeSizeFamily = "None"
)

func (*NodeSizeFamily) UnmarshalJSON

func (s *NodeSizeFamily) UnmarshalJSON(bytes []byte) error

type SparkConfigProperties

type SparkConfigProperties struct {
	ConfigurationType *ConfigurationType `json:"configurationType,omitempty"`
	Content           *string            `json:"content,omitempty"`
	Filename          *string            `json:"filename,omitempty"`
	Time              *string            `json:"time,omitempty"`
}

func (*SparkConfigProperties) GetTimeAsTime

func (o *SparkConfigProperties) GetTimeAsTime() (*time.Time, error)

func (*SparkConfigProperties) SetTimeAsTime

func (o *SparkConfigProperties) SetTimeAsTime(input time.Time)

type UpdateOperationResponse

type UpdateOperationResponse struct {
	HttpResponse *http.Response
	OData        *odata.OData
	Model        *BigDataPoolResourceInfo
}

type WorkspaceId

type WorkspaceId struct {
	SubscriptionId    string
	ResourceGroupName string
	WorkspaceName     string
}

WorkspaceId is a struct representing the Resource ID for a Workspace

func NewWorkspaceID

func NewWorkspaceID(subscriptionId string, resourceGroupName string, workspaceName string) WorkspaceId

NewWorkspaceID returns a new WorkspaceId struct

func ParseWorkspaceID

func ParseWorkspaceID(input string) (*WorkspaceId, error)

ParseWorkspaceID parses 'input' into a WorkspaceId

func ParseWorkspaceIDInsensitively

func ParseWorkspaceIDInsensitively(input string) (*WorkspaceId, error)

ParseWorkspaceIDInsensitively parses 'input' case-insensitively into a WorkspaceId note: this method should only be used for API response data and not user input

func (*WorkspaceId) FromParseResult

func (id *WorkspaceId) FromParseResult(input resourceids.ParseResult) error

func (WorkspaceId) ID

func (id WorkspaceId) ID() string

ID returns the formatted Workspace ID

func (WorkspaceId) Segments

func (id WorkspaceId) Segments() []resourceids.Segment

Segments returns a slice of Resource ID Segments which comprise this Workspace ID

func (WorkspaceId) String

func (id WorkspaceId) String() string

String returns a human-readable description of this Workspace ID

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL