workspace

package
v0.13.3 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 20, 2023 License: Apache-2.0 Imports: 12 Imported by: 0

Documentation

Overview

These APIs allow you to manage Git Credentials, Repos, Secrets, Workspace, etc.

Index

Examples

Constants

This section is empty.

Variables

This section is empty.

Functions

func DownloadFormat

func DownloadFormat(f ExportFormat) func(q map[string]any)

Types

type AclItem

type AclItem struct {
	// The permission level applied to the principal.
	Permission AclPermission `json:"permission"`
	// The principal in which the permission is applied.
	Principal string `json:"principal"`
}

type AclPermission

type AclPermission string
const AclPermissionManage AclPermission = `MANAGE`
const AclPermissionRead AclPermission = `READ`
const AclPermissionWrite AclPermission = `WRITE`

func (*AclPermission) Set

func (f *AclPermission) Set(v string) error

Set raw string value and validate it against allowed values

func (*AclPermission) String

func (f *AclPermission) String() string

String representation for fmt.Print

func (*AclPermission) Type

func (f *AclPermission) Type() string

Type always returns AclPermission to satisfy [pflag.Value] interface

type AzureKeyVaultSecretScopeMetadata

type AzureKeyVaultSecretScopeMetadata struct {
	// The DNS of the KeyVault
	DnsName string `json:"dns_name"`
	// The resource id of the azure KeyVault that user wants to associate the
	// scope with.
	ResourceId string `json:"resource_id"`
}

type CreateCredentials

type CreateCredentials struct {
	// Git provider. This field is case-insensitive. The available Git providers
	// are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
	// gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
	// awsCodeCommit.
	GitProvider string `json:"git_provider"`
	// Git username.
	GitUsername string `json:"git_username,omitempty"`
	// The personal access token used to authenticate to the corresponding Git
	// provider.
	PersonalAccessToken string `json:"personal_access_token,omitempty"`
}

type CreateCredentialsResponse

type CreateCredentialsResponse struct {
	// ID of the credential object in the workspace.
	CredentialId int64 `json:"credential_id,omitempty"`
	// Git provider. This field is case-insensitive. The available Git providers
	// are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
	// gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
	// awsCodeCommit.
	GitProvider string `json:"git_provider,omitempty"`
	// Git username.
	GitUsername string `json:"git_username,omitempty"`
}

type CreateRepo

type CreateRepo struct {
	// Desired path for the repo in the workspace. Must be in the format
	// /Repos/{folder}/{repo-name}.
	Path string `json:"path,omitempty"`
	// Git provider. This field is case-insensitive. The available Git providers
	// are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
	// gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
	// awsCodeCommit.
	Provider string `json:"provider"`
	// If specified, the repo will be created with sparse checkout enabled. You
	// cannot enable/disable sparse checkout after the repo is created.
	SparseCheckout *SparseCheckout `json:"sparse_checkout,omitempty"`
	// URL of the Git repository to be linked.
	Url string `json:"url"`
}

type CreateScope

type CreateScope struct {
	// The metadata for the secret scope if the type is `AZURE_KEYVAULT`
	BackendAzureKeyvault *AzureKeyVaultSecretScopeMetadata `json:"backend_azure_keyvault,omitempty"`
	// The principal that is initially granted `MANAGE` permission to the
	// created scope.
	InitialManagePrincipal string `json:"initial_manage_principal,omitempty"`
	// Scope name requested by the user. Scope names are unique.
	Scope string `json:"scope"`
	// The backend type the scope will be created with. If not specified, will
	// default to `DATABRICKS`
	ScopeBackendType ScopeBackendType `json:"scope_backend_type,omitempty"`
}

type CredentialInfo

type CredentialInfo struct {
	// ID of the credential object in the workspace.
	CredentialId int64 `json:"credential_id,omitempty"`
	// Git provider. This field is case-insensitive. The available Git providers
	// are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
	// gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
	// awsCodeCommit.
	GitProvider string `json:"git_provider,omitempty"`
	// Git username.
	GitUsername string `json:"git_username,omitempty"`
}

type Delete

type Delete struct {
	// The absolute path of the notebook or directory.
	Path string `json:"path"`
	// The flag that specifies whether to delete the object recursively. It is
	// `false` by default. Please note this deleting directory is not atomic. If
	// it fails in the middle, some of objects under this directory may be
	// deleted and cannot be undone.
	Recursive bool `json:"recursive,omitempty"`
}

type DeleteAcl

type DeleteAcl struct {
	// The principal to remove an existing ACL from.
	Principal string `json:"principal"`
	// The name of the scope to remove permissions from.
	Scope string `json:"scope"`
}

type DeleteGitCredentialRequest

type DeleteGitCredentialRequest struct {
	// The ID for the corresponding credential to access.
	CredentialId int64 `json:"-" url:"-"`
}

Delete a credential

type DeleteRepoRequest

type DeleteRepoRequest struct {
	// The ID for the corresponding repo to access.
	RepoId int64 `json:"-" url:"-"`
}

Delete a repo

type DeleteScope

type DeleteScope struct {
	// Name of the scope to delete.
	Scope string `json:"scope"`
}

type DeleteSecret

type DeleteSecret struct {
	// Name of the secret to delete.
	Key string `json:"key"`
	// The name of the scope that contains the secret to delete.
	Scope string `json:"scope"`
}

type DownloadOption

type DownloadOption = func(q map[string]any)

type ExportFormat

type ExportFormat string
const ExportFormatDbc ExportFormat = `DBC`
const ExportFormatHtml ExportFormat = `HTML`
const ExportFormatJupyter ExportFormat = `JUPYTER`
const ExportFormatRMarkdown ExportFormat = `R_MARKDOWN`
const ExportFormatSource ExportFormat = `SOURCE`

func (*ExportFormat) Set

func (f *ExportFormat) Set(v string) error

Set raw string value and validate it against allowed values

func (*ExportFormat) String

func (f *ExportFormat) String() string

String representation for fmt.Print

func (*ExportFormat) Type

func (f *ExportFormat) Type() string

Type always returns ExportFormat to satisfy [pflag.Value] interface

type ExportRequest

type ExportRequest struct {
	// This specifies the format of the exported file. By default, this is
	// `SOURCE`.
	//
	// The value is case sensitive.
	//
	// - `SOURCE`: The notebook is exported as source code. - `HTML`: The
	// notebook is exported as an HTML file. - `JUPYTER`: The notebook is
	// exported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is
	// exported in Databricks archive format. - `R_MARKDOWN`: The notebook is
	// exported to R Markdown format.
	Format ExportFormat `json:"-" url:"format,omitempty"`
	// The absolute path of the object or directory. Exporting a directory is
	// only supported for the `DBC` and `SOURCE` format.
	Path string `json:"-" url:"path"`
}

Export a workspace object

type ExportResponse

type ExportResponse struct {
	// The base64-encoded content. If the limit (10MB) is exceeded, exception
	// with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown.
	Content string `json:"content,omitempty"`
}

func (*ExportResponse) Bytes

func (r *ExportResponse) Bytes() ([]byte, error)

type GetAclRequest

type GetAclRequest struct {
	// The principal to fetch ACL information for.
	Principal string `json:"-" url:"principal"`
	// The name of the scope to fetch ACL information from.
	Scope string `json:"-" url:"scope"`
}

Get secret ACL details

type GetCredentialsResponse

type GetCredentialsResponse struct {
	Credentials []CredentialInfo `json:"credentials,omitempty"`
}

type GetGitCredentialRequest

type GetGitCredentialRequest struct {
	// The ID for the corresponding credential to access.
	CredentialId int64 `json:"-" url:"-"`
}

Get a credential entry

type GetRepoRequest

type GetRepoRequest struct {
	// The ID for the corresponding repo to access.
	RepoId int64 `json:"-" url:"-"`
}

Get a repo

type GetStatusRequest

type GetStatusRequest struct {
	// The absolute path of the notebook or directory.
	Path string `json:"-" url:"path"`
}

Get status

type GitCredentialsAPI

type GitCredentialsAPI struct {
	// contains filtered or unexported fields
}

Registers personal access token for Databricks to do operations on behalf of the user.

See more info.

func NewGitCredentials

func NewGitCredentials(client *client.DatabricksClient) *GitCredentialsAPI

func (*GitCredentialsAPI) Create

Create a credential entry.

Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials.

Example (GitCredentials)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

cr, err := w.GitCredentials.Create(ctx, workspace.CreateCredentials{
	GitProvider:         "gitHub",
	GitUsername:         "test",
	PersonalAccessToken: "test",
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", cr)

// cleanup

err = w.GitCredentials.DeleteByCredentialId(ctx, cr.CredentialId)
if err != nil {
	panic(err)
}
Output:

func (*GitCredentialsAPI) CredentialInfoGitProviderToCredentialIdMap

func (a *GitCredentialsAPI) CredentialInfoGitProviderToCredentialIdMap(ctx context.Context) (map[string]int64, error)

CredentialInfoGitProviderToCredentialIdMap calls GitCredentialsAPI.ListAll and creates a map of results with CredentialInfo.GitProvider as key and CredentialInfo.CredentialId as value.

Returns an error if there's more than one CredentialInfo with the same .GitProvider.

Note: All CredentialInfo instances are loaded into memory before creating a map.

This method is generated by Databricks SDK Code Generator.

func (*GitCredentialsAPI) Delete

Delete a credential.

Deletes the specified Git credential.

func (*GitCredentialsAPI) DeleteByCredentialId

func (a *GitCredentialsAPI) DeleteByCredentialId(ctx context.Context, credentialId int64) error

Delete a credential.

Deletes the specified Git credential.

func (*GitCredentialsAPI) Get

Get a credential entry.

Gets the Git credential with the specified credential ID.

Example (GitCredentials)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

cr, err := w.GitCredentials.Create(ctx, workspace.CreateCredentials{
	GitProvider:         "gitHub",
	GitUsername:         "test",
	PersonalAccessToken: "test",
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", cr)

byId, err := w.GitCredentials.GetByCredentialId(ctx, cr.CredentialId)
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", byId)

// cleanup

err = w.GitCredentials.DeleteByCredentialId(ctx, cr.CredentialId)
if err != nil {
	panic(err)
}
Output:

func (*GitCredentialsAPI) GetByCredentialId

func (a *GitCredentialsAPI) GetByCredentialId(ctx context.Context, credentialId int64) (*CredentialInfo, error)

Get a credential entry.

Gets the Git credential with the specified credential ID.

func (*GitCredentialsAPI) GetByGitProvider

func (a *GitCredentialsAPI) GetByGitProvider(ctx context.Context, name string) (*CredentialInfo, error)

GetByGitProvider calls GitCredentialsAPI.CredentialInfoGitProviderToCredentialIdMap and returns a single CredentialInfo.

Returns an error if there's more than one CredentialInfo with the same .GitProvider.

Note: All CredentialInfo instances are loaded into memory before returning matching by name.

This method is generated by Databricks SDK Code Generator.

func (*GitCredentialsAPI) Impl

Impl returns low-level GitCredentials API implementation

func (*GitCredentialsAPI) ListAll

func (a *GitCredentialsAPI) ListAll(ctx context.Context) ([]CredentialInfo, error)

Get Git credentials.

Lists the calling user's Git credentials. One credential per user is supported.

This method is generated by Databricks SDK Code Generator.

Example (GitCredentials)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

list, err := w.GitCredentials.ListAll(ctx)
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", list)
Output:

func (*GitCredentialsAPI) Update

func (a *GitCredentialsAPI) Update(ctx context.Context, request UpdateCredentials) error

Update a credential.

Updates the specified Git credential.

Example (GitCredentials)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

cr, err := w.GitCredentials.Create(ctx, workspace.CreateCredentials{
	GitProvider:         "gitHub",
	GitUsername:         "test",
	PersonalAccessToken: "test",
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", cr)

err = w.GitCredentials.Update(ctx, workspace.UpdateCredentials{
	CredentialId:        cr.CredentialId,
	GitProvider:         "gitHub",
	GitUsername:         fmt.Sprintf("sdk-%x@example.com", time.Now().UnixNano()),
	PersonalAccessToken: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
})
if err != nil {
	panic(err)
}

// cleanup

err = w.GitCredentials.DeleteByCredentialId(ctx, cr.CredentialId)
if err != nil {
	panic(err)
}
Output:

func (*GitCredentialsAPI) WithImpl

WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.

type GitCredentialsService

type GitCredentialsService interface {

	// Create a credential entry.
	//
	// Creates a Git credential entry for the user. Only one Git credential per
	// user is supported, so any attempts to create credentials if an entry
	// already exists will fail. Use the PATCH endpoint to update existing
	// credentials, or the DELETE endpoint to delete existing credentials.
	Create(ctx context.Context, request CreateCredentials) (*CreateCredentialsResponse, error)

	// Delete a credential.
	//
	// Deletes the specified Git credential.
	Delete(ctx context.Context, request DeleteGitCredentialRequest) error

	// Get a credential entry.
	//
	// Gets the Git credential with the specified credential ID.
	Get(ctx context.Context, request GetGitCredentialRequest) (*CredentialInfo, error)

	// Get Git credentials.
	//
	// Lists the calling user's Git credentials. One credential per user is
	// supported.
	//
	// Use ListAll() to get all CredentialInfo instances
	List(ctx context.Context) (*GetCredentialsResponse, error)

	// Update a credential.
	//
	// Updates the specified Git credential.
	Update(ctx context.Context, request UpdateCredentials) error
}

Registers personal access token for Databricks to do operations on behalf of the user.

See more info.

type Import

type Import struct {
	// The base64-encoded content. This has a limit of 10 MB.
	//
	// If the limit (10MB) is exceeded, exception with error code
	// **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown. This parameter might be absent,
	// and instead a posted file is used.
	Content string `json:"content,omitempty"`
	// This specifies the format of the file to be imported.
	//
	// The value is case sensitive.
	//
	// - `AUTO`: The item is imported depending on an analysis of the item's
	// extension and the header content provided in the request. If the item is
	// imported as a notebook, then the item's extension is automatically
	// removed. - `SOURCE`: The notebook is imported as source code. - `HTML`:
	// The notebook is imported as an HTML file. - `JUPYTER`: The notebook is
	// imported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is
	// imported in Databricks archive format. Required for directories. -
	// `R_MARKDOWN`: The notebook is imported from R Markdown format.
	Format ImportFormat `json:"format,omitempty"`
	// The language of the object. This value is set only if the object type is
	// `NOTEBOOK`.
	Language Language `json:"language,omitempty"`
	// The flag that specifies whether to overwrite existing object. It is
	// `false` by default. For `DBC` format, `overwrite` is not supported since
	// it may contain a directory.
	Overwrite bool `json:"overwrite,omitempty"`
	// The absolute path of the object or directory. Importing a directory is
	// only supported for the `DBC` format.
	Path string `json:"path"`
}

func PythonNotebookOverwrite

func PythonNotebookOverwrite(path, content string) Import

PythonNotebookOverwrite crafts Python import notebook request also by trimming the code specified in the second argument

func PythonNotebookOverwriteReader

func PythonNotebookOverwriteReader(path string, r io.Reader) (Import, error)

type ImportFormat

type ImportFormat string

This specifies the format of the file to be imported.

The value is case sensitive.

- `AUTO`: The item is imported depending on an analysis of the item's extension and the header content provided in the request. If the item is imported as a notebook, then the item's extension is automatically removed. - `SOURCE`: The notebook is imported as source code. - `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.

const ImportFormatAuto ImportFormat = `AUTO`
const ImportFormatDbc ImportFormat = `DBC`
const ImportFormatHtml ImportFormat = `HTML`
const ImportFormatJupyter ImportFormat = `JUPYTER`
const ImportFormatRMarkdown ImportFormat = `R_MARKDOWN`
const ImportFormatSource ImportFormat = `SOURCE`

func (*ImportFormat) Set

func (f *ImportFormat) Set(v string) error

Set raw string value and validate it against allowed values

func (*ImportFormat) String

func (f *ImportFormat) String() string

String representation for fmt.Print

func (*ImportFormat) Type

func (f *ImportFormat) Type() string

Type always returns ImportFormat to satisfy [pflag.Value] interface

type Language

type Language string

The language of the object. This value is set only if the object type is `NOTEBOOK`.

const LanguagePython Language = `PYTHON`
const LanguageR Language = `R`
const LanguageScala Language = `SCALA`
const LanguageSql Language = `SQL`

func (*Language) Set

func (f *Language) Set(v string) error

Set raw string value and validate it against allowed values

func (*Language) String

func (f *Language) String() string

String representation for fmt.Print

func (*Language) Type

func (f *Language) Type() string

Type always returns Language to satisfy [pflag.Value] interface

type ListAclsRequest

type ListAclsRequest struct {
	// The name of the scope to fetch ACL information from.
	Scope string `json:"-" url:"scope"`
}

Lists ACLs

type ListAclsResponse

type ListAclsResponse struct {
	// The associated ACLs rule applied to principals in the given scope.
	Items []AclItem `json:"items,omitempty"`
}

type ListReposRequest

type ListReposRequest struct {
	// Token used to get the next page of results. If not specified, returns the
	// first page of results as well as a next page token if there are more
	// results.
	NextPageToken string `json:"-" url:"next_page_token,omitempty"`
	// Filters repos that have paths starting with the given path prefix.
	PathPrefix string `json:"-" url:"path_prefix,omitempty"`
}

Get repos

type ListReposResponse

type ListReposResponse struct {
	// Token that can be specified as a query parameter to the GET /repos
	// endpoint to retrieve the next page of results.
	NextPageToken string `json:"next_page_token,omitempty"`

	Repos []RepoInfo `json:"repos,omitempty"`
}

type ListResponse

type ListResponse struct {
	// List of objects.
	Objects []ObjectInfo `json:"objects,omitempty"`
}

type ListScopesResponse

type ListScopesResponse struct {
	// The available secret scopes.
	Scopes []SecretScope `json:"scopes,omitempty"`
}

type ListSecretsRequest

type ListSecretsRequest struct {
	// The name of the scope to list secrets within.
	Scope string `json:"-" url:"scope"`
}

List secret keys

type ListSecretsResponse

type ListSecretsResponse struct {
	// Metadata information of all secrets contained within the given scope.
	Secrets []SecretMetadata `json:"secrets,omitempty"`
}

type ListWorkspaceRequest

type ListWorkspaceRequest struct {
	// UTC timestamp in milliseconds
	NotebooksModifiedAfter int `json:"-" url:"notebooks_modified_after,omitempty"`
	// The absolute path of the notebook or directory.
	Path string `json:"-" url:"path"`
}

List contents

type Mkdirs

type Mkdirs struct {
	// The absolute path of the directory. If the parent directories do not
	// exist, it will also create them. If the directory already exists, this
	// command will do nothing and succeed.
	Path string `json:"path"`
}

type ObjectInfo

type ObjectInfo struct {
	// Only applicable to files. The creation UTC timestamp.
	CreatedAt int64 `json:"created_at,omitempty"`
	// The language of the object. This value is set only if the object type is
	// `NOTEBOOK`.
	Language Language `json:"language,omitempty"`
	// Only applicable to files, the last modified UTC timestamp.
	ModifiedAt int64 `json:"modified_at,omitempty"`
	// Unique identifier for the object.
	ObjectId int64 `json:"object_id,omitempty"`
	// The type of the object in workspace.
	//
	// - `NOTEBOOK`: document that contains runnable code, visualizations, and
	// explanatory text. - `DIRECTORY`: directory - `LIBRARY`: library - `FILE`:
	// file - `REPO`: repository
	ObjectType ObjectType `json:"object_type,omitempty"`
	// The absolute path of the object.
	Path string `json:"path,omitempty"`
	// Only applicable to files. The file size in bytes can be returned.
	Size int64 `json:"size,omitempty"`
}

type ObjectType

type ObjectType string

The type of the object in workspace.

- `NOTEBOOK`: document that contains runnable code, visualizations, and explanatory text. - `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository

const ObjectTypeDirectory ObjectType = `DIRECTORY`
const ObjectTypeFile ObjectType = `FILE`
const ObjectTypeLibrary ObjectType = `LIBRARY`
const ObjectTypeNotebook ObjectType = `NOTEBOOK`
const ObjectTypeRepo ObjectType = `REPO`

func (*ObjectType) Set

func (f *ObjectType) Set(v string) error

Set raw string value and validate it against allowed values

func (*ObjectType) String

func (f *ObjectType) String() string

String representation for fmt.Print

func (*ObjectType) Type

func (f *ObjectType) Type() string

Type always returns ObjectType to satisfy [pflag.Value] interface

type PutAcl

type PutAcl struct {
	// The permission level applied to the principal.
	Permission AclPermission `json:"permission"`
	// The principal in which the permission is applied.
	Principal string `json:"principal"`
	// The name of the scope to apply permissions to.
	Scope string `json:"scope"`
}

type PutSecret

type PutSecret struct {
	// If specified, value will be stored as bytes.
	BytesValue string `json:"bytes_value,omitempty"`
	// A unique name to identify the secret.
	Key string `json:"key"`
	// The name of the scope to which the secret will be associated with.
	Scope string `json:"scope"`
	// If specified, note that the value will be stored in UTF-8 (MB4) form.
	StringValue string `json:"string_value,omitempty"`
}

type RepoInfo

type RepoInfo struct {
	// Branch that the local version of the repo is checked out to.
	Branch string `json:"branch,omitempty"`
	// SHA-1 hash representing the commit ID of the current HEAD of the repo.
	HeadCommitId string `json:"head_commit_id,omitempty"`
	// ID of the repo object in the workspace.
	Id int64 `json:"id,omitempty"`
	// Desired path for the repo in the workspace. Must be in the format
	// /Repos/{folder}/{repo-name}.
	Path string `json:"path,omitempty"`
	// Git provider. This field is case-insensitive. The available Git providers
	// are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
	// gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
	// awsCodeCommit.
	Provider string `json:"provider,omitempty"`

	SparseCheckout *SparseCheckout `json:"sparse_checkout,omitempty"`
	// URL of the Git repository to be linked.
	Url string `json:"url,omitempty"`
}

type ReposAPI

type ReposAPI struct {
	// contains filtered or unexported fields
}

The Repos API allows users to manage their git repos. Users can use the API to access all repos that they have manage permissions on.

Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a repository, committing and pushing, pulling, branch management, and visual comparison of diffs when committing.

Within Repos you can develop code in notebooks or other files and follow data science and engineering code development best practices using Git for version control, collaboration, and CI/CD.

func NewRepos

func NewRepos(client *client.DatabricksClient) *ReposAPI

func (*ReposAPI) Create

func (a *ReposAPI) Create(ctx context.Context, request CreateRepo) (*RepoInfo, error)

Create a repo.

Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, unlike repos created in the browser.

Example (Repos)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

root := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

ri, err := w.Repos.Create(ctx, workspace.CreateRepo{
	Path:     root,
	Url:      "https://github.com/shreyas-goenka/empty-repo.git",
	Provider: "github",
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", ri)

// cleanup

err = w.Repos.DeleteByRepoId(ctx, ri.Id)
if err != nil {
	panic(err)
}
Output:

func (*ReposAPI) Delete

func (a *ReposAPI) Delete(ctx context.Context, request DeleteRepoRequest) error

Delete a repo.

Deletes the specified repo.

func (*ReposAPI) DeleteByRepoId

func (a *ReposAPI) DeleteByRepoId(ctx context.Context, repoId int64) error

Delete a repo.

Deletes the specified repo.

func (*ReposAPI) Get

func (a *ReposAPI) Get(ctx context.Context, request GetRepoRequest) (*RepoInfo, error)

Get a repo.

Returns the repo with the given repo ID.

Example (Repos)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

root := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

ri, err := w.Repos.Create(ctx, workspace.CreateRepo{
	Path:     root,
	Url:      "https://github.com/shreyas-goenka/empty-repo.git",
	Provider: "github",
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", ri)

byId, err := w.Repos.GetByRepoId(ctx, ri.Id)
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", byId)

// cleanup

err = w.Repos.DeleteByRepoId(ctx, ri.Id)
if err != nil {
	panic(err)
}
Output:

func (*ReposAPI) GetByPath

func (a *ReposAPI) GetByPath(ctx context.Context, name string) (*RepoInfo, error)

GetByPath calls ReposAPI.RepoInfoPathToIdMap and returns a single RepoInfo.

Returns an error if there's more than one RepoInfo with the same .Path.

Note: All RepoInfo instances are loaded into memory before returning matching by name.

This method is generated by Databricks SDK Code Generator.

Example (CheckoutBranchByPath)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}
// shortcut for getting RepoInfo by path
repo, err := w.Repos.GetByPath(ctx, "/Repos/path/to/prod")
if err != nil {
	panic(err)
}
// because you can update repo only by ID, not by path
err = w.Repos.Update(ctx, workspace.UpdateRepo{
	RepoId: repo.Id,
	Branch: "v1.4.18",
})
if err != nil {
	panic(err)
}
Output:

func (*ReposAPI) GetByRepoId

func (a *ReposAPI) GetByRepoId(ctx context.Context, repoId int64) (*RepoInfo, error)

Get a repo.

Returns the repo with the given repo ID.

func (*ReposAPI) Impl

func (a *ReposAPI) Impl() ReposService

Impl returns low-level Repos API implementation

func (*ReposAPI) ListAll

func (a *ReposAPI) ListAll(ctx context.Context, request ListReposRequest) ([]RepoInfo, error)

Get repos.

Returns repos that the calling user has Manage permissions on. Results are paginated with each page containing twenty repos.

This method is generated by Databricks SDK Code Generator.

Example (Repos)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

all, err := w.Repos.ListAll(ctx, workspace.ListReposRequest{})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", all)
Output:

func (*ReposAPI) RepoInfoPathToIdMap

func (a *ReposAPI) RepoInfoPathToIdMap(ctx context.Context, request ListReposRequest) (map[string]int64, error)

RepoInfoPathToIdMap calls ReposAPI.ListAll and creates a map of results with RepoInfo.Path as key and RepoInfo.Id as value.

Returns an error if there's more than one RepoInfo with the same .Path.

Note: All RepoInfo instances are loaded into memory before creating a map.

This method is generated by Databricks SDK Code Generator.

func (*ReposAPI) Update

func (a *ReposAPI) Update(ctx context.Context, request UpdateRepo) error

Update a repo.

Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same branch.

Example (Repos)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

root := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

ri, err := w.Repos.Create(ctx, workspace.CreateRepo{
	Path:     root,
	Url:      "https://github.com/shreyas-goenka/empty-repo.git",
	Provider: "github",
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", ri)

err = w.Repos.Update(ctx, workspace.UpdateRepo{
	RepoId: ri.Id,
	Branch: "foo",
})
if err != nil {
	panic(err)
}

// cleanup

err = w.Repos.DeleteByRepoId(ctx, ri.Id)
if err != nil {
	panic(err)
}
Output:

func (*ReposAPI) WithImpl

func (a *ReposAPI) WithImpl(impl ReposService) *ReposAPI

WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.

type ReposService

type ReposService interface {

	// Create a repo.
	//
	// Creates a repo in the workspace and links it to the remote Git repo
	// specified. Note that repos created programmatically must be linked to a
	// remote Git repo, unlike repos created in the browser.
	Create(ctx context.Context, request CreateRepo) (*RepoInfo, error)

	// Delete a repo.
	//
	// Deletes the specified repo.
	Delete(ctx context.Context, request DeleteRepoRequest) error

	// Get a repo.
	//
	// Returns the repo with the given repo ID.
	Get(ctx context.Context, request GetRepoRequest) (*RepoInfo, error)

	// Get repos.
	//
	// Returns repos that the calling user has Manage permissions on. Results
	// are paginated with each page containing twenty repos.
	//
	// Use ListAll() to get all RepoInfo instances, which will iterate over every result page.
	List(ctx context.Context, request ListReposRequest) (*ListReposResponse, error)

	// Update a repo.
	//
	// Updates the repo to a different branch or tag, or updates the repo to the
	// latest commit on the same branch.
	Update(ctx context.Context, request UpdateRepo) error
}

The Repos API allows users to manage their git repos. Users can use the API to access all repos that they have manage permissions on.

Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a repository, committing and pushing, pulling, branch management, and visual comparison of diffs when committing.

Within Repos you can develop code in notebooks or other files and follow data science and engineering code development best practices using Git for version control, collaboration, and CI/CD.

type ScopeBackendType

type ScopeBackendType string
const ScopeBackendTypeAzureKeyvault ScopeBackendType = `AZURE_KEYVAULT`
const ScopeBackendTypeDatabricks ScopeBackendType = `DATABRICKS`

func (*ScopeBackendType) Set

func (f *ScopeBackendType) Set(v string) error

Set raw string value and validate it against allowed values

func (*ScopeBackendType) String

func (f *ScopeBackendType) String() string

String representation for fmt.Print

func (*ScopeBackendType) Type

func (f *ScopeBackendType) Type() string

Type always returns ScopeBackendType to satisfy [pflag.Value] interface

type SecretMetadata

type SecretMetadata struct {
	// A unique name to identify the secret.
	Key string `json:"key,omitempty"`
	// The last updated timestamp (in milliseconds) for the secret.
	LastUpdatedTimestamp int64 `json:"last_updated_timestamp,omitempty"`
}

type SecretScope

type SecretScope struct {
	// The type of secret scope backend.
	BackendType ScopeBackendType `json:"backend_type,omitempty"`
	// The metadata for the secret scope if the type is `AZURE_KEYVAULT`
	KeyvaultMetadata *AzureKeyVaultSecretScopeMetadata `json:"keyvault_metadata,omitempty"`
	// A unique name to identify the secret scope.
	Name string `json:"name,omitempty"`
}

type SecretsAPI

type SecretsAPI struct {
	// contains filtered or unexported fields
}

The Secrets API allows you to manage secrets, secret scopes, and access permissions.

Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of directly entering your credentials into a notebook, use Databricks secrets to store your credentials and reference them in notebooks and jobs.

Administrators, secret creators, and users granted permission can read Databricks secrets. While Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not possible to prevent such users from reading secrets.

func NewSecrets

func NewSecrets(client *client.DatabricksClient) *SecretsAPI

func (*SecretsAPI) CreateScope

func (a *SecretsAPI) CreateScope(ctx context.Context, request CreateScope) error

Create a new secret scope.

The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. The maximum number of scopes in a workspace is 100.

Example (Secrets)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

keyName := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

scopeName := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

err = w.Secrets.CreateScope(ctx, workspace.CreateScope{
	Scope: scopeName,
})
if err != nil {
	panic(err)
}

// cleanup

err = w.Secrets.DeleteSecret(ctx, workspace.DeleteSecret{
	Scope: scopeName,
	Key:   keyName,
})
if err != nil {
	panic(err)
}
err = w.Secrets.DeleteScopeByScope(ctx, scopeName)
if err != nil {
	panic(err)
}
Output:

func (*SecretsAPI) DeleteAcl

func (a *SecretsAPI) DeleteAcl(ctx context.Context, request DeleteAcl) error

Delete an ACL.

Deletes the given ACL on the given scope.

Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

func (*SecretsAPI) DeleteScope

func (a *SecretsAPI) DeleteScope(ctx context.Context, request DeleteScope) error

Delete a secret scope.

Deletes a secret scope.

Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

func (*SecretsAPI) DeleteScopeByScope

func (a *SecretsAPI) DeleteScopeByScope(ctx context.Context, scope string) error

Delete a secret scope.

Deletes a secret scope.

Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

func (*SecretsAPI) DeleteSecret

func (a *SecretsAPI) DeleteSecret(ctx context.Context, request DeleteSecret) error

Delete a secret.

Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the secret scope.

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

func (*SecretsAPI) GetAcl

func (a *SecretsAPI) GetAcl(ctx context.Context, request GetAclRequest) (*AclItem, error)

Get secret ACL details.

Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` permission to invoke this API.

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

func (*SecretsAPI) Impl

func (a *SecretsAPI) Impl() SecretsService

Impl returns low-level Secrets API implementation

func (*SecretsAPI) ListAclsAll

func (a *SecretsAPI) ListAclsAll(ctx context.Context, request ListAclsRequest) ([]AclItem, error)

Lists ACLs.

List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API.

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

This method is generated by Databricks SDK Code Generator.

func (*SecretsAPI) ListAclsByScope

func (a *SecretsAPI) ListAclsByScope(ctx context.Context, scope string) (*ListAclsResponse, error)

Lists ACLs.

List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API.

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

func (*SecretsAPI) ListScopesAll

func (a *SecretsAPI) ListScopesAll(ctx context.Context) ([]SecretScope, error)

List all scopes.

Lists all secret scopes available in the workspace.

Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

This method is generated by Databricks SDK Code Generator.

func (*SecretsAPI) ListSecretsAll

func (a *SecretsAPI) ListSecretsAll(ctx context.Context, request ListSecretsRequest) ([]SecretMetadata, error)

List secret keys.

Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call.

The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

This method is generated by Databricks SDK Code Generator.

func (*SecretsAPI) ListSecretsByScope

func (a *SecretsAPI) ListSecretsByScope(ctx context.Context, scope string) (*ListSecretsResponse, error)

List secret keys.

Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call.

The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

func (*SecretsAPI) PutAcl

func (a *SecretsAPI) PutAcl(ctx context.Context, request PutAcl) error

Create/update an ACL.

Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point.

In general, a user or group will use the most powerful permission available to them, and permissions are ordered as follows:

* `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what secrets are available.

Note that in general, secret values can only be read from within a command on a cluster (for example, through a notebook). There is no API to read the actual secret value material outside of a cluster. However, the user's permission will be applied based on who is executing the command, and they must have at least READ permission.

Users must have the `MANAGE` permission to invoke this API.

The principal is a user or group name corresponding to an existing Databricks principal to be granted or revoked access.

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

Example (Secrets)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

keyName := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

group, err := w.Groups.Create(ctx, iam.Group{
	DisplayName: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", group)

scopeName := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

err = w.Secrets.CreateScope(ctx, workspace.CreateScope{
	Scope: scopeName,
})
if err != nil {
	panic(err)
}

err = w.Secrets.PutAcl(ctx, workspace.PutAcl{
	Scope:      scopeName,
	Permission: workspace.AclPermissionManage,
	Principal:  group.DisplayName,
})
if err != nil {
	panic(err)
}

// cleanup

err = w.Groups.DeleteById(ctx, group.Id)
if err != nil {
	panic(err)
}
err = w.Secrets.DeleteSecret(ctx, workspace.DeleteSecret{
	Scope: scopeName,
	Key:   keyName,
})
if err != nil {
	panic(err)
}
err = w.Secrets.DeleteScopeByScope(ctx, scopeName)
if err != nil {
	panic(err)
}
Output:

func (*SecretsAPI) PutSecret

func (a *SecretsAPI) PutSecret(ctx context.Context, request PutSecret) error

Add a secret.

Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using the secret scope's encryption settings before storing it.

You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000.

The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine the value returned when the secret value is requested. Exactly one must be specified.

Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.

Example (Secrets)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

keyName := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

scopeName := fmt.Sprintf("sdk-%x", time.Now().UnixNano())

err = w.Secrets.CreateScope(ctx, workspace.CreateScope{
	Scope: scopeName,
})
if err != nil {
	panic(err)
}

err = w.Secrets.PutSecret(ctx, workspace.PutSecret{
	Scope:       scopeName,
	Key:         keyName,
	StringValue: fmt.Sprintf("sdk-%x", time.Now().UnixNano()),
})
if err != nil {
	panic(err)
}

// cleanup

err = w.Secrets.DeleteSecret(ctx, workspace.DeleteSecret{
	Scope: scopeName,
	Key:   keyName,
})
if err != nil {
	panic(err)
}
err = w.Secrets.DeleteScopeByScope(ctx, scopeName)
if err != nil {
	panic(err)
}
Output:

func (*SecretsAPI) WithImpl

func (a *SecretsAPI) WithImpl(impl SecretsService) *SecretsAPI

WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.

type SecretsService

type SecretsService interface {

	// Create a new secret scope.
	//
	// The scope name must consist of alphanumeric characters, dashes,
	// underscores, and periods, and may not exceed 128 characters. The maximum
	// number of scopes in a workspace is 100.
	CreateScope(ctx context.Context, request CreateScope) error

	// Delete an ACL.
	//
	// Deletes the given ACL on the given scope.
	//
	// Users must have the `MANAGE` permission to invoke this API. Throws
	// `RESOURCE_DOES_NOT_EXIST` if no such secret scope, principal, or ACL
	// exists. Throws `PERMISSION_DENIED` if the user does not have permission
	// to make this API call.
	DeleteAcl(ctx context.Context, request DeleteAcl) error

	// Delete a secret scope.
	//
	// Deletes a secret scope.
	//
	// Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws
	// `PERMISSION_DENIED` if the user does not have permission to make this API
	// call.
	DeleteScope(ctx context.Context, request DeleteScope) error

	// Delete a secret.
	//
	// Deletes the secret stored in this secret scope. You must have `WRITE` or
	// `MANAGE` permission on the secret scope.
	//
	// Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret
	// exists. Throws `PERMISSION_DENIED` if the user does not have permission
	// to make this API call.
	DeleteSecret(ctx context.Context, request DeleteSecret) error

	// Get secret ACL details.
	//
	// Gets the details about the given ACL, such as the group and permission.
	// Users must have the `MANAGE` permission to invoke this API.
	//
	// Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
	// `PERMISSION_DENIED` if the user does not have permission to make this API
	// call.
	GetAcl(ctx context.Context, request GetAclRequest) (*AclItem, error)

	// Lists ACLs.
	//
	// List the ACLs for a given secret scope. Users must have the `MANAGE`
	// permission to invoke this API.
	//
	// Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
	// `PERMISSION_DENIED` if the user does not have permission to make this API
	// call.
	//
	// Use ListAclsAll() to get all AclItem instances
	ListAcls(ctx context.Context, request ListAclsRequest) (*ListAclsResponse, error)

	// List all scopes.
	//
	// Lists all secret scopes available in the workspace.
	//
	// Throws `PERMISSION_DENIED` if the user does not have permission to make
	// this API call.
	//
	// Use ListScopesAll() to get all SecretScope instances
	ListScopes(ctx context.Context) (*ListScopesResponse, error)

	// List secret keys.
	//
	// Lists the secret keys that are stored at this scope. This is a
	// metadata-only operation; secret data cannot be retrieved using this API.
	// Users need the READ permission to make this call.
	//
	// The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws
	// `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
	// `PERMISSION_DENIED` if the user does not have permission to make this API
	// call.
	//
	// Use ListSecretsAll() to get all SecretMetadata instances
	ListSecrets(ctx context.Context, request ListSecretsRequest) (*ListSecretsResponse, error)

	// Create/update an ACL.
	//
	// Creates or overwrites the Access Control List (ACL) associated with the
	// given principal (user or group) on the specified scope point.
	//
	// In general, a user or group will use the most powerful permission
	// available to them, and permissions are ordered as follows:
	//
	// * `MANAGE` - Allowed to change ACLs, and read and write to this secret
	// scope. * `WRITE` - Allowed to read and write to this secret scope. *
	// `READ` - Allowed to read this secret scope and list what secrets are
	// available.
	//
	// Note that in general, secret values can only be read from within a
	// command on a cluster (for example, through a notebook). There is no API
	// to read the actual secret value material outside of a cluster. However,
	// the user's permission will be applied based on who is executing the
	// command, and they must have at least READ permission.
	//
	// Users must have the `MANAGE` permission to invoke this API.
	//
	// The principal is a user or group name corresponding to an existing
	// Databricks principal to be granted or revoked access.
	//
	// Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
	// `RESOURCE_ALREADY_EXISTS` if a permission for the principal already
	// exists. Throws `INVALID_PARAMETER_VALUE` if the permission or principal
	// is invalid. Throws `PERMISSION_DENIED` if the user does not have
	// permission to make this API call.
	PutAcl(ctx context.Context, request PutAcl) error

	// Add a secret.
	//
	// Inserts a secret under the provided scope with the given name. If a
	// secret already exists with the same name, this command overwrites the
	// existing secret's value. The server encrypts the secret using the secret
	// scope's encryption settings before storing it.
	//
	// You must have `WRITE` or `MANAGE` permission on the secret scope. The
	// secret key must consist of alphanumeric characters, dashes, underscores,
	// and periods, and cannot exceed 128 characters. The maximum allowed secret
	// value size is 128 KB. The maximum number of secrets in a given scope is
	// 1000.
	//
	// The input fields "string_value" or "bytes_value" specify the type of the
	// secret, which will determine the value returned when the secret value is
	// requested. Exactly one must be specified.
	//
	// Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws
	// `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is
	// exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or value
	// length is invalid. Throws `PERMISSION_DENIED` if the user does not have
	// permission to make this API call.
	PutSecret(ctx context.Context, request PutSecret) error
}

The Secrets API allows you to manage secrets, secret scopes, and access permissions.

Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of directly entering your credentials into a notebook, use Databricks secrets to store your credentials and reference them in notebooks and jobs.

Administrators, secret creators, and users granted permission can read Databricks secrets. While Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not possible to prevent such users from reading secrets.

type SparseCheckout

type SparseCheckout struct {
	// List of patterns to include for sparse checkout.
	Patterns []string `json:"patterns,omitempty"`
}

type SparseCheckoutUpdate

type SparseCheckoutUpdate struct {
	// List of patterns to include for sparse checkout.
	Patterns []string `json:"patterns,omitempty"`
}

type UpdateCredentials

type UpdateCredentials struct {
	// The ID for the corresponding credential to access.
	CredentialId int64 `json:"-" url:"-"`
	// Git provider. This field is case-insensitive. The available Git providers
	// are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
	// gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
	// awsCodeCommit.
	GitProvider string `json:"git_provider,omitempty"`
	// Git username.
	GitUsername string `json:"git_username,omitempty"`
	// The personal access token used to authenticate to the corresponding Git
	// provider.
	PersonalAccessToken string `json:"personal_access_token,omitempty"`
}

type UpdateRepo

type UpdateRepo struct {
	// Branch that the local version of the repo is checked out to.
	Branch string `json:"branch,omitempty"`
	// The ID for the corresponding repo to access.
	RepoId int64 `json:"-" url:"-"`
	// If specified, update the sparse checkout settings. The update will fail
	// if sparse checkout is not enabled for the repo.
	SparseCheckout *SparseCheckoutUpdate `json:"sparse_checkout,omitempty"`
	// Tag that the local version of the repo is checked out to. Updating the
	// repo to a tag puts the repo in a detached HEAD state. Before committing
	// new changes, you must update the repo to a branch instead of the detached
	// HEAD.
	Tag string `json:"tag,omitempty"`
}

type UpdateWorkspaceRequest added in v0.13.3

type UpdateWorkspaceRequest struct {
	WorkspaceID string   `json:"-"`
	UserID      string   `json:"-"`
	Roles       []string `json:"roles"`
}

type UploadOption

type UploadOption = func(*Import)

func UploadFormat

func UploadFormat(f ImportFormat) UploadOption

func UploadLanguage

func UploadLanguage(l Language) UploadOption

func UploadOverwrite

func UploadOverwrite() UploadOption

type WorkspaceAPI

type WorkspaceAPI struct {
	// contains filtered or unexported fields
}

The Workspace API allows you to list, import, export, and delete notebooks and folders.

A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text.

func NewWorkspace

func NewWorkspace(client *client.DatabricksClient) *WorkspaceAPI

func (*WorkspaceAPI) Delete

func (a *WorkspaceAPI) Delete(ctx context.Context, request Delete) error

Delete a workspace object.

Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`.

Object deletion cannot be undone and deleting a directory recursively is not atomic.

func (*WorkspaceAPI) Download

func (a *WorkspaceAPI) Download(ctx context.Context, path string, opts ...DownloadOption) (io.ReadCloser, error)

Download a notebook or file from the workspace by path.

By default, it acts as if workspace.DownloadFormat(workspace.ExportFormatSource) option is supplied. When using workspace.ExportFormatAuto, the `path` is imported or exported as either a workspace file or a notebook, depending on an analysis of the `item`’s extension and the file content header provided in the request.

Returns bytes.Buffer of the path contents.

func (*WorkspaceAPI) Export

func (a *WorkspaceAPI) Export(ctx context.Context, request ExportRequest) (*ExportResponse, error)

Export a workspace object.

Exports an object or the contents of an entire directory.

If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.

If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`. Currently, this API does not support exporting a library.

Example (WorkspaceIntegration)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

notebook := func() string {
	me, err := w.CurrentUser.Me(ctx)
	if err != nil {
		panic(err)
	}
	return filepath.Join("/Users", me.UserName, fmt.Sprintf("sdk-%x", time.Now().UnixNano()))
}()

exportResponse, err := w.Workspace.Export(ctx, workspace.ExportRequest{
	Format: workspace.ExportFormatSource,
	Path:   notebook,
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", exportResponse)
Output:

func (*WorkspaceAPI) GetByPath

func (a *WorkspaceAPI) GetByPath(ctx context.Context, name string) (*ObjectInfo, error)

GetByPath calls WorkspaceAPI.ObjectInfoPathToObjectIdMap and returns a single ObjectInfo.

Returns an error if there's more than one ObjectInfo with the same .Path.

Note: All ObjectInfo instances are loaded into memory before returning matching by name.

This method is generated by Databricks SDK Code Generator.

func (*WorkspaceAPI) GetStatus

func (a *WorkspaceAPI) GetStatus(ctx context.Context, request GetStatusRequest) (*ObjectInfo, error)

Get status.

Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.

Example (GenericPermissions)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

notebookPath := func() string {
	me, err := w.CurrentUser.Me(ctx)
	if err != nil {
		panic(err)
	}
	return filepath.Join("/Users", me.UserName, fmt.Sprintf("sdk-%x", time.Now().UnixNano()))
}()

obj, err := w.Workspace.GetStatusByPath(ctx, notebookPath)
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", obj)
Output:

Example (WorkspaceIntegration)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

notebook := func() string {
	me, err := w.CurrentUser.Me(ctx)
	if err != nil {
		panic(err)
	}
	return filepath.Join("/Users", me.UserName, fmt.Sprintf("sdk-%x", time.Now().UnixNano()))
}()

getStatusResponse, err := w.Workspace.GetStatusByPath(ctx, notebook)
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", getStatusResponse)
Output:

func (*WorkspaceAPI) GetStatusByPath

func (a *WorkspaceAPI) GetStatusByPath(ctx context.Context, path string) (*ObjectInfo, error)

Get status.

Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.

func (*WorkspaceAPI) Impl

func (a *WorkspaceAPI) Impl() WorkspaceService

Impl returns low-level Workspace API implementation

func (*WorkspaceAPI) Import

func (a *WorkspaceAPI) Import(ctx context.Context, request Import) error

Import a workspace object.

Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. One can only use `DBC` format to import a directory.

Example (GenericPermissions)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

notebookPath := func() string {
	me, err := w.CurrentUser.Me(ctx)
	if err != nil {
		panic(err)
	}
	return filepath.Join("/Users", me.UserName, fmt.Sprintf("sdk-%x", time.Now().UnixNano()))
}()

err = w.Workspace.Import(ctx, workspace.Import{
	Path:      notebookPath,
	Overwrite: true,
	Format:    workspace.ImportFormatSource,
	Language:  workspace.LanguagePython,
	Content: base64.StdEncoding.EncodeToString([]byte((`print(1)
`))),
})
if err != nil {
	panic(err)
}
Output:

Example (JobsApiFullIntegration)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

notebookPath := func() string {
	me, err := w.CurrentUser.Me(ctx)
	if err != nil {
		panic(err)
	}
	return filepath.Join("/Users", me.UserName, fmt.Sprintf("sdk-%x", time.Now().UnixNano()))
}()

err = w.Workspace.Import(ctx, workspace.Import{
	Path:      notebookPath,
	Overwrite: true,
	Format:    workspace.ImportFormatSource,
	Language:  workspace.LanguagePython,
	Content: base64.StdEncoding.EncodeToString([]byte((`import time
time.sleep(10)
dbutils.notebook.exit('hello')
`))),
})
if err != nil {
	panic(err)
}
Output:

Example (Pipelines)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

notebookPath := func() string {
	me, err := w.CurrentUser.Me(ctx)
	if err != nil {
		panic(err)
	}
	return filepath.Join("/Users", me.UserName, fmt.Sprintf("sdk-%x", time.Now().UnixNano()))
}()

err = w.Workspace.Import(ctx, workspace.Import{
	Content:   base64.StdEncoding.EncodeToString([]byte(("CREATE LIVE TABLE dlt_sample AS SELECT 1"))),
	Format:    workspace.ImportFormatSource,
	Language:  workspace.LanguageSql,
	Overwrite: true,
	Path:      notebookPath,
})
if err != nil {
	panic(err)
}
Output:

Example (WorkspaceIntegration)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

notebook := func() string {
	me, err := w.CurrentUser.Me(ctx)
	if err != nil {
		panic(err)
	}
	return filepath.Join("/Users", me.UserName, fmt.Sprintf("sdk-%x", time.Now().UnixNano()))
}()

err = w.Workspace.Import(ctx, workspace.Import{
	Path:      notebook,
	Format:    workspace.ImportFormatSource,
	Language:  workspace.LanguagePython,
	Content:   base64.StdEncoding.EncodeToString([]byte(("# Databricks notebook source\nprint('hello from job')"))),
	Overwrite: true,
})
if err != nil {
	panic(err)
}
Output:

func (*WorkspaceAPI) ListAll

func (a *WorkspaceAPI) ListAll(ctx context.Context, request ListWorkspaceRequest) ([]ObjectInfo, error)

List contents.

Lists the contents of a directory, or the object if it is not a directory. If the input path does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.

This method is generated by Databricks SDK Code Generator.

Example (WorkspaceIntegration)
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
if err != nil {
	panic(err)
}

notebook := func() string {
	me, err := w.CurrentUser.Me(ctx)
	if err != nil {
		panic(err)
	}
	return filepath.Join("/Users", me.UserName, fmt.Sprintf("sdk-%x", time.Now().UnixNano()))
}()

objects, err := w.Workspace.ListAll(ctx, workspace.ListWorkspaceRequest{
	Path: filepath.Dir(notebook),
})
if err != nil {
	panic(err)
}
logger.Infof(ctx, "found %v", objects)
Output:

func (*WorkspaceAPI) Mkdirs

func (a *WorkspaceAPI) Mkdirs(ctx context.Context, request Mkdirs) error

Create a directory.

Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`.

Note that if this operation fails it may have succeeded in creating some of the necessary parent directories.

func (*WorkspaceAPI) MkdirsByPath

func (a *WorkspaceAPI) MkdirsByPath(ctx context.Context, path string) error

Create a directory.

Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`.

Note that if this operation fails it may have succeeded in creating some of the necessary parent directories.

func (*WorkspaceAPI) ObjectInfoPathToObjectIdMap

func (a *WorkspaceAPI) ObjectInfoPathToObjectIdMap(ctx context.Context, request ListWorkspaceRequest) (map[string]int64, error)

ObjectInfoPathToObjectIdMap calls WorkspaceAPI.ListAll and creates a map of results with ObjectInfo.Path as key and ObjectInfo.ObjectId as value.

Returns an error if there's more than one ObjectInfo with the same .Path.

Note: All ObjectInfo instances are loaded into memory before creating a map.

This method is generated by Databricks SDK Code Generator.

func (*WorkspaceAPI) ReadFile

func (a *WorkspaceAPI) ReadFile(ctx context.Context, name string) ([]byte, error)

ReadFile is identical to os.ReadFile but for workspace files.

func (*WorkspaceAPI) RecursiveList

func (a *WorkspaceAPI) RecursiveList(ctx context.Context, path string) ([]ObjectInfo, error)

RecursiveList traverses the workspace tree and returns all non-directory objects under the path

func (*WorkspaceAPI) Update added in v0.13.3

func (a *WorkspaceAPI) Update(ctx context.Context, request UpdateWorkspaceRequest) error

func (*WorkspaceAPI) Upload

func (a *WorkspaceAPI) Upload(ctx context.Context, path string, r io.Reader, opts ...UploadOption) error

Upload a workspace object (for example, a notebook or file) or the contents of an entire directory (`DBC` format).

Errors:

  • RESOURCE_ALREADY_EXISTS: if `path` already exists no `overwrite=True`.
  • INVALID_PARAMETER_VALUE: if `format` and `content` values are not compatible.

By default, workspace.UploadFormat(workspace.ImportFormatSource). If using workspace.UploadFormat(workspace.ImportFormatAuto) the `path` is imported or exported as either a workspace file or a notebook, depending on an analysis of the `path`’s extension and the header content provided in the request. In addition, if the `path` is imported as a notebook, then the `item`’s extension is automatically removed.

workspace.UploadLanguage(...) is only required if source format.

func (*WorkspaceAPI) WithImpl

func (a *WorkspaceAPI) WithImpl(impl WorkspaceService) *WorkspaceAPI

WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.

func (*WorkspaceAPI) WriteFile

func (a *WorkspaceAPI) WriteFile(ctx context.Context, name string, data []byte) error

WriteFile is identical to os.WriteFile but for Workspace File. Keep in mind: It doesn't upload the notebook, but the file and does always overwrite it.

type WorkspaceService

type WorkspaceService interface {

	// Delete a workspace object.
	//
	// Deletes an object or a directory (and optionally recursively deletes all
	// objects in the directory). * If `path` does not exist, this call returns
	// an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty directory
	// and `recursive` is set to `false`, this call returns an error
	// `DIRECTORY_NOT_EMPTY`.
	//
	// Object deletion cannot be undone and deleting a directory recursively is
	// not atomic.
	Delete(ctx context.Context, request Delete) error

	// Export a workspace object.
	//
	// Exports an object or the contents of an entire directory.
	//
	// If `path` does not exist, this call returns an error
	// `RESOURCE_DOES_NOT_EXIST`.
	//
	// If the exported data would exceed size limit, this call returns
	// `MAX_NOTEBOOK_SIZE_EXCEEDED`. Currently, this API does not support
	// exporting a library.
	Export(ctx context.Context, request ExportRequest) (*ExportResponse, error)

	// Get status.
	//
	// Gets the status of an object or a directory. If `path` does not exist,
	// this call returns an error `RESOURCE_DOES_NOT_EXIST`.
	GetStatus(ctx context.Context, request GetStatusRequest) (*ObjectInfo, error)

	// Import a workspace object.
	//
	// Imports a workspace object (for example, a notebook or file) or the
	// contents of an entire directory. If `path` already exists and `overwrite`
	// is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`.
	// One can only use `DBC` format to import a directory.
	Import(ctx context.Context, request Import) error

	// List contents.
	//
	// Lists the contents of a directory, or the object if it is not a
	// directory. If the input path does not exist, this call returns an error
	// `RESOURCE_DOES_NOT_EXIST`.
	//
	// Use ListAll() to get all ObjectInfo instances
	List(ctx context.Context, request ListWorkspaceRequest) (*ListResponse, error)

	// Create a directory.
	//
	// Creates the specified directory (and necessary parent directories if they
	// do not exist). If there is an object (not a directory) at any prefix of
	// the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`.
	//
	// Note that if this operation fails it may have succeeded in creating some
	// of the necessary parent directories.
	Mkdirs(ctx context.Context, request Mkdirs) error
	Update(ctx context.Context, request UpdateWorkspaceRequest) error
}

The Workspace API allows you to list, import, export, and delete notebooks and folders.

A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL