Documentation ¶
Overview ¶
+kubebuilder:object:generate=true +groupName=bigquery.gcp.upbound.io +versionName=v1beta2
Index ¶
- Constants
- Variables
- type AccessDatasetInitParameters
- type AccessDatasetObservation
- type AccessDatasetParameters
- type AccessInitParameters
- type AccessObservation
- type AccessParameters
- type AccessRoleInitParameters
- type AccessRoleObservation
- type AccessRoleParameters
- type AnalyticsHubDataExchangeIAMMember
- func (in *AnalyticsHubDataExchangeIAMMember) DeepCopy() *AnalyticsHubDataExchangeIAMMember
- func (in *AnalyticsHubDataExchangeIAMMember) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMember)
- func (in *AnalyticsHubDataExchangeIAMMember) DeepCopyObject() runtime.Object
- func (mg *AnalyticsHubDataExchangeIAMMember) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *AnalyticsHubDataExchangeIAMMember) GetConnectionDetailsMapping() map[string]string
- func (mg *AnalyticsHubDataExchangeIAMMember) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *AnalyticsHubDataExchangeIAMMember) GetID() string
- func (tr *AnalyticsHubDataExchangeIAMMember) GetInitParameters() (map[string]any, error)
- func (mg *AnalyticsHubDataExchangeIAMMember) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *AnalyticsHubDataExchangeIAMMember) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *AnalyticsHubDataExchangeIAMMember) GetObservation() (map[string]any, error)
- func (tr *AnalyticsHubDataExchangeIAMMember) GetParameters() (map[string]any, error)
- func (mg *AnalyticsHubDataExchangeIAMMember) GetProviderConfigReference() *xpv1.Reference
- func (mg *AnalyticsHubDataExchangeIAMMember) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *AnalyticsHubDataExchangeIAMMember) GetTerraformResourceType() string
- func (tr *AnalyticsHubDataExchangeIAMMember) GetTerraformSchemaVersion() int
- func (mg *AnalyticsHubDataExchangeIAMMember) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *AnalyticsHubDataExchangeIAMMember) Hub()
- func (tr *AnalyticsHubDataExchangeIAMMember) LateInitialize(attrs []byte) (bool, error)
- func (mg *AnalyticsHubDataExchangeIAMMember) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *AnalyticsHubDataExchangeIAMMember) SetConditions(c ...xpv1.Condition)
- func (mg *AnalyticsHubDataExchangeIAMMember) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *AnalyticsHubDataExchangeIAMMember) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *AnalyticsHubDataExchangeIAMMember) SetObservation(obs map[string]any) error
- func (tr *AnalyticsHubDataExchangeIAMMember) SetParameters(params map[string]any) error
- func (mg *AnalyticsHubDataExchangeIAMMember) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *AnalyticsHubDataExchangeIAMMember) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *AnalyticsHubDataExchangeIAMMember) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type AnalyticsHubDataExchangeIAMMemberInitParameters
- type AnalyticsHubDataExchangeIAMMemberList
- func (in *AnalyticsHubDataExchangeIAMMemberList) DeepCopy() *AnalyticsHubDataExchangeIAMMemberList
- func (in *AnalyticsHubDataExchangeIAMMemberList) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMemberList)
- func (in *AnalyticsHubDataExchangeIAMMemberList) DeepCopyObject() runtime.Object
- func (l *AnalyticsHubDataExchangeIAMMemberList) GetItems() []resource.Managed
- type AnalyticsHubDataExchangeIAMMemberObservation
- type AnalyticsHubDataExchangeIAMMemberParameters
- type AnalyticsHubDataExchangeIAMMemberSpec
- type AnalyticsHubDataExchangeIAMMemberStatus
- type AnalyticsHubListing
- func (in *AnalyticsHubListing) DeepCopy() *AnalyticsHubListing
- func (in *AnalyticsHubListing) DeepCopyInto(out *AnalyticsHubListing)
- func (in *AnalyticsHubListing) DeepCopyObject() runtime.Object
- func (mg *AnalyticsHubListing) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *AnalyticsHubListing) GetConnectionDetailsMapping() map[string]string
- func (mg *AnalyticsHubListing) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *AnalyticsHubListing) GetID() string
- func (tr *AnalyticsHubListing) GetInitParameters() (map[string]any, error)
- func (mg *AnalyticsHubListing) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *AnalyticsHubListing) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *AnalyticsHubListing) GetObservation() (map[string]any, error)
- func (tr *AnalyticsHubListing) GetParameters() (map[string]any, error)
- func (mg *AnalyticsHubListing) GetProviderConfigReference() *xpv1.Reference
- func (mg *AnalyticsHubListing) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *AnalyticsHubListing) GetTerraformResourceType() string
- func (tr *AnalyticsHubListing) GetTerraformSchemaVersion() int
- func (mg *AnalyticsHubListing) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *AnalyticsHubListing) Hub()
- func (tr *AnalyticsHubListing) LateInitialize(attrs []byte) (bool, error)
- func (mg *AnalyticsHubListing) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *AnalyticsHubListing) SetConditions(c ...xpv1.Condition)
- func (mg *AnalyticsHubListing) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *AnalyticsHubListing) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *AnalyticsHubListing) SetObservation(obs map[string]any) error
- func (tr *AnalyticsHubListing) SetParameters(params map[string]any) error
- func (mg *AnalyticsHubListing) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *AnalyticsHubListing) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *AnalyticsHubListing) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type AnalyticsHubListingInitParameters
- type AnalyticsHubListingList
- type AnalyticsHubListingObservation
- type AnalyticsHubListingParameters
- type AnalyticsHubListingSpec
- type AnalyticsHubListingStatus
- type ArgumentsInitParameters
- type ArgumentsObservation
- type ArgumentsParameters
- type AutoscaleInitParameters
- type AutoscaleObservation
- type AutoscaleParameters
- type AvroOptionsInitParameters
- type AvroOptionsObservation
- type AvroOptionsParameters
- type AwsInitParameters
- type AwsObservation
- type AwsParameters
- type AzureInitParameters
- type AzureObservation
- type AzureParameters
- type BigqueryDatasetInitParameters
- type BigqueryDatasetObservation
- type BigqueryDatasetParameters
- type BigtableOptionsInitParameters
- type BigtableOptionsObservation
- type BigtableOptionsParameters
- type CloudResourceInitParameters
- type CloudResourceObservation
- type CloudResourceParameters
- type CloudSQLInitParameters
- type CloudSQLObservation
- type CloudSQLParameters
- type CloudSpannerInitParameters
- type CloudSpannerObservation
- type CloudSpannerParameters
- type ColumnFamilyInitParameters
- type ColumnFamilyObservation
- type ColumnFamilyParameters
- type ColumnInitParameters
- type ColumnObservation
- type ColumnParameters
- type ColumnReferencesInitParameters
- type ColumnReferencesObservation
- type ColumnReferencesParameters
- type ConditionInitParameters
- type ConditionObservation
- type ConditionParameters
- type Connection
- func (in *Connection) DeepCopy() *Connection
- func (in *Connection) DeepCopyInto(out *Connection)
- func (in *Connection) DeepCopyObject() runtime.Object
- func (mg *Connection) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Connection) GetConnectionDetailsMapping() map[string]string
- func (mg *Connection) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Connection) GetID() string
- func (tr *Connection) GetInitParameters() (map[string]any, error)
- func (mg *Connection) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *Connection) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *Connection) GetObservation() (map[string]any, error)
- func (tr *Connection) GetParameters() (map[string]any, error)
- func (mg *Connection) GetProviderConfigReference() *xpv1.Reference
- func (mg *Connection) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Connection) GetTerraformResourceType() string
- func (tr *Connection) GetTerraformSchemaVersion() int
- func (mg *Connection) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Connection) Hub()
- func (tr *Connection) LateInitialize(attrs []byte) (bool, error)
- func (mg *Connection) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Connection) SetConditions(c ...xpv1.Condition)
- func (mg *Connection) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Connection) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *Connection) SetObservation(obs map[string]any) error
- func (tr *Connection) SetParameters(params map[string]any) error
- func (mg *Connection) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Connection) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Connection) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type ConnectionInitParameters
- type ConnectionList
- type ConnectionObservation
- type ConnectionParameters
- type ConnectionSpec
- type ConnectionStatus
- type CopyInitParameters
- type CopyObservation
- type CopyParameters
- type CredentialInitParameters
- type CredentialObservation
- type CredentialParameters
- type CsvOptionsInitParameters
- type CsvOptionsObservation
- type CsvOptionsParameters
- type DataProviderInitParameters
- type DataProviderObservation
- type DataProviderParameters
- type DataTransferConfig
- func (in *DataTransferConfig) DeepCopy() *DataTransferConfig
- func (in *DataTransferConfig) DeepCopyInto(out *DataTransferConfig)
- func (in *DataTransferConfig) DeepCopyObject() runtime.Object
- func (mg *DataTransferConfig) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *DataTransferConfig) GetConnectionDetailsMapping() map[string]string
- func (mg *DataTransferConfig) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *DataTransferConfig) GetID() string
- func (tr *DataTransferConfig) GetInitParameters() (map[string]any, error)
- func (mg *DataTransferConfig) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *DataTransferConfig) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *DataTransferConfig) GetObservation() (map[string]any, error)
- func (tr *DataTransferConfig) GetParameters() (map[string]any, error)
- func (mg *DataTransferConfig) GetProviderConfigReference() *xpv1.Reference
- func (mg *DataTransferConfig) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *DataTransferConfig) GetTerraformResourceType() string
- func (tr *DataTransferConfig) GetTerraformSchemaVersion() int
- func (mg *DataTransferConfig) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *DataTransferConfig) Hub()
- func (tr *DataTransferConfig) LateInitialize(attrs []byte) (bool, error)
- func (mg *DataTransferConfig) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *DataTransferConfig) SetConditions(c ...xpv1.Condition)
- func (mg *DataTransferConfig) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *DataTransferConfig) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *DataTransferConfig) SetObservation(obs map[string]any) error
- func (tr *DataTransferConfig) SetParameters(params map[string]any) error
- func (mg *DataTransferConfig) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *DataTransferConfig) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *DataTransferConfig) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type DataTransferConfigInitParameters
- type DataTransferConfigList
- type DataTransferConfigObservation
- type DataTransferConfigParameters
- type DataTransferConfigSpec
- type DataTransferConfigStatus
- type Dataset
- func (in *Dataset) DeepCopy() *Dataset
- func (in *Dataset) DeepCopyInto(out *Dataset)
- func (in *Dataset) DeepCopyObject() runtime.Object
- func (mg *Dataset) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Dataset) GetConnectionDetailsMapping() map[string]string
- func (mg *Dataset) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Dataset) GetID() string
- func (tr *Dataset) GetInitParameters() (map[string]any, error)
- func (mg *Dataset) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *Dataset) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *Dataset) GetObservation() (map[string]any, error)
- func (tr *Dataset) GetParameters() (map[string]any, error)
- func (mg *Dataset) GetProviderConfigReference() *xpv1.Reference
- func (mg *Dataset) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Dataset) GetTerraformResourceType() string
- func (tr *Dataset) GetTerraformSchemaVersion() int
- func (mg *Dataset) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Dataset) Hub()
- func (tr *Dataset) LateInitialize(attrs []byte) (bool, error)
- func (mg *Dataset) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Dataset) SetConditions(c ...xpv1.Condition)
- func (mg *Dataset) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Dataset) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *Dataset) SetObservation(obs map[string]any) error
- func (tr *Dataset) SetParameters(params map[string]any) error
- func (mg *Dataset) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Dataset) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Dataset) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type DatasetAccess
- func (in *DatasetAccess) DeepCopy() *DatasetAccess
- func (in *DatasetAccess) DeepCopyInto(out *DatasetAccess)
- func (in *DatasetAccess) DeepCopyObject() runtime.Object
- func (mg *DatasetAccess) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *DatasetAccess) GetConnectionDetailsMapping() map[string]string
- func (mg *DatasetAccess) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *DatasetAccess) GetID() string
- func (tr *DatasetAccess) GetInitParameters() (map[string]any, error)
- func (mg *DatasetAccess) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *DatasetAccess) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *DatasetAccess) GetObservation() (map[string]any, error)
- func (tr *DatasetAccess) GetParameters() (map[string]any, error)
- func (mg *DatasetAccess) GetProviderConfigReference() *xpv1.Reference
- func (mg *DatasetAccess) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *DatasetAccess) GetTerraformResourceType() string
- func (tr *DatasetAccess) GetTerraformSchemaVersion() int
- func (mg *DatasetAccess) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *DatasetAccess) Hub()
- func (tr *DatasetAccess) LateInitialize(attrs []byte) (bool, error)
- func (mg *DatasetAccess) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *DatasetAccess) SetConditions(c ...xpv1.Condition)
- func (mg *DatasetAccess) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *DatasetAccess) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *DatasetAccess) SetObservation(obs map[string]any) error
- func (tr *DatasetAccess) SetParameters(params map[string]any) error
- func (mg *DatasetAccess) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *DatasetAccess) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *DatasetAccess) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type DatasetAccessDatasetDatasetInitParameters
- type DatasetAccessDatasetDatasetObservation
- type DatasetAccessDatasetDatasetParameters
- type DatasetAccessDatasetInitParameters
- type DatasetAccessDatasetObservation
- type DatasetAccessDatasetParameters
- type DatasetAccessInitParameters
- type DatasetAccessList
- type DatasetAccessObservation
- type DatasetAccessParameters
- type DatasetAccessRoutineInitParameters
- type DatasetAccessRoutineObservation
- type DatasetAccessRoutineParameters
- type DatasetAccessSpec
- type DatasetAccessStatus
- type DatasetAccessViewInitParameters
- type DatasetAccessViewObservation
- type DatasetAccessViewParameters
- type DatasetDatasetInitParameters
- type DatasetDatasetObservation
- type DatasetDatasetParameters
- type DatasetIAMBinding
- func (in *DatasetIAMBinding) DeepCopy() *DatasetIAMBinding
- func (in *DatasetIAMBinding) DeepCopyInto(out *DatasetIAMBinding)
- func (in *DatasetIAMBinding) DeepCopyObject() runtime.Object
- func (mg *DatasetIAMBinding) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *DatasetIAMBinding) GetConnectionDetailsMapping() map[string]string
- func (mg *DatasetIAMBinding) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *DatasetIAMBinding) GetID() string
- func (tr *DatasetIAMBinding) GetInitParameters() (map[string]any, error)
- func (mg *DatasetIAMBinding) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *DatasetIAMBinding) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *DatasetIAMBinding) GetObservation() (map[string]any, error)
- func (tr *DatasetIAMBinding) GetParameters() (map[string]any, error)
- func (mg *DatasetIAMBinding) GetProviderConfigReference() *xpv1.Reference
- func (mg *DatasetIAMBinding) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *DatasetIAMBinding) GetTerraformResourceType() string
- func (tr *DatasetIAMBinding) GetTerraformSchemaVersion() int
- func (mg *DatasetIAMBinding) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *DatasetIAMBinding) Hub()
- func (tr *DatasetIAMBinding) LateInitialize(attrs []byte) (bool, error)
- func (mg *DatasetIAMBinding) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *DatasetIAMBinding) SetConditions(c ...xpv1.Condition)
- func (mg *DatasetIAMBinding) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *DatasetIAMBinding) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *DatasetIAMBinding) SetObservation(obs map[string]any) error
- func (tr *DatasetIAMBinding) SetParameters(params map[string]any) error
- func (mg *DatasetIAMBinding) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *DatasetIAMBinding) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *DatasetIAMBinding) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type DatasetIAMBindingConditionInitParameters
- type DatasetIAMBindingConditionObservation
- type DatasetIAMBindingConditionParameters
- type DatasetIAMBindingInitParameters
- type DatasetIAMBindingList
- type DatasetIAMBindingObservation
- type DatasetIAMBindingParameters
- type DatasetIAMBindingSpec
- type DatasetIAMBindingStatus
- type DatasetIAMMember
- func (in *DatasetIAMMember) DeepCopy() *DatasetIAMMember
- func (in *DatasetIAMMember) DeepCopyInto(out *DatasetIAMMember)
- func (in *DatasetIAMMember) DeepCopyObject() runtime.Object
- func (mg *DatasetIAMMember) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *DatasetIAMMember) GetConnectionDetailsMapping() map[string]string
- func (mg *DatasetIAMMember) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *DatasetIAMMember) GetID() string
- func (tr *DatasetIAMMember) GetInitParameters() (map[string]any, error)
- func (mg *DatasetIAMMember) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *DatasetIAMMember) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *DatasetIAMMember) GetObservation() (map[string]any, error)
- func (tr *DatasetIAMMember) GetParameters() (map[string]any, error)
- func (mg *DatasetIAMMember) GetProviderConfigReference() *xpv1.Reference
- func (mg *DatasetIAMMember) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *DatasetIAMMember) GetTerraformResourceType() string
- func (tr *DatasetIAMMember) GetTerraformSchemaVersion() int
- func (mg *DatasetIAMMember) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *DatasetIAMMember) Hub()
- func (tr *DatasetIAMMember) LateInitialize(attrs []byte) (bool, error)
- func (mg *DatasetIAMMember) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *DatasetIAMMember) SetConditions(c ...xpv1.Condition)
- func (mg *DatasetIAMMember) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *DatasetIAMMember) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *DatasetIAMMember) SetObservation(obs map[string]any) error
- func (tr *DatasetIAMMember) SetParameters(params map[string]any) error
- func (mg *DatasetIAMMember) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *DatasetIAMMember) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *DatasetIAMMember) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type DatasetIAMMemberConditionInitParameters
- type DatasetIAMMemberConditionObservation
- type DatasetIAMMemberConditionParameters
- type DatasetIAMMemberInitParameters
- type DatasetIAMMemberList
- type DatasetIAMMemberObservation
- type DatasetIAMMemberParameters
- type DatasetIAMMemberSpec
- type DatasetIAMMemberStatus
- type DatasetInitParameters
- type DatasetList
- type DatasetObservation
- type DatasetParameters
- type DatasetSpec
- type DatasetStatus
- type DefaultDatasetInitParameters
- type DefaultDatasetObservation
- type DefaultDatasetParameters
- type DefaultEncryptionConfigurationInitParameters
- type DefaultEncryptionConfigurationObservation
- type DefaultEncryptionConfigurationParameters
- type DestinationEncryptionConfigurationInitParameters
- type DestinationEncryptionConfigurationObservation
- type DestinationEncryptionConfigurationParameters
- type DestinationTableInitParameters
- type DestinationTableObservation
- type DestinationTableParameters
- type EmailPreferencesInitParameters
- type EmailPreferencesObservation
- type EmailPreferencesParameters
- type EncryptionConfigurationInitParameters
- type EncryptionConfigurationObservation
- type EncryptionConfigurationParameters
- type ErrorResultInitParameters
- type ErrorResultObservation
- type ErrorResultParameters
- type ErrorsInitParameters
- type ErrorsObservation
- type ErrorsParameters
- type ExternalDataConfigurationInitParameters
- type ExternalDataConfigurationObservation
- type ExternalDataConfigurationParameters
- type ExternalDataConfigurationParquetOptionsInitParameters
- type ExternalDataConfigurationParquetOptionsObservation
- type ExternalDataConfigurationParquetOptionsParameters
- type ExternalDatasetReferenceInitParameters
- type ExternalDatasetReferenceObservation
- type ExternalDatasetReferenceParameters
- type ExtractInitParameters
- type ExtractObservation
- type ExtractParameters
- type ForeignKeysInitParameters
- type ForeignKeysObservation
- type ForeignKeysParameters
- type GoogleSheetsOptionsInitParameters
- type GoogleSheetsOptionsObservation
- type GoogleSheetsOptionsParameters
- type HivePartitioningOptionsInitParameters
- type HivePartitioningOptionsObservation
- type HivePartitioningOptionsParameters
- type JSONOptionsInitParameters
- type JSONOptionsObservation
- type JSONOptionsParameters
- type Job
- func (in *Job) DeepCopy() *Job
- func (in *Job) DeepCopyInto(out *Job)
- func (in *Job) DeepCopyObject() runtime.Object
- func (mg *Job) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Job) GetConnectionDetailsMapping() map[string]string
- func (mg *Job) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Job) GetID() string
- func (tr *Job) GetInitParameters() (map[string]any, error)
- func (mg *Job) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *Job) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *Job) GetObservation() (map[string]any, error)
- func (tr *Job) GetParameters() (map[string]any, error)
- func (mg *Job) GetProviderConfigReference() *xpv1.Reference
- func (mg *Job) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Job) GetTerraformResourceType() string
- func (tr *Job) GetTerraformSchemaVersion() int
- func (mg *Job) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Job) Hub()
- func (tr *Job) LateInitialize(attrs []byte) (bool, error)
- func (mg *Job) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Job) SetConditions(c ...xpv1.Condition)
- func (mg *Job) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Job) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *Job) SetObservation(obs map[string]any) error
- func (tr *Job) SetParameters(params map[string]any) error
- func (mg *Job) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Job) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Job) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type JobInitParameters
- type JobList
- type JobObservation
- type JobParameters
- type JobSpec
- type JobStatus
- type LoadDestinationEncryptionConfigurationInitParameters
- type LoadDestinationEncryptionConfigurationObservation
- type LoadDestinationEncryptionConfigurationParameters
- type LoadDestinationTableInitParameters
- type LoadDestinationTableObservation
- type LoadDestinationTableParameters
- type LoadInitParameters
- type LoadObservation
- type LoadParameters
- type MaterializedViewInitParameters
- type MaterializedViewObservation
- type MaterializedViewParameters
- type MetastoreServiceConfigInitParameters
- type MetastoreServiceConfigObservation
- type MetastoreServiceConfigParameters
- type ParquetOptionsInitParameters
- type ParquetOptionsObservation
- type ParquetOptionsParameters
- type PrimaryKeyInitParameters
- type PrimaryKeyObservation
- type PrimaryKeyParameters
- type PublisherInitParameters
- type PublisherObservation
- type PublisherParameters
- type QueryDestinationEncryptionConfigurationInitParameters
- type QueryDestinationEncryptionConfigurationObservation
- type QueryDestinationEncryptionConfigurationParameters
- type QueryDestinationTableInitParameters
- type QueryDestinationTableObservation
- type QueryDestinationTableParameters
- type QueryInitParameters
- type QueryObservation
- type QueryParameters
- type RangeInitParameters
- type RangeObservation
- type RangeParameters
- type RangePartitioningInitParameters
- type RangePartitioningObservation
- type RangePartitioningParameters
- type ReferencedTableInitParameters
- type ReferencedTableObservation
- type ReferencedTableParameters
- type RemoteFunctionOptionsInitParameters
- type RemoteFunctionOptionsObservation
- type RemoteFunctionOptionsParameters
- type Reservation
- func (in *Reservation) DeepCopy() *Reservation
- func (in *Reservation) DeepCopyInto(out *Reservation)
- func (in *Reservation) DeepCopyObject() runtime.Object
- func (mg *Reservation) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Reservation) GetConnectionDetailsMapping() map[string]string
- func (mg *Reservation) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Reservation) GetID() string
- func (tr *Reservation) GetInitParameters() (map[string]any, error)
- func (mg *Reservation) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *Reservation) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *Reservation) GetObservation() (map[string]any, error)
- func (tr *Reservation) GetParameters() (map[string]any, error)
- func (mg *Reservation) GetProviderConfigReference() *xpv1.Reference
- func (mg *Reservation) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Reservation) GetTerraformResourceType() string
- func (tr *Reservation) GetTerraformSchemaVersion() int
- func (mg *Reservation) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Reservation) Hub()
- func (tr *Reservation) LateInitialize(attrs []byte) (bool, error)
- func (mg *Reservation) SetConditions(c ...xpv1.Condition)
- func (mg *Reservation) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Reservation) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *Reservation) SetObservation(obs map[string]any) error
- func (tr *Reservation) SetParameters(params map[string]any) error
- func (mg *Reservation) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Reservation) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Reservation) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type ReservationInitParameters
- type ReservationList
- type ReservationObservation
- type ReservationParameters
- type ReservationSpec
- type ReservationStatus
- type RestrictedExportConfigInitParameters
- type RestrictedExportConfigObservation
- type RestrictedExportConfigParameters
- type Routine
- func (in *Routine) DeepCopy() *Routine
- func (in *Routine) DeepCopyInto(out *Routine)
- func (in *Routine) DeepCopyObject() runtime.Object
- func (mg *Routine) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Routine) GetConnectionDetailsMapping() map[string]string
- func (mg *Routine) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Routine) GetID() string
- func (tr *Routine) GetInitParameters() (map[string]any, error)
- func (mg *Routine) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *Routine) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *Routine) GetObservation() (map[string]any, error)
- func (tr *Routine) GetParameters() (map[string]any, error)
- func (mg *Routine) GetProviderConfigReference() *xpv1.Reference
- func (mg *Routine) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Routine) GetTerraformResourceType() string
- func (tr *Routine) GetTerraformSchemaVersion() int
- func (mg *Routine) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Routine) Hub()
- func (tr *Routine) LateInitialize(attrs []byte) (bool, error)
- func (mg *Routine) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Routine) SetConditions(c ...xpv1.Condition)
- func (mg *Routine) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Routine) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *Routine) SetObservation(obs map[string]any) error
- func (tr *Routine) SetParameters(params map[string]any) error
- func (mg *Routine) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Routine) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Routine) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type RoutineInitParameters
- type RoutineInitParameters_2
- type RoutineList
- type RoutineObservation
- type RoutineObservation_2
- type RoutineParameters
- type RoutineParameters_2
- type RoutineSpec
- type RoutineStatus
- type ScheduleOptionsInitParameters
- type ScheduleOptionsObservation
- type ScheduleOptionsParameters
- type ScriptOptionsInitParameters
- type ScriptOptionsObservation
- type ScriptOptionsParameters
- type SensitiveParamsInitParameters
- type SensitiveParamsObservation
- type SensitiveParamsParameters
- type SourceModelInitParameters
- type SourceModelObservation
- type SourceModelParameters
- type SourceTableInitParameters
- type SourceTableObservation
- type SourceTableParameters
- type SourceTablesInitParameters
- type SourceTablesObservation
- type SourceTablesParameters
- type SparkHistoryServerConfigInitParameters
- type SparkHistoryServerConfigObservation
- type SparkHistoryServerConfigParameters
- type SparkInitParameters
- type SparkObservation
- type SparkOptionsInitParameters
- type SparkOptionsObservation
- type SparkOptionsParameters
- type SparkParameters
- type StatusInitParameters
- type StatusObservation
- type StatusParameters
- type Table
- func (in *Table) DeepCopy() *Table
- func (in *Table) DeepCopyInto(out *Table)
- func (in *Table) DeepCopyObject() runtime.Object
- func (mg *Table) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Table) GetConnectionDetailsMapping() map[string]string
- func (mg *Table) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Table) GetID() string
- func (tr *Table) GetInitParameters() (map[string]any, error)
- func (mg *Table) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *Table) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *Table) GetObservation() (map[string]any, error)
- func (tr *Table) GetParameters() (map[string]any, error)
- func (mg *Table) GetProviderConfigReference() *xpv1.Reference
- func (mg *Table) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Table) GetTerraformResourceType() string
- func (tr *Table) GetTerraformSchemaVersion() int
- func (mg *Table) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Table) Hub()
- func (tr *Table) LateInitialize(attrs []byte) (bool, error)
- func (mg *Table) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Table) SetConditions(c ...xpv1.Condition)
- func (mg *Table) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Table) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *Table) SetObservation(obs map[string]any) error
- func (tr *Table) SetParameters(params map[string]any) error
- func (mg *Table) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Table) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Table) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type TableConstraintsInitParameters
- type TableConstraintsObservation
- type TableConstraintsParameters
- type TableIAMBinding
- func (in *TableIAMBinding) DeepCopy() *TableIAMBinding
- func (in *TableIAMBinding) DeepCopyInto(out *TableIAMBinding)
- func (in *TableIAMBinding) DeepCopyObject() runtime.Object
- func (mg *TableIAMBinding) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *TableIAMBinding) GetConnectionDetailsMapping() map[string]string
- func (mg *TableIAMBinding) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *TableIAMBinding) GetID() string
- func (tr *TableIAMBinding) GetInitParameters() (map[string]any, error)
- func (mg *TableIAMBinding) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *TableIAMBinding) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *TableIAMBinding) GetObservation() (map[string]any, error)
- func (tr *TableIAMBinding) GetParameters() (map[string]any, error)
- func (mg *TableIAMBinding) GetProviderConfigReference() *xpv1.Reference
- func (mg *TableIAMBinding) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *TableIAMBinding) GetTerraformResourceType() string
- func (tr *TableIAMBinding) GetTerraformSchemaVersion() int
- func (mg *TableIAMBinding) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *TableIAMBinding) Hub()
- func (tr *TableIAMBinding) LateInitialize(attrs []byte) (bool, error)
- func (mg *TableIAMBinding) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *TableIAMBinding) SetConditions(c ...xpv1.Condition)
- func (mg *TableIAMBinding) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *TableIAMBinding) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *TableIAMBinding) SetObservation(obs map[string]any) error
- func (tr *TableIAMBinding) SetParameters(params map[string]any) error
- func (mg *TableIAMBinding) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *TableIAMBinding) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *TableIAMBinding) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type TableIAMBindingConditionInitParameters
- type TableIAMBindingConditionObservation
- type TableIAMBindingConditionParameters
- type TableIAMBindingInitParameters
- type TableIAMBindingList
- type TableIAMBindingObservation
- type TableIAMBindingParameters
- type TableIAMBindingSpec
- type TableIAMBindingStatus
- type TableIAMMember
- func (in *TableIAMMember) DeepCopy() *TableIAMMember
- func (in *TableIAMMember) DeepCopyInto(out *TableIAMMember)
- func (in *TableIAMMember) DeepCopyObject() runtime.Object
- func (mg *TableIAMMember) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *TableIAMMember) GetConnectionDetailsMapping() map[string]string
- func (mg *TableIAMMember) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *TableIAMMember) GetID() string
- func (tr *TableIAMMember) GetInitParameters() (map[string]any, error)
- func (mg *TableIAMMember) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *TableIAMMember) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *TableIAMMember) GetObservation() (map[string]any, error)
- func (tr *TableIAMMember) GetParameters() (map[string]any, error)
- func (mg *TableIAMMember) GetProviderConfigReference() *xpv1.Reference
- func (mg *TableIAMMember) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *TableIAMMember) GetTerraformResourceType() string
- func (tr *TableIAMMember) GetTerraformSchemaVersion() int
- func (mg *TableIAMMember) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *TableIAMMember) Hub()
- func (tr *TableIAMMember) LateInitialize(attrs []byte) (bool, error)
- func (mg *TableIAMMember) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *TableIAMMember) SetConditions(c ...xpv1.Condition)
- func (mg *TableIAMMember) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *TableIAMMember) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *TableIAMMember) SetObservation(obs map[string]any) error
- func (tr *TableIAMMember) SetParameters(params map[string]any) error
- func (mg *TableIAMMember) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *TableIAMMember) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *TableIAMMember) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type TableIAMMemberConditionInitParameters
- type TableIAMMemberConditionObservation
- type TableIAMMemberConditionParameters
- type TableIAMMemberInitParameters
- type TableIAMMemberList
- type TableIAMMemberObservation
- type TableIAMMemberParameters
- type TableIAMMemberSpec
- type TableIAMMemberStatus
- type TableInitParameters
- type TableList
- type TableObservation
- type TableParameters
- type TableReplicationInfoInitParameters
- type TableReplicationInfoObservation
- type TableReplicationInfoParameters
- type TableSpec
- type TableStatus
- type TableTimePartitioningInitParameters
- type TableTimePartitioningObservation
- type TableTimePartitioningParameters
- type TableViewInitParameters
- type TableViewObservation
- type TableViewParameters
- type TimePartitioningInitParameters
- type TimePartitioningObservation
- type TimePartitioningParameters
- type UserDefinedFunctionResourcesInitParameters
- type UserDefinedFunctionResourcesObservation
- type UserDefinedFunctionResourcesParameters
- type ViewInitParameters
- type ViewObservation
- type ViewParameters
Constants ¶
const ( CRDGroup = "bigquery.gcp.upbound.io" CRDVersion = "v1beta2" )
Package type metadata.
Variables ¶
var ( AnalyticsHubDataExchangeIAMMember_Kind = "AnalyticsHubDataExchangeIAMMember" AnalyticsHubDataExchangeIAMMember_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: AnalyticsHubDataExchangeIAMMember_Kind}.String() AnalyticsHubDataExchangeIAMMember_KindAPIVersion = AnalyticsHubDataExchangeIAMMember_Kind + "." + CRDGroupVersion.String() AnalyticsHubDataExchangeIAMMember_GroupVersionKind = CRDGroupVersion.WithKind(AnalyticsHubDataExchangeIAMMember_Kind) )
Repository type metadata.
var ( AnalyticsHubListing_Kind = "AnalyticsHubListing" AnalyticsHubListing_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: AnalyticsHubListing_Kind}.String() AnalyticsHubListing_KindAPIVersion = AnalyticsHubListing_Kind + "." + CRDGroupVersion.String() AnalyticsHubListing_GroupVersionKind = CRDGroupVersion.WithKind(AnalyticsHubListing_Kind) )
Repository type metadata.
var ( Connection_Kind = "Connection" Connection_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Connection_Kind}.String() Connection_KindAPIVersion = Connection_Kind + "." + CRDGroupVersion.String() Connection_GroupVersionKind = CRDGroupVersion.WithKind(Connection_Kind) )
Repository type metadata.
var ( Dataset_Kind = "Dataset" Dataset_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Dataset_Kind}.String() Dataset_KindAPIVersion = Dataset_Kind + "." + CRDGroupVersion.String() Dataset_GroupVersionKind = CRDGroupVersion.WithKind(Dataset_Kind) )
Repository type metadata.
var ( DatasetAccess_Kind = "DatasetAccess" DatasetAccess_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: DatasetAccess_Kind}.String() DatasetAccess_KindAPIVersion = DatasetAccess_Kind + "." + CRDGroupVersion.String() DatasetAccess_GroupVersionKind = CRDGroupVersion.WithKind(DatasetAccess_Kind) )
Repository type metadata.
var ( DatasetIAMBinding_Kind = "DatasetIAMBinding" DatasetIAMBinding_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: DatasetIAMBinding_Kind}.String() DatasetIAMBinding_KindAPIVersion = DatasetIAMBinding_Kind + "." + CRDGroupVersion.String() DatasetIAMBinding_GroupVersionKind = CRDGroupVersion.WithKind(DatasetIAMBinding_Kind) )
Repository type metadata.
var ( DatasetIAMMember_Kind = "DatasetIAMMember" DatasetIAMMember_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: DatasetIAMMember_Kind}.String() DatasetIAMMember_KindAPIVersion = DatasetIAMMember_Kind + "." + CRDGroupVersion.String() DatasetIAMMember_GroupVersionKind = CRDGroupVersion.WithKind(DatasetIAMMember_Kind) )
Repository type metadata.
var ( DataTransferConfig_Kind = "DataTransferConfig" DataTransferConfig_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: DataTransferConfig_Kind}.String() DataTransferConfig_KindAPIVersion = DataTransferConfig_Kind + "." + CRDGroupVersion.String() DataTransferConfig_GroupVersionKind = CRDGroupVersion.WithKind(DataTransferConfig_Kind) )
Repository type metadata.
var ( // CRDGroupVersion is the API Group Version used to register the objects CRDGroupVersion = schema.GroupVersion{Group: CRDGroup, Version: CRDVersion} // SchemeBuilder is used to add go types to the GroupVersionKind scheme SchemeBuilder = &scheme.Builder{GroupVersion: CRDGroupVersion} // AddToScheme adds the types in this group-version to the given scheme. AddToScheme = SchemeBuilder.AddToScheme )
var ( Job_Kind = "Job" Job_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Job_Kind}.String() Job_KindAPIVersion = Job_Kind + "." + CRDGroupVersion.String() Job_GroupVersionKind = CRDGroupVersion.WithKind(Job_Kind) )
Repository type metadata.
var ( Reservation_Kind = "Reservation" Reservation_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Reservation_Kind}.String() Reservation_KindAPIVersion = Reservation_Kind + "." + CRDGroupVersion.String() Reservation_GroupVersionKind = CRDGroupVersion.WithKind(Reservation_Kind) )
Repository type metadata.
var ( Routine_Kind = "Routine" Routine_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Routine_Kind}.String() Routine_KindAPIVersion = Routine_Kind + "." + CRDGroupVersion.String() Routine_GroupVersionKind = CRDGroupVersion.WithKind(Routine_Kind) )
Repository type metadata.
var ( Table_Kind = "Table" Table_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Table_Kind}.String() Table_KindAPIVersion = Table_Kind + "." + CRDGroupVersion.String() Table_GroupVersionKind = CRDGroupVersion.WithKind(Table_Kind) )
Repository type metadata.
var ( TableIAMBinding_Kind = "TableIAMBinding" TableIAMBinding_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: TableIAMBinding_Kind}.String() TableIAMBinding_KindAPIVersion = TableIAMBinding_Kind + "." + CRDGroupVersion.String() TableIAMBinding_GroupVersionKind = CRDGroupVersion.WithKind(TableIAMBinding_Kind) )
Repository type metadata.
var ( TableIAMMember_Kind = "TableIAMMember" TableIAMMember_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: TableIAMMember_Kind}.String() TableIAMMember_KindAPIVersion = TableIAMMember_Kind + "." + CRDGroupVersion.String() TableIAMMember_GroupVersionKind = CRDGroupVersion.WithKind(TableIAMMember_Kind) )
Repository type metadata.
Functions ¶
This section is empty.
Types ¶
type AccessDatasetInitParameters ¶
type AccessDatasetInitParameters struct { // The dataset this entry applies to // Structure is documented below. Dataset *DatasetDatasetInitParameters `json:"dataset,omitempty" tf:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only views are supported, // but additional target types may be added in the future. Possible values: VIEWS TargetTypes []*string `json:"targetTypes,omitempty" tf:"target_types,omitempty"` }
func (*AccessDatasetInitParameters) DeepCopy ¶
func (in *AccessDatasetInitParameters) DeepCopy() *AccessDatasetInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessDatasetInitParameters.
func (*AccessDatasetInitParameters) DeepCopyInto ¶
func (in *AccessDatasetInitParameters) DeepCopyInto(out *AccessDatasetInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AccessDatasetObservation ¶
type AccessDatasetObservation struct { // The dataset this entry applies to // Structure is documented below. Dataset *DatasetDatasetObservation `json:"dataset,omitempty" tf:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only views are supported, // but additional target types may be added in the future. Possible values: VIEWS TargetTypes []*string `json:"targetTypes,omitempty" tf:"target_types,omitempty"` }
func (*AccessDatasetObservation) DeepCopy ¶
func (in *AccessDatasetObservation) DeepCopy() *AccessDatasetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessDatasetObservation.
func (*AccessDatasetObservation) DeepCopyInto ¶
func (in *AccessDatasetObservation) DeepCopyInto(out *AccessDatasetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AccessDatasetParameters ¶
type AccessDatasetParameters struct { // The dataset this entry applies to // Structure is documented below. // +kubebuilder:validation:Optional Dataset *DatasetDatasetParameters `json:"dataset" tf:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only views are supported, // but additional target types may be added in the future. Possible values: VIEWS // +kubebuilder:validation:Optional TargetTypes []*string `json:"targetTypes" tf:"target_types,omitempty"` }
func (*AccessDatasetParameters) DeepCopy ¶
func (in *AccessDatasetParameters) DeepCopy() *AccessDatasetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessDatasetParameters.
func (*AccessDatasetParameters) DeepCopyInto ¶
func (in *AccessDatasetParameters) DeepCopyInto(out *AccessDatasetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AccessInitParameters ¶
type AccessInitParameters struct { // Grants all resources of particular types in a particular dataset read access to the current dataset. // Structure is documented below. Dataset *AccessDatasetInitParameters `json:"dataset,omitempty" tf:"dataset,omitempty"` // A domain to grant access to. Any users signed in with the // domain specified will be granted the specified access Domain *string `json:"domain,omitempty" tf:"domain,omitempty"` // An email address of a Google Group to grant access to. GroupByEmail *string `json:"groupByEmail,omitempty" tf:"group_by_email,omitempty"` // Some other type of member that appears in the IAM Policy but isn't a user, // group, domain, or special group. For example: allUsers IAMMember *string `json:"iamMember,omitempty" tf:"iam_member,omitempty"` // Describes the rights granted to the user specified by the other // member of the access object. Basic, predefined, and custom roles // are supported. Predefined roles that have equivalent basic roles // are swapped by the API to their basic counterparts. See // official docs. Role *string `json:"role,omitempty" tf:"role,omitempty"` // A routine from a different dataset to grant access to. Queries // executed against that routine will have read access to tables in // this dataset. The role field is not required when this field is // set. If that routine is updated by any user, access to the routine // needs to be granted again via an update operation. // Structure is documented below. Routine *RoutineInitParameters `json:"routine,omitempty" tf:"routine,omitempty"` // A special group to grant access to. Possible values include: SpecialGroup *string `json:"specialGroup,omitempty" tf:"special_group,omitempty"` // An email address of a user to grant access to. For example: // fred@example.com // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/cloudplatform/v1beta1.ServiceAccount // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("email",true) UserByEmail *string `json:"userByEmail,omitempty" tf:"user_by_email,omitempty"` // Reference to a ServiceAccount in cloudplatform to populate userByEmail. // +kubebuilder:validation:Optional UserByEmailRef *v1.Reference `json:"userByEmailRef,omitempty" tf:"-"` // Selector for a ServiceAccount in cloudplatform to populate userByEmail. // +kubebuilder:validation:Optional UserByEmailSelector *v1.Selector `json:"userByEmailSelector,omitempty" tf:"-"` // A view from a different dataset to grant access to. Queries // executed against that view will have read access to tables in // this dataset. The role field is not required when this field is // set. If that view is updated by any user, access to the view // needs to be granted again via an update operation. // Structure is documented below. View *ViewInitParameters `json:"view,omitempty" tf:"view,omitempty"` }
func (*AccessInitParameters) DeepCopy ¶
func (in *AccessInitParameters) DeepCopy() *AccessInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessInitParameters.
func (*AccessInitParameters) DeepCopyInto ¶
func (in *AccessInitParameters) DeepCopyInto(out *AccessInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AccessObservation ¶
type AccessObservation struct { // Grants all resources of particular types in a particular dataset read access to the current dataset. // Structure is documented below. Dataset *AccessDatasetObservation `json:"dataset,omitempty" tf:"dataset,omitempty"` // A domain to grant access to. Any users signed in with the // domain specified will be granted the specified access Domain *string `json:"domain,omitempty" tf:"domain,omitempty"` // An email address of a Google Group to grant access to. GroupByEmail *string `json:"groupByEmail,omitempty" tf:"group_by_email,omitempty"` // Some other type of member that appears in the IAM Policy but isn't a user, // group, domain, or special group. For example: allUsers IAMMember *string `json:"iamMember,omitempty" tf:"iam_member,omitempty"` // Describes the rights granted to the user specified by the other // member of the access object. Basic, predefined, and custom roles // are supported. Predefined roles that have equivalent basic roles // are swapped by the API to their basic counterparts. See // official docs. Role *string `json:"role,omitempty" tf:"role,omitempty"` // A routine from a different dataset to grant access to. Queries // executed against that routine will have read access to tables in // this dataset. The role field is not required when this field is // set. If that routine is updated by any user, access to the routine // needs to be granted again via an update operation. // Structure is documented below. Routine *RoutineObservation `json:"routine,omitempty" tf:"routine,omitempty"` // A special group to grant access to. Possible values include: SpecialGroup *string `json:"specialGroup,omitempty" tf:"special_group,omitempty"` // An email address of a user to grant access to. For example: // fred@example.com UserByEmail *string `json:"userByEmail,omitempty" tf:"user_by_email,omitempty"` // A view from a different dataset to grant access to. Queries // executed against that view will have read access to tables in // this dataset. The role field is not required when this field is // set. If that view is updated by any user, access to the view // needs to be granted again via an update operation. // Structure is documented below. View *ViewObservation `json:"view,omitempty" tf:"view,omitempty"` }
func (*AccessObservation) DeepCopy ¶
func (in *AccessObservation) DeepCopy() *AccessObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessObservation.
func (*AccessObservation) DeepCopyInto ¶
func (in *AccessObservation) DeepCopyInto(out *AccessObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AccessParameters ¶
type AccessParameters struct { // Grants all resources of particular types in a particular dataset read access to the current dataset. // Structure is documented below. // +kubebuilder:validation:Optional Dataset *AccessDatasetParameters `json:"dataset,omitempty" tf:"dataset,omitempty"` // A domain to grant access to. Any users signed in with the // domain specified will be granted the specified access // +kubebuilder:validation:Optional Domain *string `json:"domain,omitempty" tf:"domain,omitempty"` // An email address of a Google Group to grant access to. // +kubebuilder:validation:Optional GroupByEmail *string `json:"groupByEmail,omitempty" tf:"group_by_email,omitempty"` // Some other type of member that appears in the IAM Policy but isn't a user, // group, domain, or special group. For example: allUsers // +kubebuilder:validation:Optional IAMMember *string `json:"iamMember,omitempty" tf:"iam_member,omitempty"` // Describes the rights granted to the user specified by the other // member of the access object. Basic, predefined, and custom roles // are supported. Predefined roles that have equivalent basic roles // are swapped by the API to their basic counterparts. See // official docs. // +kubebuilder:validation:Optional Role *string `json:"role,omitempty" tf:"role,omitempty"` // A routine from a different dataset to grant access to. Queries // executed against that routine will have read access to tables in // this dataset. The role field is not required when this field is // set. If that routine is updated by any user, access to the routine // needs to be granted again via an update operation. // Structure is documented below. // +kubebuilder:validation:Optional Routine *RoutineParameters `json:"routine,omitempty" tf:"routine,omitempty"` // A special group to grant access to. Possible values include: // +kubebuilder:validation:Optional SpecialGroup *string `json:"specialGroup,omitempty" tf:"special_group,omitempty"` // An email address of a user to grant access to. For example: // fred@example.com // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/cloudplatform/v1beta1.ServiceAccount // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("email",true) // +kubebuilder:validation:Optional UserByEmail *string `json:"userByEmail,omitempty" tf:"user_by_email,omitempty"` // Reference to a ServiceAccount in cloudplatform to populate userByEmail. // +kubebuilder:validation:Optional UserByEmailRef *v1.Reference `json:"userByEmailRef,omitempty" tf:"-"` // Selector for a ServiceAccount in cloudplatform to populate userByEmail. // +kubebuilder:validation:Optional UserByEmailSelector *v1.Selector `json:"userByEmailSelector,omitempty" tf:"-"` // A view from a different dataset to grant access to. Queries // executed against that view will have read access to tables in // this dataset. The role field is not required when this field is // set. If that view is updated by any user, access to the view // needs to be granted again via an update operation. // Structure is documented below. // +kubebuilder:validation:Optional View *ViewParameters `json:"view,omitempty" tf:"view,omitempty"` }
func (*AccessParameters) DeepCopy ¶
func (in *AccessParameters) DeepCopy() *AccessParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessParameters.
func (*AccessParameters) DeepCopyInto ¶
func (in *AccessParameters) DeepCopyInto(out *AccessParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AccessRoleInitParameters ¶
type AccessRoleInitParameters struct { // The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection. IAMRoleID *string `json:"iamRoleId,omitempty" tf:"iam_role_id,omitempty"` }
func (*AccessRoleInitParameters) DeepCopy ¶
func (in *AccessRoleInitParameters) DeepCopy() *AccessRoleInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessRoleInitParameters.
func (*AccessRoleInitParameters) DeepCopyInto ¶
func (in *AccessRoleInitParameters) DeepCopyInto(out *AccessRoleInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AccessRoleObservation ¶
type AccessRoleObservation struct { // The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection. IAMRoleID *string `json:"iamRoleId,omitempty" tf:"iam_role_id,omitempty"` // (Output) // A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's AWS IAM Role. Identity *string `json:"identity,omitempty" tf:"identity,omitempty"` }
func (*AccessRoleObservation) DeepCopy ¶
func (in *AccessRoleObservation) DeepCopy() *AccessRoleObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessRoleObservation.
func (*AccessRoleObservation) DeepCopyInto ¶
func (in *AccessRoleObservation) DeepCopyInto(out *AccessRoleObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AccessRoleParameters ¶
type AccessRoleParameters struct { // The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection. // +kubebuilder:validation:Optional IAMRoleID *string `json:"iamRoleId" tf:"iam_role_id,omitempty"` }
func (*AccessRoleParameters) DeepCopy ¶
func (in *AccessRoleParameters) DeepCopy() *AccessRoleParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AccessRoleParameters.
func (*AccessRoleParameters) DeepCopyInto ¶
func (in *AccessRoleParameters) DeepCopyInto(out *AccessRoleParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubDataExchangeIAMMember ¶
type AnalyticsHubDataExchangeIAMMember struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.member) || (has(self.initProvider) && has(self.initProvider.member))",message="spec.forProvider.member is a required parameter" // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.role) || (has(self.initProvider) && has(self.initProvider.role))",message="spec.forProvider.role is a required parameter" Spec AnalyticsHubDataExchangeIAMMemberSpec `json:"spec"` Status AnalyticsHubDataExchangeIAMMemberStatus `json:"status,omitempty"` }
AnalyticsHubDataExchangeIAMMember is the Schema for the AnalyticsHubDataExchangeIAMMembers API. <no value> +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*AnalyticsHubDataExchangeIAMMember) DeepCopy ¶
func (in *AnalyticsHubDataExchangeIAMMember) DeepCopy() *AnalyticsHubDataExchangeIAMMember
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) DeepCopyInto ¶
func (in *AnalyticsHubDataExchangeIAMMember) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMember)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*AnalyticsHubDataExchangeIAMMember) DeepCopyObject ¶
func (in *AnalyticsHubDataExchangeIAMMember) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*AnalyticsHubDataExchangeIAMMember) GetCondition ¶
func (mg *AnalyticsHubDataExchangeIAMMember) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) GetConnectionDetailsMapping ¶
func (tr *AnalyticsHubDataExchangeIAMMember) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) GetDeletionPolicy ¶
func (mg *AnalyticsHubDataExchangeIAMMember) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) GetID ¶
func (tr *AnalyticsHubDataExchangeIAMMember) GetID() string
GetID returns ID of underlying Terraform resource of this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) GetInitParameters ¶
func (tr *AnalyticsHubDataExchangeIAMMember) GetInitParameters() (map[string]any, error)
GetInitParameters of this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) GetManagementPolicies ¶
func (mg *AnalyticsHubDataExchangeIAMMember) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) GetMergedParameters ¶
func (tr *AnalyticsHubDataExchangeIAMMember) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) GetObservation ¶
func (tr *AnalyticsHubDataExchangeIAMMember) GetObservation() (map[string]any, error)
GetObservation of this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) GetParameters ¶
func (tr *AnalyticsHubDataExchangeIAMMember) GetParameters() (map[string]any, error)
GetParameters of this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) GetProviderConfigReference ¶
func (mg *AnalyticsHubDataExchangeIAMMember) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) GetPublishConnectionDetailsTo ¶
func (mg *AnalyticsHubDataExchangeIAMMember) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) GetTerraformResourceType ¶
func (mg *AnalyticsHubDataExchangeIAMMember) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) GetTerraformSchemaVersion ¶
func (tr *AnalyticsHubDataExchangeIAMMember) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*AnalyticsHubDataExchangeIAMMember) GetWriteConnectionSecretToReference ¶
func (mg *AnalyticsHubDataExchangeIAMMember) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) Hub ¶
func (tr *AnalyticsHubDataExchangeIAMMember) Hub()
Hub marks this type as a conversion hub.
func (*AnalyticsHubDataExchangeIAMMember) LateInitialize ¶
func (tr *AnalyticsHubDataExchangeIAMMember) LateInitialize(attrs []byte) (bool, error)
LateInitialize this AnalyticsHubDataExchangeIAMMember using its observed tfState. returns True if there are any spec changes for the resource.
func (*AnalyticsHubDataExchangeIAMMember) ResolveReferences ¶
func (*AnalyticsHubDataExchangeIAMMember) SetConditions ¶
func (mg *AnalyticsHubDataExchangeIAMMember) SetConditions(c ...xpv1.Condition)
SetConditions of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) SetDeletionPolicy ¶
func (mg *AnalyticsHubDataExchangeIAMMember) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) SetManagementPolicies ¶
func (mg *AnalyticsHubDataExchangeIAMMember) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) SetObservation ¶
func (tr *AnalyticsHubDataExchangeIAMMember) SetObservation(obs map[string]any) error
SetObservation for this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) SetParameters ¶
func (tr *AnalyticsHubDataExchangeIAMMember) SetParameters(params map[string]any) error
SetParameters for this AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMember) SetProviderConfigReference ¶
func (mg *AnalyticsHubDataExchangeIAMMember) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) SetPublishConnectionDetailsTo ¶
func (mg *AnalyticsHubDataExchangeIAMMember) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMember) SetWriteConnectionSecretToReference ¶
func (mg *AnalyticsHubDataExchangeIAMMember) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this AnalyticsHubDataExchangeIAMMember.
type AnalyticsHubDataExchangeIAMMemberInitParameters ¶
type AnalyticsHubDataExchangeIAMMemberInitParameters struct { Condition *ConditionInitParameters `json:"condition,omitempty" tf:"condition,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta1.AnalyticsHubDataExchange DataExchangeID *string `json:"dataExchangeId,omitempty" tf:"data_exchange_id,omitempty"` // Reference to a AnalyticsHubDataExchange in bigquery to populate dataExchangeId. // +kubebuilder:validation:Optional DataExchangeIDRef *v1.Reference `json:"dataExchangeIdRef,omitempty" tf:"-"` // Selector for a AnalyticsHubDataExchange in bigquery to populate dataExchangeId. // +kubebuilder:validation:Optional DataExchangeIDSelector *v1.Selector `json:"dataExchangeIdSelector,omitempty" tf:"-"` Location *string `json:"location,omitempty" tf:"location,omitempty"` Member *string `json:"member,omitempty" tf:"member,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` Role *string `json:"role,omitempty" tf:"role,omitempty"` }
func (*AnalyticsHubDataExchangeIAMMemberInitParameters) DeepCopy ¶
func (in *AnalyticsHubDataExchangeIAMMemberInitParameters) DeepCopy() *AnalyticsHubDataExchangeIAMMemberInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubDataExchangeIAMMemberInitParameters.
func (*AnalyticsHubDataExchangeIAMMemberInitParameters) DeepCopyInto ¶
func (in *AnalyticsHubDataExchangeIAMMemberInitParameters) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMemberInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubDataExchangeIAMMemberList ¶
type AnalyticsHubDataExchangeIAMMemberList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []AnalyticsHubDataExchangeIAMMember `json:"items"` }
AnalyticsHubDataExchangeIAMMemberList contains a list of AnalyticsHubDataExchangeIAMMembers
func (*AnalyticsHubDataExchangeIAMMemberList) DeepCopy ¶
func (in *AnalyticsHubDataExchangeIAMMemberList) DeepCopy() *AnalyticsHubDataExchangeIAMMemberList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubDataExchangeIAMMemberList.
func (*AnalyticsHubDataExchangeIAMMemberList) DeepCopyInto ¶
func (in *AnalyticsHubDataExchangeIAMMemberList) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMemberList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*AnalyticsHubDataExchangeIAMMemberList) DeepCopyObject ¶
func (in *AnalyticsHubDataExchangeIAMMemberList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*AnalyticsHubDataExchangeIAMMemberList) GetItems ¶
func (l *AnalyticsHubDataExchangeIAMMemberList) GetItems() []resource.Managed
GetItems of this AnalyticsHubDataExchangeIAMMemberList.
type AnalyticsHubDataExchangeIAMMemberObservation ¶
type AnalyticsHubDataExchangeIAMMemberObservation struct { Condition *ConditionObservation `json:"condition,omitempty" tf:"condition,omitempty"` DataExchangeID *string `json:"dataExchangeId,omitempty" tf:"data_exchange_id,omitempty"` Etag *string `json:"etag,omitempty" tf:"etag,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` Location *string `json:"location,omitempty" tf:"location,omitempty"` Member *string `json:"member,omitempty" tf:"member,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` Role *string `json:"role,omitempty" tf:"role,omitempty"` }
func (*AnalyticsHubDataExchangeIAMMemberObservation) DeepCopy ¶
func (in *AnalyticsHubDataExchangeIAMMemberObservation) DeepCopy() *AnalyticsHubDataExchangeIAMMemberObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubDataExchangeIAMMemberObservation.
func (*AnalyticsHubDataExchangeIAMMemberObservation) DeepCopyInto ¶
func (in *AnalyticsHubDataExchangeIAMMemberObservation) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMemberObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubDataExchangeIAMMemberParameters ¶
type AnalyticsHubDataExchangeIAMMemberParameters struct { // +kubebuilder:validation:Optional Condition *ConditionParameters `json:"condition,omitempty" tf:"condition,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta1.AnalyticsHubDataExchange // +kubebuilder:validation:Optional DataExchangeID *string `json:"dataExchangeId,omitempty" tf:"data_exchange_id,omitempty"` // Reference to a AnalyticsHubDataExchange in bigquery to populate dataExchangeId. // +kubebuilder:validation:Optional DataExchangeIDRef *v1.Reference `json:"dataExchangeIdRef,omitempty" tf:"-"` // Selector for a AnalyticsHubDataExchange in bigquery to populate dataExchangeId. // +kubebuilder:validation:Optional DataExchangeIDSelector *v1.Selector `json:"dataExchangeIdSelector,omitempty" tf:"-"` // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // +kubebuilder:validation:Optional Member *string `json:"member,omitempty" tf:"member,omitempty"` // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // +kubebuilder:validation:Optional Role *string `json:"role,omitempty" tf:"role,omitempty"` }
func (*AnalyticsHubDataExchangeIAMMemberParameters) DeepCopy ¶
func (in *AnalyticsHubDataExchangeIAMMemberParameters) DeepCopy() *AnalyticsHubDataExchangeIAMMemberParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubDataExchangeIAMMemberParameters.
func (*AnalyticsHubDataExchangeIAMMemberParameters) DeepCopyInto ¶
func (in *AnalyticsHubDataExchangeIAMMemberParameters) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMemberParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubDataExchangeIAMMemberSpec ¶
type AnalyticsHubDataExchangeIAMMemberSpec struct { v1.ResourceSpec `json:",inline"` ForProvider AnalyticsHubDataExchangeIAMMemberParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider AnalyticsHubDataExchangeIAMMemberInitParameters `json:"initProvider,omitempty"` }
AnalyticsHubDataExchangeIAMMemberSpec defines the desired state of AnalyticsHubDataExchangeIAMMember
func (*AnalyticsHubDataExchangeIAMMemberSpec) DeepCopy ¶
func (in *AnalyticsHubDataExchangeIAMMemberSpec) DeepCopy() *AnalyticsHubDataExchangeIAMMemberSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubDataExchangeIAMMemberSpec.
func (*AnalyticsHubDataExchangeIAMMemberSpec) DeepCopyInto ¶
func (in *AnalyticsHubDataExchangeIAMMemberSpec) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMemberSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubDataExchangeIAMMemberStatus ¶
type AnalyticsHubDataExchangeIAMMemberStatus struct { v1.ResourceStatus `json:",inline"` AtProvider AnalyticsHubDataExchangeIAMMemberObservation `json:"atProvider,omitempty"` }
AnalyticsHubDataExchangeIAMMemberStatus defines the observed state of AnalyticsHubDataExchangeIAMMember.
func (*AnalyticsHubDataExchangeIAMMemberStatus) DeepCopy ¶
func (in *AnalyticsHubDataExchangeIAMMemberStatus) DeepCopy() *AnalyticsHubDataExchangeIAMMemberStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubDataExchangeIAMMemberStatus.
func (*AnalyticsHubDataExchangeIAMMemberStatus) DeepCopyInto ¶
func (in *AnalyticsHubDataExchangeIAMMemberStatus) DeepCopyInto(out *AnalyticsHubDataExchangeIAMMemberStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubListing ¶
type AnalyticsHubListing struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.bigqueryDataset) || (has(self.initProvider) && has(self.initProvider.bigqueryDataset))",message="spec.forProvider.bigqueryDataset is a required parameter" // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.displayName) || (has(self.initProvider) && has(self.initProvider.displayName))",message="spec.forProvider.displayName is a required parameter" Spec AnalyticsHubListingSpec `json:"spec"` Status AnalyticsHubListingStatus `json:"status,omitempty"` }
AnalyticsHubListing is the Schema for the AnalyticsHubListings API. A Bigquery Analytics Hub data exchange listing +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*AnalyticsHubListing) DeepCopy ¶
func (in *AnalyticsHubListing) DeepCopy() *AnalyticsHubListing
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubListing.
func (*AnalyticsHubListing) DeepCopyInto ¶
func (in *AnalyticsHubListing) DeepCopyInto(out *AnalyticsHubListing)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*AnalyticsHubListing) DeepCopyObject ¶
func (in *AnalyticsHubListing) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*AnalyticsHubListing) GetCondition ¶
func (mg *AnalyticsHubListing) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this AnalyticsHubListing.
func (*AnalyticsHubListing) GetConnectionDetailsMapping ¶
func (tr *AnalyticsHubListing) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this AnalyticsHubListing
func (*AnalyticsHubListing) GetDeletionPolicy ¶
func (mg *AnalyticsHubListing) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this AnalyticsHubListing.
func (*AnalyticsHubListing) GetID ¶
func (tr *AnalyticsHubListing) GetID() string
GetID returns ID of underlying Terraform resource of this AnalyticsHubListing
func (*AnalyticsHubListing) GetInitParameters ¶
func (tr *AnalyticsHubListing) GetInitParameters() (map[string]any, error)
GetInitParameters of this AnalyticsHubListing
func (*AnalyticsHubListing) GetManagementPolicies ¶
func (mg *AnalyticsHubListing) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this AnalyticsHubListing.
func (*AnalyticsHubListing) GetMergedParameters ¶
func (tr *AnalyticsHubListing) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this AnalyticsHubListing
func (*AnalyticsHubListing) GetObservation ¶
func (tr *AnalyticsHubListing) GetObservation() (map[string]any, error)
GetObservation of this AnalyticsHubListing
func (*AnalyticsHubListing) GetParameters ¶
func (tr *AnalyticsHubListing) GetParameters() (map[string]any, error)
GetParameters of this AnalyticsHubListing
func (*AnalyticsHubListing) GetProviderConfigReference ¶
func (mg *AnalyticsHubListing) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this AnalyticsHubListing.
func (*AnalyticsHubListing) GetPublishConnectionDetailsTo ¶
func (mg *AnalyticsHubListing) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this AnalyticsHubListing.
func (*AnalyticsHubListing) GetTerraformResourceType ¶
func (mg *AnalyticsHubListing) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this AnalyticsHubListing
func (*AnalyticsHubListing) GetTerraformSchemaVersion ¶
func (tr *AnalyticsHubListing) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*AnalyticsHubListing) GetWriteConnectionSecretToReference ¶
func (mg *AnalyticsHubListing) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this AnalyticsHubListing.
func (*AnalyticsHubListing) Hub ¶
func (tr *AnalyticsHubListing) Hub()
Hub marks this type as a conversion hub.
func (*AnalyticsHubListing) LateInitialize ¶
func (tr *AnalyticsHubListing) LateInitialize(attrs []byte) (bool, error)
LateInitialize this AnalyticsHubListing using its observed tfState. returns True if there are any spec changes for the resource.
func (*AnalyticsHubListing) ResolveReferences ¶
ResolveReferences of this AnalyticsHubListing.
func (*AnalyticsHubListing) SetConditions ¶
func (mg *AnalyticsHubListing) SetConditions(c ...xpv1.Condition)
SetConditions of this AnalyticsHubListing.
func (*AnalyticsHubListing) SetDeletionPolicy ¶
func (mg *AnalyticsHubListing) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this AnalyticsHubListing.
func (*AnalyticsHubListing) SetManagementPolicies ¶
func (mg *AnalyticsHubListing) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this AnalyticsHubListing.
func (*AnalyticsHubListing) SetObservation ¶
func (tr *AnalyticsHubListing) SetObservation(obs map[string]any) error
SetObservation for this AnalyticsHubListing
func (*AnalyticsHubListing) SetParameters ¶
func (tr *AnalyticsHubListing) SetParameters(params map[string]any) error
SetParameters for this AnalyticsHubListing
func (*AnalyticsHubListing) SetProviderConfigReference ¶
func (mg *AnalyticsHubListing) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this AnalyticsHubListing.
func (*AnalyticsHubListing) SetPublishConnectionDetailsTo ¶
func (mg *AnalyticsHubListing) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this AnalyticsHubListing.
func (*AnalyticsHubListing) SetWriteConnectionSecretToReference ¶
func (mg *AnalyticsHubListing) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this AnalyticsHubListing.
type AnalyticsHubListingInitParameters ¶
type AnalyticsHubListingInitParameters struct { // Shared dataset i.e. BigQuery dataset source. // Structure is documented below. BigqueryDataset *BigqueryDatasetInitParameters `json:"bigqueryDataset,omitempty" tf:"bigquery_dataset,omitempty"` // Categories of the listing. Up to two categories are allowed. Categories []*string `json:"categories,omitempty" tf:"categories,omitempty"` // Details of the data provider who owns the source data. // Structure is documented below. DataProvider *DataProviderInitParameters `json:"dataProvider,omitempty" tf:"data_provider,omitempty"` // Short description of the listing. The description must not contain Unicode non-characters and C0 and C1 control codes except tabs (HT), new lines (LF), carriage returns (CR), and page breaks (FF). Description *string `json:"description,omitempty" tf:"description,omitempty"` // Human-readable display name of the listing. The display name must contain only Unicode letters, numbers (0-9), underscores (_), dashes (-), spaces ( ), ampersands (&) and can't start or end with spaces. DisplayName *string `json:"displayName,omitempty" tf:"display_name,omitempty"` // Documentation describing the listing. Documentation *string `json:"documentation,omitempty" tf:"documentation,omitempty"` // Base64 encoded image representing the listing. Icon *string `json:"icon,omitempty" tf:"icon,omitempty"` // Email or URL of the listing publisher. PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Details of the publisher who owns the listing and who can share the source data. // Structure is documented below. Publisher *PublisherInitParameters `json:"publisher,omitempty" tf:"publisher,omitempty"` // Email or URL of the request access of the listing. Subscribers can use this reference to request access. RequestAccess *string `json:"requestAccess,omitempty" tf:"request_access,omitempty"` // If set, restricted export configuration will be propagated and enforced on the linked dataset. // Structure is documented below. RestrictedExportConfig *RestrictedExportConfigInitParameters `json:"restrictedExportConfig,omitempty" tf:"restricted_export_config,omitempty"` }
func (*AnalyticsHubListingInitParameters) DeepCopy ¶
func (in *AnalyticsHubListingInitParameters) DeepCopy() *AnalyticsHubListingInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubListingInitParameters.
func (*AnalyticsHubListingInitParameters) DeepCopyInto ¶
func (in *AnalyticsHubListingInitParameters) DeepCopyInto(out *AnalyticsHubListingInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubListingList ¶
type AnalyticsHubListingList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []AnalyticsHubListing `json:"items"` }
AnalyticsHubListingList contains a list of AnalyticsHubListings
func (*AnalyticsHubListingList) DeepCopy ¶
func (in *AnalyticsHubListingList) DeepCopy() *AnalyticsHubListingList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubListingList.
func (*AnalyticsHubListingList) DeepCopyInto ¶
func (in *AnalyticsHubListingList) DeepCopyInto(out *AnalyticsHubListingList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*AnalyticsHubListingList) DeepCopyObject ¶
func (in *AnalyticsHubListingList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*AnalyticsHubListingList) GetItems ¶
func (l *AnalyticsHubListingList) GetItems() []resource.Managed
GetItems of this AnalyticsHubListingList.
type AnalyticsHubListingObservation ¶
type AnalyticsHubListingObservation struct { // Shared dataset i.e. BigQuery dataset source. // Structure is documented below. BigqueryDataset *BigqueryDatasetObservation `json:"bigqueryDataset,omitempty" tf:"bigquery_dataset,omitempty"` // Categories of the listing. Up to two categories are allowed. Categories []*string `json:"categories,omitempty" tf:"categories,omitempty"` // The ID of the data exchange. Must contain only Unicode letters, numbers (0-9), underscores (_). Should not use characters that require URL-escaping, or characters outside of ASCII, spaces. DataExchangeID *string `json:"dataExchangeId,omitempty" tf:"data_exchange_id,omitempty"` // Details of the data provider who owns the source data. // Structure is documented below. DataProvider *DataProviderObservation `json:"dataProvider,omitempty" tf:"data_provider,omitempty"` // Short description of the listing. The description must not contain Unicode non-characters and C0 and C1 control codes except tabs (HT), new lines (LF), carriage returns (CR), and page breaks (FF). Description *string `json:"description,omitempty" tf:"description,omitempty"` // Human-readable display name of the listing. The display name must contain only Unicode letters, numbers (0-9), underscores (_), dashes (-), spaces ( ), ampersands (&) and can't start or end with spaces. DisplayName *string `json:"displayName,omitempty" tf:"display_name,omitempty"` // Documentation describing the listing. Documentation *string `json:"documentation,omitempty" tf:"documentation,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/dataExchanges/{{data_exchange_id}}/listings/{{listing_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // Base64 encoded image representing the listing. Icon *string `json:"icon,omitempty" tf:"icon,omitempty"` // The name of the location this data exchange listing. Location *string `json:"location,omitempty" tf:"location,omitempty"` // The resource name of the listing. e.g. "projects/myproject/locations/US/dataExchanges/123/listings/456" Name *string `json:"name,omitempty" tf:"name,omitempty"` // Email or URL of the listing publisher. PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Details of the publisher who owns the listing and who can share the source data. // Structure is documented below. Publisher *PublisherObservation `json:"publisher,omitempty" tf:"publisher,omitempty"` // Email or URL of the request access of the listing. Subscribers can use this reference to request access. RequestAccess *string `json:"requestAccess,omitempty" tf:"request_access,omitempty"` // If set, restricted export configuration will be propagated and enforced on the linked dataset. // Structure is documented below. RestrictedExportConfig *RestrictedExportConfigObservation `json:"restrictedExportConfig,omitempty" tf:"restricted_export_config,omitempty"` }
func (*AnalyticsHubListingObservation) DeepCopy ¶
func (in *AnalyticsHubListingObservation) DeepCopy() *AnalyticsHubListingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubListingObservation.
func (*AnalyticsHubListingObservation) DeepCopyInto ¶
func (in *AnalyticsHubListingObservation) DeepCopyInto(out *AnalyticsHubListingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubListingParameters ¶
type AnalyticsHubListingParameters struct { // Shared dataset i.e. BigQuery dataset source. // Structure is documented below. // +kubebuilder:validation:Optional BigqueryDataset *BigqueryDatasetParameters `json:"bigqueryDataset,omitempty" tf:"bigquery_dataset,omitempty"` // Categories of the listing. Up to two categories are allowed. // +kubebuilder:validation:Optional Categories []*string `json:"categories,omitempty" tf:"categories,omitempty"` // The ID of the data exchange. Must contain only Unicode letters, numbers (0-9), underscores (_). Should not use characters that require URL-escaping, or characters outside of ASCII, spaces. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta1.AnalyticsHubDataExchange // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("data_exchange_id",false) // +kubebuilder:validation:Optional DataExchangeID *string `json:"dataExchangeId,omitempty" tf:"data_exchange_id,omitempty"` // Reference to a AnalyticsHubDataExchange in bigquery to populate dataExchangeId. // +kubebuilder:validation:Optional DataExchangeIDRef *v1.Reference `json:"dataExchangeIdRef,omitempty" tf:"-"` // Selector for a AnalyticsHubDataExchange in bigquery to populate dataExchangeId. // +kubebuilder:validation:Optional DataExchangeIDSelector *v1.Selector `json:"dataExchangeIdSelector,omitempty" tf:"-"` // Details of the data provider who owns the source data. // Structure is documented below. // +kubebuilder:validation:Optional DataProvider *DataProviderParameters `json:"dataProvider,omitempty" tf:"data_provider,omitempty"` // Short description of the listing. The description must not contain Unicode non-characters and C0 and C1 control codes except tabs (HT), new lines (LF), carriage returns (CR), and page breaks (FF). // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // Human-readable display name of the listing. The display name must contain only Unicode letters, numbers (0-9), underscores (_), dashes (-), spaces ( ), ampersands (&) and can't start or end with spaces. // +kubebuilder:validation:Optional DisplayName *string `json:"displayName,omitempty" tf:"display_name,omitempty"` // Documentation describing the listing. // +kubebuilder:validation:Optional Documentation *string `json:"documentation,omitempty" tf:"documentation,omitempty"` // Base64 encoded image representing the listing. // +kubebuilder:validation:Optional Icon *string `json:"icon,omitempty" tf:"icon,omitempty"` // The name of the location this data exchange listing. // +kubebuilder:validation:Required Location *string `json:"location" tf:"location,omitempty"` // Email or URL of the listing publisher. // +kubebuilder:validation:Optional PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Details of the publisher who owns the listing and who can share the source data. // Structure is documented below. // +kubebuilder:validation:Optional Publisher *PublisherParameters `json:"publisher,omitempty" tf:"publisher,omitempty"` // Email or URL of the request access of the listing. Subscribers can use this reference to request access. // +kubebuilder:validation:Optional RequestAccess *string `json:"requestAccess,omitempty" tf:"request_access,omitempty"` // If set, restricted export configuration will be propagated and enforced on the linked dataset. // Structure is documented below. // +kubebuilder:validation:Optional RestrictedExportConfig *RestrictedExportConfigParameters `json:"restrictedExportConfig,omitempty" tf:"restricted_export_config,omitempty"` }
func (*AnalyticsHubListingParameters) DeepCopy ¶
func (in *AnalyticsHubListingParameters) DeepCopy() *AnalyticsHubListingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubListingParameters.
func (*AnalyticsHubListingParameters) DeepCopyInto ¶
func (in *AnalyticsHubListingParameters) DeepCopyInto(out *AnalyticsHubListingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubListingSpec ¶
type AnalyticsHubListingSpec struct { v1.ResourceSpec `json:",inline"` ForProvider AnalyticsHubListingParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider AnalyticsHubListingInitParameters `json:"initProvider,omitempty"` }
AnalyticsHubListingSpec defines the desired state of AnalyticsHubListing
func (*AnalyticsHubListingSpec) DeepCopy ¶
func (in *AnalyticsHubListingSpec) DeepCopy() *AnalyticsHubListingSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubListingSpec.
func (*AnalyticsHubListingSpec) DeepCopyInto ¶
func (in *AnalyticsHubListingSpec) DeepCopyInto(out *AnalyticsHubListingSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AnalyticsHubListingStatus ¶
type AnalyticsHubListingStatus struct { v1.ResourceStatus `json:",inline"` AtProvider AnalyticsHubListingObservation `json:"atProvider,omitempty"` }
AnalyticsHubListingStatus defines the observed state of AnalyticsHubListing.
func (*AnalyticsHubListingStatus) DeepCopy ¶
func (in *AnalyticsHubListingStatus) DeepCopy() *AnalyticsHubListingStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AnalyticsHubListingStatus.
func (*AnalyticsHubListingStatus) DeepCopyInto ¶
func (in *AnalyticsHubListingStatus) DeepCopyInto(out *AnalyticsHubListingStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ArgumentsInitParameters ¶
type ArgumentsInitParameters struct { // Defaults to FIXED_TYPE. // Default value is FIXED_TYPE. // Possible values are: FIXED_TYPE, ANY_TYPE. ArgumentKind *string `json:"argumentKind,omitempty" tf:"argument_kind,omitempty"` // A JSON schema for the data type. Required unless argumentKind = ANY_TYPE. // ~>NOTE: Because this field expects a JSON string, any changes to the string // will create a diff, even if the JSON itself hasn't changed. If the API returns // a different value for the same schema, e.g. it switched the order of values // or replaced STRUCT field type with RECORD field type, we currently cannot // suppress the recurring diff this causes. As a workaround, we recommend using // the schema as returned by the API. DataType *string `json:"dataType,omitempty" tf:"data_type,omitempty"` // Specifies whether the argument is input or output. Can be set for procedures only. // Possible values are: IN, OUT, INOUT. Mode *string `json:"mode,omitempty" tf:"mode,omitempty"` // The name of this argument. Can be absent for function return argument. Name *string `json:"name,omitempty" tf:"name,omitempty"` }
func (*ArgumentsInitParameters) DeepCopy ¶
func (in *ArgumentsInitParameters) DeepCopy() *ArgumentsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArgumentsInitParameters.
func (*ArgumentsInitParameters) DeepCopyInto ¶
func (in *ArgumentsInitParameters) DeepCopyInto(out *ArgumentsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ArgumentsObservation ¶
type ArgumentsObservation struct { // Defaults to FIXED_TYPE. // Default value is FIXED_TYPE. // Possible values are: FIXED_TYPE, ANY_TYPE. ArgumentKind *string `json:"argumentKind,omitempty" tf:"argument_kind,omitempty"` // A JSON schema for the data type. Required unless argumentKind = ANY_TYPE. // ~>NOTE: Because this field expects a JSON string, any changes to the string // will create a diff, even if the JSON itself hasn't changed. If the API returns // a different value for the same schema, e.g. it switched the order of values // or replaced STRUCT field type with RECORD field type, we currently cannot // suppress the recurring diff this causes. As a workaround, we recommend using // the schema as returned by the API. DataType *string `json:"dataType,omitempty" tf:"data_type,omitempty"` // Specifies whether the argument is input or output. Can be set for procedures only. // Possible values are: IN, OUT, INOUT. Mode *string `json:"mode,omitempty" tf:"mode,omitempty"` // The name of this argument. Can be absent for function return argument. Name *string `json:"name,omitempty" tf:"name,omitempty"` }
func (*ArgumentsObservation) DeepCopy ¶
func (in *ArgumentsObservation) DeepCopy() *ArgumentsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArgumentsObservation.
func (*ArgumentsObservation) DeepCopyInto ¶
func (in *ArgumentsObservation) DeepCopyInto(out *ArgumentsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ArgumentsParameters ¶
type ArgumentsParameters struct { // Defaults to FIXED_TYPE. // Default value is FIXED_TYPE. // Possible values are: FIXED_TYPE, ANY_TYPE. // +kubebuilder:validation:Optional ArgumentKind *string `json:"argumentKind,omitempty" tf:"argument_kind,omitempty"` // A JSON schema for the data type. Required unless argumentKind = ANY_TYPE. // ~>NOTE: Because this field expects a JSON string, any changes to the string // will create a diff, even if the JSON itself hasn't changed. If the API returns // a different value for the same schema, e.g. it switched the order of values // or replaced STRUCT field type with RECORD field type, we currently cannot // suppress the recurring diff this causes. As a workaround, we recommend using // the schema as returned by the API. // +kubebuilder:validation:Optional DataType *string `json:"dataType,omitempty" tf:"data_type,omitempty"` // Specifies whether the argument is input or output. Can be set for procedures only. // Possible values are: IN, OUT, INOUT. // +kubebuilder:validation:Optional Mode *string `json:"mode,omitempty" tf:"mode,omitempty"` // The name of this argument. Can be absent for function return argument. // +kubebuilder:validation:Optional Name *string `json:"name,omitempty" tf:"name,omitempty"` }
func (*ArgumentsParameters) DeepCopy ¶
func (in *ArgumentsParameters) DeepCopy() *ArgumentsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ArgumentsParameters.
func (*ArgumentsParameters) DeepCopyInto ¶
func (in *ArgumentsParameters) DeepCopyInto(out *ArgumentsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscaleInitParameters ¶
type AutoscaleInitParameters struct { // Number of slots to be scaled when needed. MaxSlots *float64 `json:"maxSlots,omitempty" tf:"max_slots,omitempty"` }
func (*AutoscaleInitParameters) DeepCopy ¶
func (in *AutoscaleInitParameters) DeepCopy() *AutoscaleInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscaleInitParameters.
func (*AutoscaleInitParameters) DeepCopyInto ¶
func (in *AutoscaleInitParameters) DeepCopyInto(out *AutoscaleInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscaleObservation ¶
type AutoscaleObservation struct { // (Output) // The slot capacity added to this reservation when autoscale happens. Will be between [0, max_slots]. CurrentSlots *float64 `json:"currentSlots,omitempty" tf:"current_slots,omitempty"` // Number of slots to be scaled when needed. MaxSlots *float64 `json:"maxSlots,omitempty" tf:"max_slots,omitempty"` }
func (*AutoscaleObservation) DeepCopy ¶
func (in *AutoscaleObservation) DeepCopy() *AutoscaleObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscaleObservation.
func (*AutoscaleObservation) DeepCopyInto ¶
func (in *AutoscaleObservation) DeepCopyInto(out *AutoscaleObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscaleParameters ¶
type AutoscaleParameters struct { // Number of slots to be scaled when needed. // +kubebuilder:validation:Optional MaxSlots *float64 `json:"maxSlots,omitempty" tf:"max_slots,omitempty"` }
func (*AutoscaleParameters) DeepCopy ¶
func (in *AutoscaleParameters) DeepCopy() *AutoscaleParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscaleParameters.
func (*AutoscaleParameters) DeepCopyInto ¶
func (in *AutoscaleParameters) DeepCopyInto(out *AutoscaleParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AvroOptionsInitParameters ¶
type AvroOptionsInitParameters struct { // If is set to true, indicates whether // to interpret logical types as the corresponding BigQuery data type // (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER). UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty" tf:"use_avro_logical_types,omitempty"` }
func (*AvroOptionsInitParameters) DeepCopy ¶
func (in *AvroOptionsInitParameters) DeepCopy() *AvroOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AvroOptionsInitParameters.
func (*AvroOptionsInitParameters) DeepCopyInto ¶
func (in *AvroOptionsInitParameters) DeepCopyInto(out *AvroOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AvroOptionsObservation ¶
type AvroOptionsObservation struct { // If is set to true, indicates whether // to interpret logical types as the corresponding BigQuery data type // (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER). UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty" tf:"use_avro_logical_types,omitempty"` }
func (*AvroOptionsObservation) DeepCopy ¶
func (in *AvroOptionsObservation) DeepCopy() *AvroOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AvroOptionsObservation.
func (*AvroOptionsObservation) DeepCopyInto ¶
func (in *AvroOptionsObservation) DeepCopyInto(out *AvroOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AvroOptionsParameters ¶
type AvroOptionsParameters struct { // If is set to true, indicates whether // to interpret logical types as the corresponding BigQuery data type // (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER). // +kubebuilder:validation:Optional UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes" tf:"use_avro_logical_types,omitempty"` }
func (*AvroOptionsParameters) DeepCopy ¶
func (in *AvroOptionsParameters) DeepCopy() *AvroOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AvroOptionsParameters.
func (*AvroOptionsParameters) DeepCopyInto ¶
func (in *AvroOptionsParameters) DeepCopyInto(out *AvroOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AwsInitParameters ¶
type AwsInitParameters struct { // Authentication using Google owned service account to assume into customer's AWS IAM Role. // Structure is documented below. AccessRole *AccessRoleInitParameters `json:"accessRole,omitempty" tf:"access_role,omitempty"` }
func (*AwsInitParameters) DeepCopy ¶
func (in *AwsInitParameters) DeepCopy() *AwsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AwsInitParameters.
func (*AwsInitParameters) DeepCopyInto ¶
func (in *AwsInitParameters) DeepCopyInto(out *AwsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AwsObservation ¶
type AwsObservation struct { // Authentication using Google owned service account to assume into customer's AWS IAM Role. // Structure is documented below. AccessRole *AccessRoleObservation `json:"accessRole,omitempty" tf:"access_role,omitempty"` }
func (*AwsObservation) DeepCopy ¶
func (in *AwsObservation) DeepCopy() *AwsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AwsObservation.
func (*AwsObservation) DeepCopyInto ¶
func (in *AwsObservation) DeepCopyInto(out *AwsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AwsParameters ¶
type AwsParameters struct { // Authentication using Google owned service account to assume into customer's AWS IAM Role. // Structure is documented below. // +kubebuilder:validation:Optional AccessRole *AccessRoleParameters `json:"accessRole" tf:"access_role,omitempty"` }
func (*AwsParameters) DeepCopy ¶
func (in *AwsParameters) DeepCopy() *AwsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AwsParameters.
func (*AwsParameters) DeepCopyInto ¶
func (in *AwsParameters) DeepCopyInto(out *AwsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AzureInitParameters ¶
type AzureInitParameters struct { // The id of customer's directory that host the data. CustomerTenantID *string `json:"customerTenantId,omitempty" tf:"customer_tenant_id,omitempty"` // The Azure Application (client) ID where the federated credentials will be hosted. FederatedApplicationClientID *string `json:"federatedApplicationClientId,omitempty" tf:"federated_application_client_id,omitempty"` }
func (*AzureInitParameters) DeepCopy ¶
func (in *AzureInitParameters) DeepCopy() *AzureInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AzureInitParameters.
func (*AzureInitParameters) DeepCopyInto ¶
func (in *AzureInitParameters) DeepCopyInto(out *AzureInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AzureObservation ¶
type AzureObservation struct { // (Output) // The name of the Azure Active Directory Application. Application *string `json:"application,omitempty" tf:"application,omitempty"` // (Output) // The client id of the Azure Active Directory Application. ClientID *string `json:"clientId,omitempty" tf:"client_id,omitempty"` // The id of customer's directory that host the data. CustomerTenantID *string `json:"customerTenantId,omitempty" tf:"customer_tenant_id,omitempty"` // The Azure Application (client) ID where the federated credentials will be hosted. FederatedApplicationClientID *string `json:"federatedApplicationClientId,omitempty" tf:"federated_application_client_id,omitempty"` // (Output) // A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's Azure Active Directory Application. Identity *string `json:"identity,omitempty" tf:"identity,omitempty"` // (Output) // The object id of the Azure Active Directory Application. ObjectID *string `json:"objectId,omitempty" tf:"object_id,omitempty"` // (Output) // The URL user will be redirected to after granting consent during connection setup. RedirectURI *string `json:"redirectUri,omitempty" tf:"redirect_uri,omitempty"` }
func (*AzureObservation) DeepCopy ¶
func (in *AzureObservation) DeepCopy() *AzureObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AzureObservation.
func (*AzureObservation) DeepCopyInto ¶
func (in *AzureObservation) DeepCopyInto(out *AzureObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AzureParameters ¶
type AzureParameters struct { // The id of customer's directory that host the data. // +kubebuilder:validation:Optional CustomerTenantID *string `json:"customerTenantId" tf:"customer_tenant_id,omitempty"` // The Azure Application (client) ID where the federated credentials will be hosted. // +kubebuilder:validation:Optional FederatedApplicationClientID *string `json:"federatedApplicationClientId,omitempty" tf:"federated_application_client_id,omitempty"` }
func (*AzureParameters) DeepCopy ¶
func (in *AzureParameters) DeepCopy() *AzureParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AzureParameters.
func (*AzureParameters) DeepCopyInto ¶
func (in *AzureParameters) DeepCopyInto(out *AzureParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigqueryDatasetInitParameters ¶
type BigqueryDatasetInitParameters struct { // Resource name of the dataset source for this listing. e.g. projects/myproject/datasets/123 // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() Dataset *string `json:"dataset,omitempty" tf:"dataset,omitempty"` // Reference to a Dataset in bigquery to populate dataset. // +kubebuilder:validation:Optional DatasetRef *v1.Reference `json:"datasetRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate dataset. // +kubebuilder:validation:Optional DatasetSelector *v1.Selector `json:"datasetSelector,omitempty" tf:"-"` }
func (*BigqueryDatasetInitParameters) DeepCopy ¶
func (in *BigqueryDatasetInitParameters) DeepCopy() *BigqueryDatasetInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigqueryDatasetInitParameters.
func (*BigqueryDatasetInitParameters) DeepCopyInto ¶
func (in *BigqueryDatasetInitParameters) DeepCopyInto(out *BigqueryDatasetInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigqueryDatasetObservation ¶
type BigqueryDatasetObservation struct { // Resource name of the dataset source for this listing. e.g. projects/myproject/datasets/123 Dataset *string `json:"dataset,omitempty" tf:"dataset,omitempty"` }
func (*BigqueryDatasetObservation) DeepCopy ¶
func (in *BigqueryDatasetObservation) DeepCopy() *BigqueryDatasetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigqueryDatasetObservation.
func (*BigqueryDatasetObservation) DeepCopyInto ¶
func (in *BigqueryDatasetObservation) DeepCopyInto(out *BigqueryDatasetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigqueryDatasetParameters ¶
type BigqueryDatasetParameters struct { // Resource name of the dataset source for this listing. e.g. projects/myproject/datasets/123 // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional Dataset *string `json:"dataset,omitempty" tf:"dataset,omitempty"` // Reference to a Dataset in bigquery to populate dataset. // +kubebuilder:validation:Optional DatasetRef *v1.Reference `json:"datasetRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate dataset. // +kubebuilder:validation:Optional DatasetSelector *v1.Selector `json:"datasetSelector,omitempty" tf:"-"` }
func (*BigqueryDatasetParameters) DeepCopy ¶
func (in *BigqueryDatasetParameters) DeepCopy() *BigqueryDatasetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigqueryDatasetParameters.
func (*BigqueryDatasetParameters) DeepCopyInto ¶
func (in *BigqueryDatasetParameters) DeepCopyInto(out *BigqueryDatasetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigtableOptionsInitParameters ¶ added in v1.7.0
type BigtableOptionsInitParameters struct { // A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable. Structure is documented below. ColumnFamily []ColumnFamilyInitParameters `json:"columnFamily,omitempty" tf:"column_family,omitempty"` // If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false. IgnoreUnspecifiedColumnFamilies *bool `json:"ignoreUnspecifiedColumnFamilies,omitempty" tf:"ignore_unspecified_column_families,omitempty"` // If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false. OutputColumnFamiliesAsJSON *bool `json:"outputColumnFamiliesAsJson,omitempty" tf:"output_column_families_as_json,omitempty"` // If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false. ReadRowkeyAsString *bool `json:"readRowkeyAsString,omitempty" tf:"read_rowkey_as_string,omitempty"` }
func (*BigtableOptionsInitParameters) DeepCopy ¶ added in v1.7.0
func (in *BigtableOptionsInitParameters) DeepCopy() *BigtableOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableOptionsInitParameters.
func (*BigtableOptionsInitParameters) DeepCopyInto ¶ added in v1.7.0
func (in *BigtableOptionsInitParameters) DeepCopyInto(out *BigtableOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigtableOptionsObservation ¶ added in v1.7.0
type BigtableOptionsObservation struct { // A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable. Structure is documented below. ColumnFamily []ColumnFamilyObservation `json:"columnFamily,omitempty" tf:"column_family,omitempty"` // If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false. IgnoreUnspecifiedColumnFamilies *bool `json:"ignoreUnspecifiedColumnFamilies,omitempty" tf:"ignore_unspecified_column_families,omitempty"` // If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false. OutputColumnFamiliesAsJSON *bool `json:"outputColumnFamiliesAsJson,omitempty" tf:"output_column_families_as_json,omitempty"` // If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false. ReadRowkeyAsString *bool `json:"readRowkeyAsString,omitempty" tf:"read_rowkey_as_string,omitempty"` }
func (*BigtableOptionsObservation) DeepCopy ¶ added in v1.7.0
func (in *BigtableOptionsObservation) DeepCopy() *BigtableOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableOptionsObservation.
func (*BigtableOptionsObservation) DeepCopyInto ¶ added in v1.7.0
func (in *BigtableOptionsObservation) DeepCopyInto(out *BigtableOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigtableOptionsParameters ¶ added in v1.7.0
type BigtableOptionsParameters struct { // A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable. Structure is documented below. // +kubebuilder:validation:Optional ColumnFamily []ColumnFamilyParameters `json:"columnFamily,omitempty" tf:"column_family,omitempty"` // If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false. // +kubebuilder:validation:Optional IgnoreUnspecifiedColumnFamilies *bool `json:"ignoreUnspecifiedColumnFamilies,omitempty" tf:"ignore_unspecified_column_families,omitempty"` // If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false. // +kubebuilder:validation:Optional OutputColumnFamiliesAsJSON *bool `json:"outputColumnFamiliesAsJson,omitempty" tf:"output_column_families_as_json,omitempty"` // If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false. // +kubebuilder:validation:Optional ReadRowkeyAsString *bool `json:"readRowkeyAsString,omitempty" tf:"read_rowkey_as_string,omitempty"` }
func (*BigtableOptionsParameters) DeepCopy ¶ added in v1.7.0
func (in *BigtableOptionsParameters) DeepCopy() *BigtableOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableOptionsParameters.
func (*BigtableOptionsParameters) DeepCopyInto ¶ added in v1.7.0
func (in *BigtableOptionsParameters) DeepCopyInto(out *BigtableOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudResourceInitParameters ¶
type CloudResourceInitParameters struct { }
func (*CloudResourceInitParameters) DeepCopy ¶
func (in *CloudResourceInitParameters) DeepCopy() *CloudResourceInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudResourceInitParameters.
func (*CloudResourceInitParameters) DeepCopyInto ¶
func (in *CloudResourceInitParameters) DeepCopyInto(out *CloudResourceInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudResourceObservation ¶
type CloudResourceObservation struct { // (Output) // The account ID of the service created for the purpose of this connection. ServiceAccountID *string `json:"serviceAccountId,omitempty" tf:"service_account_id,omitempty"` }
func (*CloudResourceObservation) DeepCopy ¶
func (in *CloudResourceObservation) DeepCopy() *CloudResourceObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudResourceObservation.
func (*CloudResourceObservation) DeepCopyInto ¶
func (in *CloudResourceObservation) DeepCopyInto(out *CloudResourceObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudResourceParameters ¶
type CloudResourceParameters struct { }
func (*CloudResourceParameters) DeepCopy ¶
func (in *CloudResourceParameters) DeepCopy() *CloudResourceParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudResourceParameters.
func (*CloudResourceParameters) DeepCopyInto ¶
func (in *CloudResourceParameters) DeepCopyInto(out *CloudResourceParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudSQLInitParameters ¶
type CloudSQLInitParameters struct { // Cloud SQL properties. // Structure is documented below. Credential *CredentialInitParameters `json:"credential,omitempty" tf:"credential,omitempty"` // Database name. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/sql/v1beta1.Database Database *string `json:"database,omitempty" tf:"database,omitempty"` // Reference to a Database in sql to populate database. // +kubebuilder:validation:Optional DatabaseRef *v1.Reference `json:"databaseRef,omitempty" tf:"-"` // Selector for a Database in sql to populate database. // +kubebuilder:validation:Optional DatabaseSelector *v1.Selector `json:"databaseSelector,omitempty" tf:"-"` // Cloud SQL instance ID in the form project:location:instance. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/sql/v1beta2.DatabaseInstance // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("connection_name",true) InstanceID *string `json:"instanceId,omitempty" tf:"instance_id,omitempty"` // Reference to a DatabaseInstance in sql to populate instanceId. // +kubebuilder:validation:Optional InstanceIDRef *v1.Reference `json:"instanceIdRef,omitempty" tf:"-"` // Selector for a DatabaseInstance in sql to populate instanceId. // +kubebuilder:validation:Optional InstanceIDSelector *v1.Selector `json:"instanceIdSelector,omitempty" tf:"-"` // Type of the Cloud SQL database. // Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*CloudSQLInitParameters) DeepCopy ¶
func (in *CloudSQLInitParameters) DeepCopy() *CloudSQLInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudSQLInitParameters.
func (*CloudSQLInitParameters) DeepCopyInto ¶
func (in *CloudSQLInitParameters) DeepCopyInto(out *CloudSQLInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudSQLObservation ¶
type CloudSQLObservation struct { // Cloud SQL properties. // Structure is documented below. Credential *CredentialObservation `json:"credential,omitempty" tf:"credential,omitempty"` // Database name. Database *string `json:"database,omitempty" tf:"database,omitempty"` // Cloud SQL instance ID in the form project:location:instance. InstanceID *string `json:"instanceId,omitempty" tf:"instance_id,omitempty"` // (Output) // When the connection is used in the context of an operation in BigQuery, this service account will serve as the identity being used for connecting to the CloudSQL instance specified in this connection. ServiceAccountID *string `json:"serviceAccountId,omitempty" tf:"service_account_id,omitempty"` // Type of the Cloud SQL database. // Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*CloudSQLObservation) DeepCopy ¶
func (in *CloudSQLObservation) DeepCopy() *CloudSQLObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudSQLObservation.
func (*CloudSQLObservation) DeepCopyInto ¶
func (in *CloudSQLObservation) DeepCopyInto(out *CloudSQLObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudSQLParameters ¶
type CloudSQLParameters struct { // Cloud SQL properties. // Structure is documented below. // +kubebuilder:validation:Optional Credential *CredentialParameters `json:"credential" tf:"credential,omitempty"` // Database name. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/sql/v1beta1.Database // +kubebuilder:validation:Optional Database *string `json:"database,omitempty" tf:"database,omitempty"` // Reference to a Database in sql to populate database. // +kubebuilder:validation:Optional DatabaseRef *v1.Reference `json:"databaseRef,omitempty" tf:"-"` // Selector for a Database in sql to populate database. // +kubebuilder:validation:Optional DatabaseSelector *v1.Selector `json:"databaseSelector,omitempty" tf:"-"` // Cloud SQL instance ID in the form project:location:instance. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/sql/v1beta2.DatabaseInstance // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("connection_name",true) // +kubebuilder:validation:Optional InstanceID *string `json:"instanceId,omitempty" tf:"instance_id,omitempty"` // Reference to a DatabaseInstance in sql to populate instanceId. // +kubebuilder:validation:Optional InstanceIDRef *v1.Reference `json:"instanceIdRef,omitempty" tf:"-"` // Selector for a DatabaseInstance in sql to populate instanceId. // +kubebuilder:validation:Optional InstanceIDSelector *v1.Selector `json:"instanceIdSelector,omitempty" tf:"-"` // Type of the Cloud SQL database. // Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL. // +kubebuilder:validation:Optional Type *string `json:"type" tf:"type,omitempty"` }
func (*CloudSQLParameters) DeepCopy ¶
func (in *CloudSQLParameters) DeepCopy() *CloudSQLParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudSQLParameters.
func (*CloudSQLParameters) DeepCopyInto ¶
func (in *CloudSQLParameters) DeepCopyInto(out *CloudSQLParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudSpannerInitParameters ¶
type CloudSpannerInitParameters struct { // Cloud Spanner database in the form `project/instance/database'. Database *string `json:"database,omitempty" tf:"database,omitempty"` // Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about. DatabaseRole *string `json:"databaseRole,omitempty" tf:"database_role,omitempty"` // Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism. MaxParallelism *float64 `json:"maxParallelism,omitempty" tf:"max_parallelism,omitempty"` // If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost. UseDataBoost *bool `json:"useDataBoost,omitempty" tf:"use_data_boost,omitempty"` // If parallelism should be used when reading from Cloud Spanner. UseParallelism *bool `json:"useParallelism,omitempty" tf:"use_parallelism,omitempty"` // If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics. UseServerlessAnalytics *bool `json:"useServerlessAnalytics,omitempty" tf:"use_serverless_analytics,omitempty"` }
func (*CloudSpannerInitParameters) DeepCopy ¶
func (in *CloudSpannerInitParameters) DeepCopy() *CloudSpannerInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudSpannerInitParameters.
func (*CloudSpannerInitParameters) DeepCopyInto ¶
func (in *CloudSpannerInitParameters) DeepCopyInto(out *CloudSpannerInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudSpannerObservation ¶
type CloudSpannerObservation struct { // Cloud Spanner database in the form `project/instance/database'. Database *string `json:"database,omitempty" tf:"database,omitempty"` // Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about. DatabaseRole *string `json:"databaseRole,omitempty" tf:"database_role,omitempty"` // Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism. MaxParallelism *float64 `json:"maxParallelism,omitempty" tf:"max_parallelism,omitempty"` // If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost. UseDataBoost *bool `json:"useDataBoost,omitempty" tf:"use_data_boost,omitempty"` // If parallelism should be used when reading from Cloud Spanner. UseParallelism *bool `json:"useParallelism,omitempty" tf:"use_parallelism,omitempty"` // If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics. UseServerlessAnalytics *bool `json:"useServerlessAnalytics,omitempty" tf:"use_serverless_analytics,omitempty"` }
func (*CloudSpannerObservation) DeepCopy ¶
func (in *CloudSpannerObservation) DeepCopy() *CloudSpannerObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudSpannerObservation.
func (*CloudSpannerObservation) DeepCopyInto ¶
func (in *CloudSpannerObservation) DeepCopyInto(out *CloudSpannerObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CloudSpannerParameters ¶
type CloudSpannerParameters struct { // Cloud Spanner database in the form `project/instance/database'. // +kubebuilder:validation:Optional Database *string `json:"database" tf:"database,omitempty"` // Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about. // +kubebuilder:validation:Optional DatabaseRole *string `json:"databaseRole,omitempty" tf:"database_role,omitempty"` // Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism. // +kubebuilder:validation:Optional MaxParallelism *float64 `json:"maxParallelism,omitempty" tf:"max_parallelism,omitempty"` // If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost. // +kubebuilder:validation:Optional UseDataBoost *bool `json:"useDataBoost,omitempty" tf:"use_data_boost,omitempty"` // If parallelism should be used when reading from Cloud Spanner. // +kubebuilder:validation:Optional UseParallelism *bool `json:"useParallelism,omitempty" tf:"use_parallelism,omitempty"` // If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics. // +kubebuilder:validation:Optional UseServerlessAnalytics *bool `json:"useServerlessAnalytics,omitempty" tf:"use_serverless_analytics,omitempty"` }
func (*CloudSpannerParameters) DeepCopy ¶
func (in *CloudSpannerParameters) DeepCopy() *CloudSpannerParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloudSpannerParameters.
func (*CloudSpannerParameters) DeepCopyInto ¶
func (in *CloudSpannerParameters) DeepCopyInto(out *CloudSpannerParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnFamilyInitParameters ¶ added in v1.7.0
type ColumnFamilyInitParameters struct { // A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field. Structure is documented below. Column []ColumnInitParameters `json:"column,omitempty" tf:"column,omitempty"` // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // Identifier of the column family. FamilyID *string `json:"familyId,omitempty" tf:"family_id,omitempty"` // If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column. OnlyReadLatest *bool `json:"onlyReadLatest,omitempty" tf:"only_read_latest,omitempty"` // Describes the table type. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*ColumnFamilyInitParameters) DeepCopy ¶ added in v1.7.0
func (in *ColumnFamilyInitParameters) DeepCopy() *ColumnFamilyInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnFamilyInitParameters.
func (*ColumnFamilyInitParameters) DeepCopyInto ¶ added in v1.7.0
func (in *ColumnFamilyInitParameters) DeepCopyInto(out *ColumnFamilyInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnFamilyObservation ¶ added in v1.7.0
type ColumnFamilyObservation struct { // A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field. Structure is documented below. Column []ColumnObservation `json:"column,omitempty" tf:"column,omitempty"` // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // Identifier of the column family. FamilyID *string `json:"familyId,omitempty" tf:"family_id,omitempty"` // If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column. OnlyReadLatest *bool `json:"onlyReadLatest,omitempty" tf:"only_read_latest,omitempty"` // Describes the table type. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*ColumnFamilyObservation) DeepCopy ¶ added in v1.7.0
func (in *ColumnFamilyObservation) DeepCopy() *ColumnFamilyObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnFamilyObservation.
func (*ColumnFamilyObservation) DeepCopyInto ¶ added in v1.7.0
func (in *ColumnFamilyObservation) DeepCopyInto(out *ColumnFamilyObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnFamilyParameters ¶ added in v1.7.0
type ColumnFamilyParameters struct { // A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field. Structure is documented below. // +kubebuilder:validation:Optional Column []ColumnParameters `json:"column,omitempty" tf:"column,omitempty"` // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. // +kubebuilder:validation:Optional Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // Identifier of the column family. // +kubebuilder:validation:Optional FamilyID *string `json:"familyId,omitempty" tf:"family_id,omitempty"` // If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column. // +kubebuilder:validation:Optional OnlyReadLatest *bool `json:"onlyReadLatest,omitempty" tf:"only_read_latest,omitempty"` // Describes the table type. // +kubebuilder:validation:Optional Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*ColumnFamilyParameters) DeepCopy ¶ added in v1.7.0
func (in *ColumnFamilyParameters) DeepCopy() *ColumnFamilyParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnFamilyParameters.
func (*ColumnFamilyParameters) DeepCopyInto ¶ added in v1.7.0
func (in *ColumnFamilyParameters) DeepCopyInto(out *ColumnFamilyParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnInitParameters ¶ added in v1.7.0
type ColumnInitParameters struct { // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries. FieldName *string `json:"fieldName,omitempty" tf:"field_name,omitempty"` // If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column. OnlyReadLatest *bool `json:"onlyReadLatest,omitempty" tf:"only_read_latest,omitempty"` // Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName. QualifierEncoded *string `json:"qualifierEncoded,omitempty" tf:"qualifier_encoded,omitempty"` // Qualifier string. QualifierString *string `json:"qualifierString,omitempty" tf:"qualifier_string,omitempty"` // Describes the table type. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*ColumnInitParameters) DeepCopy ¶ added in v1.7.0
func (in *ColumnInitParameters) DeepCopy() *ColumnInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnInitParameters.
func (*ColumnInitParameters) DeepCopyInto ¶ added in v1.7.0
func (in *ColumnInitParameters) DeepCopyInto(out *ColumnInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnObservation ¶ added in v1.7.0
type ColumnObservation struct { // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries. FieldName *string `json:"fieldName,omitempty" tf:"field_name,omitempty"` // If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column. OnlyReadLatest *bool `json:"onlyReadLatest,omitempty" tf:"only_read_latest,omitempty"` // Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName. QualifierEncoded *string `json:"qualifierEncoded,omitempty" tf:"qualifier_encoded,omitempty"` // Qualifier string. QualifierString *string `json:"qualifierString,omitempty" tf:"qualifier_string,omitempty"` // Describes the table type. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*ColumnObservation) DeepCopy ¶ added in v1.7.0
func (in *ColumnObservation) DeepCopy() *ColumnObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnObservation.
func (*ColumnObservation) DeepCopyInto ¶ added in v1.7.0
func (in *ColumnObservation) DeepCopyInto(out *ColumnObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnParameters ¶ added in v1.7.0
type ColumnParameters struct { // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. // +kubebuilder:validation:Optional Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries. // +kubebuilder:validation:Optional FieldName *string `json:"fieldName,omitempty" tf:"field_name,omitempty"` // If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column. // +kubebuilder:validation:Optional OnlyReadLatest *bool `json:"onlyReadLatest,omitempty" tf:"only_read_latest,omitempty"` // Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName. // +kubebuilder:validation:Optional QualifierEncoded *string `json:"qualifierEncoded,omitempty" tf:"qualifier_encoded,omitempty"` // Qualifier string. // +kubebuilder:validation:Optional QualifierString *string `json:"qualifierString,omitempty" tf:"qualifier_string,omitempty"` // Describes the table type. // +kubebuilder:validation:Optional Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*ColumnParameters) DeepCopy ¶ added in v1.7.0
func (in *ColumnParameters) DeepCopy() *ColumnParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnParameters.
func (*ColumnParameters) DeepCopyInto ¶ added in v1.7.0
func (in *ColumnParameters) DeepCopyInto(out *ColumnParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnReferencesInitParameters ¶
type ColumnReferencesInitParameters struct { // : The column in the primary key that are // referenced by the referencingColumn ReferencedColumn *string `json:"referencedColumn,omitempty" tf:"referenced_column,omitempty"` // : The column that composes the foreign key. ReferencingColumn *string `json:"referencingColumn,omitempty" tf:"referencing_column,omitempty"` }
func (*ColumnReferencesInitParameters) DeepCopy ¶
func (in *ColumnReferencesInitParameters) DeepCopy() *ColumnReferencesInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnReferencesInitParameters.
func (*ColumnReferencesInitParameters) DeepCopyInto ¶
func (in *ColumnReferencesInitParameters) DeepCopyInto(out *ColumnReferencesInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnReferencesObservation ¶
type ColumnReferencesObservation struct { // : The column in the primary key that are // referenced by the referencingColumn ReferencedColumn *string `json:"referencedColumn,omitempty" tf:"referenced_column,omitempty"` // : The column that composes the foreign key. ReferencingColumn *string `json:"referencingColumn,omitempty" tf:"referencing_column,omitempty"` }
func (*ColumnReferencesObservation) DeepCopy ¶
func (in *ColumnReferencesObservation) DeepCopy() *ColumnReferencesObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnReferencesObservation.
func (*ColumnReferencesObservation) DeepCopyInto ¶
func (in *ColumnReferencesObservation) DeepCopyInto(out *ColumnReferencesObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ColumnReferencesParameters ¶
type ColumnReferencesParameters struct { // : The column in the primary key that are // referenced by the referencingColumn // +kubebuilder:validation:Optional ReferencedColumn *string `json:"referencedColumn" tf:"referenced_column,omitempty"` // : The column that composes the foreign key. // +kubebuilder:validation:Optional ReferencingColumn *string `json:"referencingColumn" tf:"referencing_column,omitempty"` }
func (*ColumnReferencesParameters) DeepCopy ¶
func (in *ColumnReferencesParameters) DeepCopy() *ColumnReferencesParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnReferencesParameters.
func (*ColumnReferencesParameters) DeepCopyInto ¶
func (in *ColumnReferencesParameters) DeepCopyInto(out *ColumnReferencesParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConditionInitParameters ¶
type ConditionInitParameters struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*ConditionInitParameters) DeepCopy ¶
func (in *ConditionInitParameters) DeepCopy() *ConditionInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConditionInitParameters.
func (*ConditionInitParameters) DeepCopyInto ¶
func (in *ConditionInitParameters) DeepCopyInto(out *ConditionInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConditionObservation ¶
type ConditionObservation struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*ConditionObservation) DeepCopy ¶
func (in *ConditionObservation) DeepCopy() *ConditionObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConditionObservation.
func (*ConditionObservation) DeepCopyInto ¶
func (in *ConditionObservation) DeepCopyInto(out *ConditionObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConditionParameters ¶
type ConditionParameters struct { // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // +kubebuilder:validation:Optional Expression *string `json:"expression" tf:"expression,omitempty"` // +kubebuilder:validation:Optional Title *string `json:"title" tf:"title,omitempty"` }
func (*ConditionParameters) DeepCopy ¶
func (in *ConditionParameters) DeepCopy() *ConditionParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConditionParameters.
func (*ConditionParameters) DeepCopyInto ¶
func (in *ConditionParameters) DeepCopyInto(out *ConditionParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Connection ¶
type Connection struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec ConnectionSpec `json:"spec"` Status ConnectionStatus `json:"status,omitempty"` }
Connection is the Schema for the Connections API. A connection allows BigQuery connections to external data sources. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Connection) DeepCopy ¶
func (in *Connection) DeepCopy() *Connection
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Connection.
func (*Connection) DeepCopyInto ¶
func (in *Connection) DeepCopyInto(out *Connection)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Connection) DeepCopyObject ¶
func (in *Connection) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Connection) GetCondition ¶
func (mg *Connection) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Connection.
func (*Connection) GetConnectionDetailsMapping ¶
func (tr *Connection) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this Connection
func (*Connection) GetDeletionPolicy ¶
func (mg *Connection) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Connection.
func (*Connection) GetID ¶
func (tr *Connection) GetID() string
GetID returns ID of underlying Terraform resource of this Connection
func (*Connection) GetInitParameters ¶
func (tr *Connection) GetInitParameters() (map[string]any, error)
GetInitParameters of this Connection
func (*Connection) GetManagementPolicies ¶
func (mg *Connection) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this Connection.
func (*Connection) GetMergedParameters ¶
func (tr *Connection) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this Connection
func (*Connection) GetObservation ¶
func (tr *Connection) GetObservation() (map[string]any, error)
GetObservation of this Connection
func (*Connection) GetParameters ¶
func (tr *Connection) GetParameters() (map[string]any, error)
GetParameters of this Connection
func (*Connection) GetProviderConfigReference ¶
func (mg *Connection) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this Connection.
func (*Connection) GetPublishConnectionDetailsTo ¶
func (mg *Connection) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Connection.
func (*Connection) GetTerraformResourceType ¶
func (mg *Connection) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this Connection
func (*Connection) GetTerraformSchemaVersion ¶
func (tr *Connection) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Connection) GetWriteConnectionSecretToReference ¶
func (mg *Connection) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Connection.
func (*Connection) LateInitialize ¶
func (tr *Connection) LateInitialize(attrs []byte) (bool, error)
LateInitialize this Connection using its observed tfState. returns True if there are any spec changes for the resource.
func (*Connection) ResolveReferences ¶
ResolveReferences of this Connection.
func (*Connection) SetConditions ¶
func (mg *Connection) SetConditions(c ...xpv1.Condition)
SetConditions of this Connection.
func (*Connection) SetDeletionPolicy ¶
func (mg *Connection) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Connection.
func (*Connection) SetManagementPolicies ¶
func (mg *Connection) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this Connection.
func (*Connection) SetObservation ¶
func (tr *Connection) SetObservation(obs map[string]any) error
SetObservation for this Connection
func (*Connection) SetParameters ¶
func (tr *Connection) SetParameters(params map[string]any) error
SetParameters for this Connection
func (*Connection) SetProviderConfigReference ¶
func (mg *Connection) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this Connection.
func (*Connection) SetPublishConnectionDetailsTo ¶
func (mg *Connection) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Connection.
func (*Connection) SetWriteConnectionSecretToReference ¶
func (mg *Connection) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Connection.
type ConnectionInitParameters ¶
type ConnectionInitParameters struct { // Connection properties specific to Amazon Web Services. // Structure is documented below. Aws *AwsInitParameters `json:"aws,omitempty" tf:"aws,omitempty"` // Container for connection properties specific to Azure. // Structure is documented below. Azure *AzureInitParameters `json:"azure,omitempty" tf:"azure,omitempty"` // Container for connection properties for delegation of access to GCP resources. // Structure is documented below. CloudResource *CloudResourceInitParameters `json:"cloudResource,omitempty" tf:"cloud_resource,omitempty"` // Connection properties specific to the Cloud SQL. // Structure is documented below. CloudSQL *CloudSQLInitParameters `json:"cloudSql,omitempty" tf:"cloud_sql,omitempty"` // Connection properties specific to Cloud Spanner // Structure is documented below. CloudSpanner *CloudSpannerInitParameters `json:"cloudSpanner,omitempty" tf:"cloud_spanner,omitempty"` // Optional connection id that should be assigned to the created connection. ConnectionID *string `json:"connectionId,omitempty" tf:"connection_id,omitempty"` // A descriptive description for the connection Description *string `json:"description,omitempty" tf:"description,omitempty"` // A descriptive name for the connection FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // Optional. The Cloud KMS key that is used for encryption. // Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key] KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // The geographic location where the connection should reside. // Cloud SQL instance must be in the same location as the connection // with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. // Examples: US, EU, asia-northeast1, us-central1, europe-west1. // Spanner Connections same as spanner region // AWS allowed regions are aws-us-east-1 // Azure allowed regions are azure-eastus2 Location *string `json:"location,omitempty" tf:"location,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Container for connection properties to execute stored procedures for Apache Spark. resources. // Structure is documented below. Spark *SparkInitParameters `json:"spark,omitempty" tf:"spark,omitempty"` }
func (*ConnectionInitParameters) DeepCopy ¶
func (in *ConnectionInitParameters) DeepCopy() *ConnectionInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConnectionInitParameters.
func (*ConnectionInitParameters) DeepCopyInto ¶
func (in *ConnectionInitParameters) DeepCopyInto(out *ConnectionInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConnectionList ¶
type ConnectionList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Connection `json:"items"` }
ConnectionList contains a list of Connections
func (*ConnectionList) DeepCopy ¶
func (in *ConnectionList) DeepCopy() *ConnectionList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConnectionList.
func (*ConnectionList) DeepCopyInto ¶
func (in *ConnectionList) DeepCopyInto(out *ConnectionList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ConnectionList) DeepCopyObject ¶
func (in *ConnectionList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*ConnectionList) GetItems ¶
func (l *ConnectionList) GetItems() []resource.Managed
GetItems of this ConnectionList.
type ConnectionObservation ¶
type ConnectionObservation struct { // Connection properties specific to Amazon Web Services. // Structure is documented below. Aws *AwsObservation `json:"aws,omitempty" tf:"aws,omitempty"` // Container for connection properties specific to Azure. // Structure is documented below. Azure *AzureObservation `json:"azure,omitempty" tf:"azure,omitempty"` // Container for connection properties for delegation of access to GCP resources. // Structure is documented below. CloudResource *CloudResourceObservation `json:"cloudResource,omitempty" tf:"cloud_resource,omitempty"` // Connection properties specific to the Cloud SQL. // Structure is documented below. CloudSQL *CloudSQLObservation `json:"cloudSql,omitempty" tf:"cloud_sql,omitempty"` // Connection properties specific to Cloud Spanner // Structure is documented below. CloudSpanner *CloudSpannerObservation `json:"cloudSpanner,omitempty" tf:"cloud_spanner,omitempty"` // Optional connection id that should be assigned to the created connection. ConnectionID *string `json:"connectionId,omitempty" tf:"connection_id,omitempty"` // A descriptive description for the connection Description *string `json:"description,omitempty" tf:"description,omitempty"` // A descriptive name for the connection FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // True if the connection has credential assigned. HasCredential *bool `json:"hasCredential,omitempty" tf:"has_credential,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/connections/{{connection_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // Optional. The Cloud KMS key that is used for encryption. // Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key] KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // The geographic location where the connection should reside. // Cloud SQL instance must be in the same location as the connection // with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. // Examples: US, EU, asia-northeast1, us-central1, europe-west1. // Spanner Connections same as spanner region // AWS allowed regions are aws-us-east-1 // Azure allowed regions are azure-eastus2 Location *string `json:"location,omitempty" tf:"location,omitempty"` // The resource name of the connection in the form of: // "projects/{project_id}/locations/{location_id}/connections/{connectionId}" Name *string `json:"name,omitempty" tf:"name,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Container for connection properties to execute stored procedures for Apache Spark. resources. // Structure is documented below. Spark *SparkObservation `json:"spark,omitempty" tf:"spark,omitempty"` }
func (*ConnectionObservation) DeepCopy ¶
func (in *ConnectionObservation) DeepCopy() *ConnectionObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConnectionObservation.
func (*ConnectionObservation) DeepCopyInto ¶
func (in *ConnectionObservation) DeepCopyInto(out *ConnectionObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConnectionParameters ¶
type ConnectionParameters struct { // Connection properties specific to Amazon Web Services. // Structure is documented below. // +kubebuilder:validation:Optional Aws *AwsParameters `json:"aws,omitempty" tf:"aws,omitempty"` // Container for connection properties specific to Azure. // Structure is documented below. // +kubebuilder:validation:Optional Azure *AzureParameters `json:"azure,omitempty" tf:"azure,omitempty"` // Container for connection properties for delegation of access to GCP resources. // Structure is documented below. // +kubebuilder:validation:Optional CloudResource *CloudResourceParameters `json:"cloudResource,omitempty" tf:"cloud_resource,omitempty"` // Connection properties specific to the Cloud SQL. // Structure is documented below. // +kubebuilder:validation:Optional CloudSQL *CloudSQLParameters `json:"cloudSql,omitempty" tf:"cloud_sql,omitempty"` // Connection properties specific to Cloud Spanner // Structure is documented below. // +kubebuilder:validation:Optional CloudSpanner *CloudSpannerParameters `json:"cloudSpanner,omitempty" tf:"cloud_spanner,omitempty"` // Optional connection id that should be assigned to the created connection. // +kubebuilder:validation:Optional ConnectionID *string `json:"connectionId,omitempty" tf:"connection_id,omitempty"` // A descriptive description for the connection // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // A descriptive name for the connection // +kubebuilder:validation:Optional FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // Optional. The Cloud KMS key that is used for encryption. // Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key] // +kubebuilder:validation:Optional KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // The geographic location where the connection should reside. // Cloud SQL instance must be in the same location as the connection // with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. // Examples: US, EU, asia-northeast1, us-central1, europe-west1. // Spanner Connections same as spanner region // AWS allowed regions are aws-us-east-1 // Azure allowed regions are azure-eastus2 // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Container for connection properties to execute stored procedures for Apache Spark. resources. // Structure is documented below. // +kubebuilder:validation:Optional Spark *SparkParameters `json:"spark,omitempty" tf:"spark,omitempty"` }
func (*ConnectionParameters) DeepCopy ¶
func (in *ConnectionParameters) DeepCopy() *ConnectionParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConnectionParameters.
func (*ConnectionParameters) DeepCopyInto ¶
func (in *ConnectionParameters) DeepCopyInto(out *ConnectionParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConnectionSpec ¶
type ConnectionSpec struct { v1.ResourceSpec `json:",inline"` ForProvider ConnectionParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider ConnectionInitParameters `json:"initProvider,omitempty"` }
ConnectionSpec defines the desired state of Connection
func (*ConnectionSpec) DeepCopy ¶
func (in *ConnectionSpec) DeepCopy() *ConnectionSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConnectionSpec.
func (*ConnectionSpec) DeepCopyInto ¶
func (in *ConnectionSpec) DeepCopyInto(out *ConnectionSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConnectionStatus ¶
type ConnectionStatus struct { v1.ResourceStatus `json:",inline"` AtProvider ConnectionObservation `json:"atProvider,omitempty"` }
ConnectionStatus defines the observed state of Connection.
func (*ConnectionStatus) DeepCopy ¶
func (in *ConnectionStatus) DeepCopy() *ConnectionStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConnectionStatus.
func (*ConnectionStatus) DeepCopyInto ¶
func (in *ConnectionStatus) DeepCopyInto(out *ConnectionStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CopyInitParameters ¶
type CopyInitParameters struct { // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. DestinationEncryptionConfiguration *DestinationEncryptionConfigurationInitParameters `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // The destination table. // Structure is documented below. DestinationTable *DestinationTableInitParameters `json:"destinationTable,omitempty" tf:"destination_table,omitempty"` // Source tables to copy. // Structure is documented below. SourceTables []SourceTablesInitParameters `json:"sourceTables,omitempty" tf:"source_tables,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*CopyInitParameters) DeepCopy ¶
func (in *CopyInitParameters) DeepCopy() *CopyInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CopyInitParameters.
func (*CopyInitParameters) DeepCopyInto ¶
func (in *CopyInitParameters) DeepCopyInto(out *CopyInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CopyObservation ¶
type CopyObservation struct { // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. DestinationEncryptionConfiguration *DestinationEncryptionConfigurationObservation `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // The destination table. // Structure is documented below. DestinationTable *DestinationTableObservation `json:"destinationTable,omitempty" tf:"destination_table,omitempty"` // Source tables to copy. // Structure is documented below. SourceTables []SourceTablesObservation `json:"sourceTables,omitempty" tf:"source_tables,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*CopyObservation) DeepCopy ¶
func (in *CopyObservation) DeepCopy() *CopyObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CopyObservation.
func (*CopyObservation) DeepCopyInto ¶
func (in *CopyObservation) DeepCopyInto(out *CopyObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CopyParameters ¶
type CopyParameters struct { // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. // +kubebuilder:validation:Optional CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. // +kubebuilder:validation:Optional DestinationEncryptionConfiguration *DestinationEncryptionConfigurationParameters `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // The destination table. // Structure is documented below. // +kubebuilder:validation:Optional DestinationTable *DestinationTableParameters `json:"destinationTable,omitempty" tf:"destination_table,omitempty"` // Source tables to copy. // Structure is documented below. // +kubebuilder:validation:Optional SourceTables []SourceTablesParameters `json:"sourceTables" tf:"source_tables,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. // +kubebuilder:validation:Optional WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*CopyParameters) DeepCopy ¶
func (in *CopyParameters) DeepCopy() *CopyParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CopyParameters.
func (*CopyParameters) DeepCopyInto ¶
func (in *CopyParameters) DeepCopyInto(out *CopyParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CredentialInitParameters ¶
type CredentialInitParameters struct { // Password for database. // Note: This property is sensitive and will not be displayed in the plan. PasswordSecretRef v1.SecretKeySelector `json:"passwordSecretRef" tf:"-"` // Username for database. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/sql/v1beta2.User Username *string `json:"username,omitempty" tf:"username,omitempty"` // Reference to a User in sql to populate username. // +kubebuilder:validation:Optional UsernameRef *v1.Reference `json:"usernameRef,omitempty" tf:"-"` // Selector for a User in sql to populate username. // +kubebuilder:validation:Optional UsernameSelector *v1.Selector `json:"usernameSelector,omitempty" tf:"-"` }
func (*CredentialInitParameters) DeepCopy ¶
func (in *CredentialInitParameters) DeepCopy() *CredentialInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CredentialInitParameters.
func (*CredentialInitParameters) DeepCopyInto ¶
func (in *CredentialInitParameters) DeepCopyInto(out *CredentialInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CredentialObservation ¶
type CredentialObservation struct { // Username for database. Username *string `json:"username,omitempty" tf:"username,omitempty"` }
func (*CredentialObservation) DeepCopy ¶
func (in *CredentialObservation) DeepCopy() *CredentialObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CredentialObservation.
func (*CredentialObservation) DeepCopyInto ¶
func (in *CredentialObservation) DeepCopyInto(out *CredentialObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CredentialParameters ¶
type CredentialParameters struct { // Password for database. // Note: This property is sensitive and will not be displayed in the plan. // +kubebuilder:validation:Optional PasswordSecretRef v1.SecretKeySelector `json:"passwordSecretRef" tf:"-"` // Username for database. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/sql/v1beta2.User // +kubebuilder:validation:Optional Username *string `json:"username,omitempty" tf:"username,omitempty"` // Reference to a User in sql to populate username. // +kubebuilder:validation:Optional UsernameRef *v1.Reference `json:"usernameRef,omitempty" tf:"-"` // Selector for a User in sql to populate username. // +kubebuilder:validation:Optional UsernameSelector *v1.Selector `json:"usernameSelector,omitempty" tf:"-"` }
func (*CredentialParameters) DeepCopy ¶
func (in *CredentialParameters) DeepCopy() *CredentialParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CredentialParameters.
func (*CredentialParameters) DeepCopyInto ¶
func (in *CredentialParameters) DeepCopyInto(out *CredentialParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CsvOptionsInitParameters ¶
type CsvOptionsInitParameters struct { // Indicates if BigQuery should accept rows // that are missing trailing optional columns. AllowJaggedRows *bool `json:"allowJaggedRows,omitempty" tf:"allow_jagged_rows,omitempty"` // Indicates if BigQuery should allow // quoted data sections that contain newline characters in a CSV file. // The default value is false. AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty" tf:"allow_quoted_newlines,omitempty"` // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // The separator for fields in a CSV file. FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // The value that is used to quote data sections in a // CSV file. If your data does not contain quoted sections, set the // property value to an empty string. If your data contains quoted newline // characters, you must also set the allow_quoted_newlines property to true. Quote *string `json:"quote,omitempty" tf:"quote,omitempty"` // The number of rows at the top of the sheet // that BigQuery will skip when reading the data. At least one of range or // skip_leading_rows must be set. SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` }
func (*CsvOptionsInitParameters) DeepCopy ¶
func (in *CsvOptionsInitParameters) DeepCopy() *CsvOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CsvOptionsInitParameters.
func (*CsvOptionsInitParameters) DeepCopyInto ¶
func (in *CsvOptionsInitParameters) DeepCopyInto(out *CsvOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CsvOptionsObservation ¶
type CsvOptionsObservation struct { // Indicates if BigQuery should accept rows // that are missing trailing optional columns. AllowJaggedRows *bool `json:"allowJaggedRows,omitempty" tf:"allow_jagged_rows,omitempty"` // Indicates if BigQuery should allow // quoted data sections that contain newline characters in a CSV file. // The default value is false. AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty" tf:"allow_quoted_newlines,omitempty"` // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // The separator for fields in a CSV file. FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // The value that is used to quote data sections in a // CSV file. If your data does not contain quoted sections, set the // property value to an empty string. If your data contains quoted newline // characters, you must also set the allow_quoted_newlines property to true. Quote *string `json:"quote,omitempty" tf:"quote,omitempty"` // The number of rows at the top of the sheet // that BigQuery will skip when reading the data. At least one of range or // skip_leading_rows must be set. SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` }
func (*CsvOptionsObservation) DeepCopy ¶
func (in *CsvOptionsObservation) DeepCopy() *CsvOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CsvOptionsObservation.
func (*CsvOptionsObservation) DeepCopyInto ¶
func (in *CsvOptionsObservation) DeepCopyInto(out *CsvOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type CsvOptionsParameters ¶
type CsvOptionsParameters struct { // Indicates if BigQuery should accept rows // that are missing trailing optional columns. // +kubebuilder:validation:Optional AllowJaggedRows *bool `json:"allowJaggedRows,omitempty" tf:"allow_jagged_rows,omitempty"` // Indicates if BigQuery should allow // quoted data sections that contain newline characters in a CSV file. // The default value is false. // +kubebuilder:validation:Optional AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty" tf:"allow_quoted_newlines,omitempty"` // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. // +kubebuilder:validation:Optional Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // The separator for fields in a CSV file. // +kubebuilder:validation:Optional FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // The value that is used to quote data sections in a // CSV file. If your data does not contain quoted sections, set the // property value to an empty string. If your data contains quoted newline // characters, you must also set the allow_quoted_newlines property to true. // +kubebuilder:validation:Optional Quote *string `json:"quote" tf:"quote,omitempty"` // The number of rows at the top of the sheet // that BigQuery will skip when reading the data. At least one of range or // skip_leading_rows must be set. // +kubebuilder:validation:Optional SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` }
func (*CsvOptionsParameters) DeepCopy ¶
func (in *CsvOptionsParameters) DeepCopy() *CsvOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CsvOptionsParameters.
func (*CsvOptionsParameters) DeepCopyInto ¶
func (in *CsvOptionsParameters) DeepCopyInto(out *CsvOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataProviderInitParameters ¶
type DataProviderInitParameters struct { // Name of the data provider. Name *string `json:"name,omitempty" tf:"name,omitempty"` // Email or URL of the data provider. PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` }
func (*DataProviderInitParameters) DeepCopy ¶
func (in *DataProviderInitParameters) DeepCopy() *DataProviderInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProviderInitParameters.
func (*DataProviderInitParameters) DeepCopyInto ¶
func (in *DataProviderInitParameters) DeepCopyInto(out *DataProviderInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataProviderObservation ¶
type DataProviderObservation struct { // Name of the data provider. Name *string `json:"name,omitempty" tf:"name,omitempty"` // Email or URL of the data provider. PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` }
func (*DataProviderObservation) DeepCopy ¶
func (in *DataProviderObservation) DeepCopy() *DataProviderObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProviderObservation.
func (*DataProviderObservation) DeepCopyInto ¶
func (in *DataProviderObservation) DeepCopyInto(out *DataProviderObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataProviderParameters ¶
type DataProviderParameters struct { // Name of the data provider. // +kubebuilder:validation:Optional Name *string `json:"name" tf:"name,omitempty"` // Email or URL of the data provider. // +kubebuilder:validation:Optional PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` }
func (*DataProviderParameters) DeepCopy ¶
func (in *DataProviderParameters) DeepCopy() *DataProviderParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataProviderParameters.
func (*DataProviderParameters) DeepCopyInto ¶
func (in *DataProviderParameters) DeepCopyInto(out *DataProviderParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataTransferConfig ¶
type DataTransferConfig struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.dataSourceId) || (has(self.initProvider) && has(self.initProvider.dataSourceId))",message="spec.forProvider.dataSourceId is a required parameter" // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.displayName) || (has(self.initProvider) && has(self.initProvider.displayName))",message="spec.forProvider.displayName is a required parameter" // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.params) || (has(self.initProvider) && has(self.initProvider.params))",message="spec.forProvider.params is a required parameter" Spec DataTransferConfigSpec `json:"spec"` Status DataTransferConfigStatus `json:"status,omitempty"` }
DataTransferConfig is the Schema for the DataTransferConfigs API. Represents a data transfer configuration. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*DataTransferConfig) DeepCopy ¶
func (in *DataTransferConfig) DeepCopy() *DataTransferConfig
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfig.
func (*DataTransferConfig) DeepCopyInto ¶
func (in *DataTransferConfig) DeepCopyInto(out *DataTransferConfig)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataTransferConfig) DeepCopyObject ¶
func (in *DataTransferConfig) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataTransferConfig) GetCondition ¶
func (mg *DataTransferConfig) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this DataTransferConfig.
func (*DataTransferConfig) GetConnectionDetailsMapping ¶
func (tr *DataTransferConfig) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this DataTransferConfig
func (*DataTransferConfig) GetDeletionPolicy ¶
func (mg *DataTransferConfig) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this DataTransferConfig.
func (*DataTransferConfig) GetID ¶
func (tr *DataTransferConfig) GetID() string
GetID returns ID of underlying Terraform resource of this DataTransferConfig
func (*DataTransferConfig) GetInitParameters ¶
func (tr *DataTransferConfig) GetInitParameters() (map[string]any, error)
GetInitParameters of this DataTransferConfig
func (*DataTransferConfig) GetManagementPolicies ¶
func (mg *DataTransferConfig) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this DataTransferConfig.
func (*DataTransferConfig) GetMergedParameters ¶
func (tr *DataTransferConfig) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this DataTransferConfig
func (*DataTransferConfig) GetObservation ¶
func (tr *DataTransferConfig) GetObservation() (map[string]any, error)
GetObservation of this DataTransferConfig
func (*DataTransferConfig) GetParameters ¶
func (tr *DataTransferConfig) GetParameters() (map[string]any, error)
GetParameters of this DataTransferConfig
func (*DataTransferConfig) GetProviderConfigReference ¶
func (mg *DataTransferConfig) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this DataTransferConfig.
func (*DataTransferConfig) GetPublishConnectionDetailsTo ¶
func (mg *DataTransferConfig) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this DataTransferConfig.
func (*DataTransferConfig) GetTerraformResourceType ¶
func (mg *DataTransferConfig) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this DataTransferConfig
func (*DataTransferConfig) GetTerraformSchemaVersion ¶
func (tr *DataTransferConfig) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*DataTransferConfig) GetWriteConnectionSecretToReference ¶
func (mg *DataTransferConfig) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this DataTransferConfig.
func (*DataTransferConfig) Hub ¶
func (tr *DataTransferConfig) Hub()
Hub marks this type as a conversion hub.
func (*DataTransferConfig) LateInitialize ¶
func (tr *DataTransferConfig) LateInitialize(attrs []byte) (bool, error)
LateInitialize this DataTransferConfig using its observed tfState. returns True if there are any spec changes for the resource.
func (*DataTransferConfig) ResolveReferences ¶
ResolveReferences of this DataTransferConfig.
func (*DataTransferConfig) SetConditions ¶
func (mg *DataTransferConfig) SetConditions(c ...xpv1.Condition)
SetConditions of this DataTransferConfig.
func (*DataTransferConfig) SetDeletionPolicy ¶
func (mg *DataTransferConfig) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this DataTransferConfig.
func (*DataTransferConfig) SetManagementPolicies ¶
func (mg *DataTransferConfig) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this DataTransferConfig.
func (*DataTransferConfig) SetObservation ¶
func (tr *DataTransferConfig) SetObservation(obs map[string]any) error
SetObservation for this DataTransferConfig
func (*DataTransferConfig) SetParameters ¶
func (tr *DataTransferConfig) SetParameters(params map[string]any) error
SetParameters for this DataTransferConfig
func (*DataTransferConfig) SetProviderConfigReference ¶
func (mg *DataTransferConfig) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this DataTransferConfig.
func (*DataTransferConfig) SetPublishConnectionDetailsTo ¶
func (mg *DataTransferConfig) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this DataTransferConfig.
func (*DataTransferConfig) SetWriteConnectionSecretToReference ¶
func (mg *DataTransferConfig) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this DataTransferConfig.
type DataTransferConfigInitParameters ¶
type DataTransferConfigInitParameters struct { // The number of days to look back to automatically refresh the data. // For example, if dataRefreshWindowDays = 10, then every day BigQuery // reingests data for [today-10, today-1], rather than ingesting data for // just [today-1]. Only valid if the data source supports the feature. // Set the value to 0 to use the default value. DataRefreshWindowDays *float64 `json:"dataRefreshWindowDays,omitempty" tf:"data_refresh_window_days,omitempty"` // The data source id. Cannot be changed once the transfer config is created. DataSourceID *string `json:"dataSourceId,omitempty" tf:"data_source_id,omitempty"` // The BigQuery target dataset id. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DestinationDatasetID *string `json:"destinationDatasetId,omitempty" tf:"destination_dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate destinationDatasetId. // +kubebuilder:validation:Optional DestinationDatasetIDRef *v1.Reference `json:"destinationDatasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate destinationDatasetId. // +kubebuilder:validation:Optional DestinationDatasetIDSelector *v1.Selector `json:"destinationDatasetIdSelector,omitempty" tf:"-"` // When set to true, no runs are scheduled for a given transfer. Disabled *bool `json:"disabled,omitempty" tf:"disabled,omitempty"` // The user specified display name for the transfer config. DisplayName *string `json:"displayName,omitempty" tf:"display_name,omitempty"` // Email notifications will be sent according to these preferences to the // email address of the user who owns this transfer config. // Structure is documented below. EmailPreferences *EmailPreferencesInitParameters `json:"emailPreferences,omitempty" tf:"email_preferences,omitempty"` // The geographic location where the transfer config should reside. // Examples: US, EU, asia-northeast1. The default value is US. Location *string `json:"location,omitempty" tf:"location,omitempty"` // Pub/Sub topic where notifications will be sent after transfer runs // associated with this transfer config finish. NotificationPubsubTopic *string `json:"notificationPubsubTopic,omitempty" tf:"notification_pubsub_topic,omitempty"` // Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' // section for each data source. For example the parameters for Cloud Storage transfers are listed here: // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq // NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource. // +mapType=granular Params map[string]*string `json:"params,omitempty" tf:"params,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Data transfer schedule. If the data source does not support a custom // schedule, this should be empty. If it is empty, the default value for // the data source will be used. The specified times are in UTC. Examples // of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, // jun 13:15, and first sunday of quarter 00:00. See more explanation // about the format here: // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format // NOTE: The minimum interval time between recurring transfers depends // on the data source; refer to the documentation for your data source. Schedule *string `json:"schedule,omitempty" tf:"schedule,omitempty"` // Options customizing the data transfer schedule. // Structure is documented below. ScheduleOptions *ScheduleOptionsInitParameters `json:"scheduleOptions,omitempty" tf:"schedule_options,omitempty"` // Different parameters are configured primarily using the the params field on this // resource. This block contains the parameters which contain secrets or passwords so that they can be marked // sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key // in the params map in the api request. // Credentials may not be specified in both locations and will cause an error. Changing from one location // to a different credential configuration in the config will require an apply to update state. // Structure is documented below. SensitiveParams *SensitiveParamsInitParameters `json:"sensitiveParams,omitempty" tf:"sensitive_params,omitempty"` // Service account email. If this field is set, transfer config will // be created with this service account credentials. It requires that // requesting user calling this API has permissions to act as this service account. ServiceAccountName *string `json:"serviceAccountName,omitempty" tf:"service_account_name,omitempty"` }
func (*DataTransferConfigInitParameters) DeepCopy ¶
func (in *DataTransferConfigInitParameters) DeepCopy() *DataTransferConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigInitParameters.
func (*DataTransferConfigInitParameters) DeepCopyInto ¶
func (in *DataTransferConfigInitParameters) DeepCopyInto(out *DataTransferConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataTransferConfigList ¶
type DataTransferConfigList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []DataTransferConfig `json:"items"` }
DataTransferConfigList contains a list of DataTransferConfigs
func (*DataTransferConfigList) DeepCopy ¶
func (in *DataTransferConfigList) DeepCopy() *DataTransferConfigList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigList.
func (*DataTransferConfigList) DeepCopyInto ¶
func (in *DataTransferConfigList) DeepCopyInto(out *DataTransferConfigList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DataTransferConfigList) DeepCopyObject ¶
func (in *DataTransferConfigList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DataTransferConfigList) GetItems ¶
func (l *DataTransferConfigList) GetItems() []resource.Managed
GetItems of this DataTransferConfigList.
type DataTransferConfigObservation ¶
type DataTransferConfigObservation struct { // The number of days to look back to automatically refresh the data. // For example, if dataRefreshWindowDays = 10, then every day BigQuery // reingests data for [today-10, today-1], rather than ingesting data for // just [today-1]. Only valid if the data source supports the feature. // Set the value to 0 to use the default value. DataRefreshWindowDays *float64 `json:"dataRefreshWindowDays,omitempty" tf:"data_refresh_window_days,omitempty"` // The data source id. Cannot be changed once the transfer config is created. DataSourceID *string `json:"dataSourceId,omitempty" tf:"data_source_id,omitempty"` // The BigQuery target dataset id. DestinationDatasetID *string `json:"destinationDatasetId,omitempty" tf:"destination_dataset_id,omitempty"` // When set to true, no runs are scheduled for a given transfer. Disabled *bool `json:"disabled,omitempty" tf:"disabled,omitempty"` // The user specified display name for the transfer config. DisplayName *string `json:"displayName,omitempty" tf:"display_name,omitempty"` // Email notifications will be sent according to these preferences to the // email address of the user who owns this transfer config. // Structure is documented below. EmailPreferences *EmailPreferencesObservation `json:"emailPreferences,omitempty" tf:"email_preferences,omitempty"` // an identifier for the resource with format {{name}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // The geographic location where the transfer config should reside. // Examples: US, EU, asia-northeast1. The default value is US. Location *string `json:"location,omitempty" tf:"location,omitempty"` // The resource name of the transfer config. Transfer config names have the // form projects/{projectId}/locations/{location}/transferConfigs/{configId} // or projects/{projectId}/transferConfigs/{configId}, // where configId is usually a uuid, but this is not required. // The name is ignored when creating a transfer config. Name *string `json:"name,omitempty" tf:"name,omitempty"` // Pub/Sub topic where notifications will be sent after transfer runs // associated with this transfer config finish. NotificationPubsubTopic *string `json:"notificationPubsubTopic,omitempty" tf:"notification_pubsub_topic,omitempty"` // Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' // section for each data source. For example the parameters for Cloud Storage transfers are listed here: // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq // NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource. // +mapType=granular Params map[string]*string `json:"params,omitempty" tf:"params,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Data transfer schedule. If the data source does not support a custom // schedule, this should be empty. If it is empty, the default value for // the data source will be used. The specified times are in UTC. Examples // of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, // jun 13:15, and first sunday of quarter 00:00. See more explanation // about the format here: // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format // NOTE: The minimum interval time between recurring transfers depends // on the data source; refer to the documentation for your data source. Schedule *string `json:"schedule,omitempty" tf:"schedule,omitempty"` // Options customizing the data transfer schedule. // Structure is documented below. ScheduleOptions *ScheduleOptionsObservation `json:"scheduleOptions,omitempty" tf:"schedule_options,omitempty"` // Different parameters are configured primarily using the the params field on this // resource. This block contains the parameters which contain secrets or passwords so that they can be marked // sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key // in the params map in the api request. // Credentials may not be specified in both locations and will cause an error. Changing from one location // to a different credential configuration in the config will require an apply to update state. // Structure is documented below. SensitiveParams *SensitiveParamsParameters `json:"sensitiveParams,omitempty" tf:"sensitive_params,omitempty"` // Service account email. If this field is set, transfer config will // be created with this service account credentials. It requires that // requesting user calling this API has permissions to act as this service account. ServiceAccountName *string `json:"serviceAccountName,omitempty" tf:"service_account_name,omitempty"` }
func (*DataTransferConfigObservation) DeepCopy ¶
func (in *DataTransferConfigObservation) DeepCopy() *DataTransferConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigObservation.
func (*DataTransferConfigObservation) DeepCopyInto ¶
func (in *DataTransferConfigObservation) DeepCopyInto(out *DataTransferConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataTransferConfigParameters ¶
type DataTransferConfigParameters struct { // The number of days to look back to automatically refresh the data. // For example, if dataRefreshWindowDays = 10, then every day BigQuery // reingests data for [today-10, today-1], rather than ingesting data for // just [today-1]. Only valid if the data source supports the feature. // Set the value to 0 to use the default value. // +kubebuilder:validation:Optional DataRefreshWindowDays *float64 `json:"dataRefreshWindowDays,omitempty" tf:"data_refresh_window_days,omitempty"` // The data source id. Cannot be changed once the transfer config is created. // +kubebuilder:validation:Optional DataSourceID *string `json:"dataSourceId,omitempty" tf:"data_source_id,omitempty"` // The BigQuery target dataset id. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DestinationDatasetID *string `json:"destinationDatasetId,omitempty" tf:"destination_dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate destinationDatasetId. // +kubebuilder:validation:Optional DestinationDatasetIDRef *v1.Reference `json:"destinationDatasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate destinationDatasetId. // +kubebuilder:validation:Optional DestinationDatasetIDSelector *v1.Selector `json:"destinationDatasetIdSelector,omitempty" tf:"-"` // When set to true, no runs are scheduled for a given transfer. // +kubebuilder:validation:Optional Disabled *bool `json:"disabled,omitempty" tf:"disabled,omitempty"` // The user specified display name for the transfer config. // +kubebuilder:validation:Optional DisplayName *string `json:"displayName,omitempty" tf:"display_name,omitempty"` // Email notifications will be sent according to these preferences to the // email address of the user who owns this transfer config. // Structure is documented below. // +kubebuilder:validation:Optional EmailPreferences *EmailPreferencesParameters `json:"emailPreferences,omitempty" tf:"email_preferences,omitempty"` // The geographic location where the transfer config should reside. // Examples: US, EU, asia-northeast1. The default value is US. // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // Pub/Sub topic where notifications will be sent after transfer runs // associated with this transfer config finish. // +kubebuilder:validation:Optional NotificationPubsubTopic *string `json:"notificationPubsubTopic,omitempty" tf:"notification_pubsub_topic,omitempty"` // Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' // section for each data source. For example the parameters for Cloud Storage transfers are listed here: // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq // NOTE : If you are attempting to update a parameter that cannot be updated (due to api limitations) please force recreation of the resource. // +kubebuilder:validation:Optional // +mapType=granular Params map[string]*string `json:"params,omitempty" tf:"params,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Data transfer schedule. If the data source does not support a custom // schedule, this should be empty. If it is empty, the default value for // the data source will be used. The specified times are in UTC. Examples // of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, // jun 13:15, and first sunday of quarter 00:00. See more explanation // about the format here: // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format // NOTE: The minimum interval time between recurring transfers depends // on the data source; refer to the documentation for your data source. // +kubebuilder:validation:Optional Schedule *string `json:"schedule,omitempty" tf:"schedule,omitempty"` // Options customizing the data transfer schedule. // Structure is documented below. // +kubebuilder:validation:Optional ScheduleOptions *ScheduleOptionsParameters `json:"scheduleOptions,omitempty" tf:"schedule_options,omitempty"` // Different parameters are configured primarily using the the params field on this // resource. This block contains the parameters which contain secrets or passwords so that they can be marked // sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key // in the params map in the api request. // Credentials may not be specified in both locations and will cause an error. Changing from one location // to a different credential configuration in the config will require an apply to update state. // Structure is documented below. // +kubebuilder:validation:Optional SensitiveParams *SensitiveParamsParameters `json:"sensitiveParams,omitempty" tf:"sensitive_params,omitempty"` // Service account email. If this field is set, transfer config will // be created with this service account credentials. It requires that // requesting user calling this API has permissions to act as this service account. // +kubebuilder:validation:Optional ServiceAccountName *string `json:"serviceAccountName,omitempty" tf:"service_account_name,omitempty"` }
func (*DataTransferConfigParameters) DeepCopy ¶
func (in *DataTransferConfigParameters) DeepCopy() *DataTransferConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigParameters.
func (*DataTransferConfigParameters) DeepCopyInto ¶
func (in *DataTransferConfigParameters) DeepCopyInto(out *DataTransferConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataTransferConfigSpec ¶
type DataTransferConfigSpec struct { v1.ResourceSpec `json:",inline"` ForProvider DataTransferConfigParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider DataTransferConfigInitParameters `json:"initProvider,omitempty"` }
DataTransferConfigSpec defines the desired state of DataTransferConfig
func (*DataTransferConfigSpec) DeepCopy ¶
func (in *DataTransferConfigSpec) DeepCopy() *DataTransferConfigSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigSpec.
func (*DataTransferConfigSpec) DeepCopyInto ¶
func (in *DataTransferConfigSpec) DeepCopyInto(out *DataTransferConfigSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataTransferConfigStatus ¶
type DataTransferConfigStatus struct { v1.ResourceStatus `json:",inline"` AtProvider DataTransferConfigObservation `json:"atProvider,omitempty"` }
DataTransferConfigStatus defines the observed state of DataTransferConfig.
func (*DataTransferConfigStatus) DeepCopy ¶
func (in *DataTransferConfigStatus) DeepCopy() *DataTransferConfigStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataTransferConfigStatus.
func (*DataTransferConfigStatus) DeepCopyInto ¶
func (in *DataTransferConfigStatus) DeepCopyInto(out *DataTransferConfigStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Dataset ¶
type Dataset struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec DatasetSpec `json:"spec"` Status DatasetStatus `json:"status,omitempty"` }
Dataset is the Schema for the Datasets API. Datasets allow you to organize and control access to your tables. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Dataset) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Dataset.
func (*Dataset) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Dataset) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Dataset) GetCondition ¶
func (mg *Dataset) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Dataset.
func (*Dataset) GetConnectionDetailsMapping ¶
GetConnectionDetailsMapping for this Dataset
func (*Dataset) GetDeletionPolicy ¶
func (mg *Dataset) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Dataset.
func (*Dataset) GetInitParameters ¶
GetInitParameters of this Dataset
func (*Dataset) GetManagementPolicies ¶
func (mg *Dataset) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this Dataset.
func (*Dataset) GetMergedParameters ¶
GetInitParameters of this Dataset
func (*Dataset) GetObservation ¶
GetObservation of this Dataset
func (*Dataset) GetParameters ¶
GetParameters of this Dataset
func (*Dataset) GetProviderConfigReference ¶
GetProviderConfigReference of this Dataset.
func (*Dataset) GetPublishConnectionDetailsTo ¶
func (mg *Dataset) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Dataset.
func (*Dataset) GetTerraformResourceType ¶
GetTerraformResourceType returns Terraform resource type for this Dataset
func (*Dataset) GetTerraformSchemaVersion ¶
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Dataset) GetWriteConnectionSecretToReference ¶
func (mg *Dataset) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Dataset.
func (*Dataset) LateInitialize ¶
LateInitialize this Dataset using its observed tfState. returns True if there are any spec changes for the resource.
func (*Dataset) ResolveReferences ¶
ResolveReferences of this Dataset.
func (*Dataset) SetConditions ¶
SetConditions of this Dataset.
func (*Dataset) SetDeletionPolicy ¶
func (mg *Dataset) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Dataset.
func (*Dataset) SetManagementPolicies ¶
func (mg *Dataset) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this Dataset.
func (*Dataset) SetObservation ¶
SetObservation for this Dataset
func (*Dataset) SetParameters ¶
SetParameters for this Dataset
func (*Dataset) SetProviderConfigReference ¶
SetProviderConfigReference of this Dataset.
func (*Dataset) SetPublishConnectionDetailsTo ¶
func (mg *Dataset) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Dataset.
func (*Dataset) SetWriteConnectionSecretToReference ¶
func (mg *Dataset) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Dataset.
type DatasetAccess ¶
type DatasetAccess struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec DatasetAccessSpec `json:"spec"` Status DatasetAccessStatus `json:"status,omitempty"` }
DatasetAccess is the Schema for the DatasetAccesss API. Gives dataset access for a single entity. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*DatasetAccess) DeepCopy ¶
func (in *DatasetAccess) DeepCopy() *DatasetAccess
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccess.
func (*DatasetAccess) DeepCopyInto ¶
func (in *DatasetAccess) DeepCopyInto(out *DatasetAccess)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetAccess) DeepCopyObject ¶
func (in *DatasetAccess) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DatasetAccess) GetCondition ¶
func (mg *DatasetAccess) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this DatasetAccess.
func (*DatasetAccess) GetConnectionDetailsMapping ¶
func (tr *DatasetAccess) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this DatasetAccess
func (*DatasetAccess) GetDeletionPolicy ¶
func (mg *DatasetAccess) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this DatasetAccess.
func (*DatasetAccess) GetID ¶
func (tr *DatasetAccess) GetID() string
GetID returns ID of underlying Terraform resource of this DatasetAccess
func (*DatasetAccess) GetInitParameters ¶
func (tr *DatasetAccess) GetInitParameters() (map[string]any, error)
GetInitParameters of this DatasetAccess
func (*DatasetAccess) GetManagementPolicies ¶
func (mg *DatasetAccess) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this DatasetAccess.
func (*DatasetAccess) GetMergedParameters ¶
func (tr *DatasetAccess) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this DatasetAccess
func (*DatasetAccess) GetObservation ¶
func (tr *DatasetAccess) GetObservation() (map[string]any, error)
GetObservation of this DatasetAccess
func (*DatasetAccess) GetParameters ¶
func (tr *DatasetAccess) GetParameters() (map[string]any, error)
GetParameters of this DatasetAccess
func (*DatasetAccess) GetProviderConfigReference ¶
func (mg *DatasetAccess) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this DatasetAccess.
func (*DatasetAccess) GetPublishConnectionDetailsTo ¶
func (mg *DatasetAccess) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this DatasetAccess.
func (*DatasetAccess) GetTerraformResourceType ¶
func (mg *DatasetAccess) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this DatasetAccess
func (*DatasetAccess) GetTerraformSchemaVersion ¶
func (tr *DatasetAccess) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*DatasetAccess) GetWriteConnectionSecretToReference ¶
func (mg *DatasetAccess) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this DatasetAccess.
func (*DatasetAccess) LateInitialize ¶
func (tr *DatasetAccess) LateInitialize(attrs []byte) (bool, error)
LateInitialize this DatasetAccess using its observed tfState. returns True if there are any spec changes for the resource.
func (*DatasetAccess) ResolveReferences ¶
ResolveReferences of this DatasetAccess.
func (*DatasetAccess) SetConditions ¶
func (mg *DatasetAccess) SetConditions(c ...xpv1.Condition)
SetConditions of this DatasetAccess.
func (*DatasetAccess) SetDeletionPolicy ¶
func (mg *DatasetAccess) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this DatasetAccess.
func (*DatasetAccess) SetManagementPolicies ¶
func (mg *DatasetAccess) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this DatasetAccess.
func (*DatasetAccess) SetObservation ¶
func (tr *DatasetAccess) SetObservation(obs map[string]any) error
SetObservation for this DatasetAccess
func (*DatasetAccess) SetParameters ¶
func (tr *DatasetAccess) SetParameters(params map[string]any) error
SetParameters for this DatasetAccess
func (*DatasetAccess) SetProviderConfigReference ¶
func (mg *DatasetAccess) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this DatasetAccess.
func (*DatasetAccess) SetPublishConnectionDetailsTo ¶
func (mg *DatasetAccess) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this DatasetAccess.
func (*DatasetAccess) SetWriteConnectionSecretToReference ¶
func (mg *DatasetAccess) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this DatasetAccess.
type DatasetAccessDatasetDatasetInitParameters ¶
type DatasetAccessDatasetDatasetInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*DatasetAccessDatasetDatasetInitParameters) DeepCopy ¶
func (in *DatasetAccessDatasetDatasetInitParameters) DeepCopy() *DatasetAccessDatasetDatasetInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessDatasetDatasetInitParameters.
func (*DatasetAccessDatasetDatasetInitParameters) DeepCopyInto ¶
func (in *DatasetAccessDatasetDatasetInitParameters) DeepCopyInto(out *DatasetAccessDatasetDatasetInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessDatasetDatasetObservation ¶
type DatasetAccessDatasetDatasetObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*DatasetAccessDatasetDatasetObservation) DeepCopy ¶
func (in *DatasetAccessDatasetDatasetObservation) DeepCopy() *DatasetAccessDatasetDatasetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessDatasetDatasetObservation.
func (*DatasetAccessDatasetDatasetObservation) DeepCopyInto ¶
func (in *DatasetAccessDatasetDatasetObservation) DeepCopyInto(out *DatasetAccessDatasetDatasetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessDatasetDatasetParameters ¶
type DatasetAccessDatasetDatasetParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId" tf:"project_id,omitempty"` }
func (*DatasetAccessDatasetDatasetParameters) DeepCopy ¶
func (in *DatasetAccessDatasetDatasetParameters) DeepCopy() *DatasetAccessDatasetDatasetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessDatasetDatasetParameters.
func (*DatasetAccessDatasetDatasetParameters) DeepCopyInto ¶
func (in *DatasetAccessDatasetDatasetParameters) DeepCopyInto(out *DatasetAccessDatasetDatasetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessDatasetInitParameters ¶
type DatasetAccessDatasetInitParameters struct { // The dataset this entry applies to // Structure is documented below. Dataset *DatasetAccessDatasetDatasetInitParameters `json:"dataset,omitempty" tf:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only views are supported, // but additional target types may be added in the future. Possible values: VIEWS TargetTypes []*string `json:"targetTypes,omitempty" tf:"target_types,omitempty"` }
func (*DatasetAccessDatasetInitParameters) DeepCopy ¶
func (in *DatasetAccessDatasetInitParameters) DeepCopy() *DatasetAccessDatasetInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessDatasetInitParameters.
func (*DatasetAccessDatasetInitParameters) DeepCopyInto ¶
func (in *DatasetAccessDatasetInitParameters) DeepCopyInto(out *DatasetAccessDatasetInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessDatasetObservation ¶
type DatasetAccessDatasetObservation struct { // The dataset this entry applies to // Structure is documented below. Dataset *DatasetAccessDatasetDatasetObservation `json:"dataset,omitempty" tf:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only views are supported, // but additional target types may be added in the future. Possible values: VIEWS TargetTypes []*string `json:"targetTypes,omitempty" tf:"target_types,omitempty"` }
func (*DatasetAccessDatasetObservation) DeepCopy ¶
func (in *DatasetAccessDatasetObservation) DeepCopy() *DatasetAccessDatasetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessDatasetObservation.
func (*DatasetAccessDatasetObservation) DeepCopyInto ¶
func (in *DatasetAccessDatasetObservation) DeepCopyInto(out *DatasetAccessDatasetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessDatasetParameters ¶
type DatasetAccessDatasetParameters struct { // The dataset this entry applies to // Structure is documented below. // +kubebuilder:validation:Optional Dataset *DatasetAccessDatasetDatasetParameters `json:"dataset" tf:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only views are supported, // but additional target types may be added in the future. Possible values: VIEWS // +kubebuilder:validation:Optional TargetTypes []*string `json:"targetTypes" tf:"target_types,omitempty"` }
func (*DatasetAccessDatasetParameters) DeepCopy ¶
func (in *DatasetAccessDatasetParameters) DeepCopy() *DatasetAccessDatasetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessDatasetParameters.
func (*DatasetAccessDatasetParameters) DeepCopyInto ¶
func (in *DatasetAccessDatasetParameters) DeepCopyInto(out *DatasetAccessDatasetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessInitParameters ¶
type DatasetAccessInitParameters struct { // Grants all resources of particular types in a particular dataset read access to the current dataset. // Structure is documented below. Dataset *DatasetAccessDatasetInitParameters `json:"dataset,omitempty" tf:"dataset,omitempty"` // A unique ID for this dataset, without the project name. The ID // must contain only letters (a-z, A-Z), numbers (0-9), or // underscores (_). The maximum length is 1,024 characters. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // A domain to grant access to. Any users signed in with the // domain specified will be granted the specified access Domain *string `json:"domain,omitempty" tf:"domain,omitempty"` // An email address of a Google Group to grant access to. GroupByEmail *string `json:"groupByEmail,omitempty" tf:"group_by_email,omitempty"` // Some other type of member that appears in the IAM Policy but isn't a user, // group, domain, or special group. For example: allUsers IAMMember *string `json:"iamMember,omitempty" tf:"iam_member,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Describes the rights granted to the user specified by the other // member of the access object. Basic, predefined, and custom roles are // supported. Predefined roles that have equivalent basic roles are // swapped by the API to their basic counterparts, and will show a diff // post-create. See // official docs. Role *string `json:"role,omitempty" tf:"role,omitempty"` // A routine from a different dataset to grant access to. Queries // executed against that routine will have read access to tables in // this dataset. The role field is not required when this field is // set. If that routine is updated by any user, access to the routine // needs to be granted again via an update operation. // Structure is documented below. Routine *DatasetAccessRoutineInitParameters `json:"routine,omitempty" tf:"routine,omitempty"` // A special group to grant access to. Possible values include: SpecialGroup *string `json:"specialGroup,omitempty" tf:"special_group,omitempty"` // An email address of a user to grant access to. For example: // fred@example.com // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/cloudplatform/v1beta1.ServiceAccount // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("email",true) UserByEmail *string `json:"userByEmail,omitempty" tf:"user_by_email,omitempty"` // Reference to a ServiceAccount in cloudplatform to populate userByEmail. // +kubebuilder:validation:Optional UserByEmailRef *v1.Reference `json:"userByEmailRef,omitempty" tf:"-"` // Selector for a ServiceAccount in cloudplatform to populate userByEmail. // +kubebuilder:validation:Optional UserByEmailSelector *v1.Selector `json:"userByEmailSelector,omitempty" tf:"-"` // A view from a different dataset to grant access to. Queries // executed against that view will have read access to tables in // this dataset. The role field is not required when this field is // set. If that view is updated by any user, access to the view // needs to be granted again via an update operation. // Structure is documented below. View *DatasetAccessViewInitParameters `json:"view,omitempty" tf:"view,omitempty"` }
func (*DatasetAccessInitParameters) DeepCopy ¶
func (in *DatasetAccessInitParameters) DeepCopy() *DatasetAccessInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessInitParameters.
func (*DatasetAccessInitParameters) DeepCopyInto ¶
func (in *DatasetAccessInitParameters) DeepCopyInto(out *DatasetAccessInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessList ¶
type DatasetAccessList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []DatasetAccess `json:"items"` }
DatasetAccessList contains a list of DatasetAccesss
func (*DatasetAccessList) DeepCopy ¶
func (in *DatasetAccessList) DeepCopy() *DatasetAccessList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessList.
func (*DatasetAccessList) DeepCopyInto ¶
func (in *DatasetAccessList) DeepCopyInto(out *DatasetAccessList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetAccessList) DeepCopyObject ¶
func (in *DatasetAccessList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DatasetAccessList) GetItems ¶
func (l *DatasetAccessList) GetItems() []resource.Managed
GetItems of this DatasetAccessList.
type DatasetAccessObservation ¶
type DatasetAccessObservation struct { APIUpdatedMember *bool `json:"apiUpdatedMember,omitempty" tf:"api_updated_member,omitempty"` // Grants all resources of particular types in a particular dataset read access to the current dataset. // Structure is documented below. Dataset *DatasetAccessDatasetObservation `json:"dataset,omitempty" tf:"dataset,omitempty"` // A unique ID for this dataset, without the project name. The ID // must contain only letters (a-z, A-Z), numbers (0-9), or // underscores (_). The maximum length is 1,024 characters. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // A domain to grant access to. Any users signed in with the // domain specified will be granted the specified access Domain *string `json:"domain,omitempty" tf:"domain,omitempty"` // An email address of a Google Group to grant access to. GroupByEmail *string `json:"groupByEmail,omitempty" tf:"group_by_email,omitempty"` // Some other type of member that appears in the IAM Policy but isn't a user, // group, domain, or special group. For example: allUsers IAMMember *string `json:"iamMember,omitempty" tf:"iam_member,omitempty"` // an identifier for the resource with format projects/{{project}}/datasets/{{dataset_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Describes the rights granted to the user specified by the other // member of the access object. Basic, predefined, and custom roles are // supported. Predefined roles that have equivalent basic roles are // swapped by the API to their basic counterparts, and will show a diff // post-create. See // official docs. Role *string `json:"role,omitempty" tf:"role,omitempty"` // A routine from a different dataset to grant access to. Queries // executed against that routine will have read access to tables in // this dataset. The role field is not required when this field is // set. If that routine is updated by any user, access to the routine // needs to be granted again via an update operation. // Structure is documented below. Routine *DatasetAccessRoutineObservation `json:"routine,omitempty" tf:"routine,omitempty"` // A special group to grant access to. Possible values include: SpecialGroup *string `json:"specialGroup,omitempty" tf:"special_group,omitempty"` // An email address of a user to grant access to. For example: // fred@example.com UserByEmail *string `json:"userByEmail,omitempty" tf:"user_by_email,omitempty"` // A view from a different dataset to grant access to. Queries // executed against that view will have read access to tables in // this dataset. The role field is not required when this field is // set. If that view is updated by any user, access to the view // needs to be granted again via an update operation. // Structure is documented below. View *DatasetAccessViewObservation `json:"view,omitempty" tf:"view,omitempty"` }
func (*DatasetAccessObservation) DeepCopy ¶
func (in *DatasetAccessObservation) DeepCopy() *DatasetAccessObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessObservation.
func (*DatasetAccessObservation) DeepCopyInto ¶
func (in *DatasetAccessObservation) DeepCopyInto(out *DatasetAccessObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessParameters ¶
type DatasetAccessParameters struct { // Grants all resources of particular types in a particular dataset read access to the current dataset. // Structure is documented below. // +kubebuilder:validation:Optional Dataset *DatasetAccessDatasetParameters `json:"dataset,omitempty" tf:"dataset,omitempty"` // A unique ID for this dataset, without the project name. The ID // must contain only letters (a-z, A-Z), numbers (0-9), or // underscores (_). The maximum length is 1,024 characters. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // A domain to grant access to. Any users signed in with the // domain specified will be granted the specified access // +kubebuilder:validation:Optional Domain *string `json:"domain,omitempty" tf:"domain,omitempty"` // An email address of a Google Group to grant access to. // +kubebuilder:validation:Optional GroupByEmail *string `json:"groupByEmail,omitempty" tf:"group_by_email,omitempty"` // Some other type of member that appears in the IAM Policy but isn't a user, // group, domain, or special group. For example: allUsers // +kubebuilder:validation:Optional IAMMember *string `json:"iamMember,omitempty" tf:"iam_member,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Describes the rights granted to the user specified by the other // member of the access object. Basic, predefined, and custom roles are // supported. Predefined roles that have equivalent basic roles are // swapped by the API to their basic counterparts, and will show a diff // post-create. See // official docs. // +kubebuilder:validation:Optional Role *string `json:"role,omitempty" tf:"role,omitempty"` // A routine from a different dataset to grant access to. Queries // executed against that routine will have read access to tables in // this dataset. The role field is not required when this field is // set. If that routine is updated by any user, access to the routine // needs to be granted again via an update operation. // Structure is documented below. // +kubebuilder:validation:Optional Routine *DatasetAccessRoutineParameters `json:"routine,omitempty" tf:"routine,omitempty"` // A special group to grant access to. Possible values include: // +kubebuilder:validation:Optional SpecialGroup *string `json:"specialGroup,omitempty" tf:"special_group,omitempty"` // An email address of a user to grant access to. For example: // fred@example.com // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/cloudplatform/v1beta1.ServiceAccount // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("email",true) // +kubebuilder:validation:Optional UserByEmail *string `json:"userByEmail,omitempty" tf:"user_by_email,omitempty"` // Reference to a ServiceAccount in cloudplatform to populate userByEmail. // +kubebuilder:validation:Optional UserByEmailRef *v1.Reference `json:"userByEmailRef,omitempty" tf:"-"` // Selector for a ServiceAccount in cloudplatform to populate userByEmail. // +kubebuilder:validation:Optional UserByEmailSelector *v1.Selector `json:"userByEmailSelector,omitempty" tf:"-"` // A view from a different dataset to grant access to. Queries // executed against that view will have read access to tables in // this dataset. The role field is not required when this field is // set. If that view is updated by any user, access to the view // needs to be granted again via an update operation. // Structure is documented below. // +kubebuilder:validation:Optional View *DatasetAccessViewParameters `json:"view,omitempty" tf:"view,omitempty"` }
func (*DatasetAccessParameters) DeepCopy ¶
func (in *DatasetAccessParameters) DeepCopy() *DatasetAccessParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessParameters.
func (*DatasetAccessParameters) DeepCopyInto ¶
func (in *DatasetAccessParameters) DeepCopyInto(out *DatasetAccessParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessRoutineInitParameters ¶
type DatasetAccessRoutineInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("dataset_id",false) DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Routine in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("project",false) ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // Reference to a Routine in bigquery to populate projectId. // +kubebuilder:validation:Optional ProjectIDRef *v1.Reference `json:"projectIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate projectId. // +kubebuilder:validation:Optional ProjectIDSelector *v1.Selector `json:"projectIdSelector,omitempty" tf:"-"` // The ID of the routine. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 256 characters. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine RoutineID *string `json:"routineId,omitempty" tf:"routine_id,omitempty"` // Reference to a Routine in bigquery to populate routineId. // +kubebuilder:validation:Optional RoutineIDRef *v1.Reference `json:"routineIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate routineId. // +kubebuilder:validation:Optional RoutineIDSelector *v1.Selector `json:"routineIdSelector,omitempty" tf:"-"` }
func (*DatasetAccessRoutineInitParameters) DeepCopy ¶
func (in *DatasetAccessRoutineInitParameters) DeepCopy() *DatasetAccessRoutineInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessRoutineInitParameters.
func (*DatasetAccessRoutineInitParameters) DeepCopyInto ¶
func (in *DatasetAccessRoutineInitParameters) DeepCopyInto(out *DatasetAccessRoutineInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessRoutineObservation ¶
type DatasetAccessRoutineObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The ID of the routine. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 256 characters. RoutineID *string `json:"routineId,omitempty" tf:"routine_id,omitempty"` }
func (*DatasetAccessRoutineObservation) DeepCopy ¶
func (in *DatasetAccessRoutineObservation) DeepCopy() *DatasetAccessRoutineObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessRoutineObservation.
func (*DatasetAccessRoutineObservation) DeepCopyInto ¶
func (in *DatasetAccessRoutineObservation) DeepCopyInto(out *DatasetAccessRoutineObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessRoutineParameters ¶
type DatasetAccessRoutineParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("dataset_id",false) // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Routine in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("project",false) // +kubebuilder:validation:Optional ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // Reference to a Routine in bigquery to populate projectId. // +kubebuilder:validation:Optional ProjectIDRef *v1.Reference `json:"projectIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate projectId. // +kubebuilder:validation:Optional ProjectIDSelector *v1.Selector `json:"projectIdSelector,omitempty" tf:"-"` // The ID of the routine. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 256 characters. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +kubebuilder:validation:Optional RoutineID *string `json:"routineId,omitempty" tf:"routine_id,omitempty"` // Reference to a Routine in bigquery to populate routineId. // +kubebuilder:validation:Optional RoutineIDRef *v1.Reference `json:"routineIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate routineId. // +kubebuilder:validation:Optional RoutineIDSelector *v1.Selector `json:"routineIdSelector,omitempty" tf:"-"` }
func (*DatasetAccessRoutineParameters) DeepCopy ¶
func (in *DatasetAccessRoutineParameters) DeepCopy() *DatasetAccessRoutineParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessRoutineParameters.
func (*DatasetAccessRoutineParameters) DeepCopyInto ¶
func (in *DatasetAccessRoutineParameters) DeepCopyInto(out *DatasetAccessRoutineParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessSpec ¶
type DatasetAccessSpec struct { v1.ResourceSpec `json:",inline"` ForProvider DatasetAccessParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider DatasetAccessInitParameters `json:"initProvider,omitempty"` }
DatasetAccessSpec defines the desired state of DatasetAccess
func (*DatasetAccessSpec) DeepCopy ¶
func (in *DatasetAccessSpec) DeepCopy() *DatasetAccessSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessSpec.
func (*DatasetAccessSpec) DeepCopyInto ¶
func (in *DatasetAccessSpec) DeepCopyInto(out *DatasetAccessSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessStatus ¶
type DatasetAccessStatus struct { v1.ResourceStatus `json:",inline"` AtProvider DatasetAccessObservation `json:"atProvider,omitempty"` }
DatasetAccessStatus defines the observed state of DatasetAccess.
func (*DatasetAccessStatus) DeepCopy ¶
func (in *DatasetAccessStatus) DeepCopy() *DatasetAccessStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessStatus.
func (*DatasetAccessStatus) DeepCopyInto ¶
func (in *DatasetAccessStatus) DeepCopyInto(out *DatasetAccessStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessViewInitParameters ¶
type DatasetAccessViewInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The ID of the table. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 1,024 characters. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*DatasetAccessViewInitParameters) DeepCopy ¶
func (in *DatasetAccessViewInitParameters) DeepCopy() *DatasetAccessViewInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessViewInitParameters.
func (*DatasetAccessViewInitParameters) DeepCopyInto ¶
func (in *DatasetAccessViewInitParameters) DeepCopyInto(out *DatasetAccessViewInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessViewObservation ¶
type DatasetAccessViewObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The ID of the table. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 1,024 characters. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*DatasetAccessViewObservation) DeepCopy ¶
func (in *DatasetAccessViewObservation) DeepCopy() *DatasetAccessViewObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessViewObservation.
func (*DatasetAccessViewObservation) DeepCopyInto ¶
func (in *DatasetAccessViewObservation) DeepCopyInto(out *DatasetAccessViewObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessViewParameters ¶
type DatasetAccessViewParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId" tf:"project_id,omitempty"` // The ID of the table. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 1,024 characters. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +kubebuilder:validation:Optional TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*DatasetAccessViewParameters) DeepCopy ¶
func (in *DatasetAccessViewParameters) DeepCopy() *DatasetAccessViewParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessViewParameters.
func (*DatasetAccessViewParameters) DeepCopyInto ¶
func (in *DatasetAccessViewParameters) DeepCopyInto(out *DatasetAccessViewParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetDatasetInitParameters ¶
type DatasetDatasetInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*DatasetDatasetInitParameters) DeepCopy ¶
func (in *DatasetDatasetInitParameters) DeepCopy() *DatasetDatasetInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetDatasetInitParameters.
func (*DatasetDatasetInitParameters) DeepCopyInto ¶
func (in *DatasetDatasetInitParameters) DeepCopyInto(out *DatasetDatasetInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetDatasetObservation ¶
type DatasetDatasetObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*DatasetDatasetObservation) DeepCopy ¶
func (in *DatasetDatasetObservation) DeepCopy() *DatasetDatasetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetDatasetObservation.
func (*DatasetDatasetObservation) DeepCopyInto ¶
func (in *DatasetDatasetObservation) DeepCopyInto(out *DatasetDatasetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetDatasetParameters ¶
type DatasetDatasetParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId" tf:"project_id,omitempty"` }
func (*DatasetDatasetParameters) DeepCopy ¶
func (in *DatasetDatasetParameters) DeepCopy() *DatasetDatasetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetDatasetParameters.
func (*DatasetDatasetParameters) DeepCopyInto ¶
func (in *DatasetDatasetParameters) DeepCopyInto(out *DatasetDatasetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMBinding ¶
type DatasetIAMBinding struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.members) || (has(self.initProvider) && has(self.initProvider.members))",message="spec.forProvider.members is a required parameter" Spec DatasetIAMBindingSpec `json:"spec"` Status DatasetIAMBindingStatus `json:"status,omitempty"` }
DatasetIAMBinding is the Schema for the DatasetIAMBindings API. <no value> +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*DatasetIAMBinding) DeepCopy ¶
func (in *DatasetIAMBinding) DeepCopy() *DatasetIAMBinding
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBinding.
func (*DatasetIAMBinding) DeepCopyInto ¶
func (in *DatasetIAMBinding) DeepCopyInto(out *DatasetIAMBinding)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetIAMBinding) DeepCopyObject ¶
func (in *DatasetIAMBinding) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DatasetIAMBinding) GetCondition ¶
func (mg *DatasetIAMBinding) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this DatasetIAMBinding.
func (*DatasetIAMBinding) GetConnectionDetailsMapping ¶
func (tr *DatasetIAMBinding) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this DatasetIAMBinding
func (*DatasetIAMBinding) GetDeletionPolicy ¶
func (mg *DatasetIAMBinding) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this DatasetIAMBinding.
func (*DatasetIAMBinding) GetID ¶
func (tr *DatasetIAMBinding) GetID() string
GetID returns ID of underlying Terraform resource of this DatasetIAMBinding
func (*DatasetIAMBinding) GetInitParameters ¶
func (tr *DatasetIAMBinding) GetInitParameters() (map[string]any, error)
GetInitParameters of this DatasetIAMBinding
func (*DatasetIAMBinding) GetManagementPolicies ¶
func (mg *DatasetIAMBinding) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this DatasetIAMBinding.
func (*DatasetIAMBinding) GetMergedParameters ¶
func (tr *DatasetIAMBinding) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this DatasetIAMBinding
func (*DatasetIAMBinding) GetObservation ¶
func (tr *DatasetIAMBinding) GetObservation() (map[string]any, error)
GetObservation of this DatasetIAMBinding
func (*DatasetIAMBinding) GetParameters ¶
func (tr *DatasetIAMBinding) GetParameters() (map[string]any, error)
GetParameters of this DatasetIAMBinding
func (*DatasetIAMBinding) GetProviderConfigReference ¶
func (mg *DatasetIAMBinding) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this DatasetIAMBinding.
func (*DatasetIAMBinding) GetPublishConnectionDetailsTo ¶
func (mg *DatasetIAMBinding) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this DatasetIAMBinding.
func (*DatasetIAMBinding) GetTerraformResourceType ¶
func (mg *DatasetIAMBinding) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this DatasetIAMBinding
func (*DatasetIAMBinding) GetTerraformSchemaVersion ¶
func (tr *DatasetIAMBinding) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*DatasetIAMBinding) GetWriteConnectionSecretToReference ¶
func (mg *DatasetIAMBinding) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this DatasetIAMBinding.
func (*DatasetIAMBinding) Hub ¶
func (tr *DatasetIAMBinding) Hub()
Hub marks this type as a conversion hub.
func (*DatasetIAMBinding) LateInitialize ¶
func (tr *DatasetIAMBinding) LateInitialize(attrs []byte) (bool, error)
LateInitialize this DatasetIAMBinding using its observed tfState. returns True if there are any spec changes for the resource.
func (*DatasetIAMBinding) ResolveReferences ¶
ResolveReferences of this DatasetIAMBinding.
func (*DatasetIAMBinding) SetConditions ¶
func (mg *DatasetIAMBinding) SetConditions(c ...xpv1.Condition)
SetConditions of this DatasetIAMBinding.
func (*DatasetIAMBinding) SetDeletionPolicy ¶
func (mg *DatasetIAMBinding) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this DatasetIAMBinding.
func (*DatasetIAMBinding) SetManagementPolicies ¶
func (mg *DatasetIAMBinding) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this DatasetIAMBinding.
func (*DatasetIAMBinding) SetObservation ¶
func (tr *DatasetIAMBinding) SetObservation(obs map[string]any) error
SetObservation for this DatasetIAMBinding
func (*DatasetIAMBinding) SetParameters ¶
func (tr *DatasetIAMBinding) SetParameters(params map[string]any) error
SetParameters for this DatasetIAMBinding
func (*DatasetIAMBinding) SetProviderConfigReference ¶
func (mg *DatasetIAMBinding) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this DatasetIAMBinding.
func (*DatasetIAMBinding) SetPublishConnectionDetailsTo ¶
func (mg *DatasetIAMBinding) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this DatasetIAMBinding.
func (*DatasetIAMBinding) SetWriteConnectionSecretToReference ¶
func (mg *DatasetIAMBinding) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this DatasetIAMBinding.
type DatasetIAMBindingConditionInitParameters ¶
type DatasetIAMBindingConditionInitParameters struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*DatasetIAMBindingConditionInitParameters) DeepCopy ¶
func (in *DatasetIAMBindingConditionInitParameters) DeepCopy() *DatasetIAMBindingConditionInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingConditionInitParameters.
func (*DatasetIAMBindingConditionInitParameters) DeepCopyInto ¶
func (in *DatasetIAMBindingConditionInitParameters) DeepCopyInto(out *DatasetIAMBindingConditionInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMBindingConditionObservation ¶
type DatasetIAMBindingConditionObservation struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*DatasetIAMBindingConditionObservation) DeepCopy ¶
func (in *DatasetIAMBindingConditionObservation) DeepCopy() *DatasetIAMBindingConditionObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingConditionObservation.
func (*DatasetIAMBindingConditionObservation) DeepCopyInto ¶
func (in *DatasetIAMBindingConditionObservation) DeepCopyInto(out *DatasetIAMBindingConditionObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMBindingConditionParameters ¶
type DatasetIAMBindingConditionParameters struct { // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // +kubebuilder:validation:Optional Expression *string `json:"expression" tf:"expression,omitempty"` // +kubebuilder:validation:Optional Title *string `json:"title" tf:"title,omitempty"` }
func (*DatasetIAMBindingConditionParameters) DeepCopy ¶
func (in *DatasetIAMBindingConditionParameters) DeepCopy() *DatasetIAMBindingConditionParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingConditionParameters.
func (*DatasetIAMBindingConditionParameters) DeepCopyInto ¶
func (in *DatasetIAMBindingConditionParameters) DeepCopyInto(out *DatasetIAMBindingConditionParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMBindingInitParameters ¶
type DatasetIAMBindingInitParameters struct { Condition *DatasetIAMBindingConditionInitParameters `json:"condition,omitempty" tf:"condition,omitempty"` // +listType=set Members []*string `json:"members,omitempty" tf:"members,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` }
func (*DatasetIAMBindingInitParameters) DeepCopy ¶
func (in *DatasetIAMBindingInitParameters) DeepCopy() *DatasetIAMBindingInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingInitParameters.
func (*DatasetIAMBindingInitParameters) DeepCopyInto ¶
func (in *DatasetIAMBindingInitParameters) DeepCopyInto(out *DatasetIAMBindingInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMBindingList ¶
type DatasetIAMBindingList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []DatasetIAMBinding `json:"items"` }
DatasetIAMBindingList contains a list of DatasetIAMBindings
func (*DatasetIAMBindingList) DeepCopy ¶
func (in *DatasetIAMBindingList) DeepCopy() *DatasetIAMBindingList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingList.
func (*DatasetIAMBindingList) DeepCopyInto ¶
func (in *DatasetIAMBindingList) DeepCopyInto(out *DatasetIAMBindingList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetIAMBindingList) DeepCopyObject ¶
func (in *DatasetIAMBindingList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DatasetIAMBindingList) GetItems ¶
func (l *DatasetIAMBindingList) GetItems() []resource.Managed
GetItems of this DatasetIAMBindingList.
type DatasetIAMBindingObservation ¶
type DatasetIAMBindingObservation struct { Condition *DatasetIAMBindingConditionObservation `json:"condition,omitempty" tf:"condition,omitempty"` DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` Etag *string `json:"etag,omitempty" tf:"etag,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` // +listType=set Members []*string `json:"members,omitempty" tf:"members,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` Role *string `json:"role,omitempty" tf:"role,omitempty"` }
func (*DatasetIAMBindingObservation) DeepCopy ¶
func (in *DatasetIAMBindingObservation) DeepCopy() *DatasetIAMBindingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingObservation.
func (*DatasetIAMBindingObservation) DeepCopyInto ¶
func (in *DatasetIAMBindingObservation) DeepCopyInto(out *DatasetIAMBindingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMBindingParameters ¶
type DatasetIAMBindingParameters struct { // +kubebuilder:validation:Optional Condition *DatasetIAMBindingConditionParameters `json:"condition,omitempty" tf:"condition,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // +kubebuilder:validation:Optional // +listType=set Members []*string `json:"members,omitempty" tf:"members,omitempty"` // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // +kubebuilder:validation:Required Role *string `json:"role" tf:"role,omitempty"` }
func (*DatasetIAMBindingParameters) DeepCopy ¶
func (in *DatasetIAMBindingParameters) DeepCopy() *DatasetIAMBindingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingParameters.
func (*DatasetIAMBindingParameters) DeepCopyInto ¶
func (in *DatasetIAMBindingParameters) DeepCopyInto(out *DatasetIAMBindingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMBindingSpec ¶
type DatasetIAMBindingSpec struct { v1.ResourceSpec `json:",inline"` ForProvider DatasetIAMBindingParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider DatasetIAMBindingInitParameters `json:"initProvider,omitempty"` }
DatasetIAMBindingSpec defines the desired state of DatasetIAMBinding
func (*DatasetIAMBindingSpec) DeepCopy ¶
func (in *DatasetIAMBindingSpec) DeepCopy() *DatasetIAMBindingSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingSpec.
func (*DatasetIAMBindingSpec) DeepCopyInto ¶
func (in *DatasetIAMBindingSpec) DeepCopyInto(out *DatasetIAMBindingSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMBindingStatus ¶
type DatasetIAMBindingStatus struct { v1.ResourceStatus `json:",inline"` AtProvider DatasetIAMBindingObservation `json:"atProvider,omitempty"` }
DatasetIAMBindingStatus defines the observed state of DatasetIAMBinding.
func (*DatasetIAMBindingStatus) DeepCopy ¶
func (in *DatasetIAMBindingStatus) DeepCopy() *DatasetIAMBindingStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMBindingStatus.
func (*DatasetIAMBindingStatus) DeepCopyInto ¶
func (in *DatasetIAMBindingStatus) DeepCopyInto(out *DatasetIAMBindingStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMMember ¶
type DatasetIAMMember struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec DatasetIAMMemberSpec `json:"spec"` Status DatasetIAMMemberStatus `json:"status,omitempty"` }
DatasetIAMMember is the Schema for the DatasetIAMMembers API. <no value> +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*DatasetIAMMember) DeepCopy ¶
func (in *DatasetIAMMember) DeepCopy() *DatasetIAMMember
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMember.
func (*DatasetIAMMember) DeepCopyInto ¶
func (in *DatasetIAMMember) DeepCopyInto(out *DatasetIAMMember)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetIAMMember) DeepCopyObject ¶
func (in *DatasetIAMMember) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DatasetIAMMember) GetCondition ¶
func (mg *DatasetIAMMember) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this DatasetIAMMember.
func (*DatasetIAMMember) GetConnectionDetailsMapping ¶
func (tr *DatasetIAMMember) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this DatasetIAMMember
func (*DatasetIAMMember) GetDeletionPolicy ¶
func (mg *DatasetIAMMember) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this DatasetIAMMember.
func (*DatasetIAMMember) GetID ¶
func (tr *DatasetIAMMember) GetID() string
GetID returns ID of underlying Terraform resource of this DatasetIAMMember
func (*DatasetIAMMember) GetInitParameters ¶
func (tr *DatasetIAMMember) GetInitParameters() (map[string]any, error)
GetInitParameters of this DatasetIAMMember
func (*DatasetIAMMember) GetManagementPolicies ¶
func (mg *DatasetIAMMember) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this DatasetIAMMember.
func (*DatasetIAMMember) GetMergedParameters ¶
func (tr *DatasetIAMMember) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this DatasetIAMMember
func (*DatasetIAMMember) GetObservation ¶
func (tr *DatasetIAMMember) GetObservation() (map[string]any, error)
GetObservation of this DatasetIAMMember
func (*DatasetIAMMember) GetParameters ¶
func (tr *DatasetIAMMember) GetParameters() (map[string]any, error)
GetParameters of this DatasetIAMMember
func (*DatasetIAMMember) GetProviderConfigReference ¶
func (mg *DatasetIAMMember) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this DatasetIAMMember.
func (*DatasetIAMMember) GetPublishConnectionDetailsTo ¶
func (mg *DatasetIAMMember) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this DatasetIAMMember.
func (*DatasetIAMMember) GetTerraformResourceType ¶
func (mg *DatasetIAMMember) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this DatasetIAMMember
func (*DatasetIAMMember) GetTerraformSchemaVersion ¶
func (tr *DatasetIAMMember) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*DatasetIAMMember) GetWriteConnectionSecretToReference ¶
func (mg *DatasetIAMMember) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this DatasetIAMMember.
func (*DatasetIAMMember) Hub ¶
func (tr *DatasetIAMMember) Hub()
Hub marks this type as a conversion hub.
func (*DatasetIAMMember) LateInitialize ¶
func (tr *DatasetIAMMember) LateInitialize(attrs []byte) (bool, error)
LateInitialize this DatasetIAMMember using its observed tfState. returns True if there are any spec changes for the resource.
func (*DatasetIAMMember) ResolveReferences ¶
ResolveReferences of this DatasetIAMMember.
func (*DatasetIAMMember) SetConditions ¶
func (mg *DatasetIAMMember) SetConditions(c ...xpv1.Condition)
SetConditions of this DatasetIAMMember.
func (*DatasetIAMMember) SetDeletionPolicy ¶
func (mg *DatasetIAMMember) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this DatasetIAMMember.
func (*DatasetIAMMember) SetManagementPolicies ¶
func (mg *DatasetIAMMember) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this DatasetIAMMember.
func (*DatasetIAMMember) SetObservation ¶
func (tr *DatasetIAMMember) SetObservation(obs map[string]any) error
SetObservation for this DatasetIAMMember
func (*DatasetIAMMember) SetParameters ¶
func (tr *DatasetIAMMember) SetParameters(params map[string]any) error
SetParameters for this DatasetIAMMember
func (*DatasetIAMMember) SetProviderConfigReference ¶
func (mg *DatasetIAMMember) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this DatasetIAMMember.
func (*DatasetIAMMember) SetPublishConnectionDetailsTo ¶
func (mg *DatasetIAMMember) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this DatasetIAMMember.
func (*DatasetIAMMember) SetWriteConnectionSecretToReference ¶
func (mg *DatasetIAMMember) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this DatasetIAMMember.
type DatasetIAMMemberConditionInitParameters ¶
type DatasetIAMMemberConditionInitParameters struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*DatasetIAMMemberConditionInitParameters) DeepCopy ¶
func (in *DatasetIAMMemberConditionInitParameters) DeepCopy() *DatasetIAMMemberConditionInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberConditionInitParameters.
func (*DatasetIAMMemberConditionInitParameters) DeepCopyInto ¶
func (in *DatasetIAMMemberConditionInitParameters) DeepCopyInto(out *DatasetIAMMemberConditionInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMMemberConditionObservation ¶
type DatasetIAMMemberConditionObservation struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*DatasetIAMMemberConditionObservation) DeepCopy ¶
func (in *DatasetIAMMemberConditionObservation) DeepCopy() *DatasetIAMMemberConditionObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberConditionObservation.
func (*DatasetIAMMemberConditionObservation) DeepCopyInto ¶
func (in *DatasetIAMMemberConditionObservation) DeepCopyInto(out *DatasetIAMMemberConditionObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMMemberConditionParameters ¶
type DatasetIAMMemberConditionParameters struct { // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // +kubebuilder:validation:Optional Expression *string `json:"expression" tf:"expression,omitempty"` // +kubebuilder:validation:Optional Title *string `json:"title" tf:"title,omitempty"` }
func (*DatasetIAMMemberConditionParameters) DeepCopy ¶
func (in *DatasetIAMMemberConditionParameters) DeepCopy() *DatasetIAMMemberConditionParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberConditionParameters.
func (*DatasetIAMMemberConditionParameters) DeepCopyInto ¶
func (in *DatasetIAMMemberConditionParameters) DeepCopyInto(out *DatasetIAMMemberConditionParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMMemberInitParameters ¶
type DatasetIAMMemberInitParameters struct { Condition *DatasetIAMMemberConditionInitParameters `json:"condition,omitempty" tf:"condition,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` }
func (*DatasetIAMMemberInitParameters) DeepCopy ¶
func (in *DatasetIAMMemberInitParameters) DeepCopy() *DatasetIAMMemberInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberInitParameters.
func (*DatasetIAMMemberInitParameters) DeepCopyInto ¶
func (in *DatasetIAMMemberInitParameters) DeepCopyInto(out *DatasetIAMMemberInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMMemberList ¶
type DatasetIAMMemberList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []DatasetIAMMember `json:"items"` }
DatasetIAMMemberList contains a list of DatasetIAMMembers
func (*DatasetIAMMemberList) DeepCopy ¶
func (in *DatasetIAMMemberList) DeepCopy() *DatasetIAMMemberList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberList.
func (*DatasetIAMMemberList) DeepCopyInto ¶
func (in *DatasetIAMMemberList) DeepCopyInto(out *DatasetIAMMemberList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetIAMMemberList) DeepCopyObject ¶
func (in *DatasetIAMMemberList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DatasetIAMMemberList) GetItems ¶
func (l *DatasetIAMMemberList) GetItems() []resource.Managed
GetItems of this DatasetIAMMemberList.
type DatasetIAMMemberObservation ¶
type DatasetIAMMemberObservation struct { Condition *DatasetIAMMemberConditionObservation `json:"condition,omitempty" tf:"condition,omitempty"` DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` Etag *string `json:"etag,omitempty" tf:"etag,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` Member *string `json:"member,omitempty" tf:"member,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` Role *string `json:"role,omitempty" tf:"role,omitempty"` }
func (*DatasetIAMMemberObservation) DeepCopy ¶
func (in *DatasetIAMMemberObservation) DeepCopy() *DatasetIAMMemberObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberObservation.
func (*DatasetIAMMemberObservation) DeepCopyInto ¶
func (in *DatasetIAMMemberObservation) DeepCopyInto(out *DatasetIAMMemberObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMMemberParameters ¶
type DatasetIAMMemberParameters struct { // +kubebuilder:validation:Optional Condition *DatasetIAMMemberConditionParameters `json:"condition,omitempty" tf:"condition,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // +kubebuilder:validation:Required Member *string `json:"member" tf:"member,omitempty"` // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // +kubebuilder:validation:Required Role *string `json:"role" tf:"role,omitempty"` }
func (*DatasetIAMMemberParameters) DeepCopy ¶
func (in *DatasetIAMMemberParameters) DeepCopy() *DatasetIAMMemberParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberParameters.
func (*DatasetIAMMemberParameters) DeepCopyInto ¶
func (in *DatasetIAMMemberParameters) DeepCopyInto(out *DatasetIAMMemberParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMMemberSpec ¶
type DatasetIAMMemberSpec struct { v1.ResourceSpec `json:",inline"` ForProvider DatasetIAMMemberParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider DatasetIAMMemberInitParameters `json:"initProvider,omitempty"` }
DatasetIAMMemberSpec defines the desired state of DatasetIAMMember
func (*DatasetIAMMemberSpec) DeepCopy ¶
func (in *DatasetIAMMemberSpec) DeepCopy() *DatasetIAMMemberSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberSpec.
func (*DatasetIAMMemberSpec) DeepCopyInto ¶
func (in *DatasetIAMMemberSpec) DeepCopyInto(out *DatasetIAMMemberSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIAMMemberStatus ¶
type DatasetIAMMemberStatus struct { v1.ResourceStatus `json:",inline"` AtProvider DatasetIAMMemberObservation `json:"atProvider,omitempty"` }
DatasetIAMMemberStatus defines the observed state of DatasetIAMMember.
func (*DatasetIAMMemberStatus) DeepCopy ¶
func (in *DatasetIAMMemberStatus) DeepCopy() *DatasetIAMMemberStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIAMMemberStatus.
func (*DatasetIAMMemberStatus) DeepCopyInto ¶
func (in *DatasetIAMMemberStatus) DeepCopyInto(out *DatasetIAMMemberStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetInitParameters ¶
type DatasetInitParameters struct { // An array of objects that define dataset access for one or more entities. // Structure is documented below. Access []AccessInitParameters `json:"access,omitempty" tf:"access,omitempty"` // Defines the default collation specification of future tables created // in the dataset. If a table is created in this dataset without table-level // default collation, then the table inherits the dataset default collation, // which is applied to the string fields that do not have explicit collation // specified. A change to this field affects only tables created afterwards, // and does not alter the existing tables. // The following values are supported: DefaultCollation *string `json:"defaultCollation,omitempty" tf:"default_collation,omitempty"` // The default encryption key for all tables in the dataset. Once this property is set, // all newly-created partitioned tables in the dataset will have encryption key set to // this value, unless table creation request (or query) overrides the key. // Structure is documented below. DefaultEncryptionConfiguration *DefaultEncryptionConfigurationInitParameters `json:"defaultEncryptionConfiguration,omitempty" tf:"default_encryption_configuration,omitempty"` // The default partition expiration for all partitioned tables in // the dataset, in milliseconds. // Once this property is set, all newly-created partitioned tables in // the dataset will have an expirationMs property in the timePartitioning // settings set to this value, and changing the value will only // affect new tables, not existing ones. The storage in a partition will // have an expiration time of its partition time plus this value. // Setting this property overrides the use of defaultTableExpirationMs // for partitioned tables: only one of defaultTableExpirationMs and // defaultPartitionExpirationMs will be used for any new partitioned // table. If you provide an explicit timePartitioning.expirationMs when // creating or updating a partitioned table, that value takes precedence // over the default partition expiration time indicated by this property. DefaultPartitionExpirationMs *float64 `json:"defaultPartitionExpirationMs,omitempty" tf:"default_partition_expiration_ms,omitempty"` // The default lifetime of all tables in the dataset, in milliseconds. // The minimum value is 3600000 milliseconds (one hour). // Once this property is set, all newly-created tables in the dataset // will have an expirationTime property set to the creation time plus // the value in this property, and changing the value will only affect // new tables, not existing ones. When the expirationTime for a given // table is reached, that table will be deleted automatically. // If a table's expirationTime is modified or removed before the // table expires, or if you provide an explicit expirationTime when // creating a table, that value takes precedence over the default // expiration time indicated by this property. DefaultTableExpirationMs *float64 `json:"defaultTableExpirationMs,omitempty" tf:"default_table_expiration_ms,omitempty"` // If set to true, delete all the tables in the // dataset when destroying the resource; otherwise, // destroying the resource will fail if tables are present. DeleteContentsOnDestroy *bool `json:"deleteContentsOnDestroy,omitempty" tf:"delete_contents_on_destroy,omitempty"` // A user-friendly description of the dataset Description *string `json:"description,omitempty" tf:"description,omitempty"` // Information about the external metadata storage where the dataset is defined. // Structure is documented below. ExternalDatasetReference *ExternalDatasetReferenceInitParameters `json:"externalDatasetReference,omitempty" tf:"external_dataset_reference,omitempty"` // A descriptive name for the dataset FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. // By default, this is FALSE, which means the dataset and its table names are // case-sensitive. This field does not affect routine references. IsCaseInsensitive *bool `json:"isCaseInsensitive,omitempty" tf:"is_case_insensitive,omitempty"` // The labels associated with this dataset. You can use these to // organize and group your datasets. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The geographic location where the dataset should reside. // See official docs. // There are two types of locations, regional or multi-regional. A regional // location is a specific geographic place, such as Tokyo, and a multi-regional // location is a large geographic area, such as the United States, that // contains at least two geographic places. // The default value is multi-regional location US. // Changing this forces a new resource to be created. Location *string `json:"location,omitempty" tf:"location,omitempty"` // Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days). MaxTimeTravelHours *string `json:"maxTimeTravelHours,omitempty" tf:"max_time_travel_hours,omitempty"` // The tags attached to this table. Tag keys are globally unique. Tag key is expected to be // in the namespaced format, for example "123456789012/environment" where 123456789012 is the // ID of the parent organization or project resource for this tag key. Tag value is expected // to be the short name, for example "Production". See Tag definitions // for more details. // +mapType=granular ResourceTags map[string]*string `json:"resourceTags,omitempty" tf:"resource_tags,omitempty"` // Specifies the storage billing model for the dataset. // Set this flag value to LOGICAL to use logical bytes for storage billing, // or to PHYSICAL to use physical bytes instead. // LOGICAL is the default if this flag isn't specified. StorageBillingModel *string `json:"storageBillingModel,omitempty" tf:"storage_billing_model,omitempty"` }
func (*DatasetInitParameters) DeepCopy ¶
func (in *DatasetInitParameters) DeepCopy() *DatasetInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetInitParameters.
func (*DatasetInitParameters) DeepCopyInto ¶
func (in *DatasetInitParameters) DeepCopyInto(out *DatasetInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetList ¶
type DatasetList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Dataset `json:"items"` }
DatasetList contains a list of Datasets
func (*DatasetList) DeepCopy ¶
func (in *DatasetList) DeepCopy() *DatasetList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetList.
func (*DatasetList) DeepCopyInto ¶
func (in *DatasetList) DeepCopyInto(out *DatasetList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetList) DeepCopyObject ¶
func (in *DatasetList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*DatasetList) GetItems ¶
func (l *DatasetList) GetItems() []resource.Managed
GetItems of this DatasetList.
type DatasetObservation ¶
type DatasetObservation struct { // An array of objects that define dataset access for one or more entities. // Structure is documented below. Access []AccessObservation `json:"access,omitempty" tf:"access,omitempty"` // The time when this dataset was created, in milliseconds since the // epoch. CreationTime *float64 `json:"creationTime,omitempty" tf:"creation_time,omitempty"` // Defines the default collation specification of future tables created // in the dataset. If a table is created in this dataset without table-level // default collation, then the table inherits the dataset default collation, // which is applied to the string fields that do not have explicit collation // specified. A change to this field affects only tables created afterwards, // and does not alter the existing tables. // The following values are supported: DefaultCollation *string `json:"defaultCollation,omitempty" tf:"default_collation,omitempty"` // The default encryption key for all tables in the dataset. Once this property is set, // all newly-created partitioned tables in the dataset will have encryption key set to // this value, unless table creation request (or query) overrides the key. // Structure is documented below. DefaultEncryptionConfiguration *DefaultEncryptionConfigurationObservation `json:"defaultEncryptionConfiguration,omitempty" tf:"default_encryption_configuration,omitempty"` // The default partition expiration for all partitioned tables in // the dataset, in milliseconds. // Once this property is set, all newly-created partitioned tables in // the dataset will have an expirationMs property in the timePartitioning // settings set to this value, and changing the value will only // affect new tables, not existing ones. The storage in a partition will // have an expiration time of its partition time plus this value. // Setting this property overrides the use of defaultTableExpirationMs // for partitioned tables: only one of defaultTableExpirationMs and // defaultPartitionExpirationMs will be used for any new partitioned // table. If you provide an explicit timePartitioning.expirationMs when // creating or updating a partitioned table, that value takes precedence // over the default partition expiration time indicated by this property. DefaultPartitionExpirationMs *float64 `json:"defaultPartitionExpirationMs,omitempty" tf:"default_partition_expiration_ms,omitempty"` // The default lifetime of all tables in the dataset, in milliseconds. // The minimum value is 3600000 milliseconds (one hour). // Once this property is set, all newly-created tables in the dataset // will have an expirationTime property set to the creation time plus // the value in this property, and changing the value will only affect // new tables, not existing ones. When the expirationTime for a given // table is reached, that table will be deleted automatically. // If a table's expirationTime is modified or removed before the // table expires, or if you provide an explicit expirationTime when // creating a table, that value takes precedence over the default // expiration time indicated by this property. DefaultTableExpirationMs *float64 `json:"defaultTableExpirationMs,omitempty" tf:"default_table_expiration_ms,omitempty"` // If set to true, delete all the tables in the // dataset when destroying the resource; otherwise, // destroying the resource will fail if tables are present. DeleteContentsOnDestroy *bool `json:"deleteContentsOnDestroy,omitempty" tf:"delete_contents_on_destroy,omitempty"` // A user-friendly description of the dataset Description *string `json:"description,omitempty" tf:"description,omitempty"` // for all of the labels present on the resource. // +mapType=granular EffectiveLabels map[string]*string `json:"effectiveLabels,omitempty" tf:"effective_labels,omitempty"` // A hash of the resource. Etag *string `json:"etag,omitempty" tf:"etag,omitempty"` // Information about the external metadata storage where the dataset is defined. // Structure is documented below. ExternalDatasetReference *ExternalDatasetReferenceObservation `json:"externalDatasetReference,omitempty" tf:"external_dataset_reference,omitempty"` // A descriptive name for the dataset FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // an identifier for the resource with format projects/{{project}}/datasets/{{dataset_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. // By default, this is FALSE, which means the dataset and its table names are // case-sensitive. This field does not affect routine references. IsCaseInsensitive *bool `json:"isCaseInsensitive,omitempty" tf:"is_case_insensitive,omitempty"` // The labels associated with this dataset. You can use these to // organize and group your datasets. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The date when this dataset or any of its tables was last modified, in // milliseconds since the epoch. LastModifiedTime *float64 `json:"lastModifiedTime,omitempty" tf:"last_modified_time,omitempty"` // The geographic location where the dataset should reside. // See official docs. // There are two types of locations, regional or multi-regional. A regional // location is a specific geographic place, such as Tokyo, and a multi-regional // location is a large geographic area, such as the United States, that // contains at least two geographic places. // The default value is multi-regional location US. // Changing this forces a new resource to be created. Location *string `json:"location,omitempty" tf:"location,omitempty"` // Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days). MaxTimeTravelHours *string `json:"maxTimeTravelHours,omitempty" tf:"max_time_travel_hours,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // The tags attached to this table. Tag keys are globally unique. Tag key is expected to be // in the namespaced format, for example "123456789012/environment" where 123456789012 is the // ID of the parent organization or project resource for this tag key. Tag value is expected // to be the short name, for example "Production". See Tag definitions // for more details. // +mapType=granular ResourceTags map[string]*string `json:"resourceTags,omitempty" tf:"resource_tags,omitempty"` // The URI of the created resource. SelfLink *string `json:"selfLink,omitempty" tf:"self_link,omitempty"` // Specifies the storage billing model for the dataset. // Set this flag value to LOGICAL to use logical bytes for storage billing, // or to PHYSICAL to use physical bytes instead. // LOGICAL is the default if this flag isn't specified. StorageBillingModel *string `json:"storageBillingModel,omitempty" tf:"storage_billing_model,omitempty"` // The combination of labels configured directly on the resource // and default labels configured on the provider. // +mapType=granular TerraformLabels map[string]*string `json:"terraformLabels,omitempty" tf:"terraform_labels,omitempty"` }
func (*DatasetObservation) DeepCopy ¶
func (in *DatasetObservation) DeepCopy() *DatasetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetObservation.
func (*DatasetObservation) DeepCopyInto ¶
func (in *DatasetObservation) DeepCopyInto(out *DatasetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetParameters ¶
type DatasetParameters struct { // An array of objects that define dataset access for one or more entities. // Structure is documented below. // +kubebuilder:validation:Optional Access []AccessParameters `json:"access,omitempty" tf:"access,omitempty"` // Defines the default collation specification of future tables created // in the dataset. If a table is created in this dataset without table-level // default collation, then the table inherits the dataset default collation, // which is applied to the string fields that do not have explicit collation // specified. A change to this field affects only tables created afterwards, // and does not alter the existing tables. // The following values are supported: // +kubebuilder:validation:Optional DefaultCollation *string `json:"defaultCollation,omitempty" tf:"default_collation,omitempty"` // The default encryption key for all tables in the dataset. Once this property is set, // all newly-created partitioned tables in the dataset will have encryption key set to // this value, unless table creation request (or query) overrides the key. // Structure is documented below. // +kubebuilder:validation:Optional DefaultEncryptionConfiguration *DefaultEncryptionConfigurationParameters `json:"defaultEncryptionConfiguration,omitempty" tf:"default_encryption_configuration,omitempty"` // The default partition expiration for all partitioned tables in // the dataset, in milliseconds. // Once this property is set, all newly-created partitioned tables in // the dataset will have an expirationMs property in the timePartitioning // settings set to this value, and changing the value will only // affect new tables, not existing ones. The storage in a partition will // have an expiration time of its partition time plus this value. // Setting this property overrides the use of defaultTableExpirationMs // for partitioned tables: only one of defaultTableExpirationMs and // defaultPartitionExpirationMs will be used for any new partitioned // table. If you provide an explicit timePartitioning.expirationMs when // creating or updating a partitioned table, that value takes precedence // over the default partition expiration time indicated by this property. // +kubebuilder:validation:Optional DefaultPartitionExpirationMs *float64 `json:"defaultPartitionExpirationMs,omitempty" tf:"default_partition_expiration_ms,omitempty"` // The default lifetime of all tables in the dataset, in milliseconds. // The minimum value is 3600000 milliseconds (one hour). // Once this property is set, all newly-created tables in the dataset // will have an expirationTime property set to the creation time plus // the value in this property, and changing the value will only affect // new tables, not existing ones. When the expirationTime for a given // table is reached, that table will be deleted automatically. // If a table's expirationTime is modified or removed before the // table expires, or if you provide an explicit expirationTime when // creating a table, that value takes precedence over the default // expiration time indicated by this property. // +kubebuilder:validation:Optional DefaultTableExpirationMs *float64 `json:"defaultTableExpirationMs,omitempty" tf:"default_table_expiration_ms,omitempty"` // If set to true, delete all the tables in the // dataset when destroying the resource; otherwise, // destroying the resource will fail if tables are present. // +kubebuilder:validation:Optional DeleteContentsOnDestroy *bool `json:"deleteContentsOnDestroy,omitempty" tf:"delete_contents_on_destroy,omitempty"` // A user-friendly description of the dataset // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // Information about the external metadata storage where the dataset is defined. // Structure is documented below. // +kubebuilder:validation:Optional ExternalDatasetReference *ExternalDatasetReferenceParameters `json:"externalDatasetReference,omitempty" tf:"external_dataset_reference,omitempty"` // A descriptive name for the dataset // +kubebuilder:validation:Optional FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. // By default, this is FALSE, which means the dataset and its table names are // case-sensitive. This field does not affect routine references. // +kubebuilder:validation:Optional IsCaseInsensitive *bool `json:"isCaseInsensitive,omitempty" tf:"is_case_insensitive,omitempty"` // The labels associated with this dataset. You can use these to // organize and group your datasets. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The geographic location where the dataset should reside. // See official docs. // There are two types of locations, regional or multi-regional. A regional // location is a specific geographic place, such as Tokyo, and a multi-regional // location is a large geographic area, such as the United States, that // contains at least two geographic places. // The default value is multi-regional location US. // Changing this forces a new resource to be created. // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days). // +kubebuilder:validation:Optional MaxTimeTravelHours *string `json:"maxTimeTravelHours,omitempty" tf:"max_time_travel_hours,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // The tags attached to this table. Tag keys are globally unique. Tag key is expected to be // in the namespaced format, for example "123456789012/environment" where 123456789012 is the // ID of the parent organization or project resource for this tag key. Tag value is expected // to be the short name, for example "Production". See Tag definitions // for more details. // +kubebuilder:validation:Optional // +mapType=granular ResourceTags map[string]*string `json:"resourceTags,omitempty" tf:"resource_tags,omitempty"` // Specifies the storage billing model for the dataset. // Set this flag value to LOGICAL to use logical bytes for storage billing, // or to PHYSICAL to use physical bytes instead. // LOGICAL is the default if this flag isn't specified. // +kubebuilder:validation:Optional StorageBillingModel *string `json:"storageBillingModel,omitempty" tf:"storage_billing_model,omitempty"` }
func (*DatasetParameters) DeepCopy ¶
func (in *DatasetParameters) DeepCopy() *DatasetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetParameters.
func (*DatasetParameters) DeepCopyInto ¶
func (in *DatasetParameters) DeepCopyInto(out *DatasetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetSpec ¶
type DatasetSpec struct { v1.ResourceSpec `json:",inline"` ForProvider DatasetParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider DatasetInitParameters `json:"initProvider,omitempty"` }
DatasetSpec defines the desired state of Dataset
func (*DatasetSpec) DeepCopy ¶
func (in *DatasetSpec) DeepCopy() *DatasetSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetSpec.
func (*DatasetSpec) DeepCopyInto ¶
func (in *DatasetSpec) DeepCopyInto(out *DatasetSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetStatus ¶
type DatasetStatus struct { v1.ResourceStatus `json:",inline"` AtProvider DatasetObservation `json:"atProvider,omitempty"` }
DatasetStatus defines the observed state of Dataset.
func (*DatasetStatus) DeepCopy ¶
func (in *DatasetStatus) DeepCopy() *DatasetStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetStatus.
func (*DatasetStatus) DeepCopyInto ¶
func (in *DatasetStatus) DeepCopyInto(out *DatasetStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DefaultDatasetInitParameters ¶
type DefaultDatasetInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*DefaultDatasetInitParameters) DeepCopy ¶
func (in *DefaultDatasetInitParameters) DeepCopy() *DefaultDatasetInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultDatasetInitParameters.
func (*DefaultDatasetInitParameters) DeepCopyInto ¶
func (in *DefaultDatasetInitParameters) DeepCopyInto(out *DefaultDatasetInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DefaultDatasetObservation ¶
type DefaultDatasetObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*DefaultDatasetObservation) DeepCopy ¶
func (in *DefaultDatasetObservation) DeepCopy() *DefaultDatasetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultDatasetObservation.
func (*DefaultDatasetObservation) DeepCopyInto ¶
func (in *DefaultDatasetObservation) DeepCopyInto(out *DefaultDatasetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DefaultDatasetParameters ¶
type DefaultDatasetParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*DefaultDatasetParameters) DeepCopy ¶
func (in *DefaultDatasetParameters) DeepCopy() *DefaultDatasetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultDatasetParameters.
func (*DefaultDatasetParameters) DeepCopyInto ¶
func (in *DefaultDatasetParameters) DeepCopyInto(out *DefaultDatasetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DefaultEncryptionConfigurationInitParameters ¶
type DefaultEncryptionConfigurationInitParameters struct { // Describes the Cloud KMS encryption key that will be used to protect destination // BigQuery table. The BigQuery Service Account associated with your project requires // access to this encryption key. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/kms/v1beta2.CryptoKey // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // Reference to a CryptoKey in kms to populate kmsKeyName. // +kubebuilder:validation:Optional KMSKeyNameRef *v1.Reference `json:"kmsKeyNameRef,omitempty" tf:"-"` // Selector for a CryptoKey in kms to populate kmsKeyName. // +kubebuilder:validation:Optional KMSKeyNameSelector *v1.Selector `json:"kmsKeyNameSelector,omitempty" tf:"-"` }
func (*DefaultEncryptionConfigurationInitParameters) DeepCopy ¶
func (in *DefaultEncryptionConfigurationInitParameters) DeepCopy() *DefaultEncryptionConfigurationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultEncryptionConfigurationInitParameters.
func (*DefaultEncryptionConfigurationInitParameters) DeepCopyInto ¶
func (in *DefaultEncryptionConfigurationInitParameters) DeepCopyInto(out *DefaultEncryptionConfigurationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DefaultEncryptionConfigurationObservation ¶
type DefaultEncryptionConfigurationObservation struct { // Describes the Cloud KMS encryption key that will be used to protect destination // BigQuery table. The BigQuery Service Account associated with your project requires // access to this encryption key. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` }
func (*DefaultEncryptionConfigurationObservation) DeepCopy ¶
func (in *DefaultEncryptionConfigurationObservation) DeepCopy() *DefaultEncryptionConfigurationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultEncryptionConfigurationObservation.
func (*DefaultEncryptionConfigurationObservation) DeepCopyInto ¶
func (in *DefaultEncryptionConfigurationObservation) DeepCopyInto(out *DefaultEncryptionConfigurationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DefaultEncryptionConfigurationParameters ¶
type DefaultEncryptionConfigurationParameters struct { // Describes the Cloud KMS encryption key that will be used to protect destination // BigQuery table. The BigQuery Service Account associated with your project requires // access to this encryption key. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/kms/v1beta2.CryptoKey // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // Reference to a CryptoKey in kms to populate kmsKeyName. // +kubebuilder:validation:Optional KMSKeyNameRef *v1.Reference `json:"kmsKeyNameRef,omitempty" tf:"-"` // Selector for a CryptoKey in kms to populate kmsKeyName. // +kubebuilder:validation:Optional KMSKeyNameSelector *v1.Selector `json:"kmsKeyNameSelector,omitempty" tf:"-"` }
func (*DefaultEncryptionConfigurationParameters) DeepCopy ¶
func (in *DefaultEncryptionConfigurationParameters) DeepCopy() *DefaultEncryptionConfigurationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DefaultEncryptionConfigurationParameters.
func (*DefaultEncryptionConfigurationParameters) DeepCopyInto ¶
func (in *DefaultEncryptionConfigurationParameters) DeepCopyInto(out *DefaultEncryptionConfigurationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DestinationEncryptionConfigurationInitParameters ¶
type DestinationEncryptionConfigurationInitParameters struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/kms/v1beta2.CryptoKey // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // Reference to a CryptoKey in kms to populate kmsKeyName. // +kubebuilder:validation:Optional KMSKeyNameRef *v1.Reference `json:"kmsKeyNameRef,omitempty" tf:"-"` // Selector for a CryptoKey in kms to populate kmsKeyName. // +kubebuilder:validation:Optional KMSKeyNameSelector *v1.Selector `json:"kmsKeyNameSelector,omitempty" tf:"-"` }
func (*DestinationEncryptionConfigurationInitParameters) DeepCopy ¶
func (in *DestinationEncryptionConfigurationInitParameters) DeepCopy() *DestinationEncryptionConfigurationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DestinationEncryptionConfigurationInitParameters.
func (*DestinationEncryptionConfigurationInitParameters) DeepCopyInto ¶
func (in *DestinationEncryptionConfigurationInitParameters) DeepCopyInto(out *DestinationEncryptionConfigurationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DestinationEncryptionConfigurationObservation ¶
type DestinationEncryptionConfigurationObservation struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // (Output) // Describes the Cloud KMS encryption key version used to protect destination BigQuery table. KMSKeyVersion *string `json:"kmsKeyVersion,omitempty" tf:"kms_key_version,omitempty"` }
func (*DestinationEncryptionConfigurationObservation) DeepCopy ¶
func (in *DestinationEncryptionConfigurationObservation) DeepCopy() *DestinationEncryptionConfigurationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DestinationEncryptionConfigurationObservation.
func (*DestinationEncryptionConfigurationObservation) DeepCopyInto ¶
func (in *DestinationEncryptionConfigurationObservation) DeepCopyInto(out *DestinationEncryptionConfigurationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DestinationEncryptionConfigurationParameters ¶
type DestinationEncryptionConfigurationParameters struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/kms/v1beta2.CryptoKey // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // Reference to a CryptoKey in kms to populate kmsKeyName. // +kubebuilder:validation:Optional KMSKeyNameRef *v1.Reference `json:"kmsKeyNameRef,omitempty" tf:"-"` // Selector for a CryptoKey in kms to populate kmsKeyName. // +kubebuilder:validation:Optional KMSKeyNameSelector *v1.Selector `json:"kmsKeyNameSelector,omitempty" tf:"-"` }
func (*DestinationEncryptionConfigurationParameters) DeepCopy ¶
func (in *DestinationEncryptionConfigurationParameters) DeepCopy() *DestinationEncryptionConfigurationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DestinationEncryptionConfigurationParameters.
func (*DestinationEncryptionConfigurationParameters) DeepCopyInto ¶
func (in *DestinationEncryptionConfigurationParameters) DeepCopyInto(out *DestinationEncryptionConfigurationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DestinationTableInitParameters ¶
type DestinationTableInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*DestinationTableInitParameters) DeepCopy ¶
func (in *DestinationTableInitParameters) DeepCopy() *DestinationTableInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DestinationTableInitParameters.
func (*DestinationTableInitParameters) DeepCopyInto ¶
func (in *DestinationTableInitParameters) DeepCopyInto(out *DestinationTableInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DestinationTableObservation ¶
type DestinationTableObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*DestinationTableObservation) DeepCopy ¶
func (in *DestinationTableObservation) DeepCopy() *DestinationTableObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DestinationTableObservation.
func (*DestinationTableObservation) DeepCopyInto ¶
func (in *DestinationTableObservation) DeepCopyInto(out *DestinationTableObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DestinationTableParameters ¶
type DestinationTableParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*DestinationTableParameters) DeepCopy ¶
func (in *DestinationTableParameters) DeepCopy() *DestinationTableParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DestinationTableParameters.
func (*DestinationTableParameters) DeepCopyInto ¶
func (in *DestinationTableParameters) DeepCopyInto(out *DestinationTableParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EmailPreferencesInitParameters ¶
type EmailPreferencesInitParameters struct { // If true, email notifications will be sent on transfer run failures. EnableFailureEmail *bool `json:"enableFailureEmail,omitempty" tf:"enable_failure_email,omitempty"` }
func (*EmailPreferencesInitParameters) DeepCopy ¶
func (in *EmailPreferencesInitParameters) DeepCopy() *EmailPreferencesInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EmailPreferencesInitParameters.
func (*EmailPreferencesInitParameters) DeepCopyInto ¶
func (in *EmailPreferencesInitParameters) DeepCopyInto(out *EmailPreferencesInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EmailPreferencesObservation ¶
type EmailPreferencesObservation struct { // If true, email notifications will be sent on transfer run failures. EnableFailureEmail *bool `json:"enableFailureEmail,omitempty" tf:"enable_failure_email,omitempty"` }
func (*EmailPreferencesObservation) DeepCopy ¶
func (in *EmailPreferencesObservation) DeepCopy() *EmailPreferencesObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EmailPreferencesObservation.
func (*EmailPreferencesObservation) DeepCopyInto ¶
func (in *EmailPreferencesObservation) DeepCopyInto(out *EmailPreferencesObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EmailPreferencesParameters ¶
type EmailPreferencesParameters struct { // If true, email notifications will be sent on transfer run failures. // +kubebuilder:validation:Optional EnableFailureEmail *bool `json:"enableFailureEmail" tf:"enable_failure_email,omitempty"` }
func (*EmailPreferencesParameters) DeepCopy ¶
func (in *EmailPreferencesParameters) DeepCopy() *EmailPreferencesParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EmailPreferencesParameters.
func (*EmailPreferencesParameters) DeepCopyInto ¶
func (in *EmailPreferencesParameters) DeepCopyInto(out *EmailPreferencesParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfigurationInitParameters ¶
type EncryptionConfigurationInitParameters struct { // The self link or full name of a key which should be used to // encrypt this table. Note that the default bigquery service account will need to have // encrypt/decrypt permissions on this key - you may want to see the // google_bigquery_default_service_account datasource and the // google_kms_crypto_key_iam_binding resource. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` }
func (*EncryptionConfigurationInitParameters) DeepCopy ¶
func (in *EncryptionConfigurationInitParameters) DeepCopy() *EncryptionConfigurationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfigurationInitParameters.
func (*EncryptionConfigurationInitParameters) DeepCopyInto ¶
func (in *EncryptionConfigurationInitParameters) DeepCopyInto(out *EncryptionConfigurationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfigurationObservation ¶
type EncryptionConfigurationObservation struct { // The self link or full name of a key which should be used to // encrypt this table. Note that the default bigquery service account will need to have // encrypt/decrypt permissions on this key - you may want to see the // google_bigquery_default_service_account datasource and the // google_kms_crypto_key_iam_binding resource. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // The self link or full name of the kms key version used to encrypt this table. KMSKeyVersion *string `json:"kmsKeyVersion,omitempty" tf:"kms_key_version,omitempty"` }
func (*EncryptionConfigurationObservation) DeepCopy ¶
func (in *EncryptionConfigurationObservation) DeepCopy() *EncryptionConfigurationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfigurationObservation.
func (*EncryptionConfigurationObservation) DeepCopyInto ¶
func (in *EncryptionConfigurationObservation) DeepCopyInto(out *EncryptionConfigurationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfigurationParameters ¶
type EncryptionConfigurationParameters struct { // The self link or full name of a key which should be used to // encrypt this table. Note that the default bigquery service account will need to have // encrypt/decrypt permissions on this key - you may want to see the // google_bigquery_default_service_account datasource and the // google_kms_crypto_key_iam_binding resource. // +kubebuilder:validation:Optional KMSKeyName *string `json:"kmsKeyName" tf:"kms_key_name,omitempty"` }
func (*EncryptionConfigurationParameters) DeepCopy ¶
func (in *EncryptionConfigurationParameters) DeepCopy() *EncryptionConfigurationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfigurationParameters.
func (*EncryptionConfigurationParameters) DeepCopyInto ¶
func (in *EncryptionConfigurationParameters) DeepCopyInto(out *EncryptionConfigurationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ErrorResultInitParameters ¶
type ErrorResultInitParameters struct { }
func (*ErrorResultInitParameters) DeepCopy ¶
func (in *ErrorResultInitParameters) DeepCopy() *ErrorResultInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ErrorResultInitParameters.
func (*ErrorResultInitParameters) DeepCopyInto ¶
func (in *ErrorResultInitParameters) DeepCopyInto(out *ErrorResultInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ErrorResultObservation ¶
type ErrorResultObservation struct { // The geographic location of the job. The default value is US. Location *string `json:"location,omitempty" tf:"location,omitempty"` // A human-readable description of the error. Message *string `json:"message,omitempty" tf:"message,omitempty"` // A short error code that summarizes the error. Reason *string `json:"reason,omitempty" tf:"reason,omitempty"` }
func (*ErrorResultObservation) DeepCopy ¶
func (in *ErrorResultObservation) DeepCopy() *ErrorResultObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ErrorResultObservation.
func (*ErrorResultObservation) DeepCopyInto ¶
func (in *ErrorResultObservation) DeepCopyInto(out *ErrorResultObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ErrorResultParameters ¶
type ErrorResultParameters struct { }
func (*ErrorResultParameters) DeepCopy ¶
func (in *ErrorResultParameters) DeepCopy() *ErrorResultParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ErrorResultParameters.
func (*ErrorResultParameters) DeepCopyInto ¶
func (in *ErrorResultParameters) DeepCopyInto(out *ErrorResultParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ErrorsInitParameters ¶
type ErrorsInitParameters struct { }
func (*ErrorsInitParameters) DeepCopy ¶
func (in *ErrorsInitParameters) DeepCopy() *ErrorsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ErrorsInitParameters.
func (*ErrorsInitParameters) DeepCopyInto ¶
func (in *ErrorsInitParameters) DeepCopyInto(out *ErrorsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ErrorsObservation ¶
type ErrorsObservation struct { // The geographic location of the job. The default value is US. Location *string `json:"location,omitempty" tf:"location,omitempty"` // A human-readable description of the error. Message *string `json:"message,omitempty" tf:"message,omitempty"` // A short error code that summarizes the error. Reason *string `json:"reason,omitempty" tf:"reason,omitempty"` }
func (*ErrorsObservation) DeepCopy ¶
func (in *ErrorsObservation) DeepCopy() *ErrorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ErrorsObservation.
func (*ErrorsObservation) DeepCopyInto ¶
func (in *ErrorsObservation) DeepCopyInto(out *ErrorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ErrorsParameters ¶
type ErrorsParameters struct { }
func (*ErrorsParameters) DeepCopy ¶
func (in *ErrorsParameters) DeepCopy() *ErrorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ErrorsParameters.
func (*ErrorsParameters) DeepCopyInto ¶
func (in *ErrorsParameters) DeepCopyInto(out *ErrorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDataConfigurationInitParameters ¶
type ExternalDataConfigurationInitParameters struct { // - Let BigQuery try to autodetect the schema // and format of the table. Autodetect *bool `json:"autodetect,omitempty" tf:"autodetect,omitempty"` // Additional options if source_format is set to // "AVRO". Structure is documented below. AvroOptions *AvroOptionsInitParameters `json:"avroOptions,omitempty" tf:"avro_options,omitempty"` // Additional properties to set if // source_format is set to "BIGTABLE". Structure is documented below. BigtableOptions *BigtableOptionsInitParameters `json:"bigtableOptions,omitempty" tf:"bigtable_options,omitempty"` // The compression type of the data source. // Valid values are "NONE" or "GZIP". Compression *string `json:"compression,omitempty" tf:"compression,omitempty"` // The connection specifying the credentials to be used to read // external storage, such as Azure Blob, Cloud Storage, or S3. The connection_id can have // the form {{project}}.{{location}}.{{connection_id}} // or projects/{{project}}/locations/{{location}}/connections/{{connection_id}}. ConnectionID *string `json:"connectionId,omitempty" tf:"connection_id,omitempty"` // Additional properties to set if // source_format is set to "CSV". Structure is documented below. CsvOptions *CsvOptionsInitParameters `json:"csvOptions,omitempty" tf:"csv_options,omitempty"` // Specifies how source URIs are interpreted for constructing the file set to load. // By default source URIs are expanded against the underlying storage. // Other options include specifying manifest files. Only applicable to object storage systems. Docs FileSetSpecType *string `json:"fileSetSpecType,omitempty" tf:"file_set_spec_type,omitempty"` // Additional options if // source_format is set to "GOOGLE_SHEETS". Structure is // documented below. GoogleSheetsOptions *GoogleSheetsOptionsInitParameters `json:"googleSheetsOptions,omitempty" tf:"google_sheets_options,omitempty"` // When set, configures hive partitioning // support. Not all storage formats support hive partitioning -- requesting hive // partitioning on an unsupported format will lead to an error, as will providing // an invalid specification. Structure is documented below. HivePartitioningOptions *HivePartitioningOptionsInitParameters `json:"hivePartitioningOptions,omitempty" tf:"hive_partitioning_options,omitempty"` // Indicates if BigQuery should // allow extra values that are not represented in the table schema. // If true, the extra values are ignored. If false, records with // extra columns are treated as bad records, and if there are too // many bad records, an invalid error is returned in the job result. // The default value is false. IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty" tf:"ignore_unknown_values,omitempty"` // Used to indicate that a JSON variant, rather than normal JSON, is being used as the sourceFormat. This should only be used in combination with the JSON source format. Valid values are: GEOJSON. JSONExtension *string `json:"jsonExtension,omitempty" tf:"json_extension,omitempty"` // Additional properties to set if // source_format is set to "JSON". Structure is documented below. JSONOptions *JSONOptionsInitParameters `json:"jsonOptions,omitempty" tf:"json_options,omitempty"` // The maximum number of bad records that // BigQuery can ignore when reading data. MaxBadRecords *float64 `json:"maxBadRecords,omitempty" tf:"max_bad_records,omitempty"` // Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source. Valid values are AUTOMATIC and MANUAL. MetadataCacheMode *string `json:"metadataCacheMode,omitempty" tf:"metadata_cache_mode,omitempty"` // Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If object_metadata is set, source_format should be omitted. ObjectMetadata *string `json:"objectMetadata,omitempty" tf:"object_metadata,omitempty"` // Additional properties to set if // source_format is set to "PARQUET". Structure is documented below. ParquetOptions *ExternalDataConfigurationParquetOptionsInitParameters `json:"parquetOptions,omitempty" tf:"parquet_options,omitempty"` // When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC. ReferenceFileSchemaURI *string `json:"referenceFileSchemaUri,omitempty" tf:"reference_file_schema_uri,omitempty"` // A JSON schema for the external table. Schema is required // for CSV and JSON formats if autodetect is not on. Schema is disallowed // for Google Cloud Bigtable, Cloud Datastore backups, Avro, Iceberg, ORC and Parquet formats. // ~>NOTE: Because this field expects a JSON string, any changes to the // string will create a diff, even if the JSON itself hasn't changed. // Furthermore drift for this field cannot not be detected because BigQuery // only uses this schema to compute the effective schema for the table, therefore // any changes on the configured value will force the table to be recreated. // This schema is effectively only applied when creating a table from an external // datasource, after creation the computed schema will be stored in // google_bigquery_table.schema Schema *string `json:"schema,omitempty" tf:"schema,omitempty"` // The data format. Please see sourceFormat under // ExternalDataConfiguration // in Bigquery's public API documentation for supported formats. To use "GOOGLE_SHEETS" // the scopes must include "https://www.googleapis.com/auth/drive.readonly". SourceFormat *string `json:"sourceFormat,omitempty" tf:"source_format,omitempty"` // A list of the fully-qualified URIs that point to // your data in Google Cloud. SourceUris []*string `json:"sourceUris,omitempty" tf:"source_uris,omitempty"` }
func (*ExternalDataConfigurationInitParameters) DeepCopy ¶
func (in *ExternalDataConfigurationInitParameters) DeepCopy() *ExternalDataConfigurationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDataConfigurationInitParameters.
func (*ExternalDataConfigurationInitParameters) DeepCopyInto ¶
func (in *ExternalDataConfigurationInitParameters) DeepCopyInto(out *ExternalDataConfigurationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDataConfigurationObservation ¶
type ExternalDataConfigurationObservation struct { // - Let BigQuery try to autodetect the schema // and format of the table. Autodetect *bool `json:"autodetect,omitempty" tf:"autodetect,omitempty"` // Additional options if source_format is set to // "AVRO". Structure is documented below. AvroOptions *AvroOptionsObservation `json:"avroOptions,omitempty" tf:"avro_options,omitempty"` // Additional properties to set if // source_format is set to "BIGTABLE". Structure is documented below. BigtableOptions *BigtableOptionsObservation `json:"bigtableOptions,omitempty" tf:"bigtable_options,omitempty"` // The compression type of the data source. // Valid values are "NONE" or "GZIP". Compression *string `json:"compression,omitempty" tf:"compression,omitempty"` // The connection specifying the credentials to be used to read // external storage, such as Azure Blob, Cloud Storage, or S3. The connection_id can have // the form {{project}}.{{location}}.{{connection_id}} // or projects/{{project}}/locations/{{location}}/connections/{{connection_id}}. ConnectionID *string `json:"connectionId,omitempty" tf:"connection_id,omitempty"` // Additional properties to set if // source_format is set to "CSV". Structure is documented below. CsvOptions *CsvOptionsObservation `json:"csvOptions,omitempty" tf:"csv_options,omitempty"` // Specifies how source URIs are interpreted for constructing the file set to load. // By default source URIs are expanded against the underlying storage. // Other options include specifying manifest files. Only applicable to object storage systems. Docs FileSetSpecType *string `json:"fileSetSpecType,omitempty" tf:"file_set_spec_type,omitempty"` // Additional options if // source_format is set to "GOOGLE_SHEETS". Structure is // documented below. GoogleSheetsOptions *GoogleSheetsOptionsObservation `json:"googleSheetsOptions,omitempty" tf:"google_sheets_options,omitempty"` // When set, configures hive partitioning // support. Not all storage formats support hive partitioning -- requesting hive // partitioning on an unsupported format will lead to an error, as will providing // an invalid specification. Structure is documented below. HivePartitioningOptions *HivePartitioningOptionsObservation `json:"hivePartitioningOptions,omitempty" tf:"hive_partitioning_options,omitempty"` // Indicates if BigQuery should // allow extra values that are not represented in the table schema. // If true, the extra values are ignored. If false, records with // extra columns are treated as bad records, and if there are too // many bad records, an invalid error is returned in the job result. // The default value is false. IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty" tf:"ignore_unknown_values,omitempty"` // Used to indicate that a JSON variant, rather than normal JSON, is being used as the sourceFormat. This should only be used in combination with the JSON source format. Valid values are: GEOJSON. JSONExtension *string `json:"jsonExtension,omitempty" tf:"json_extension,omitempty"` // Additional properties to set if // source_format is set to "JSON". Structure is documented below. JSONOptions *JSONOptionsObservation `json:"jsonOptions,omitempty" tf:"json_options,omitempty"` // The maximum number of bad records that // BigQuery can ignore when reading data. MaxBadRecords *float64 `json:"maxBadRecords,omitempty" tf:"max_bad_records,omitempty"` // Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source. Valid values are AUTOMATIC and MANUAL. MetadataCacheMode *string `json:"metadataCacheMode,omitempty" tf:"metadata_cache_mode,omitempty"` // Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If object_metadata is set, source_format should be omitted. ObjectMetadata *string `json:"objectMetadata,omitempty" tf:"object_metadata,omitempty"` // Additional properties to set if // source_format is set to "PARQUET". Structure is documented below. ParquetOptions *ExternalDataConfigurationParquetOptionsObservation `json:"parquetOptions,omitempty" tf:"parquet_options,omitempty"` // When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC. ReferenceFileSchemaURI *string `json:"referenceFileSchemaUri,omitempty" tf:"reference_file_schema_uri,omitempty"` // A JSON schema for the external table. Schema is required // for CSV and JSON formats if autodetect is not on. Schema is disallowed // for Google Cloud Bigtable, Cloud Datastore backups, Avro, Iceberg, ORC and Parquet formats. // ~>NOTE: Because this field expects a JSON string, any changes to the // string will create a diff, even if the JSON itself hasn't changed. // Furthermore drift for this field cannot not be detected because BigQuery // only uses this schema to compute the effective schema for the table, therefore // any changes on the configured value will force the table to be recreated. // This schema is effectively only applied when creating a table from an external // datasource, after creation the computed schema will be stored in // google_bigquery_table.schema Schema *string `json:"schema,omitempty" tf:"schema,omitempty"` // The data format. Please see sourceFormat under // ExternalDataConfiguration // in Bigquery's public API documentation for supported formats. To use "GOOGLE_SHEETS" // the scopes must include "https://www.googleapis.com/auth/drive.readonly". SourceFormat *string `json:"sourceFormat,omitempty" tf:"source_format,omitempty"` // A list of the fully-qualified URIs that point to // your data in Google Cloud. SourceUris []*string `json:"sourceUris,omitempty" tf:"source_uris,omitempty"` }
func (*ExternalDataConfigurationObservation) DeepCopy ¶
func (in *ExternalDataConfigurationObservation) DeepCopy() *ExternalDataConfigurationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDataConfigurationObservation.
func (*ExternalDataConfigurationObservation) DeepCopyInto ¶
func (in *ExternalDataConfigurationObservation) DeepCopyInto(out *ExternalDataConfigurationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDataConfigurationParameters ¶
type ExternalDataConfigurationParameters struct { // - Let BigQuery try to autodetect the schema // and format of the table. // +kubebuilder:validation:Optional Autodetect *bool `json:"autodetect" tf:"autodetect,omitempty"` // Additional options if source_format is set to // "AVRO". Structure is documented below. // +kubebuilder:validation:Optional AvroOptions *AvroOptionsParameters `json:"avroOptions,omitempty" tf:"avro_options,omitempty"` // Additional properties to set if // source_format is set to "BIGTABLE". Structure is documented below. // +kubebuilder:validation:Optional BigtableOptions *BigtableOptionsParameters `json:"bigtableOptions,omitempty" tf:"bigtable_options,omitempty"` // The compression type of the data source. // Valid values are "NONE" or "GZIP". // +kubebuilder:validation:Optional Compression *string `json:"compression,omitempty" tf:"compression,omitempty"` // The connection specifying the credentials to be used to read // external storage, such as Azure Blob, Cloud Storage, or S3. The connection_id can have // the form {{project}}.{{location}}.{{connection_id}} // or projects/{{project}}/locations/{{location}}/connections/{{connection_id}}. // +kubebuilder:validation:Optional ConnectionID *string `json:"connectionId,omitempty" tf:"connection_id,omitempty"` // Additional properties to set if // source_format is set to "CSV". Structure is documented below. // +kubebuilder:validation:Optional CsvOptions *CsvOptionsParameters `json:"csvOptions,omitempty" tf:"csv_options,omitempty"` // Specifies how source URIs are interpreted for constructing the file set to load. // By default source URIs are expanded against the underlying storage. // Other options include specifying manifest files. Only applicable to object storage systems. Docs // +kubebuilder:validation:Optional FileSetSpecType *string `json:"fileSetSpecType,omitempty" tf:"file_set_spec_type,omitempty"` // Additional options if // source_format is set to "GOOGLE_SHEETS". Structure is // documented below. // +kubebuilder:validation:Optional GoogleSheetsOptions *GoogleSheetsOptionsParameters `json:"googleSheetsOptions,omitempty" tf:"google_sheets_options,omitempty"` // When set, configures hive partitioning // support. Not all storage formats support hive partitioning -- requesting hive // partitioning on an unsupported format will lead to an error, as will providing // an invalid specification. Structure is documented below. // +kubebuilder:validation:Optional HivePartitioningOptions *HivePartitioningOptionsParameters `json:"hivePartitioningOptions,omitempty" tf:"hive_partitioning_options,omitempty"` // Indicates if BigQuery should // allow extra values that are not represented in the table schema. // If true, the extra values are ignored. If false, records with // extra columns are treated as bad records, and if there are too // many bad records, an invalid error is returned in the job result. // The default value is false. // +kubebuilder:validation:Optional IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty" tf:"ignore_unknown_values,omitempty"` // Used to indicate that a JSON variant, rather than normal JSON, is being used as the sourceFormat. This should only be used in combination with the JSON source format. Valid values are: GEOJSON. // +kubebuilder:validation:Optional JSONExtension *string `json:"jsonExtension,omitempty" tf:"json_extension,omitempty"` // Additional properties to set if // source_format is set to "JSON". Structure is documented below. // +kubebuilder:validation:Optional JSONOptions *JSONOptionsParameters `json:"jsonOptions,omitempty" tf:"json_options,omitempty"` // The maximum number of bad records that // BigQuery can ignore when reading data. // +kubebuilder:validation:Optional MaxBadRecords *float64 `json:"maxBadRecords,omitempty" tf:"max_bad_records,omitempty"` // Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source. Valid values are AUTOMATIC and MANUAL. // +kubebuilder:validation:Optional MetadataCacheMode *string `json:"metadataCacheMode,omitempty" tf:"metadata_cache_mode,omitempty"` // Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If object_metadata is set, source_format should be omitted. // +kubebuilder:validation:Optional ObjectMetadata *string `json:"objectMetadata,omitempty" tf:"object_metadata,omitempty"` // Additional properties to set if // source_format is set to "PARQUET". Structure is documented below. // +kubebuilder:validation:Optional ParquetOptions *ExternalDataConfigurationParquetOptionsParameters `json:"parquetOptions,omitempty" tf:"parquet_options,omitempty"` // When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC. // +kubebuilder:validation:Optional ReferenceFileSchemaURI *string `json:"referenceFileSchemaUri,omitempty" tf:"reference_file_schema_uri,omitempty"` // A JSON schema for the external table. Schema is required // for CSV and JSON formats if autodetect is not on. Schema is disallowed // for Google Cloud Bigtable, Cloud Datastore backups, Avro, Iceberg, ORC and Parquet formats. // ~>NOTE: Because this field expects a JSON string, any changes to the // string will create a diff, even if the JSON itself hasn't changed. // Furthermore drift for this field cannot not be detected because BigQuery // only uses this schema to compute the effective schema for the table, therefore // any changes on the configured value will force the table to be recreated. // This schema is effectively only applied when creating a table from an external // datasource, after creation the computed schema will be stored in // google_bigquery_table.schema // +kubebuilder:validation:Optional Schema *string `json:"schema,omitempty" tf:"schema,omitempty"` // The data format. Please see sourceFormat under // ExternalDataConfiguration // in Bigquery's public API documentation for supported formats. To use "GOOGLE_SHEETS" // the scopes must include "https://www.googleapis.com/auth/drive.readonly". // +kubebuilder:validation:Optional SourceFormat *string `json:"sourceFormat,omitempty" tf:"source_format,omitempty"` // A list of the fully-qualified URIs that point to // your data in Google Cloud. // +kubebuilder:validation:Optional SourceUris []*string `json:"sourceUris" tf:"source_uris,omitempty"` }
func (*ExternalDataConfigurationParameters) DeepCopy ¶
func (in *ExternalDataConfigurationParameters) DeepCopy() *ExternalDataConfigurationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDataConfigurationParameters.
func (*ExternalDataConfigurationParameters) DeepCopyInto ¶
func (in *ExternalDataConfigurationParameters) DeepCopyInto(out *ExternalDataConfigurationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDataConfigurationParquetOptionsInitParameters ¶
type ExternalDataConfigurationParquetOptionsInitParameters struct { // Indicates whether to use schema inference specifically for Parquet LIST logical type. EnableListInference *bool `json:"enableListInference,omitempty" tf:"enable_list_inference,omitempty"` // Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default. EnumAsString *bool `json:"enumAsString,omitempty" tf:"enum_as_string,omitempty"` }
func (*ExternalDataConfigurationParquetOptionsInitParameters) DeepCopy ¶
func (in *ExternalDataConfigurationParquetOptionsInitParameters) DeepCopy() *ExternalDataConfigurationParquetOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDataConfigurationParquetOptionsInitParameters.
func (*ExternalDataConfigurationParquetOptionsInitParameters) DeepCopyInto ¶
func (in *ExternalDataConfigurationParquetOptionsInitParameters) DeepCopyInto(out *ExternalDataConfigurationParquetOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDataConfigurationParquetOptionsObservation ¶
type ExternalDataConfigurationParquetOptionsObservation struct { // Indicates whether to use schema inference specifically for Parquet LIST logical type. EnableListInference *bool `json:"enableListInference,omitempty" tf:"enable_list_inference,omitempty"` // Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default. EnumAsString *bool `json:"enumAsString,omitempty" tf:"enum_as_string,omitempty"` }
func (*ExternalDataConfigurationParquetOptionsObservation) DeepCopy ¶
func (in *ExternalDataConfigurationParquetOptionsObservation) DeepCopy() *ExternalDataConfigurationParquetOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDataConfigurationParquetOptionsObservation.
func (*ExternalDataConfigurationParquetOptionsObservation) DeepCopyInto ¶
func (in *ExternalDataConfigurationParquetOptionsObservation) DeepCopyInto(out *ExternalDataConfigurationParquetOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDataConfigurationParquetOptionsParameters ¶
type ExternalDataConfigurationParquetOptionsParameters struct { // Indicates whether to use schema inference specifically for Parquet LIST logical type. // +kubebuilder:validation:Optional EnableListInference *bool `json:"enableListInference,omitempty" tf:"enable_list_inference,omitempty"` // Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default. // +kubebuilder:validation:Optional EnumAsString *bool `json:"enumAsString,omitempty" tf:"enum_as_string,omitempty"` }
func (*ExternalDataConfigurationParquetOptionsParameters) DeepCopy ¶
func (in *ExternalDataConfigurationParquetOptionsParameters) DeepCopy() *ExternalDataConfigurationParquetOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDataConfigurationParquetOptionsParameters.
func (*ExternalDataConfigurationParquetOptionsParameters) DeepCopyInto ¶
func (in *ExternalDataConfigurationParquetOptionsParameters) DeepCopyInto(out *ExternalDataConfigurationParquetOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDatasetReferenceInitParameters ¶ added in v1.5.0
type ExternalDatasetReferenceInitParameters struct { // The connection id that is used to access the externalSource. // Format: projects/{projectId}/locations/{locationId}/connections/{connectionId} Connection *string `json:"connection,omitempty" tf:"connection,omitempty"` // External source that backs this dataset. ExternalSource *string `json:"externalSource,omitempty" tf:"external_source,omitempty"` }
func (*ExternalDatasetReferenceInitParameters) DeepCopy ¶ added in v1.5.0
func (in *ExternalDatasetReferenceInitParameters) DeepCopy() *ExternalDatasetReferenceInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDatasetReferenceInitParameters.
func (*ExternalDatasetReferenceInitParameters) DeepCopyInto ¶ added in v1.5.0
func (in *ExternalDatasetReferenceInitParameters) DeepCopyInto(out *ExternalDatasetReferenceInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDatasetReferenceObservation ¶ added in v1.5.0
type ExternalDatasetReferenceObservation struct { // The connection id that is used to access the externalSource. // Format: projects/{projectId}/locations/{locationId}/connections/{connectionId} Connection *string `json:"connection,omitempty" tf:"connection,omitempty"` // External source that backs this dataset. ExternalSource *string `json:"externalSource,omitempty" tf:"external_source,omitempty"` }
func (*ExternalDatasetReferenceObservation) DeepCopy ¶ added in v1.5.0
func (in *ExternalDatasetReferenceObservation) DeepCopy() *ExternalDatasetReferenceObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDatasetReferenceObservation.
func (*ExternalDatasetReferenceObservation) DeepCopyInto ¶ added in v1.5.0
func (in *ExternalDatasetReferenceObservation) DeepCopyInto(out *ExternalDatasetReferenceObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDatasetReferenceParameters ¶ added in v1.5.0
type ExternalDatasetReferenceParameters struct { // The connection id that is used to access the externalSource. // Format: projects/{projectId}/locations/{locationId}/connections/{connectionId} // +kubebuilder:validation:Optional Connection *string `json:"connection" tf:"connection,omitempty"` // External source that backs this dataset. // +kubebuilder:validation:Optional ExternalSource *string `json:"externalSource" tf:"external_source,omitempty"` }
func (*ExternalDatasetReferenceParameters) DeepCopy ¶ added in v1.5.0
func (in *ExternalDatasetReferenceParameters) DeepCopy() *ExternalDatasetReferenceParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDatasetReferenceParameters.
func (*ExternalDatasetReferenceParameters) DeepCopyInto ¶ added in v1.5.0
func (in *ExternalDatasetReferenceParameters) DeepCopyInto(out *ExternalDatasetReferenceParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExtractInitParameters ¶
type ExtractInitParameters struct { // The compression type to use for exported files. Possible values include GZIP, DEFLATE, SNAPPY, and NONE. // The default value is NONE. DEFLATE and SNAPPY are only supported for Avro. Compression *string `json:"compression,omitempty" tf:"compression,omitempty"` // The exported file format. Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO for tables and SAVED_MODEL for models. // The default value for tables is CSV. Tables with nested or repeated fields cannot be exported as CSV. // The default value for models is SAVED_MODEL. DestinationFormat *string `json:"destinationFormat,omitempty" tf:"destination_format,omitempty"` // A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written. DestinationUris []*string `json:"destinationUris,omitempty" tf:"destination_uris,omitempty"` // When extracting data in CSV format, this defines the delimiter to use between fields in the exported data. // Default is ',' FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // Whether to print out a header row in the results. Default is true. PrintHeader *bool `json:"printHeader,omitempty" tf:"print_header,omitempty"` // A reference to the model being exported. // Structure is documented below. SourceModel *SourceModelInitParameters `json:"sourceModel,omitempty" tf:"source_model,omitempty"` // A reference to the table being exported. // Structure is documented below. SourceTable *SourceTableInitParameters `json:"sourceTable,omitempty" tf:"source_table,omitempty"` // Whether to use logical types when extracting to AVRO format. UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty" tf:"use_avro_logical_types,omitempty"` }
func (*ExtractInitParameters) DeepCopy ¶
func (in *ExtractInitParameters) DeepCopy() *ExtractInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExtractInitParameters.
func (*ExtractInitParameters) DeepCopyInto ¶
func (in *ExtractInitParameters) DeepCopyInto(out *ExtractInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExtractObservation ¶
type ExtractObservation struct { // The compression type to use for exported files. Possible values include GZIP, DEFLATE, SNAPPY, and NONE. // The default value is NONE. DEFLATE and SNAPPY are only supported for Avro. Compression *string `json:"compression,omitempty" tf:"compression,omitempty"` // The exported file format. Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO for tables and SAVED_MODEL for models. // The default value for tables is CSV. Tables with nested or repeated fields cannot be exported as CSV. // The default value for models is SAVED_MODEL. DestinationFormat *string `json:"destinationFormat,omitempty" tf:"destination_format,omitempty"` // A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written. DestinationUris []*string `json:"destinationUris,omitempty" tf:"destination_uris,omitempty"` // When extracting data in CSV format, this defines the delimiter to use between fields in the exported data. // Default is ',' FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // Whether to print out a header row in the results. Default is true. PrintHeader *bool `json:"printHeader,omitempty" tf:"print_header,omitempty"` // A reference to the model being exported. // Structure is documented below. SourceModel *SourceModelObservation `json:"sourceModel,omitempty" tf:"source_model,omitempty"` // A reference to the table being exported. // Structure is documented below. SourceTable *SourceTableObservation `json:"sourceTable,omitempty" tf:"source_table,omitempty"` // Whether to use logical types when extracting to AVRO format. UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty" tf:"use_avro_logical_types,omitempty"` }
func (*ExtractObservation) DeepCopy ¶
func (in *ExtractObservation) DeepCopy() *ExtractObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExtractObservation.
func (*ExtractObservation) DeepCopyInto ¶
func (in *ExtractObservation) DeepCopyInto(out *ExtractObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExtractParameters ¶
type ExtractParameters struct { // The compression type to use for exported files. Possible values include GZIP, DEFLATE, SNAPPY, and NONE. // The default value is NONE. DEFLATE and SNAPPY are only supported for Avro. // +kubebuilder:validation:Optional Compression *string `json:"compression,omitempty" tf:"compression,omitempty"` // The exported file format. Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO for tables and SAVED_MODEL for models. // The default value for tables is CSV. Tables with nested or repeated fields cannot be exported as CSV. // The default value for models is SAVED_MODEL. // +kubebuilder:validation:Optional DestinationFormat *string `json:"destinationFormat,omitempty" tf:"destination_format,omitempty"` // A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written. // +kubebuilder:validation:Optional DestinationUris []*string `json:"destinationUris" tf:"destination_uris,omitempty"` // When extracting data in CSV format, this defines the delimiter to use between fields in the exported data. // Default is ',' // +kubebuilder:validation:Optional FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // Whether to print out a header row in the results. Default is true. // +kubebuilder:validation:Optional PrintHeader *bool `json:"printHeader,omitempty" tf:"print_header,omitempty"` // A reference to the model being exported. // Structure is documented below. // +kubebuilder:validation:Optional SourceModel *SourceModelParameters `json:"sourceModel,omitempty" tf:"source_model,omitempty"` // A reference to the table being exported. // Structure is documented below. // +kubebuilder:validation:Optional SourceTable *SourceTableParameters `json:"sourceTable,omitempty" tf:"source_table,omitempty"` // Whether to use logical types when extracting to AVRO format. // +kubebuilder:validation:Optional UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty" tf:"use_avro_logical_types,omitempty"` }
func (*ExtractParameters) DeepCopy ¶
func (in *ExtractParameters) DeepCopy() *ExtractParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExtractParameters.
func (*ExtractParameters) DeepCopyInto ¶
func (in *ExtractParameters) DeepCopyInto(out *ExtractParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ForeignKeysInitParameters ¶
type ForeignKeysInitParameters struct { // : The pair of the foreign key column and primary key column. // Structure is documented below. ColumnReferences *ColumnReferencesInitParameters `json:"columnReferences,omitempty" tf:"column_references,omitempty"` // : Set only if the foreign key constraint is named. Name *string `json:"name,omitempty" tf:"name,omitempty"` // : The table that holds the primary key // and is referenced by this foreign key. // Structure is documented below. ReferencedTable *ReferencedTableInitParameters `json:"referencedTable,omitempty" tf:"referenced_table,omitempty"` }
func (*ForeignKeysInitParameters) DeepCopy ¶
func (in *ForeignKeysInitParameters) DeepCopy() *ForeignKeysInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignKeysInitParameters.
func (*ForeignKeysInitParameters) DeepCopyInto ¶
func (in *ForeignKeysInitParameters) DeepCopyInto(out *ForeignKeysInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ForeignKeysObservation ¶
type ForeignKeysObservation struct { // : The pair of the foreign key column and primary key column. // Structure is documented below. ColumnReferences *ColumnReferencesObservation `json:"columnReferences,omitempty" tf:"column_references,omitempty"` // : Set only if the foreign key constraint is named. Name *string `json:"name,omitempty" tf:"name,omitempty"` // : The table that holds the primary key // and is referenced by this foreign key. // Structure is documented below. ReferencedTable *ReferencedTableObservation `json:"referencedTable,omitempty" tf:"referenced_table,omitempty"` }
func (*ForeignKeysObservation) DeepCopy ¶
func (in *ForeignKeysObservation) DeepCopy() *ForeignKeysObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignKeysObservation.
func (*ForeignKeysObservation) DeepCopyInto ¶
func (in *ForeignKeysObservation) DeepCopyInto(out *ForeignKeysObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ForeignKeysParameters ¶
type ForeignKeysParameters struct { // : The pair of the foreign key column and primary key column. // Structure is documented below. // +kubebuilder:validation:Optional ColumnReferences *ColumnReferencesParameters `json:"columnReferences" tf:"column_references,omitempty"` // : Set only if the foreign key constraint is named. // +kubebuilder:validation:Optional Name *string `json:"name,omitempty" tf:"name,omitempty"` // : The table that holds the primary key // and is referenced by this foreign key. // Structure is documented below. // +kubebuilder:validation:Optional ReferencedTable *ReferencedTableParameters `json:"referencedTable" tf:"referenced_table,omitempty"` }
func (*ForeignKeysParameters) DeepCopy ¶
func (in *ForeignKeysParameters) DeepCopy() *ForeignKeysParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignKeysParameters.
func (*ForeignKeysParameters) DeepCopyInto ¶
func (in *ForeignKeysParameters) DeepCopyInto(out *ForeignKeysParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GoogleSheetsOptionsInitParameters ¶
type GoogleSheetsOptionsInitParameters struct { // Information required to partition based on ranges. // Structure is documented below. Range *string `json:"range,omitempty" tf:"range,omitempty"` // The number of rows at the top of the sheet // that BigQuery will skip when reading the data. At least one of range or // skip_leading_rows must be set. SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` }
func (*GoogleSheetsOptionsInitParameters) DeepCopy ¶
func (in *GoogleSheetsOptionsInitParameters) DeepCopy() *GoogleSheetsOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GoogleSheetsOptionsInitParameters.
func (*GoogleSheetsOptionsInitParameters) DeepCopyInto ¶
func (in *GoogleSheetsOptionsInitParameters) DeepCopyInto(out *GoogleSheetsOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GoogleSheetsOptionsObservation ¶
type GoogleSheetsOptionsObservation struct { // Information required to partition based on ranges. // Structure is documented below. Range *string `json:"range,omitempty" tf:"range,omitempty"` // The number of rows at the top of the sheet // that BigQuery will skip when reading the data. At least one of range or // skip_leading_rows must be set. SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` }
func (*GoogleSheetsOptionsObservation) DeepCopy ¶
func (in *GoogleSheetsOptionsObservation) DeepCopy() *GoogleSheetsOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GoogleSheetsOptionsObservation.
func (*GoogleSheetsOptionsObservation) DeepCopyInto ¶
func (in *GoogleSheetsOptionsObservation) DeepCopyInto(out *GoogleSheetsOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GoogleSheetsOptionsParameters ¶
type GoogleSheetsOptionsParameters struct { // Information required to partition based on ranges. // Structure is documented below. // +kubebuilder:validation:Optional Range *string `json:"range,omitempty" tf:"range,omitempty"` // The number of rows at the top of the sheet // that BigQuery will skip when reading the data. At least one of range or // skip_leading_rows must be set. // +kubebuilder:validation:Optional SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` }
func (*GoogleSheetsOptionsParameters) DeepCopy ¶
func (in *GoogleSheetsOptionsParameters) DeepCopy() *GoogleSheetsOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GoogleSheetsOptionsParameters.
func (*GoogleSheetsOptionsParameters) DeepCopyInto ¶
func (in *GoogleSheetsOptionsParameters) DeepCopyInto(out *GoogleSheetsOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HivePartitioningOptionsInitParameters ¶
type HivePartitioningOptionsInitParameters struct { // When set, what mode of hive partitioning to use when // reading data. The following modes are supported. Mode *string `json:"mode,omitempty" tf:"mode,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. require_partition_filter is deprecated and will be removed in // a future major release. Use the top level field with the same name instead. RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // When hive partition detection is requested, // a common for all source uris must be required. The prefix must end immediately // before the partition key encoding begins. For example, consider files following // this data layout. gs://bucket/path_to_table/dt=2019-06-01/country=USA/id=7/file.avro // gs://bucket/path_to_table/dt=2019-05-31/country=CA/id=3/file.avro When hive // partitioning is requested with either AUTO or STRINGS detection, the common prefix // can be either of gs://bucket/path_to_table or gs://bucket/path_to_table/. // Note that when mode is set to CUSTOM, you must encode the partition key schema within the source_uri_prefix by setting source_uri_prefix to gs://bucket/path_to_table/{key1:TYPE1}/{key2:TYPE2}/{key3:TYPE3}. SourceURIPrefix *string `json:"sourceUriPrefix,omitempty" tf:"source_uri_prefix,omitempty"` }
func (*HivePartitioningOptionsInitParameters) DeepCopy ¶
func (in *HivePartitioningOptionsInitParameters) DeepCopy() *HivePartitioningOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HivePartitioningOptionsInitParameters.
func (*HivePartitioningOptionsInitParameters) DeepCopyInto ¶
func (in *HivePartitioningOptionsInitParameters) DeepCopyInto(out *HivePartitioningOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HivePartitioningOptionsObservation ¶
type HivePartitioningOptionsObservation struct { // When set, what mode of hive partitioning to use when // reading data. The following modes are supported. Mode *string `json:"mode,omitempty" tf:"mode,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. require_partition_filter is deprecated and will be removed in // a future major release. Use the top level field with the same name instead. RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // When hive partition detection is requested, // a common for all source uris must be required. The prefix must end immediately // before the partition key encoding begins. For example, consider files following // this data layout. gs://bucket/path_to_table/dt=2019-06-01/country=USA/id=7/file.avro // gs://bucket/path_to_table/dt=2019-05-31/country=CA/id=3/file.avro When hive // partitioning is requested with either AUTO or STRINGS detection, the common prefix // can be either of gs://bucket/path_to_table or gs://bucket/path_to_table/. // Note that when mode is set to CUSTOM, you must encode the partition key schema within the source_uri_prefix by setting source_uri_prefix to gs://bucket/path_to_table/{key1:TYPE1}/{key2:TYPE2}/{key3:TYPE3}. SourceURIPrefix *string `json:"sourceUriPrefix,omitempty" tf:"source_uri_prefix,omitempty"` }
func (*HivePartitioningOptionsObservation) DeepCopy ¶
func (in *HivePartitioningOptionsObservation) DeepCopy() *HivePartitioningOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HivePartitioningOptionsObservation.
func (*HivePartitioningOptionsObservation) DeepCopyInto ¶
func (in *HivePartitioningOptionsObservation) DeepCopyInto(out *HivePartitioningOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HivePartitioningOptionsParameters ¶
type HivePartitioningOptionsParameters struct { // When set, what mode of hive partitioning to use when // reading data. The following modes are supported. // +kubebuilder:validation:Optional Mode *string `json:"mode,omitempty" tf:"mode,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. require_partition_filter is deprecated and will be removed in // a future major release. Use the top level field with the same name instead. // +kubebuilder:validation:Optional RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // When hive partition detection is requested, // a common for all source uris must be required. The prefix must end immediately // before the partition key encoding begins. For example, consider files following // this data layout. gs://bucket/path_to_table/dt=2019-06-01/country=USA/id=7/file.avro // gs://bucket/path_to_table/dt=2019-05-31/country=CA/id=3/file.avro When hive // partitioning is requested with either AUTO or STRINGS detection, the common prefix // can be either of gs://bucket/path_to_table or gs://bucket/path_to_table/. // Note that when mode is set to CUSTOM, you must encode the partition key schema within the source_uri_prefix by setting source_uri_prefix to gs://bucket/path_to_table/{key1:TYPE1}/{key2:TYPE2}/{key3:TYPE3}. // +kubebuilder:validation:Optional SourceURIPrefix *string `json:"sourceUriPrefix,omitempty" tf:"source_uri_prefix,omitempty"` }
func (*HivePartitioningOptionsParameters) DeepCopy ¶
func (in *HivePartitioningOptionsParameters) DeepCopy() *HivePartitioningOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HivePartitioningOptionsParameters.
func (*HivePartitioningOptionsParameters) DeepCopyInto ¶
func (in *HivePartitioningOptionsParameters) DeepCopyInto(out *HivePartitioningOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JSONOptionsInitParameters ¶
type JSONOptionsInitParameters struct { // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` }
func (*JSONOptionsInitParameters) DeepCopy ¶
func (in *JSONOptionsInitParameters) DeepCopy() *JSONOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JSONOptionsInitParameters.
func (*JSONOptionsInitParameters) DeepCopyInto ¶
func (in *JSONOptionsInitParameters) DeepCopyInto(out *JSONOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JSONOptionsObservation ¶
type JSONOptionsObservation struct { // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` }
func (*JSONOptionsObservation) DeepCopy ¶
func (in *JSONOptionsObservation) DeepCopy() *JSONOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JSONOptionsObservation.
func (*JSONOptionsObservation) DeepCopyInto ¶
func (in *JSONOptionsObservation) DeepCopyInto(out *JSONOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JSONOptionsParameters ¶
type JSONOptionsParameters struct { // The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8. // +kubebuilder:validation:Optional Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` }
func (*JSONOptionsParameters) DeepCopy ¶
func (in *JSONOptionsParameters) DeepCopy() *JSONOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JSONOptionsParameters.
func (*JSONOptionsParameters) DeepCopyInto ¶
func (in *JSONOptionsParameters) DeepCopyInto(out *JSONOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Job ¶
type Job struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.jobId) || (has(self.initProvider) && has(self.initProvider.jobId))",message="spec.forProvider.jobId is a required parameter" Spec JobSpec `json:"spec"` Status JobStatus `json:"status,omitempty"` }
Job is the Schema for the Jobs API. Jobs are actions that BigQuery runs on your behalf to load data, export data, query data, or copy data. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Job) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job.
func (*Job) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Job) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Job) GetCondition ¶
func (mg *Job) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Job.
func (*Job) GetConnectionDetailsMapping ¶
GetConnectionDetailsMapping for this Job
func (*Job) GetDeletionPolicy ¶
func (mg *Job) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Job.
func (*Job) GetInitParameters ¶
GetInitParameters of this Job
func (*Job) GetManagementPolicies ¶
func (mg *Job) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this Job.
func (*Job) GetMergedParameters ¶
GetInitParameters of this Job
func (*Job) GetObservation ¶
GetObservation of this Job
func (*Job) GetParameters ¶
GetParameters of this Job
func (*Job) GetProviderConfigReference ¶
GetProviderConfigReference of this Job.
func (*Job) GetPublishConnectionDetailsTo ¶
func (mg *Job) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Job.
func (*Job) GetTerraformResourceType ¶
GetTerraformResourceType returns Terraform resource type for this Job
func (*Job) GetTerraformSchemaVersion ¶
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Job) GetWriteConnectionSecretToReference ¶
func (mg *Job) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Job.
func (*Job) LateInitialize ¶
LateInitialize this Job using its observed tfState. returns True if there are any spec changes for the resource.
func (*Job) ResolveReferences ¶
ResolveReferences of this Job.
func (*Job) SetConditions ¶
SetConditions of this Job.
func (*Job) SetDeletionPolicy ¶
func (mg *Job) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Job.
func (*Job) SetManagementPolicies ¶
func (mg *Job) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this Job.
func (*Job) SetObservation ¶
SetObservation for this Job
func (*Job) SetParameters ¶
SetParameters for this Job
func (*Job) SetProviderConfigReference ¶
SetProviderConfigReference of this Job.
func (*Job) SetPublishConnectionDetailsTo ¶
func (mg *Job) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Job.
func (*Job) SetWriteConnectionSecretToReference ¶
func (mg *Job) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Job.
type JobInitParameters ¶
type JobInitParameters struct { // Copies a table. // Structure is documented below. Copy *CopyInitParameters `json:"copy,omitempty" tf:"copy,omitempty"` // Configures an extract job. // Structure is documented below. Extract *ExtractInitParameters `json:"extract,omitempty" tf:"extract,omitempty"` // The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters. JobID *string `json:"jobId,omitempty" tf:"job_id,omitempty"` // Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt to terminate the job. JobTimeoutMs *string `json:"jobTimeoutMs,omitempty" tf:"job_timeout_ms,omitempty"` // The labels associated with this job. You can use these to organize and group your jobs. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Configures a load job. // Structure is documented below. Load *LoadInitParameters `json:"load,omitempty" tf:"load,omitempty"` // The geographic location of the job. The default value is US. Location *string `json:"location,omitempty" tf:"location,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Configures a query job. // Structure is documented below. Query *QueryInitParameters `json:"query,omitempty" tf:"query,omitempty"` }
func (*JobInitParameters) DeepCopy ¶
func (in *JobInitParameters) DeepCopy() *JobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobInitParameters.
func (*JobInitParameters) DeepCopyInto ¶
func (in *JobInitParameters) DeepCopyInto(out *JobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobList ¶
type JobList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Job `json:"items"` }
JobList contains a list of Jobs
func (*JobList) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobList.
func (*JobList) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*JobList) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type JobObservation ¶
type JobObservation struct { // Copies a table. // Structure is documented below. Copy *CopyObservation `json:"copy,omitempty" tf:"copy,omitempty"` // +mapType=granular EffectiveLabels map[string]*string `json:"effectiveLabels,omitempty" tf:"effective_labels,omitempty"` // Configures an extract job. // Structure is documented below. Extract *ExtractObservation `json:"extract,omitempty" tf:"extract,omitempty"` // an identifier for the resource with format projects/{{project}}/jobs/{{job_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters. JobID *string `json:"jobId,omitempty" tf:"job_id,omitempty"` // Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt to terminate the job. JobTimeoutMs *string `json:"jobTimeoutMs,omitempty" tf:"job_timeout_ms,omitempty"` // (Output) // The type of the job. JobType *string `json:"jobType,omitempty" tf:"job_type,omitempty"` // The labels associated with this job. You can use these to organize and group your jobs. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Configures a load job. // Structure is documented below. Load *LoadObservation `json:"load,omitempty" tf:"load,omitempty"` // The geographic location of the job. The default value is US. Location *string `json:"location,omitempty" tf:"location,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Configures a query job. // Structure is documented below. Query *QueryObservation `json:"query,omitempty" tf:"query,omitempty"` // The status of this job. Examine this value when polling an asynchronous job to see if the job is complete. // Structure is documented below. Status []StatusObservation `json:"status,omitempty" tf:"status,omitempty"` // (Output) // The combination of labels configured directly on the resource // and default labels configured on the provider. // +mapType=granular TerraformLabels map[string]*string `json:"terraformLabels,omitempty" tf:"terraform_labels,omitempty"` // Email address of the user who ran the job. UserEmail *string `json:"userEmail,omitempty" tf:"user_email,omitempty"` }
func (*JobObservation) DeepCopy ¶
func (in *JobObservation) DeepCopy() *JobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobObservation.
func (*JobObservation) DeepCopyInto ¶
func (in *JobObservation) DeepCopyInto(out *JobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobParameters ¶
type JobParameters struct { // Copies a table. // Structure is documented below. // +kubebuilder:validation:Optional Copy *CopyParameters `json:"copy,omitempty" tf:"copy,omitempty"` // Configures an extract job. // Structure is documented below. // +kubebuilder:validation:Optional Extract *ExtractParameters `json:"extract,omitempty" tf:"extract,omitempty"` // The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters. // +kubebuilder:validation:Optional JobID *string `json:"jobId,omitempty" tf:"job_id,omitempty"` // Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt to terminate the job. // +kubebuilder:validation:Optional JobTimeoutMs *string `json:"jobTimeoutMs,omitempty" tf:"job_timeout_ms,omitempty"` // The labels associated with this job. You can use these to organize and group your jobs. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Configures a load job. // Structure is documented below. // +kubebuilder:validation:Optional Load *LoadParameters `json:"load,omitempty" tf:"load,omitempty"` // The geographic location of the job. The default value is US. // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Configures a query job. // Structure is documented below. // +kubebuilder:validation:Optional Query *QueryParameters `json:"query,omitempty" tf:"query,omitempty"` }
func (*JobParameters) DeepCopy ¶
func (in *JobParameters) DeepCopy() *JobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobParameters.
func (*JobParameters) DeepCopyInto ¶
func (in *JobParameters) DeepCopyInto(out *JobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobSpec ¶
type JobSpec struct { v1.ResourceSpec `json:",inline"` ForProvider JobParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider JobInitParameters `json:"initProvider,omitempty"` }
JobSpec defines the desired state of Job
func (*JobSpec) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpec.
func (*JobSpec) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobStatus ¶
type JobStatus struct { v1.ResourceStatus `json:",inline"` AtProvider JobObservation `json:"atProvider,omitempty"` }
JobStatus defines the observed state of Job.
func (*JobStatus) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatus.
func (*JobStatus) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadDestinationEncryptionConfigurationInitParameters ¶
type LoadDestinationEncryptionConfigurationInitParameters struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` }
func (*LoadDestinationEncryptionConfigurationInitParameters) DeepCopy ¶
func (in *LoadDestinationEncryptionConfigurationInitParameters) DeepCopy() *LoadDestinationEncryptionConfigurationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadDestinationEncryptionConfigurationInitParameters.
func (*LoadDestinationEncryptionConfigurationInitParameters) DeepCopyInto ¶
func (in *LoadDestinationEncryptionConfigurationInitParameters) DeepCopyInto(out *LoadDestinationEncryptionConfigurationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadDestinationEncryptionConfigurationObservation ¶
type LoadDestinationEncryptionConfigurationObservation struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // (Output) // Describes the Cloud KMS encryption key version used to protect destination BigQuery table. KMSKeyVersion *string `json:"kmsKeyVersion,omitempty" tf:"kms_key_version,omitempty"` }
func (*LoadDestinationEncryptionConfigurationObservation) DeepCopy ¶
func (in *LoadDestinationEncryptionConfigurationObservation) DeepCopy() *LoadDestinationEncryptionConfigurationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadDestinationEncryptionConfigurationObservation.
func (*LoadDestinationEncryptionConfigurationObservation) DeepCopyInto ¶
func (in *LoadDestinationEncryptionConfigurationObservation) DeepCopyInto(out *LoadDestinationEncryptionConfigurationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadDestinationEncryptionConfigurationParameters ¶
type LoadDestinationEncryptionConfigurationParameters struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. // +kubebuilder:validation:Optional KMSKeyName *string `json:"kmsKeyName" tf:"kms_key_name,omitempty"` }
func (*LoadDestinationEncryptionConfigurationParameters) DeepCopy ¶
func (in *LoadDestinationEncryptionConfigurationParameters) DeepCopy() *LoadDestinationEncryptionConfigurationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadDestinationEncryptionConfigurationParameters.
func (*LoadDestinationEncryptionConfigurationParameters) DeepCopyInto ¶
func (in *LoadDestinationEncryptionConfigurationParameters) DeepCopyInto(out *LoadDestinationEncryptionConfigurationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadDestinationTableInitParameters ¶
type LoadDestinationTableInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*LoadDestinationTableInitParameters) DeepCopy ¶
func (in *LoadDestinationTableInitParameters) DeepCopy() *LoadDestinationTableInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadDestinationTableInitParameters.
func (*LoadDestinationTableInitParameters) DeepCopyInto ¶
func (in *LoadDestinationTableInitParameters) DeepCopyInto(out *LoadDestinationTableInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadDestinationTableObservation ¶
type LoadDestinationTableObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*LoadDestinationTableObservation) DeepCopy ¶
func (in *LoadDestinationTableObservation) DeepCopy() *LoadDestinationTableObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadDestinationTableObservation.
func (*LoadDestinationTableObservation) DeepCopyInto ¶
func (in *LoadDestinationTableObservation) DeepCopyInto(out *LoadDestinationTableObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadDestinationTableParameters ¶
type LoadDestinationTableParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*LoadDestinationTableParameters) DeepCopy ¶
func (in *LoadDestinationTableParameters) DeepCopy() *LoadDestinationTableParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadDestinationTableParameters.
func (*LoadDestinationTableParameters) DeepCopyInto ¶
func (in *LoadDestinationTableParameters) DeepCopyInto(out *LoadDestinationTableParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadInitParameters ¶
type LoadInitParameters struct { // Accept rows that are missing trailing optional columns. The missing values are treated as nulls. // If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, // an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats. AllowJaggedRows *bool `json:"allowJaggedRows,omitempty" tf:"allow_jagged_rows,omitempty"` // Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. // The default value is false. AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty" tf:"allow_quoted_newlines,omitempty"` // Indicates if we should automatically infer the options and schema for CSV and JSON sources. Autodetect *bool `json:"autodetect,omitempty" tf:"autodetect,omitempty"` // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. DestinationEncryptionConfiguration *LoadDestinationEncryptionConfigurationInitParameters `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // The destination table to load the data into. // Structure is documented below. DestinationTable *LoadDestinationTableInitParameters `json:"destinationTable,omitempty" tf:"destination_table,omitempty"` // The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. // The default value is UTF-8. BigQuery decodes the data after the raw, binary data // has been split using the values of the quote and fieldDelimiter properties. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // The separator for fields in a CSV file. The separator can be any ISO-8859-1 single-byte character. // To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts // the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the // data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. // The default value is a comma (','). FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // Indicates if BigQuery should allow extra values that are not represented in the table schema. // If true, the extra values are ignored. If false, records with extra columns are treated as bad records, // and if there are too many bad records, an invalid error is returned in the job result. // The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: // CSV: Trailing columns // JSON: Named values that don't match any column names IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty" tf:"ignore_unknown_values,omitempty"` // If sourceFormat is set to newline-delimited JSON, indicates whether it should be processed as a JSON variant such as GeoJSON. // For a sourceFormat other than JSON, omit this field. If the sourceFormat is newline-delimited JSON: - for newline-delimited // GeoJSON: set to GEOJSON. JSONExtension *string `json:"jsonExtension,omitempty" tf:"json_extension,omitempty"` // The maximum number of bad records that BigQuery can ignore when running the job. If the number of bad records exceeds this value, // an invalid error is returned in the job result. The default value is 0, which requires that all records are valid. MaxBadRecords *float64 `json:"maxBadRecords,omitempty" tf:"max_bad_records,omitempty"` // Specifies a string that represents a null value in a CSV file. For example, if you specify "\N", BigQuery interprets "\N" as a null value // when loading a CSV file. The default value is the empty string. If you set this property to a custom value, BigQuery throws an error if an // empty string is present for all data types except for STRING and BYTE. For STRING and BYTE columns, BigQuery interprets the empty string as // an empty value. NullMarker *string `json:"nullMarker,omitempty" tf:"null_marker,omitempty"` // Parquet Options for load and make external tables. // Structure is documented below. ParquetOptions *ParquetOptionsInitParameters `json:"parquetOptions,omitempty" tf:"parquet_options,omitempty"` // If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity properties to load into BigQuery from a Cloud Datastore backup. // Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties. // If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result. ProjectionFields []*string `json:"projectionFields,omitempty" tf:"projection_fields,omitempty"` // The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, // and then uses the first byte of the encoded string to split the data in its raw, binary state. // The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string. // If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true. Quote *string `json:"quote,omitempty" tf:"quote,omitempty"` // Allows the schema of the destination table to be updated as a side effect of the load job if a schema is autodetected or // supplied in the job configuration. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; // when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators. // For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified: // ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. // ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. SchemaUpdateOptions []*string `json:"schemaUpdateOptions,omitempty" tf:"schema_update_options,omitempty"` // The number of rows at the top of a CSV file that BigQuery will skip when loading the data. // The default value is 0. This property is useful if you have header rows in the file that should be skipped. // When autodetect is on, the behavior is the following: // skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected, // the row is read as data. Otherwise data is read starting from the second row. // skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row. // skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected, // row N is just skipped. Otherwise row N is used to extract column names for the detected schema. SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` // The format of the data files. For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". // For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET". // For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE". // The default value is CSV. SourceFormat *string `json:"sourceFormat,omitempty" tf:"source_format,omitempty"` // The fully-qualified URIs that point to your data in Google Cloud. // For Google Cloud Storage URIs: Each URI can contain one '*' wildcard character // and it must come after the 'bucket' name. Size limits related to load jobs apply // to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be // specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table. // For Google Cloud Datastore backups: Exactly one URI can be specified. Also, the '*' wildcard character is not allowed. SourceUris []*string `json:"sourceUris,omitempty" tf:"source_uris,omitempty"` // Time-based partitioning specification for the destination table. // Structure is documented below. TimePartitioning *TimePartitioningInitParameters `json:"timePartitioning,omitempty" tf:"time_partitioning,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*LoadInitParameters) DeepCopy ¶
func (in *LoadInitParameters) DeepCopy() *LoadInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadInitParameters.
func (*LoadInitParameters) DeepCopyInto ¶
func (in *LoadInitParameters) DeepCopyInto(out *LoadInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadObservation ¶
type LoadObservation struct { // Accept rows that are missing trailing optional columns. The missing values are treated as nulls. // If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, // an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats. AllowJaggedRows *bool `json:"allowJaggedRows,omitempty" tf:"allow_jagged_rows,omitempty"` // Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. // The default value is false. AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty" tf:"allow_quoted_newlines,omitempty"` // Indicates if we should automatically infer the options and schema for CSV and JSON sources. Autodetect *bool `json:"autodetect,omitempty" tf:"autodetect,omitempty"` // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. DestinationEncryptionConfiguration *LoadDestinationEncryptionConfigurationObservation `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // The destination table to load the data into. // Structure is documented below. DestinationTable *LoadDestinationTableObservation `json:"destinationTable,omitempty" tf:"destination_table,omitempty"` // The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. // The default value is UTF-8. BigQuery decodes the data after the raw, binary data // has been split using the values of the quote and fieldDelimiter properties. Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // The separator for fields in a CSV file. The separator can be any ISO-8859-1 single-byte character. // To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts // the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the // data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. // The default value is a comma (','). FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // Indicates if BigQuery should allow extra values that are not represented in the table schema. // If true, the extra values are ignored. If false, records with extra columns are treated as bad records, // and if there are too many bad records, an invalid error is returned in the job result. // The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: // CSV: Trailing columns // JSON: Named values that don't match any column names IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty" tf:"ignore_unknown_values,omitempty"` // If sourceFormat is set to newline-delimited JSON, indicates whether it should be processed as a JSON variant such as GeoJSON. // For a sourceFormat other than JSON, omit this field. If the sourceFormat is newline-delimited JSON: - for newline-delimited // GeoJSON: set to GEOJSON. JSONExtension *string `json:"jsonExtension,omitempty" tf:"json_extension,omitempty"` // The maximum number of bad records that BigQuery can ignore when running the job. If the number of bad records exceeds this value, // an invalid error is returned in the job result. The default value is 0, which requires that all records are valid. MaxBadRecords *float64 `json:"maxBadRecords,omitempty" tf:"max_bad_records,omitempty"` // Specifies a string that represents a null value in a CSV file. For example, if you specify "\N", BigQuery interprets "\N" as a null value // when loading a CSV file. The default value is the empty string. If you set this property to a custom value, BigQuery throws an error if an // empty string is present for all data types except for STRING and BYTE. For STRING and BYTE columns, BigQuery interprets the empty string as // an empty value. NullMarker *string `json:"nullMarker,omitempty" tf:"null_marker,omitempty"` // Parquet Options for load and make external tables. // Structure is documented below. ParquetOptions *ParquetOptionsObservation `json:"parquetOptions,omitempty" tf:"parquet_options,omitempty"` // If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity properties to load into BigQuery from a Cloud Datastore backup. // Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties. // If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result. ProjectionFields []*string `json:"projectionFields,omitempty" tf:"projection_fields,omitempty"` // The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, // and then uses the first byte of the encoded string to split the data in its raw, binary state. // The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string. // If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true. Quote *string `json:"quote,omitempty" tf:"quote,omitempty"` // Allows the schema of the destination table to be updated as a side effect of the load job if a schema is autodetected or // supplied in the job configuration. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; // when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators. // For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified: // ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. // ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. SchemaUpdateOptions []*string `json:"schemaUpdateOptions,omitempty" tf:"schema_update_options,omitempty"` // The number of rows at the top of a CSV file that BigQuery will skip when loading the data. // The default value is 0. This property is useful if you have header rows in the file that should be skipped. // When autodetect is on, the behavior is the following: // skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected, // the row is read as data. Otherwise data is read starting from the second row. // skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row. // skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected, // row N is just skipped. Otherwise row N is used to extract column names for the detected schema. SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` // The format of the data files. For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". // For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET". // For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE". // The default value is CSV. SourceFormat *string `json:"sourceFormat,omitempty" tf:"source_format,omitempty"` // The fully-qualified URIs that point to your data in Google Cloud. // For Google Cloud Storage URIs: Each URI can contain one '*' wildcard character // and it must come after the 'bucket' name. Size limits related to load jobs apply // to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be // specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table. // For Google Cloud Datastore backups: Exactly one URI can be specified. Also, the '*' wildcard character is not allowed. SourceUris []*string `json:"sourceUris,omitempty" tf:"source_uris,omitempty"` // Time-based partitioning specification for the destination table. // Structure is documented below. TimePartitioning *TimePartitioningObservation `json:"timePartitioning,omitempty" tf:"time_partitioning,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*LoadObservation) DeepCopy ¶
func (in *LoadObservation) DeepCopy() *LoadObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadObservation.
func (*LoadObservation) DeepCopyInto ¶
func (in *LoadObservation) DeepCopyInto(out *LoadObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoadParameters ¶
type LoadParameters struct { // Accept rows that are missing trailing optional columns. The missing values are treated as nulls. // If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, // an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats. // +kubebuilder:validation:Optional AllowJaggedRows *bool `json:"allowJaggedRows,omitempty" tf:"allow_jagged_rows,omitempty"` // Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. // The default value is false. // +kubebuilder:validation:Optional AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty" tf:"allow_quoted_newlines,omitempty"` // Indicates if we should automatically infer the options and schema for CSV and JSON sources. // +kubebuilder:validation:Optional Autodetect *bool `json:"autodetect,omitempty" tf:"autodetect,omitempty"` // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. // +kubebuilder:validation:Optional CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. // +kubebuilder:validation:Optional DestinationEncryptionConfiguration *LoadDestinationEncryptionConfigurationParameters `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // The destination table to load the data into. // Structure is documented below. // +kubebuilder:validation:Optional DestinationTable *LoadDestinationTableParameters `json:"destinationTable" tf:"destination_table,omitempty"` // The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. // The default value is UTF-8. BigQuery decodes the data after the raw, binary data // has been split using the values of the quote and fieldDelimiter properties. // +kubebuilder:validation:Optional Encoding *string `json:"encoding,omitempty" tf:"encoding,omitempty"` // The separator for fields in a CSV file. The separator can be any ISO-8859-1 single-byte character. // To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts // the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the // data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. // The default value is a comma (','). // +kubebuilder:validation:Optional FieldDelimiter *string `json:"fieldDelimiter,omitempty" tf:"field_delimiter,omitempty"` // Indicates if BigQuery should allow extra values that are not represented in the table schema. // If true, the extra values are ignored. If false, records with extra columns are treated as bad records, // and if there are too many bad records, an invalid error is returned in the job result. // The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: // CSV: Trailing columns // JSON: Named values that don't match any column names // +kubebuilder:validation:Optional IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty" tf:"ignore_unknown_values,omitempty"` // If sourceFormat is set to newline-delimited JSON, indicates whether it should be processed as a JSON variant such as GeoJSON. // For a sourceFormat other than JSON, omit this field. If the sourceFormat is newline-delimited JSON: - for newline-delimited // GeoJSON: set to GEOJSON. // +kubebuilder:validation:Optional JSONExtension *string `json:"jsonExtension,omitempty" tf:"json_extension,omitempty"` // The maximum number of bad records that BigQuery can ignore when running the job. If the number of bad records exceeds this value, // an invalid error is returned in the job result. The default value is 0, which requires that all records are valid. // +kubebuilder:validation:Optional MaxBadRecords *float64 `json:"maxBadRecords,omitempty" tf:"max_bad_records,omitempty"` // Specifies a string that represents a null value in a CSV file. For example, if you specify "\N", BigQuery interprets "\N" as a null value // when loading a CSV file. The default value is the empty string. If you set this property to a custom value, BigQuery throws an error if an // empty string is present for all data types except for STRING and BYTE. For STRING and BYTE columns, BigQuery interprets the empty string as // an empty value. // +kubebuilder:validation:Optional NullMarker *string `json:"nullMarker,omitempty" tf:"null_marker,omitempty"` // Parquet Options for load and make external tables. // Structure is documented below. // +kubebuilder:validation:Optional ParquetOptions *ParquetOptionsParameters `json:"parquetOptions,omitempty" tf:"parquet_options,omitempty"` // If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity properties to load into BigQuery from a Cloud Datastore backup. // Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties. // If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result. // +kubebuilder:validation:Optional ProjectionFields []*string `json:"projectionFields,omitempty" tf:"projection_fields,omitempty"` // The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, // and then uses the first byte of the encoded string to split the data in its raw, binary state. // The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string. // If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true. // +kubebuilder:validation:Optional Quote *string `json:"quote,omitempty" tf:"quote,omitempty"` // Allows the schema of the destination table to be updated as a side effect of the load job if a schema is autodetected or // supplied in the job configuration. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; // when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators. // For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified: // ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. // ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. // +kubebuilder:validation:Optional SchemaUpdateOptions []*string `json:"schemaUpdateOptions,omitempty" tf:"schema_update_options,omitempty"` // The number of rows at the top of a CSV file that BigQuery will skip when loading the data. // The default value is 0. This property is useful if you have header rows in the file that should be skipped. // When autodetect is on, the behavior is the following: // skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected, // the row is read as data. Otherwise data is read starting from the second row. // skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row. // skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected, // row N is just skipped. Otherwise row N is used to extract column names for the detected schema. // +kubebuilder:validation:Optional SkipLeadingRows *float64 `json:"skipLeadingRows,omitempty" tf:"skip_leading_rows,omitempty"` // The format of the data files. For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". // For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET". // For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE". // The default value is CSV. // +kubebuilder:validation:Optional SourceFormat *string `json:"sourceFormat,omitempty" tf:"source_format,omitempty"` // The fully-qualified URIs that point to your data in Google Cloud. // For Google Cloud Storage URIs: Each URI can contain one '*' wildcard character // and it must come after the 'bucket' name. Size limits related to load jobs apply // to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be // specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table. // For Google Cloud Datastore backups: Exactly one URI can be specified. Also, the '*' wildcard character is not allowed. // +kubebuilder:validation:Optional SourceUris []*string `json:"sourceUris" tf:"source_uris,omitempty"` // Time-based partitioning specification for the destination table. // Structure is documented below. // +kubebuilder:validation:Optional TimePartitioning *TimePartitioningParameters `json:"timePartitioning,omitempty" tf:"time_partitioning,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. // +kubebuilder:validation:Optional WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*LoadParameters) DeepCopy ¶
func (in *LoadParameters) DeepCopy() *LoadParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadParameters.
func (*LoadParameters) DeepCopyInto ¶
func (in *LoadParameters) DeepCopyInto(out *LoadParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MaterializedViewInitParameters ¶
type MaterializedViewInitParameters struct { // Allow non incremental materialized view definition. // The default value is false. AllowNonIncrementalDefinition *bool `json:"allowNonIncrementalDefinition,omitempty" tf:"allow_non_incremental_definition,omitempty"` // Specifies whether to use BigQuery's automatic refresh for this materialized view when the base table is updated. // The default value is true. EnableRefresh *bool `json:"enableRefresh,omitempty" tf:"enable_refresh,omitempty"` // A query whose result is persisted. Query *string `json:"query,omitempty" tf:"query,omitempty"` // The maximum frequency at which this materialized view will be refreshed. // The default value is 1800000 RefreshIntervalMs *float64 `json:"refreshIntervalMs,omitempty" tf:"refresh_interval_ms,omitempty"` }
func (*MaterializedViewInitParameters) DeepCopy ¶
func (in *MaterializedViewInitParameters) DeepCopy() *MaterializedViewInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewInitParameters.
func (*MaterializedViewInitParameters) DeepCopyInto ¶
func (in *MaterializedViewInitParameters) DeepCopyInto(out *MaterializedViewInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MaterializedViewObservation ¶
type MaterializedViewObservation struct { // Allow non incremental materialized view definition. // The default value is false. AllowNonIncrementalDefinition *bool `json:"allowNonIncrementalDefinition,omitempty" tf:"allow_non_incremental_definition,omitempty"` // Specifies whether to use BigQuery's automatic refresh for this materialized view when the base table is updated. // The default value is true. EnableRefresh *bool `json:"enableRefresh,omitempty" tf:"enable_refresh,omitempty"` // A query whose result is persisted. Query *string `json:"query,omitempty" tf:"query,omitempty"` // The maximum frequency at which this materialized view will be refreshed. // The default value is 1800000 RefreshIntervalMs *float64 `json:"refreshIntervalMs,omitempty" tf:"refresh_interval_ms,omitempty"` }
func (*MaterializedViewObservation) DeepCopy ¶
func (in *MaterializedViewObservation) DeepCopy() *MaterializedViewObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewObservation.
func (*MaterializedViewObservation) DeepCopyInto ¶
func (in *MaterializedViewObservation) DeepCopyInto(out *MaterializedViewObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MaterializedViewParameters ¶
type MaterializedViewParameters struct { // Allow non incremental materialized view definition. // The default value is false. // +kubebuilder:validation:Optional AllowNonIncrementalDefinition *bool `json:"allowNonIncrementalDefinition,omitempty" tf:"allow_non_incremental_definition,omitempty"` // Specifies whether to use BigQuery's automatic refresh for this materialized view when the base table is updated. // The default value is true. // +kubebuilder:validation:Optional EnableRefresh *bool `json:"enableRefresh,omitempty" tf:"enable_refresh,omitempty"` // A query whose result is persisted. // +kubebuilder:validation:Optional Query *string `json:"query" tf:"query,omitempty"` // The maximum frequency at which this materialized view will be refreshed. // The default value is 1800000 // +kubebuilder:validation:Optional RefreshIntervalMs *float64 `json:"refreshIntervalMs,omitempty" tf:"refresh_interval_ms,omitempty"` }
func (*MaterializedViewParameters) DeepCopy ¶
func (in *MaterializedViewParameters) DeepCopy() *MaterializedViewParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewParameters.
func (*MaterializedViewParameters) DeepCopyInto ¶
func (in *MaterializedViewParameters) DeepCopyInto(out *MaterializedViewParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceConfigInitParameters ¶
type MetastoreServiceConfigInitParameters struct { // Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId]. MetastoreService *string `json:"metastoreService,omitempty" tf:"metastore_service,omitempty"` }
func (*MetastoreServiceConfigInitParameters) DeepCopy ¶
func (in *MetastoreServiceConfigInitParameters) DeepCopy() *MetastoreServiceConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceConfigInitParameters.
func (*MetastoreServiceConfigInitParameters) DeepCopyInto ¶
func (in *MetastoreServiceConfigInitParameters) DeepCopyInto(out *MetastoreServiceConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceConfigObservation ¶
type MetastoreServiceConfigObservation struct { // Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId]. MetastoreService *string `json:"metastoreService,omitempty" tf:"metastore_service,omitempty"` }
func (*MetastoreServiceConfigObservation) DeepCopy ¶
func (in *MetastoreServiceConfigObservation) DeepCopy() *MetastoreServiceConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceConfigObservation.
func (*MetastoreServiceConfigObservation) DeepCopyInto ¶
func (in *MetastoreServiceConfigObservation) DeepCopyInto(out *MetastoreServiceConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceConfigParameters ¶
type MetastoreServiceConfigParameters struct { // Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId]. // +kubebuilder:validation:Optional MetastoreService *string `json:"metastoreService,omitempty" tf:"metastore_service,omitempty"` }
func (*MetastoreServiceConfigParameters) DeepCopy ¶
func (in *MetastoreServiceConfigParameters) DeepCopy() *MetastoreServiceConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceConfigParameters.
func (*MetastoreServiceConfigParameters) DeepCopyInto ¶
func (in *MetastoreServiceConfigParameters) DeepCopyInto(out *MetastoreServiceConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ParquetOptionsInitParameters ¶
type ParquetOptionsInitParameters struct { // If sourceFormat is set to PARQUET, indicates whether to use schema inference specifically for Parquet LIST logical type. EnableListInference *bool `json:"enableListInference,omitempty" tf:"enable_list_inference,omitempty"` // If sourceFormat is set to PARQUET, indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default. EnumAsString *bool `json:"enumAsString,omitempty" tf:"enum_as_string,omitempty"` }
func (*ParquetOptionsInitParameters) DeepCopy ¶
func (in *ParquetOptionsInitParameters) DeepCopy() *ParquetOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParquetOptionsInitParameters.
func (*ParquetOptionsInitParameters) DeepCopyInto ¶
func (in *ParquetOptionsInitParameters) DeepCopyInto(out *ParquetOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ParquetOptionsObservation ¶
type ParquetOptionsObservation struct { // If sourceFormat is set to PARQUET, indicates whether to use schema inference specifically for Parquet LIST logical type. EnableListInference *bool `json:"enableListInference,omitempty" tf:"enable_list_inference,omitempty"` // If sourceFormat is set to PARQUET, indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default. EnumAsString *bool `json:"enumAsString,omitempty" tf:"enum_as_string,omitempty"` }
func (*ParquetOptionsObservation) DeepCopy ¶
func (in *ParquetOptionsObservation) DeepCopy() *ParquetOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParquetOptionsObservation.
func (*ParquetOptionsObservation) DeepCopyInto ¶
func (in *ParquetOptionsObservation) DeepCopyInto(out *ParquetOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ParquetOptionsParameters ¶
type ParquetOptionsParameters struct { // If sourceFormat is set to PARQUET, indicates whether to use schema inference specifically for Parquet LIST logical type. // +kubebuilder:validation:Optional EnableListInference *bool `json:"enableListInference,omitempty" tf:"enable_list_inference,omitempty"` // If sourceFormat is set to PARQUET, indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default. // +kubebuilder:validation:Optional EnumAsString *bool `json:"enumAsString,omitempty" tf:"enum_as_string,omitempty"` }
func (*ParquetOptionsParameters) DeepCopy ¶
func (in *ParquetOptionsParameters) DeepCopy() *ParquetOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParquetOptionsParameters.
func (*ParquetOptionsParameters) DeepCopyInto ¶
func (in *ParquetOptionsParameters) DeepCopyInto(out *ParquetOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrimaryKeyInitParameters ¶
type PrimaryKeyInitParameters struct { // : The columns that are composed of the primary key constraint. Columns []*string `json:"columns,omitempty" tf:"columns,omitempty"` }
func (*PrimaryKeyInitParameters) DeepCopy ¶
func (in *PrimaryKeyInitParameters) DeepCopy() *PrimaryKeyInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrimaryKeyInitParameters.
func (*PrimaryKeyInitParameters) DeepCopyInto ¶
func (in *PrimaryKeyInitParameters) DeepCopyInto(out *PrimaryKeyInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrimaryKeyObservation ¶
type PrimaryKeyObservation struct { // : The columns that are composed of the primary key constraint. Columns []*string `json:"columns,omitempty" tf:"columns,omitempty"` }
func (*PrimaryKeyObservation) DeepCopy ¶
func (in *PrimaryKeyObservation) DeepCopy() *PrimaryKeyObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrimaryKeyObservation.
func (*PrimaryKeyObservation) DeepCopyInto ¶
func (in *PrimaryKeyObservation) DeepCopyInto(out *PrimaryKeyObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrimaryKeyParameters ¶
type PrimaryKeyParameters struct { // : The columns that are composed of the primary key constraint. // +kubebuilder:validation:Optional Columns []*string `json:"columns" tf:"columns,omitempty"` }
func (*PrimaryKeyParameters) DeepCopy ¶
func (in *PrimaryKeyParameters) DeepCopy() *PrimaryKeyParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrimaryKeyParameters.
func (*PrimaryKeyParameters) DeepCopyInto ¶
func (in *PrimaryKeyParameters) DeepCopyInto(out *PrimaryKeyParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PublisherInitParameters ¶
type PublisherInitParameters struct { // Name of the listing publisher. Name *string `json:"name,omitempty" tf:"name,omitempty"` // Email or URL of the listing publisher. PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` }
func (*PublisherInitParameters) DeepCopy ¶
func (in *PublisherInitParameters) DeepCopy() *PublisherInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PublisherInitParameters.
func (*PublisherInitParameters) DeepCopyInto ¶
func (in *PublisherInitParameters) DeepCopyInto(out *PublisherInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PublisherObservation ¶
type PublisherObservation struct { // Name of the listing publisher. Name *string `json:"name,omitempty" tf:"name,omitempty"` // Email or URL of the listing publisher. PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` }
func (*PublisherObservation) DeepCopy ¶
func (in *PublisherObservation) DeepCopy() *PublisherObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PublisherObservation.
func (*PublisherObservation) DeepCopyInto ¶
func (in *PublisherObservation) DeepCopyInto(out *PublisherObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PublisherParameters ¶
type PublisherParameters struct { // Name of the listing publisher. // +kubebuilder:validation:Optional Name *string `json:"name" tf:"name,omitempty"` // Email or URL of the listing publisher. // +kubebuilder:validation:Optional PrimaryContact *string `json:"primaryContact,omitempty" tf:"primary_contact,omitempty"` }
func (*PublisherParameters) DeepCopy ¶
func (in *PublisherParameters) DeepCopy() *PublisherParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PublisherParameters.
func (*PublisherParameters) DeepCopyInto ¶
func (in *PublisherParameters) DeepCopyInto(out *PublisherParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryDestinationEncryptionConfigurationInitParameters ¶
type QueryDestinationEncryptionConfigurationInitParameters struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` }
func (*QueryDestinationEncryptionConfigurationInitParameters) DeepCopy ¶
func (in *QueryDestinationEncryptionConfigurationInitParameters) DeepCopy() *QueryDestinationEncryptionConfigurationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryDestinationEncryptionConfigurationInitParameters.
func (*QueryDestinationEncryptionConfigurationInitParameters) DeepCopyInto ¶
func (in *QueryDestinationEncryptionConfigurationInitParameters) DeepCopyInto(out *QueryDestinationEncryptionConfigurationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryDestinationEncryptionConfigurationObservation ¶
type QueryDestinationEncryptionConfigurationObservation struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` // (Output) // Describes the Cloud KMS encryption key version used to protect destination BigQuery table. KMSKeyVersion *string `json:"kmsKeyVersion,omitempty" tf:"kms_key_version,omitempty"` }
func (*QueryDestinationEncryptionConfigurationObservation) DeepCopy ¶
func (in *QueryDestinationEncryptionConfigurationObservation) DeepCopy() *QueryDestinationEncryptionConfigurationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryDestinationEncryptionConfigurationObservation.
func (*QueryDestinationEncryptionConfigurationObservation) DeepCopyInto ¶
func (in *QueryDestinationEncryptionConfigurationObservation) DeepCopyInto(out *QueryDestinationEncryptionConfigurationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryDestinationEncryptionConfigurationParameters ¶
type QueryDestinationEncryptionConfigurationParameters struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // The BigQuery Service Account associated with your project requires access to this encryption key. // +kubebuilder:validation:Optional KMSKeyName *string `json:"kmsKeyName" tf:"kms_key_name,omitempty"` }
func (*QueryDestinationEncryptionConfigurationParameters) DeepCopy ¶
func (in *QueryDestinationEncryptionConfigurationParameters) DeepCopy() *QueryDestinationEncryptionConfigurationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryDestinationEncryptionConfigurationParameters.
func (*QueryDestinationEncryptionConfigurationParameters) DeepCopyInto ¶
func (in *QueryDestinationEncryptionConfigurationParameters) DeepCopyInto(out *QueryDestinationEncryptionConfigurationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryDestinationTableInitParameters ¶
type QueryDestinationTableInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*QueryDestinationTableInitParameters) DeepCopy ¶
func (in *QueryDestinationTableInitParameters) DeepCopy() *QueryDestinationTableInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryDestinationTableInitParameters.
func (*QueryDestinationTableInitParameters) DeepCopyInto ¶
func (in *QueryDestinationTableInitParameters) DeepCopyInto(out *QueryDestinationTableInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryDestinationTableObservation ¶
type QueryDestinationTableObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*QueryDestinationTableObservation) DeepCopy ¶
func (in *QueryDestinationTableObservation) DeepCopy() *QueryDestinationTableObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryDestinationTableObservation.
func (*QueryDestinationTableObservation) DeepCopyInto ¶
func (in *QueryDestinationTableObservation) DeepCopyInto(out *QueryDestinationTableObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryDestinationTableParameters ¶
type QueryDestinationTableParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*QueryDestinationTableParameters) DeepCopy ¶
func (in *QueryDestinationTableParameters) DeepCopy() *QueryDestinationTableParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryDestinationTableParameters.
func (*QueryDestinationTableParameters) DeepCopyInto ¶
func (in *QueryDestinationTableParameters) DeepCopyInto(out *QueryDestinationTableParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryInitParameters ¶
type QueryInitParameters struct { // If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large result tables at a slight cost in performance. // Requires destinationTable to be set. For standard SQL queries, this flag is ignored and large results are always allowed. // However, you must still set destinationTable when result size exceeds the allowed maximum response size. AllowLargeResults *bool `json:"allowLargeResults,omitempty" tf:"allow_large_results,omitempty"` // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Specifies the default dataset to use for unqualified table names in the query. Note that this does not alter behavior of unqualified dataset names. // Structure is documented below. DefaultDataset *DefaultDatasetInitParameters `json:"defaultDataset,omitempty" tf:"default_dataset,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. DestinationEncryptionConfiguration *QueryDestinationEncryptionConfigurationInitParameters `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // Describes the table where the query results should be stored. // This property must be set for large results that exceed the maximum response size. // For queries that produce anonymous (cached) results, this field will be populated by BigQuery. // Structure is documented below. DestinationTable *QueryDestinationTableInitParameters `json:"destinationTable,omitempty" tf:"destination_table,omitempty"` // If true and query uses legacy SQL dialect, flattens all nested and repeated fields in the query results. // allowLargeResults must be true if this is set to false. For standard SQL queries, this flag is ignored and results are never flattened. FlattenResults *bool `json:"flattenResults,omitempty" tf:"flatten_results,omitempty"` // Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge). // If unspecified, this will be set to your project default. MaximumBillingTier *float64 `json:"maximumBillingTier,omitempty" tf:"maximum_billing_tier,omitempty"` // Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit will fail (without incurring a charge). // If unspecified, this will be set to your project default. MaximumBytesBilled *string `json:"maximumBytesBilled,omitempty" tf:"maximum_bytes_billed,omitempty"` // Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query. ParameterMode *string `json:"parameterMode,omitempty" tf:"parameter_mode,omitempty"` // Specifies a priority for the query. // Default value is INTERACTIVE. // Possible values are: INTERACTIVE, BATCH. Priority *string `json:"priority,omitempty" tf:"priority,omitempty"` // SQL query text to execute. The useLegacySql field can be used to indicate whether the query uses legacy SQL or standard SQL. // NOTE: queries containing DML language // (DELETE, UPDATE, MERGE, INSERT) must specify create_disposition = "" and write_disposition = "". Query *string `json:"query,omitempty" tf:"query,omitempty"` // Allows the schema of the destination table to be updated as a side effect of the query job. // Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; // when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, // specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema. // One or more of the following values are specified: // ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. // ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. SchemaUpdateOptions []*string `json:"schemaUpdateOptions,omitempty" tf:"schema_update_options,omitempty"` // Options controlling the execution of scripts. // Structure is documented below. ScriptOptions *ScriptOptionsInitParameters `json:"scriptOptions,omitempty" tf:"script_options,omitempty"` // Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true. // If set to false, the query will use BigQuery's standard SQL. UseLegacySQL *bool `json:"useLegacySql,omitempty" tf:"use_legacy_sql,omitempty"` // Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever // tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified. // The default value is true. UseQueryCache *bool `json:"useQueryCache,omitempty" tf:"use_query_cache,omitempty"` // Describes user-defined function resources used in the query. // Structure is documented below. UserDefinedFunctionResources []UserDefinedFunctionResourcesInitParameters `json:"userDefinedFunctionResources,omitempty" tf:"user_defined_function_resources,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*QueryInitParameters) DeepCopy ¶
func (in *QueryInitParameters) DeepCopy() *QueryInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryInitParameters.
func (*QueryInitParameters) DeepCopyInto ¶
func (in *QueryInitParameters) DeepCopyInto(out *QueryInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryObservation ¶
type QueryObservation struct { // If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large result tables at a slight cost in performance. // Requires destinationTable to be set. For standard SQL queries, this flag is ignored and large results are always allowed. // However, you must still set destinationTable when result size exceeds the allowed maximum response size. AllowLargeResults *bool `json:"allowLargeResults,omitempty" tf:"allow_large_results,omitempty"` // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Specifies the default dataset to use for unqualified table names in the query. Note that this does not alter behavior of unqualified dataset names. // Structure is documented below. DefaultDataset *DefaultDatasetObservation `json:"defaultDataset,omitempty" tf:"default_dataset,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. DestinationEncryptionConfiguration *QueryDestinationEncryptionConfigurationObservation `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // Describes the table where the query results should be stored. // This property must be set for large results that exceed the maximum response size. // For queries that produce anonymous (cached) results, this field will be populated by BigQuery. // Structure is documented below. DestinationTable *QueryDestinationTableObservation `json:"destinationTable,omitempty" tf:"destination_table,omitempty"` // If true and query uses legacy SQL dialect, flattens all nested and repeated fields in the query results. // allowLargeResults must be true if this is set to false. For standard SQL queries, this flag is ignored and results are never flattened. FlattenResults *bool `json:"flattenResults,omitempty" tf:"flatten_results,omitempty"` // Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge). // If unspecified, this will be set to your project default. MaximumBillingTier *float64 `json:"maximumBillingTier,omitempty" tf:"maximum_billing_tier,omitempty"` // Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit will fail (without incurring a charge). // If unspecified, this will be set to your project default. MaximumBytesBilled *string `json:"maximumBytesBilled,omitempty" tf:"maximum_bytes_billed,omitempty"` // Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query. ParameterMode *string `json:"parameterMode,omitempty" tf:"parameter_mode,omitempty"` // Specifies a priority for the query. // Default value is INTERACTIVE. // Possible values are: INTERACTIVE, BATCH. Priority *string `json:"priority,omitempty" tf:"priority,omitempty"` // SQL query text to execute. The useLegacySql field can be used to indicate whether the query uses legacy SQL or standard SQL. // NOTE: queries containing DML language // (DELETE, UPDATE, MERGE, INSERT) must specify create_disposition = "" and write_disposition = "". Query *string `json:"query,omitempty" tf:"query,omitempty"` // Allows the schema of the destination table to be updated as a side effect of the query job. // Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; // when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, // specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema. // One or more of the following values are specified: // ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. // ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. SchemaUpdateOptions []*string `json:"schemaUpdateOptions,omitempty" tf:"schema_update_options,omitempty"` // Options controlling the execution of scripts. // Structure is documented below. ScriptOptions *ScriptOptionsObservation `json:"scriptOptions,omitempty" tf:"script_options,omitempty"` // Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true. // If set to false, the query will use BigQuery's standard SQL. UseLegacySQL *bool `json:"useLegacySql,omitempty" tf:"use_legacy_sql,omitempty"` // Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever // tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified. // The default value is true. UseQueryCache *bool `json:"useQueryCache,omitempty" tf:"use_query_cache,omitempty"` // Describes user-defined function resources used in the query. // Structure is documented below. UserDefinedFunctionResources []UserDefinedFunctionResourcesObservation `json:"userDefinedFunctionResources,omitempty" tf:"user_defined_function_resources,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*QueryObservation) DeepCopy ¶
func (in *QueryObservation) DeepCopy() *QueryObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryObservation.
func (*QueryObservation) DeepCopyInto ¶
func (in *QueryObservation) DeepCopyInto(out *QueryObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryParameters ¶
type QueryParameters struct { // If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large result tables at a slight cost in performance. // Requires destinationTable to be set. For standard SQL queries, this flag is ignored and large results are always allowed. // However, you must still set destinationTable when result size exceeds the allowed maximum response size. // +kubebuilder:validation:Optional AllowLargeResults *bool `json:"allowLargeResults,omitempty" tf:"allow_large_results,omitempty"` // Specifies whether the job is allowed to create new tables. The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion // Default value is CREATE_IF_NEEDED. // Possible values are: CREATE_IF_NEEDED, CREATE_NEVER. // +kubebuilder:validation:Optional CreateDisposition *string `json:"createDisposition,omitempty" tf:"create_disposition,omitempty"` // Specifies the default dataset to use for unqualified table names in the query. Note that this does not alter behavior of unqualified dataset names. // Structure is documented below. // +kubebuilder:validation:Optional DefaultDataset *DefaultDatasetParameters `json:"defaultDataset,omitempty" tf:"default_dataset,omitempty"` // Custom encryption configuration (e.g., Cloud KMS keys) // Structure is documented below. // +kubebuilder:validation:Optional DestinationEncryptionConfiguration *QueryDestinationEncryptionConfigurationParameters `json:"destinationEncryptionConfiguration,omitempty" tf:"destination_encryption_configuration,omitempty"` // Describes the table where the query results should be stored. // This property must be set for large results that exceed the maximum response size. // For queries that produce anonymous (cached) results, this field will be populated by BigQuery. // Structure is documented below. // +kubebuilder:validation:Optional DestinationTable *QueryDestinationTableParameters `json:"destinationTable,omitempty" tf:"destination_table,omitempty"` // If true and query uses legacy SQL dialect, flattens all nested and repeated fields in the query results. // allowLargeResults must be true if this is set to false. For standard SQL queries, this flag is ignored and results are never flattened. // +kubebuilder:validation:Optional FlattenResults *bool `json:"flattenResults,omitempty" tf:"flatten_results,omitempty"` // Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge). // If unspecified, this will be set to your project default. // +kubebuilder:validation:Optional MaximumBillingTier *float64 `json:"maximumBillingTier,omitempty" tf:"maximum_billing_tier,omitempty"` // Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit will fail (without incurring a charge). // If unspecified, this will be set to your project default. // +kubebuilder:validation:Optional MaximumBytesBilled *string `json:"maximumBytesBilled,omitempty" tf:"maximum_bytes_billed,omitempty"` // Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query. // +kubebuilder:validation:Optional ParameterMode *string `json:"parameterMode,omitempty" tf:"parameter_mode,omitempty"` // Specifies a priority for the query. // Default value is INTERACTIVE. // Possible values are: INTERACTIVE, BATCH. // +kubebuilder:validation:Optional Priority *string `json:"priority,omitempty" tf:"priority,omitempty"` // SQL query text to execute. The useLegacySql field can be used to indicate whether the query uses legacy SQL or standard SQL. // NOTE: queries containing DML language // (DELETE, UPDATE, MERGE, INSERT) must specify create_disposition = "" and write_disposition = "". // +kubebuilder:validation:Optional Query *string `json:"query" tf:"query,omitempty"` // Allows the schema of the destination table to be updated as a side effect of the query job. // Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; // when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, // specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema. // One or more of the following values are specified: // ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. // ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. // +kubebuilder:validation:Optional SchemaUpdateOptions []*string `json:"schemaUpdateOptions,omitempty" tf:"schema_update_options,omitempty"` // Options controlling the execution of scripts. // Structure is documented below. // +kubebuilder:validation:Optional ScriptOptions *ScriptOptionsParameters `json:"scriptOptions,omitempty" tf:"script_options,omitempty"` // Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true. // If set to false, the query will use BigQuery's standard SQL. // +kubebuilder:validation:Optional UseLegacySQL *bool `json:"useLegacySql,omitempty" tf:"use_legacy_sql,omitempty"` // Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever // tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified. // The default value is true. // +kubebuilder:validation:Optional UseQueryCache *bool `json:"useQueryCache,omitempty" tf:"use_query_cache,omitempty"` // Describes user-defined function resources used in the query. // Structure is documented below. // +kubebuilder:validation:Optional UserDefinedFunctionResources []UserDefinedFunctionResourcesParameters `json:"userDefinedFunctionResources,omitempty" tf:"user_defined_function_resources,omitempty"` // Specifies the action that occurs if the destination table already exists. The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. // Default value is WRITE_EMPTY. // Possible values are: WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY. // +kubebuilder:validation:Optional WriteDisposition *string `json:"writeDisposition,omitempty" tf:"write_disposition,omitempty"` }
func (*QueryParameters) DeepCopy ¶
func (in *QueryParameters) DeepCopy() *QueryParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameters.
func (*QueryParameters) DeepCopyInto ¶
func (in *QueryParameters) DeepCopyInto(out *QueryParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RangeInitParameters ¶
type RangeInitParameters struct { // End of the range partitioning, exclusive. End *float64 `json:"end,omitempty" tf:"end,omitempty"` // The width of each range within the partition. Interval *float64 `json:"interval,omitempty" tf:"interval,omitempty"` // Start of the range partitioning, inclusive. Start *float64 `json:"start,omitempty" tf:"start,omitempty"` }
func (*RangeInitParameters) DeepCopy ¶
func (in *RangeInitParameters) DeepCopy() *RangeInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangeInitParameters.
func (*RangeInitParameters) DeepCopyInto ¶
func (in *RangeInitParameters) DeepCopyInto(out *RangeInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RangeObservation ¶
type RangeObservation struct { // End of the range partitioning, exclusive. End *float64 `json:"end,omitempty" tf:"end,omitempty"` // The width of each range within the partition. Interval *float64 `json:"interval,omitempty" tf:"interval,omitempty"` // Start of the range partitioning, inclusive. Start *float64 `json:"start,omitempty" tf:"start,omitempty"` }
func (*RangeObservation) DeepCopy ¶
func (in *RangeObservation) DeepCopy() *RangeObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangeObservation.
func (*RangeObservation) DeepCopyInto ¶
func (in *RangeObservation) DeepCopyInto(out *RangeObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RangeParameters ¶
type RangeParameters struct { // End of the range partitioning, exclusive. // +kubebuilder:validation:Optional End *float64 `json:"end" tf:"end,omitempty"` // The width of each range within the partition. // +kubebuilder:validation:Optional Interval *float64 `json:"interval" tf:"interval,omitempty"` // Start of the range partitioning, inclusive. // +kubebuilder:validation:Optional Start *float64 `json:"start" tf:"start,omitempty"` }
func (*RangeParameters) DeepCopy ¶
func (in *RangeParameters) DeepCopy() *RangeParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangeParameters.
func (*RangeParameters) DeepCopyInto ¶
func (in *RangeParameters) DeepCopyInto(out *RangeParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RangePartitioningInitParameters ¶
type RangePartitioningInitParameters struct { // The field used to determine how to create a range-based // partition. Field *string `json:"field,omitempty" tf:"field,omitempty"` // Information required to partition based on ranges. // Structure is documented below. Range *RangeInitParameters `json:"range,omitempty" tf:"range,omitempty"` }
func (*RangePartitioningInitParameters) DeepCopy ¶
func (in *RangePartitioningInitParameters) DeepCopy() *RangePartitioningInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangePartitioningInitParameters.
func (*RangePartitioningInitParameters) DeepCopyInto ¶
func (in *RangePartitioningInitParameters) DeepCopyInto(out *RangePartitioningInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RangePartitioningObservation ¶
type RangePartitioningObservation struct { // The field used to determine how to create a range-based // partition. Field *string `json:"field,omitempty" tf:"field,omitempty"` // Information required to partition based on ranges. // Structure is documented below. Range *RangeObservation `json:"range,omitempty" tf:"range,omitempty"` }
func (*RangePartitioningObservation) DeepCopy ¶
func (in *RangePartitioningObservation) DeepCopy() *RangePartitioningObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangePartitioningObservation.
func (*RangePartitioningObservation) DeepCopyInto ¶
func (in *RangePartitioningObservation) DeepCopyInto(out *RangePartitioningObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RangePartitioningParameters ¶
type RangePartitioningParameters struct { // The field used to determine how to create a range-based // partition. // +kubebuilder:validation:Optional Field *string `json:"field" tf:"field,omitempty"` // Information required to partition based on ranges. // Structure is documented below. // +kubebuilder:validation:Optional Range *RangeParameters `json:"range" tf:"range,omitempty"` }
func (*RangePartitioningParameters) DeepCopy ¶
func (in *RangePartitioningParameters) DeepCopy() *RangePartitioningParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangePartitioningParameters.
func (*RangePartitioningParameters) DeepCopyInto ¶
func (in *RangePartitioningParameters) DeepCopyInto(out *RangePartitioningParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReferencedTableInitParameters ¶
type ReferencedTableInitParameters struct { // : The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // A unique ID for the resource. // Changing this forces a new resource to be created. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*ReferencedTableInitParameters) DeepCopy ¶
func (in *ReferencedTableInitParameters) DeepCopy() *ReferencedTableInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReferencedTableInitParameters.
func (*ReferencedTableInitParameters) DeepCopyInto ¶
func (in *ReferencedTableInitParameters) DeepCopyInto(out *ReferencedTableInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReferencedTableObservation ¶
type ReferencedTableObservation struct { // : The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // : The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // A unique ID for the resource. // Changing this forces a new resource to be created. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*ReferencedTableObservation) DeepCopy ¶
func (in *ReferencedTableObservation) DeepCopy() *ReferencedTableObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReferencedTableObservation.
func (*ReferencedTableObservation) DeepCopyInto ¶
func (in *ReferencedTableObservation) DeepCopyInto(out *ReferencedTableObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReferencedTableParameters ¶
type ReferencedTableParameters struct { // : The ID of the dataset containing this table. // +kubebuilder:validation:Required DatasetID *string `json:"datasetId" tf:"dataset_id,omitempty"` // : The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId" tf:"project_id,omitempty"` // A unique ID for the resource. // Changing this forces a new resource to be created. // +kubebuilder:validation:Optional TableID *string `json:"tableId" tf:"table_id,omitempty"` }
func (*ReferencedTableParameters) DeepCopy ¶
func (in *ReferencedTableParameters) DeepCopy() *ReferencedTableParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReferencedTableParameters.
func (*ReferencedTableParameters) DeepCopyInto ¶
func (in *ReferencedTableParameters) DeepCopyInto(out *ReferencedTableParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RemoteFunctionOptionsInitParameters ¶
type RemoteFunctionOptionsInitParameters struct { // Fully qualified name of the user-provided connection object which holds // the authentication information to send requests to the remote service. // Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}" // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Connection // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("name",true) Connection *string `json:"connection,omitempty" tf:"connection,omitempty"` // Reference to a Connection in bigquery to populate connection. // +kubebuilder:validation:Optional ConnectionRef *v1.Reference `json:"connectionRef,omitempty" tf:"-"` // Selector for a Connection in bigquery to populate connection. // +kubebuilder:validation:Optional ConnectionSelector *v1.Selector `json:"connectionSelector,omitempty" tf:"-"` // Endpoint of the user-provided remote service, e.g. // https://us-east1-my_gcf_project.cloudfunctions.net/remote_add Endpoint *string `json:"endpoint,omitempty" tf:"endpoint,omitempty"` // Max number of rows in each batch sent to the remote service. If absent or if 0, // BigQuery dynamically decides the number of rows in a batch. MaxBatchingRows *string `json:"maxBatchingRows,omitempty" tf:"max_batching_rows,omitempty"` // User-defined context as a set of key/value pairs, which will be sent as function // invocation context together with batched arguments in the requests to the remote // service. The total number of bytes of keys and values must be less than 8KB. // An object containing a list of "key": value pairs. Example: // { "name": "wrench", "mass": "1.3kg", "count": "3" }. // +mapType=granular UserDefinedContext map[string]*string `json:"userDefinedContext,omitempty" tf:"user_defined_context,omitempty"` }
func (*RemoteFunctionOptionsInitParameters) DeepCopy ¶
func (in *RemoteFunctionOptionsInitParameters) DeepCopy() *RemoteFunctionOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RemoteFunctionOptionsInitParameters.
func (*RemoteFunctionOptionsInitParameters) DeepCopyInto ¶
func (in *RemoteFunctionOptionsInitParameters) DeepCopyInto(out *RemoteFunctionOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RemoteFunctionOptionsObservation ¶
type RemoteFunctionOptionsObservation struct { // Fully qualified name of the user-provided connection object which holds // the authentication information to send requests to the remote service. // Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}" Connection *string `json:"connection,omitempty" tf:"connection,omitempty"` // Endpoint of the user-provided remote service, e.g. // https://us-east1-my_gcf_project.cloudfunctions.net/remote_add Endpoint *string `json:"endpoint,omitempty" tf:"endpoint,omitempty"` // Max number of rows in each batch sent to the remote service. If absent or if 0, // BigQuery dynamically decides the number of rows in a batch. MaxBatchingRows *string `json:"maxBatchingRows,omitempty" tf:"max_batching_rows,omitempty"` // User-defined context as a set of key/value pairs, which will be sent as function // invocation context together with batched arguments in the requests to the remote // service. The total number of bytes of keys and values must be less than 8KB. // An object containing a list of "key": value pairs. Example: // { "name": "wrench", "mass": "1.3kg", "count": "3" }. // +mapType=granular UserDefinedContext map[string]*string `json:"userDefinedContext,omitempty" tf:"user_defined_context,omitempty"` }
func (*RemoteFunctionOptionsObservation) DeepCopy ¶
func (in *RemoteFunctionOptionsObservation) DeepCopy() *RemoteFunctionOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RemoteFunctionOptionsObservation.
func (*RemoteFunctionOptionsObservation) DeepCopyInto ¶
func (in *RemoteFunctionOptionsObservation) DeepCopyInto(out *RemoteFunctionOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RemoteFunctionOptionsParameters ¶
type RemoteFunctionOptionsParameters struct { // Fully qualified name of the user-provided connection object which holds // the authentication information to send requests to the remote service. // Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}" // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Connection // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("name",true) // +kubebuilder:validation:Optional Connection *string `json:"connection,omitempty" tf:"connection,omitempty"` // Reference to a Connection in bigquery to populate connection. // +kubebuilder:validation:Optional ConnectionRef *v1.Reference `json:"connectionRef,omitempty" tf:"-"` // Selector for a Connection in bigquery to populate connection. // +kubebuilder:validation:Optional ConnectionSelector *v1.Selector `json:"connectionSelector,omitempty" tf:"-"` // Endpoint of the user-provided remote service, e.g. // https://us-east1-my_gcf_project.cloudfunctions.net/remote_add // +kubebuilder:validation:Optional Endpoint *string `json:"endpoint,omitempty" tf:"endpoint,omitempty"` // Max number of rows in each batch sent to the remote service. If absent or if 0, // BigQuery dynamically decides the number of rows in a batch. // +kubebuilder:validation:Optional MaxBatchingRows *string `json:"maxBatchingRows,omitempty" tf:"max_batching_rows,omitempty"` // User-defined context as a set of key/value pairs, which will be sent as function // invocation context together with batched arguments in the requests to the remote // service. The total number of bytes of keys and values must be less than 8KB. // An object containing a list of "key": value pairs. Example: // { "name": "wrench", "mass": "1.3kg", "count": "3" }. // +kubebuilder:validation:Optional // +mapType=granular UserDefinedContext map[string]*string `json:"userDefinedContext,omitempty" tf:"user_defined_context,omitempty"` }
func (*RemoteFunctionOptionsParameters) DeepCopy ¶
func (in *RemoteFunctionOptionsParameters) DeepCopy() *RemoteFunctionOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RemoteFunctionOptionsParameters.
func (*RemoteFunctionOptionsParameters) DeepCopyInto ¶
func (in *RemoteFunctionOptionsParameters) DeepCopyInto(out *RemoteFunctionOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Reservation ¶
type Reservation struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.slotCapacity) || (has(self.initProvider) && has(self.initProvider.slotCapacity))",message="spec.forProvider.slotCapacity is a required parameter" Spec ReservationSpec `json:"spec"` Status ReservationStatus `json:"status,omitempty"` }
Reservation is the Schema for the Reservations API. A reservation is a mechanism used to guarantee BigQuery slots to users. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Reservation) DeepCopy ¶
func (in *Reservation) DeepCopy() *Reservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Reservation.
func (*Reservation) DeepCopyInto ¶
func (in *Reservation) DeepCopyInto(out *Reservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Reservation) DeepCopyObject ¶
func (in *Reservation) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Reservation) GetCondition ¶
func (mg *Reservation) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Reservation.
func (*Reservation) GetConnectionDetailsMapping ¶
func (tr *Reservation) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this Reservation
func (*Reservation) GetDeletionPolicy ¶
func (mg *Reservation) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Reservation.
func (*Reservation) GetID ¶
func (tr *Reservation) GetID() string
GetID returns ID of underlying Terraform resource of this Reservation
func (*Reservation) GetInitParameters ¶
func (tr *Reservation) GetInitParameters() (map[string]any, error)
GetInitParameters of this Reservation
func (*Reservation) GetManagementPolicies ¶
func (mg *Reservation) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this Reservation.
func (*Reservation) GetMergedParameters ¶
func (tr *Reservation) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this Reservation
func (*Reservation) GetObservation ¶
func (tr *Reservation) GetObservation() (map[string]any, error)
GetObservation of this Reservation
func (*Reservation) GetParameters ¶
func (tr *Reservation) GetParameters() (map[string]any, error)
GetParameters of this Reservation
func (*Reservation) GetProviderConfigReference ¶
func (mg *Reservation) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this Reservation.
func (*Reservation) GetPublishConnectionDetailsTo ¶
func (mg *Reservation) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Reservation.
func (*Reservation) GetTerraformResourceType ¶
func (mg *Reservation) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this Reservation
func (*Reservation) GetTerraformSchemaVersion ¶
func (tr *Reservation) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Reservation) GetWriteConnectionSecretToReference ¶
func (mg *Reservation) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Reservation.
func (*Reservation) LateInitialize ¶
func (tr *Reservation) LateInitialize(attrs []byte) (bool, error)
LateInitialize this Reservation using its observed tfState. returns True if there are any spec changes for the resource.
func (*Reservation) SetConditions ¶
func (mg *Reservation) SetConditions(c ...xpv1.Condition)
SetConditions of this Reservation.
func (*Reservation) SetDeletionPolicy ¶
func (mg *Reservation) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Reservation.
func (*Reservation) SetManagementPolicies ¶
func (mg *Reservation) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this Reservation.
func (*Reservation) SetObservation ¶
func (tr *Reservation) SetObservation(obs map[string]any) error
SetObservation for this Reservation
func (*Reservation) SetParameters ¶
func (tr *Reservation) SetParameters(params map[string]any) error
SetParameters for this Reservation
func (*Reservation) SetProviderConfigReference ¶
func (mg *Reservation) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this Reservation.
func (*Reservation) SetPublishConnectionDetailsTo ¶
func (mg *Reservation) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Reservation.
func (*Reservation) SetWriteConnectionSecretToReference ¶
func (mg *Reservation) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Reservation.
type ReservationInitParameters ¶
type ReservationInitParameters struct { // The configuration parameters for the auto scaling feature. // Structure is documented below. Autoscale *AutoscaleInitParameters `json:"autoscale,omitempty" tf:"autoscale,omitempty"` // Maximum number of queries that are allowed to run concurrently in this reservation. This is a soft limit due to asynchronous nature of the system and various optimizations for small queries. Default value is 0 which means that concurrency will be automatically set based on the reservation size. Concurrency *float64 `json:"concurrency,omitempty" tf:"concurrency,omitempty"` // The edition type. Valid values are STANDARD, ENTERPRISE, ENTERPRISE_PLUS Edition *string `json:"edition,omitempty" tf:"edition,omitempty"` // If false, any query using this reservation will use idle slots from other reservations within // the same admin project. If true, a query using this reservation will execute with the slot // capacity specified above at most. IgnoreIdleSlots *bool `json:"ignoreIdleSlots,omitempty" tf:"ignore_idle_slots,omitempty"` // Applicable only for reservations located within one of the BigQuery multi-regions (US or EU). // If set to true, this reservation is placed in the organization's secondary region which is designated for disaster recovery purposes. If false, this reservation is placed in the organization's default region. MultiRegionAuxiliary *bool `json:"multiRegionAuxiliary,omitempty" tf:"multi_region_auxiliary,omitempty"` // Minimum slots available to this reservation. A slot is a unit of computational power in BigQuery, and serves as the // unit of parallelism. Queries using this reservation might use more slots during runtime if ignoreIdleSlots is set to false. SlotCapacity *float64 `json:"slotCapacity,omitempty" tf:"slot_capacity,omitempty"` }
func (*ReservationInitParameters) DeepCopy ¶
func (in *ReservationInitParameters) DeepCopy() *ReservationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationInitParameters.
func (*ReservationInitParameters) DeepCopyInto ¶
func (in *ReservationInitParameters) DeepCopyInto(out *ReservationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationList ¶
type ReservationList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Reservation `json:"items"` }
ReservationList contains a list of Reservations
func (*ReservationList) DeepCopy ¶
func (in *ReservationList) DeepCopy() *ReservationList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationList.
func (*ReservationList) DeepCopyInto ¶
func (in *ReservationList) DeepCopyInto(out *ReservationList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ReservationList) DeepCopyObject ¶
func (in *ReservationList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*ReservationList) GetItems ¶
func (l *ReservationList) GetItems() []resource.Managed
GetItems of this ReservationList.
type ReservationObservation ¶
type ReservationObservation struct { // The configuration parameters for the auto scaling feature. // Structure is documented below. Autoscale *AutoscaleObservation `json:"autoscale,omitempty" tf:"autoscale,omitempty"` // Maximum number of queries that are allowed to run concurrently in this reservation. This is a soft limit due to asynchronous nature of the system and various optimizations for small queries. Default value is 0 which means that concurrency will be automatically set based on the reservation size. Concurrency *float64 `json:"concurrency,omitempty" tf:"concurrency,omitempty"` // The edition type. Valid values are STANDARD, ENTERPRISE, ENTERPRISE_PLUS Edition *string `json:"edition,omitempty" tf:"edition,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/reservations/{{name}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // If false, any query using this reservation will use idle slots from other reservations within // the same admin project. If true, a query using this reservation will execute with the slot // capacity specified above at most. IgnoreIdleSlots *bool `json:"ignoreIdleSlots,omitempty" tf:"ignore_idle_slots,omitempty"` // The geographic location where the transfer config should reside. // Examples: US, EU, asia-northeast1. The default value is US. Location *string `json:"location,omitempty" tf:"location,omitempty"` // Applicable only for reservations located within one of the BigQuery multi-regions (US or EU). // If set to true, this reservation is placed in the organization's secondary region which is designated for disaster recovery purposes. If false, this reservation is placed in the organization's default region. MultiRegionAuxiliary *bool `json:"multiRegionAuxiliary,omitempty" tf:"multi_region_auxiliary,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Minimum slots available to this reservation. A slot is a unit of computational power in BigQuery, and serves as the // unit of parallelism. Queries using this reservation might use more slots during runtime if ignoreIdleSlots is set to false. SlotCapacity *float64 `json:"slotCapacity,omitempty" tf:"slot_capacity,omitempty"` }
func (*ReservationObservation) DeepCopy ¶
func (in *ReservationObservation) DeepCopy() *ReservationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationObservation.
func (*ReservationObservation) DeepCopyInto ¶
func (in *ReservationObservation) DeepCopyInto(out *ReservationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationParameters ¶
type ReservationParameters struct { // The configuration parameters for the auto scaling feature. // Structure is documented below. // +kubebuilder:validation:Optional Autoscale *AutoscaleParameters `json:"autoscale,omitempty" tf:"autoscale,omitempty"` // Maximum number of queries that are allowed to run concurrently in this reservation. This is a soft limit due to asynchronous nature of the system and various optimizations for small queries. Default value is 0 which means that concurrency will be automatically set based on the reservation size. // +kubebuilder:validation:Optional Concurrency *float64 `json:"concurrency,omitempty" tf:"concurrency,omitempty"` // The edition type. Valid values are STANDARD, ENTERPRISE, ENTERPRISE_PLUS // +kubebuilder:validation:Optional Edition *string `json:"edition,omitempty" tf:"edition,omitempty"` // If false, any query using this reservation will use idle slots from other reservations within // the same admin project. If true, a query using this reservation will execute with the slot // capacity specified above at most. // +kubebuilder:validation:Optional IgnoreIdleSlots *bool `json:"ignoreIdleSlots,omitempty" tf:"ignore_idle_slots,omitempty"` // The geographic location where the transfer config should reside. // Examples: US, EU, asia-northeast1. The default value is US. // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // Applicable only for reservations located within one of the BigQuery multi-regions (US or EU). // If set to true, this reservation is placed in the organization's secondary region which is designated for disaster recovery purposes. If false, this reservation is placed in the organization's default region. // +kubebuilder:validation:Optional MultiRegionAuxiliary *bool `json:"multiRegionAuxiliary,omitempty" tf:"multi_region_auxiliary,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Minimum slots available to this reservation. A slot is a unit of computational power in BigQuery, and serves as the // unit of parallelism. Queries using this reservation might use more slots during runtime if ignoreIdleSlots is set to false. // +kubebuilder:validation:Optional SlotCapacity *float64 `json:"slotCapacity,omitempty" tf:"slot_capacity,omitempty"` }
func (*ReservationParameters) DeepCopy ¶
func (in *ReservationParameters) DeepCopy() *ReservationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationParameters.
func (*ReservationParameters) DeepCopyInto ¶
func (in *ReservationParameters) DeepCopyInto(out *ReservationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationSpec ¶
type ReservationSpec struct { v1.ResourceSpec `json:",inline"` ForProvider ReservationParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider ReservationInitParameters `json:"initProvider,omitempty"` }
ReservationSpec defines the desired state of Reservation
func (*ReservationSpec) DeepCopy ¶
func (in *ReservationSpec) DeepCopy() *ReservationSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationSpec.
func (*ReservationSpec) DeepCopyInto ¶
func (in *ReservationSpec) DeepCopyInto(out *ReservationSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationStatus ¶
type ReservationStatus struct { v1.ResourceStatus `json:",inline"` AtProvider ReservationObservation `json:"atProvider,omitempty"` }
ReservationStatus defines the observed state of Reservation.
func (*ReservationStatus) DeepCopy ¶
func (in *ReservationStatus) DeepCopy() *ReservationStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationStatus.
func (*ReservationStatus) DeepCopyInto ¶
func (in *ReservationStatus) DeepCopyInto(out *ReservationStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RestrictedExportConfigInitParameters ¶
type RestrictedExportConfigInitParameters struct { // If true, enable restricted export. Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` // If true, restrict export of query result derived from restricted linked dataset table. RestrictQueryResult *bool `json:"restrictQueryResult,omitempty" tf:"restrict_query_result,omitempty"` }
func (*RestrictedExportConfigInitParameters) DeepCopy ¶
func (in *RestrictedExportConfigInitParameters) DeepCopy() *RestrictedExportConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RestrictedExportConfigInitParameters.
func (*RestrictedExportConfigInitParameters) DeepCopyInto ¶
func (in *RestrictedExportConfigInitParameters) DeepCopyInto(out *RestrictedExportConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RestrictedExportConfigObservation ¶
type RestrictedExportConfigObservation struct { // If true, enable restricted export. Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` // If true, restrict export of query result derived from restricted linked dataset table. RestrictQueryResult *bool `json:"restrictQueryResult,omitempty" tf:"restrict_query_result,omitempty"` }
func (*RestrictedExportConfigObservation) DeepCopy ¶
func (in *RestrictedExportConfigObservation) DeepCopy() *RestrictedExportConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RestrictedExportConfigObservation.
func (*RestrictedExportConfigObservation) DeepCopyInto ¶
func (in *RestrictedExportConfigObservation) DeepCopyInto(out *RestrictedExportConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RestrictedExportConfigParameters ¶
type RestrictedExportConfigParameters struct { // If true, enable restricted export. // +kubebuilder:validation:Optional Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` // If true, restrict export of query result derived from restricted linked dataset table. // +kubebuilder:validation:Optional RestrictQueryResult *bool `json:"restrictQueryResult,omitempty" tf:"restrict_query_result,omitempty"` }
func (*RestrictedExportConfigParameters) DeepCopy ¶
func (in *RestrictedExportConfigParameters) DeepCopy() *RestrictedExportConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RestrictedExportConfigParameters.
func (*RestrictedExportConfigParameters) DeepCopyInto ¶
func (in *RestrictedExportConfigParameters) DeepCopyInto(out *RestrictedExportConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Routine ¶
type Routine struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.definitionBody) || (has(self.initProvider) && has(self.initProvider.definitionBody))",message="spec.forProvider.definitionBody is a required parameter" // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.routineType) || (has(self.initProvider) && has(self.initProvider.routineType))",message="spec.forProvider.routineType is a required parameter" Spec RoutineSpec `json:"spec"` Status RoutineStatus `json:"status,omitempty"` }
Routine is the Schema for the Routines API. A user-defined function or a stored procedure that belongs to a Dataset +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Routine) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Routine.
func (*Routine) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Routine) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Routine) GetCondition ¶
func (mg *Routine) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Routine.
func (*Routine) GetConnectionDetailsMapping ¶
GetConnectionDetailsMapping for this Routine
func (*Routine) GetDeletionPolicy ¶
func (mg *Routine) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Routine.
func (*Routine) GetInitParameters ¶
GetInitParameters of this Routine
func (*Routine) GetManagementPolicies ¶
func (mg *Routine) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this Routine.
func (*Routine) GetMergedParameters ¶
GetInitParameters of this Routine
func (*Routine) GetObservation ¶
GetObservation of this Routine
func (*Routine) GetParameters ¶
GetParameters of this Routine
func (*Routine) GetProviderConfigReference ¶
GetProviderConfigReference of this Routine.
func (*Routine) GetPublishConnectionDetailsTo ¶
func (mg *Routine) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Routine.
func (*Routine) GetTerraformResourceType ¶
GetTerraformResourceType returns Terraform resource type for this Routine
func (*Routine) GetTerraformSchemaVersion ¶
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Routine) GetWriteConnectionSecretToReference ¶
func (mg *Routine) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Routine.
func (*Routine) LateInitialize ¶
LateInitialize this Routine using its observed tfState. returns True if there are any spec changes for the resource.
func (*Routine) ResolveReferences ¶
ResolveReferences of this Routine.
func (*Routine) SetConditions ¶
SetConditions of this Routine.
func (*Routine) SetDeletionPolicy ¶
func (mg *Routine) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Routine.
func (*Routine) SetManagementPolicies ¶
func (mg *Routine) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this Routine.
func (*Routine) SetObservation ¶
SetObservation for this Routine
func (*Routine) SetParameters ¶
SetParameters for this Routine
func (*Routine) SetProviderConfigReference ¶
SetProviderConfigReference of this Routine.
func (*Routine) SetPublishConnectionDetailsTo ¶
func (mg *Routine) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Routine.
func (*Routine) SetWriteConnectionSecretToReference ¶
func (mg *Routine) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Routine.
type RoutineInitParameters ¶
type RoutineInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("dataset_id",false) DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Routine in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("project",false) ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // Reference to a Routine in bigquery to populate projectId. // +kubebuilder:validation:Optional ProjectIDRef *v1.Reference `json:"projectIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate projectId. // +kubebuilder:validation:Optional ProjectIDSelector *v1.Selector `json:"projectIdSelector,omitempty" tf:"-"` // The ID of the routine. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 256 characters. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine RoutineID *string `json:"routineId,omitempty" tf:"routine_id,omitempty"` // Reference to a Routine in bigquery to populate routineId. // +kubebuilder:validation:Optional RoutineIDRef *v1.Reference `json:"routineIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate routineId. // +kubebuilder:validation:Optional RoutineIDSelector *v1.Selector `json:"routineIdSelector,omitempty" tf:"-"` }
func (*RoutineInitParameters) DeepCopy ¶
func (in *RoutineInitParameters) DeepCopy() *RoutineInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineInitParameters.
func (*RoutineInitParameters) DeepCopyInto ¶
func (in *RoutineInitParameters) DeepCopyInto(out *RoutineInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RoutineInitParameters_2 ¶
type RoutineInitParameters_2 struct { // Input/output argument of a function or a stored procedure. // Structure is documented below. Arguments []ArgumentsInitParameters `json:"arguments,omitempty" tf:"arguments,omitempty"` // If set to DATA_MASKING, the function is validated and made available as a masking function. For more information, see https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask // Possible values are: DATA_MASKING. DataGovernanceType *string `json:"dataGovernanceType,omitempty" tf:"data_governance_type,omitempty"` // The body of the routine. For functions, this is the expression in the AS clause. // If language=SQL, it is the substring inside (but excluding) the parentheses. DefinitionBody *string `json:"definitionBody,omitempty" tf:"definition_body,omitempty"` // The description of the routine if defined. Description *string `json:"description,omitempty" tf:"description,omitempty"` // The determinism level of the JavaScript UDF if defined. // Possible values are: DETERMINISM_LEVEL_UNSPECIFIED, DETERMINISTIC, NOT_DETERMINISTIC. DeterminismLevel *string `json:"determinismLevel,omitempty" tf:"determinism_level,omitempty"` // Optional. If language = "JAVASCRIPT", this field stores the path of the // imported JAVASCRIPT libraries. ImportedLibraries []*string `json:"importedLibraries,omitempty" tf:"imported_libraries,omitempty"` // The language of the routine. // Possible values are: SQL, JAVASCRIPT, PYTHON, JAVA, SCALA. Language *string `json:"language,omitempty" tf:"language,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Remote function specific options. // Structure is documented below. RemoteFunctionOptions *RemoteFunctionOptionsInitParameters `json:"remoteFunctionOptions,omitempty" tf:"remote_function_options,omitempty"` // Optional. Can be set only if routineType = "TABLE_VALUED_FUNCTION". // If absent, the return table type is inferred from definitionBody at query time in each query // that references this routine. If present, then the columns in the evaluated table result will // be cast to match the column types specificed in return table type, at query time. ReturnTableType *string `json:"returnTableType,omitempty" tf:"return_table_type,omitempty"` // A JSON schema for the return type. Optional if language = "SQL"; required otherwise. // If absent, the return type is inferred from definitionBody at query time in each query // that references this routine. If present, then the evaluated result will be cast to // the specified returned type at query time. ~>NOTE: Because this field expects a JSON // string, any changes to the string will create a diff, even if the JSON itself hasn't // changed. If the API returns a different value for the same schema, e.g. it switche // d the order of values or replaced STRUCT field type with RECORD field type, we currently // cannot suppress the recurring diff this causes. As a workaround, we recommend using // the schema as returned by the API. ReturnType *string `json:"returnType,omitempty" tf:"return_type,omitempty"` // The type of routine. // Possible values are: SCALAR_FUNCTION, PROCEDURE, TABLE_VALUED_FUNCTION. RoutineType *string `json:"routineType,omitempty" tf:"routine_type,omitempty"` // Optional. If language is one of "PYTHON", "JAVA", "SCALA", this field stores the options for spark stored procedure. // Structure is documented below. SparkOptions *SparkOptionsInitParameters `json:"sparkOptions,omitempty" tf:"spark_options,omitempty"` }
func (*RoutineInitParameters_2) DeepCopy ¶
func (in *RoutineInitParameters_2) DeepCopy() *RoutineInitParameters_2
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineInitParameters_2.
func (*RoutineInitParameters_2) DeepCopyInto ¶
func (in *RoutineInitParameters_2) DeepCopyInto(out *RoutineInitParameters_2)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RoutineList ¶
type RoutineList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Routine `json:"items"` }
RoutineList contains a list of Routines
func (*RoutineList) DeepCopy ¶
func (in *RoutineList) DeepCopy() *RoutineList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineList.
func (*RoutineList) DeepCopyInto ¶
func (in *RoutineList) DeepCopyInto(out *RoutineList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*RoutineList) DeepCopyObject ¶
func (in *RoutineList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*RoutineList) GetItems ¶
func (l *RoutineList) GetItems() []resource.Managed
GetItems of this RoutineList.
type RoutineObservation ¶
type RoutineObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The ID of the routine. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 256 characters. RoutineID *string `json:"routineId,omitempty" tf:"routine_id,omitempty"` }
func (*RoutineObservation) DeepCopy ¶
func (in *RoutineObservation) DeepCopy() *RoutineObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineObservation.
func (*RoutineObservation) DeepCopyInto ¶
func (in *RoutineObservation) DeepCopyInto(out *RoutineObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RoutineObservation_2 ¶
type RoutineObservation_2 struct { // Input/output argument of a function or a stored procedure. // Structure is documented below. Arguments []ArgumentsObservation `json:"arguments,omitempty" tf:"arguments,omitempty"` // The time when this routine was created, in milliseconds since the // epoch. CreationTime *float64 `json:"creationTime,omitempty" tf:"creation_time,omitempty"` // If set to DATA_MASKING, the function is validated and made available as a masking function. For more information, see https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask // Possible values are: DATA_MASKING. DataGovernanceType *string `json:"dataGovernanceType,omitempty" tf:"data_governance_type,omitempty"` // The ID of the dataset containing this routine DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The body of the routine. For functions, this is the expression in the AS clause. // If language=SQL, it is the substring inside (but excluding) the parentheses. DefinitionBody *string `json:"definitionBody,omitempty" tf:"definition_body,omitempty"` // The description of the routine if defined. Description *string `json:"description,omitempty" tf:"description,omitempty"` // The determinism level of the JavaScript UDF if defined. // Possible values are: DETERMINISM_LEVEL_UNSPECIFIED, DETERMINISTIC, NOT_DETERMINISTIC. DeterminismLevel *string `json:"determinismLevel,omitempty" tf:"determinism_level,omitempty"` // an identifier for the resource with format projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // Optional. If language = "JAVASCRIPT", this field stores the path of the // imported JAVASCRIPT libraries. ImportedLibraries []*string `json:"importedLibraries,omitempty" tf:"imported_libraries,omitempty"` // The language of the routine. // Possible values are: SQL, JAVASCRIPT, PYTHON, JAVA, SCALA. Language *string `json:"language,omitempty" tf:"language,omitempty"` // The time when this routine was modified, in milliseconds since the // epoch. LastModifiedTime *float64 `json:"lastModifiedTime,omitempty" tf:"last_modified_time,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Remote function specific options. // Structure is documented below. RemoteFunctionOptions *RemoteFunctionOptionsObservation `json:"remoteFunctionOptions,omitempty" tf:"remote_function_options,omitempty"` // Optional. Can be set only if routineType = "TABLE_VALUED_FUNCTION". // If absent, the return table type is inferred from definitionBody at query time in each query // that references this routine. If present, then the columns in the evaluated table result will // be cast to match the column types specificed in return table type, at query time. ReturnTableType *string `json:"returnTableType,omitempty" tf:"return_table_type,omitempty"` // A JSON schema for the return type. Optional if language = "SQL"; required otherwise. // If absent, the return type is inferred from definitionBody at query time in each query // that references this routine. If present, then the evaluated result will be cast to // the specified returned type at query time. ~>NOTE: Because this field expects a JSON // string, any changes to the string will create a diff, even if the JSON itself hasn't // changed. If the API returns a different value for the same schema, e.g. it switche // d the order of values or replaced STRUCT field type with RECORD field type, we currently // cannot suppress the recurring diff this causes. As a workaround, we recommend using // the schema as returned by the API. ReturnType *string `json:"returnType,omitempty" tf:"return_type,omitempty"` // The type of routine. // Possible values are: SCALAR_FUNCTION, PROCEDURE, TABLE_VALUED_FUNCTION. RoutineType *string `json:"routineType,omitempty" tf:"routine_type,omitempty"` // Optional. If language is one of "PYTHON", "JAVA", "SCALA", this field stores the options for spark stored procedure. // Structure is documented below. SparkOptions *SparkOptionsObservation `json:"sparkOptions,omitempty" tf:"spark_options,omitempty"` }
func (*RoutineObservation_2) DeepCopy ¶
func (in *RoutineObservation_2) DeepCopy() *RoutineObservation_2
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineObservation_2.
func (*RoutineObservation_2) DeepCopyInto ¶
func (in *RoutineObservation_2) DeepCopyInto(out *RoutineObservation_2)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RoutineParameters ¶
type RoutineParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("dataset_id",false) // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Routine in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("project",false) // +kubebuilder:validation:Optional ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // Reference to a Routine in bigquery to populate projectId. // +kubebuilder:validation:Optional ProjectIDRef *v1.Reference `json:"projectIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate projectId. // +kubebuilder:validation:Optional ProjectIDSelector *v1.Selector `json:"projectIdSelector,omitempty" tf:"-"` // The ID of the routine. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 256 characters. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Routine // +kubebuilder:validation:Optional RoutineID *string `json:"routineId,omitempty" tf:"routine_id,omitempty"` // Reference to a Routine in bigquery to populate routineId. // +kubebuilder:validation:Optional RoutineIDRef *v1.Reference `json:"routineIdRef,omitempty" tf:"-"` // Selector for a Routine in bigquery to populate routineId. // +kubebuilder:validation:Optional RoutineIDSelector *v1.Selector `json:"routineIdSelector,omitempty" tf:"-"` }
func (*RoutineParameters) DeepCopy ¶
func (in *RoutineParameters) DeepCopy() *RoutineParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineParameters.
func (*RoutineParameters) DeepCopyInto ¶
func (in *RoutineParameters) DeepCopyInto(out *RoutineParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RoutineParameters_2 ¶
type RoutineParameters_2 struct { // Input/output argument of a function or a stored procedure. // Structure is documented below. // +kubebuilder:validation:Optional Arguments []ArgumentsParameters `json:"arguments,omitempty" tf:"arguments,omitempty"` // If set to DATA_MASKING, the function is validated and made available as a masking function. For more information, see https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask // Possible values are: DATA_MASKING. // +kubebuilder:validation:Optional DataGovernanceType *string `json:"dataGovernanceType,omitempty" tf:"data_governance_type,omitempty"` // The ID of the dataset containing this routine // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The body of the routine. For functions, this is the expression in the AS clause. // If language=SQL, it is the substring inside (but excluding) the parentheses. // +kubebuilder:validation:Optional DefinitionBody *string `json:"definitionBody,omitempty" tf:"definition_body,omitempty"` // The description of the routine if defined. // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // The determinism level of the JavaScript UDF if defined. // Possible values are: DETERMINISM_LEVEL_UNSPECIFIED, DETERMINISTIC, NOT_DETERMINISTIC. // +kubebuilder:validation:Optional DeterminismLevel *string `json:"determinismLevel,omitempty" tf:"determinism_level,omitempty"` // Optional. If language = "JAVASCRIPT", this field stores the path of the // imported JAVASCRIPT libraries. // +kubebuilder:validation:Optional ImportedLibraries []*string `json:"importedLibraries,omitempty" tf:"imported_libraries,omitempty"` // The language of the routine. // Possible values are: SQL, JAVASCRIPT, PYTHON, JAVA, SCALA. // +kubebuilder:validation:Optional Language *string `json:"language,omitempty" tf:"language,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Remote function specific options. // Structure is documented below. // +kubebuilder:validation:Optional RemoteFunctionOptions *RemoteFunctionOptionsParameters `json:"remoteFunctionOptions,omitempty" tf:"remote_function_options,omitempty"` // Optional. Can be set only if routineType = "TABLE_VALUED_FUNCTION". // If absent, the return table type is inferred from definitionBody at query time in each query // that references this routine. If present, then the columns in the evaluated table result will // be cast to match the column types specificed in return table type, at query time. // +kubebuilder:validation:Optional ReturnTableType *string `json:"returnTableType,omitempty" tf:"return_table_type,omitempty"` // A JSON schema for the return type. Optional if language = "SQL"; required otherwise. // If absent, the return type is inferred from definitionBody at query time in each query // that references this routine. If present, then the evaluated result will be cast to // the specified returned type at query time. ~>NOTE: Because this field expects a JSON // string, any changes to the string will create a diff, even if the JSON itself hasn't // changed. If the API returns a different value for the same schema, e.g. it switche // d the order of values or replaced STRUCT field type with RECORD field type, we currently // cannot suppress the recurring diff this causes. As a workaround, we recommend using // the schema as returned by the API. // +kubebuilder:validation:Optional ReturnType *string `json:"returnType,omitempty" tf:"return_type,omitempty"` // The type of routine. // Possible values are: SCALAR_FUNCTION, PROCEDURE, TABLE_VALUED_FUNCTION. // +kubebuilder:validation:Optional RoutineType *string `json:"routineType,omitempty" tf:"routine_type,omitempty"` // Optional. If language is one of "PYTHON", "JAVA", "SCALA", this field stores the options for spark stored procedure. // Structure is documented below. // +kubebuilder:validation:Optional SparkOptions *SparkOptionsParameters `json:"sparkOptions,omitempty" tf:"spark_options,omitempty"` }
func (*RoutineParameters_2) DeepCopy ¶
func (in *RoutineParameters_2) DeepCopy() *RoutineParameters_2
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineParameters_2.
func (*RoutineParameters_2) DeepCopyInto ¶
func (in *RoutineParameters_2) DeepCopyInto(out *RoutineParameters_2)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RoutineSpec ¶
type RoutineSpec struct { v1.ResourceSpec `json:",inline"` ForProvider RoutineParameters_2 `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider RoutineInitParameters_2 `json:"initProvider,omitempty"` }
RoutineSpec defines the desired state of Routine
func (*RoutineSpec) DeepCopy ¶
func (in *RoutineSpec) DeepCopy() *RoutineSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineSpec.
func (*RoutineSpec) DeepCopyInto ¶
func (in *RoutineSpec) DeepCopyInto(out *RoutineSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RoutineStatus ¶
type RoutineStatus struct { v1.ResourceStatus `json:",inline"` AtProvider RoutineObservation_2 `json:"atProvider,omitempty"` }
RoutineStatus defines the observed state of Routine.
func (*RoutineStatus) DeepCopy ¶
func (in *RoutineStatus) DeepCopy() *RoutineStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineStatus.
func (*RoutineStatus) DeepCopyInto ¶
func (in *RoutineStatus) DeepCopyInto(out *RoutineStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScheduleOptionsInitParameters ¶
type ScheduleOptionsInitParameters struct { // If true, automatic scheduling of data transfer runs for this // configuration will be disabled. The runs can be started on ad-hoc // basis using transferConfigs.startManualRuns API. When automatic // scheduling is disabled, the TransferConfig.schedule field will // be ignored. DisableAutoScheduling *bool `json:"disableAutoScheduling,omitempty" tf:"disable_auto_scheduling,omitempty"` // Defines time to stop scheduling transfer runs. A transfer run cannot be // scheduled at or after the end time. The end time can be changed at any // moment. The time when a data transfer can be triggered manually is not // limited by this option. EndTime *string `json:"endTime,omitempty" tf:"end_time,omitempty"` // Specifies time to start scheduling transfer runs. The first run will be // scheduled at or after the start time according to a recurrence pattern // defined in the schedule string. The start time can be changed at any // moment. The time when a data transfer can be triggered manually is not // limited by this option. StartTime *string `json:"startTime,omitempty" tf:"start_time,omitempty"` }
func (*ScheduleOptionsInitParameters) DeepCopy ¶
func (in *ScheduleOptionsInitParameters) DeepCopy() *ScheduleOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScheduleOptionsInitParameters.
func (*ScheduleOptionsInitParameters) DeepCopyInto ¶
func (in *ScheduleOptionsInitParameters) DeepCopyInto(out *ScheduleOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScheduleOptionsObservation ¶
type ScheduleOptionsObservation struct { // If true, automatic scheduling of data transfer runs for this // configuration will be disabled. The runs can be started on ad-hoc // basis using transferConfigs.startManualRuns API. When automatic // scheduling is disabled, the TransferConfig.schedule field will // be ignored. DisableAutoScheduling *bool `json:"disableAutoScheduling,omitempty" tf:"disable_auto_scheduling,omitempty"` // Defines time to stop scheduling transfer runs. A transfer run cannot be // scheduled at or after the end time. The end time can be changed at any // moment. The time when a data transfer can be triggered manually is not // limited by this option. EndTime *string `json:"endTime,omitempty" tf:"end_time,omitempty"` // Specifies time to start scheduling transfer runs. The first run will be // scheduled at or after the start time according to a recurrence pattern // defined in the schedule string. The start time can be changed at any // moment. The time when a data transfer can be triggered manually is not // limited by this option. StartTime *string `json:"startTime,omitempty" tf:"start_time,omitempty"` }
func (*ScheduleOptionsObservation) DeepCopy ¶
func (in *ScheduleOptionsObservation) DeepCopy() *ScheduleOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScheduleOptionsObservation.
func (*ScheduleOptionsObservation) DeepCopyInto ¶
func (in *ScheduleOptionsObservation) DeepCopyInto(out *ScheduleOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScheduleOptionsParameters ¶
type ScheduleOptionsParameters struct { // If true, automatic scheduling of data transfer runs for this // configuration will be disabled. The runs can be started on ad-hoc // basis using transferConfigs.startManualRuns API. When automatic // scheduling is disabled, the TransferConfig.schedule field will // be ignored. // +kubebuilder:validation:Optional DisableAutoScheduling *bool `json:"disableAutoScheduling,omitempty" tf:"disable_auto_scheduling,omitempty"` // Defines time to stop scheduling transfer runs. A transfer run cannot be // scheduled at or after the end time. The end time can be changed at any // moment. The time when a data transfer can be triggered manually is not // limited by this option. // +kubebuilder:validation:Optional EndTime *string `json:"endTime,omitempty" tf:"end_time,omitempty"` // Specifies time to start scheduling transfer runs. The first run will be // scheduled at or after the start time according to a recurrence pattern // defined in the schedule string. The start time can be changed at any // moment. The time when a data transfer can be triggered manually is not // limited by this option. // +kubebuilder:validation:Optional StartTime *string `json:"startTime,omitempty" tf:"start_time,omitempty"` }
func (*ScheduleOptionsParameters) DeepCopy ¶
func (in *ScheduleOptionsParameters) DeepCopy() *ScheduleOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScheduleOptionsParameters.
func (*ScheduleOptionsParameters) DeepCopyInto ¶
func (in *ScheduleOptionsParameters) DeepCopyInto(out *ScheduleOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScriptOptionsInitParameters ¶
type ScriptOptionsInitParameters struct { // Determines which statement in the script represents the "key result", // used to populate the schema and query results of the script job. // Possible values are: LAST, FIRST_SELECT. KeyResultStatement *string `json:"keyResultStatement,omitempty" tf:"key_result_statement,omitempty"` // Limit on the number of bytes billed per statement. Exceeding this budget results in an error. StatementByteBudget *string `json:"statementByteBudget,omitempty" tf:"statement_byte_budget,omitempty"` // Timeout period for each statement in a script. StatementTimeoutMs *string `json:"statementTimeoutMs,omitempty" tf:"statement_timeout_ms,omitempty"` }
func (*ScriptOptionsInitParameters) DeepCopy ¶
func (in *ScriptOptionsInitParameters) DeepCopy() *ScriptOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptOptionsInitParameters.
func (*ScriptOptionsInitParameters) DeepCopyInto ¶
func (in *ScriptOptionsInitParameters) DeepCopyInto(out *ScriptOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScriptOptionsObservation ¶
type ScriptOptionsObservation struct { // Determines which statement in the script represents the "key result", // used to populate the schema and query results of the script job. // Possible values are: LAST, FIRST_SELECT. KeyResultStatement *string `json:"keyResultStatement,omitempty" tf:"key_result_statement,omitempty"` // Limit on the number of bytes billed per statement. Exceeding this budget results in an error. StatementByteBudget *string `json:"statementByteBudget,omitempty" tf:"statement_byte_budget,omitempty"` // Timeout period for each statement in a script. StatementTimeoutMs *string `json:"statementTimeoutMs,omitempty" tf:"statement_timeout_ms,omitempty"` }
func (*ScriptOptionsObservation) DeepCopy ¶
func (in *ScriptOptionsObservation) DeepCopy() *ScriptOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptOptionsObservation.
func (*ScriptOptionsObservation) DeepCopyInto ¶
func (in *ScriptOptionsObservation) DeepCopyInto(out *ScriptOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScriptOptionsParameters ¶
type ScriptOptionsParameters struct { // Determines which statement in the script represents the "key result", // used to populate the schema and query results of the script job. // Possible values are: LAST, FIRST_SELECT. // +kubebuilder:validation:Optional KeyResultStatement *string `json:"keyResultStatement,omitempty" tf:"key_result_statement,omitempty"` // Limit on the number of bytes billed per statement. Exceeding this budget results in an error. // +kubebuilder:validation:Optional StatementByteBudget *string `json:"statementByteBudget,omitempty" tf:"statement_byte_budget,omitempty"` // Timeout period for each statement in a script. // +kubebuilder:validation:Optional StatementTimeoutMs *string `json:"statementTimeoutMs,omitempty" tf:"statement_timeout_ms,omitempty"` }
func (*ScriptOptionsParameters) DeepCopy ¶
func (in *ScriptOptionsParameters) DeepCopy() *ScriptOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptOptionsParameters.
func (*ScriptOptionsParameters) DeepCopyInto ¶
func (in *ScriptOptionsParameters) DeepCopyInto(out *ScriptOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SensitiveParamsInitParameters ¶
type SensitiveParamsInitParameters struct { // The Secret Access Key of the AWS account transferring data from. // Note: This property is sensitive and will not be displayed in the plan. SecretAccessKeySecretRef v1.SecretKeySelector `json:"secretAccessKeySecretRef" tf:"-"` }
func (*SensitiveParamsInitParameters) DeepCopy ¶
func (in *SensitiveParamsInitParameters) DeepCopy() *SensitiveParamsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SensitiveParamsInitParameters.
func (*SensitiveParamsInitParameters) DeepCopyInto ¶
func (in *SensitiveParamsInitParameters) DeepCopyInto(out *SensitiveParamsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SensitiveParamsObservation ¶
type SensitiveParamsObservation struct { }
func (*SensitiveParamsObservation) DeepCopy ¶
func (in *SensitiveParamsObservation) DeepCopy() *SensitiveParamsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SensitiveParamsObservation.
func (*SensitiveParamsObservation) DeepCopyInto ¶
func (in *SensitiveParamsObservation) DeepCopyInto(out *SensitiveParamsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SensitiveParamsParameters ¶
type SensitiveParamsParameters struct { // The Secret Access Key of the AWS account transferring data from. // Note: This property is sensitive and will not be displayed in the plan. // +kubebuilder:validation:Optional SecretAccessKeySecretRef v1.SecretKeySelector `json:"secretAccessKeySecretRef" tf:"-"` }
func (*SensitiveParamsParameters) DeepCopy ¶
func (in *SensitiveParamsParameters) DeepCopy() *SensitiveParamsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SensitiveParamsParameters.
func (*SensitiveParamsParameters) DeepCopyInto ¶
func (in *SensitiveParamsParameters) DeepCopyInto(out *SensitiveParamsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceModelInitParameters ¶
type SourceModelInitParameters struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the model. ModelID *string `json:"modelId,omitempty" tf:"model_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*SourceModelInitParameters) DeepCopy ¶
func (in *SourceModelInitParameters) DeepCopy() *SourceModelInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceModelInitParameters.
func (*SourceModelInitParameters) DeepCopyInto ¶
func (in *SourceModelInitParameters) DeepCopyInto(out *SourceModelInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceModelObservation ¶
type SourceModelObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the model. ModelID *string `json:"modelId,omitempty" tf:"model_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` }
func (*SourceModelObservation) DeepCopy ¶
func (in *SourceModelObservation) DeepCopy() *SourceModelObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceModelObservation.
func (*SourceModelObservation) DeepCopyInto ¶
func (in *SourceModelObservation) DeepCopyInto(out *SourceModelObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceModelParameters ¶
type SourceModelParameters struct { // The ID of the dataset containing this table. // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId" tf:"dataset_id,omitempty"` // The ID of the model. // +kubebuilder:validation:Optional ModelID *string `json:"modelId" tf:"model_id,omitempty"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId" tf:"project_id,omitempty"` }
func (*SourceModelParameters) DeepCopy ¶
func (in *SourceModelParameters) DeepCopy() *SourceModelParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceModelParameters.
func (*SourceModelParameters) DeepCopyInto ¶
func (in *SourceModelParameters) DeepCopyInto(out *SourceModelParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceTableInitParameters ¶
type SourceTableInitParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*SourceTableInitParameters) DeepCopy ¶
func (in *SourceTableInitParameters) DeepCopy() *SourceTableInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceTableInitParameters.
func (*SourceTableInitParameters) DeepCopyInto ¶
func (in *SourceTableInitParameters) DeepCopyInto(out *SourceTableInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceTableObservation ¶
type SourceTableObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*SourceTableObservation) DeepCopy ¶
func (in *SourceTableObservation) DeepCopy() *SourceTableObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceTableObservation.
func (*SourceTableObservation) DeepCopyInto ¶
func (in *SourceTableObservation) DeepCopyInto(out *SourceTableObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceTableParameters ¶
type SourceTableParameters struct { // The ID of the dataset containing this table. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*SourceTableParameters) DeepCopy ¶
func (in *SourceTableParameters) DeepCopy() *SourceTableParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceTableParameters.
func (*SourceTableParameters) DeepCopyInto ¶
func (in *SourceTableParameters) DeepCopyInto(out *SourceTableParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceTablesInitParameters ¶
type SourceTablesInitParameters struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*SourceTablesInitParameters) DeepCopy ¶
func (in *SourceTablesInitParameters) DeepCopy() *SourceTablesInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceTablesInitParameters.
func (*SourceTablesInitParameters) DeepCopyInto ¶
func (in *SourceTablesInitParameters) DeepCopyInto(out *SourceTablesInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceTablesObservation ¶
type SourceTablesObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*SourceTablesObservation) DeepCopy ¶
func (in *SourceTablesObservation) DeepCopy() *SourceTablesObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceTablesObservation.
func (*SourceTablesObservation) DeepCopyInto ¶
func (in *SourceTablesObservation) DeepCopyInto(out *SourceTablesObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SourceTablesParameters ¶
type SourceTablesParameters struct { // The ID of the dataset containing this table. // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The table. Can be specified {{table_id}} if project_id and dataset_id are also set, // or of the form projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}} if not. // +kubebuilder:validation:Optional TableID *string `json:"tableId" tf:"table_id,omitempty"` }
func (*SourceTablesParameters) DeepCopy ¶
func (in *SourceTablesParameters) DeepCopy() *SourceTablesParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SourceTablesParameters.
func (*SourceTablesParameters) DeepCopyInto ¶
func (in *SourceTablesParameters) DeepCopyInto(out *SourceTablesParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkHistoryServerConfigInitParameters ¶
type SparkHistoryServerConfigInitParameters struct { // Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name]. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/dataproc/v1beta2.Cluster // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() DataprocCluster *string `json:"dataprocCluster,omitempty" tf:"dataproc_cluster,omitempty"` // Reference to a Cluster in dataproc to populate dataprocCluster. // +kubebuilder:validation:Optional DataprocClusterRef *v1.Reference `json:"dataprocClusterRef,omitempty" tf:"-"` // Selector for a Cluster in dataproc to populate dataprocCluster. // +kubebuilder:validation:Optional DataprocClusterSelector *v1.Selector `json:"dataprocClusterSelector,omitempty" tf:"-"` }
func (*SparkHistoryServerConfigInitParameters) DeepCopy ¶
func (in *SparkHistoryServerConfigInitParameters) DeepCopy() *SparkHistoryServerConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkHistoryServerConfigInitParameters.
func (*SparkHistoryServerConfigInitParameters) DeepCopyInto ¶
func (in *SparkHistoryServerConfigInitParameters) DeepCopyInto(out *SparkHistoryServerConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkHistoryServerConfigObservation ¶
type SparkHistoryServerConfigObservation struct { // Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name]. DataprocCluster *string `json:"dataprocCluster,omitempty" tf:"dataproc_cluster,omitempty"` }
func (*SparkHistoryServerConfigObservation) DeepCopy ¶
func (in *SparkHistoryServerConfigObservation) DeepCopy() *SparkHistoryServerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkHistoryServerConfigObservation.
func (*SparkHistoryServerConfigObservation) DeepCopyInto ¶
func (in *SparkHistoryServerConfigObservation) DeepCopyInto(out *SparkHistoryServerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkHistoryServerConfigParameters ¶
type SparkHistoryServerConfigParameters struct { // Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name]. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/dataproc/v1beta2.Cluster // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional DataprocCluster *string `json:"dataprocCluster,omitempty" tf:"dataproc_cluster,omitempty"` // Reference to a Cluster in dataproc to populate dataprocCluster. // +kubebuilder:validation:Optional DataprocClusterRef *v1.Reference `json:"dataprocClusterRef,omitempty" tf:"-"` // Selector for a Cluster in dataproc to populate dataprocCluster. // +kubebuilder:validation:Optional DataprocClusterSelector *v1.Selector `json:"dataprocClusterSelector,omitempty" tf:"-"` }
func (*SparkHistoryServerConfigParameters) DeepCopy ¶
func (in *SparkHistoryServerConfigParameters) DeepCopy() *SparkHistoryServerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkHistoryServerConfigParameters.
func (*SparkHistoryServerConfigParameters) DeepCopyInto ¶
func (in *SparkHistoryServerConfigParameters) DeepCopyInto(out *SparkHistoryServerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkInitParameters ¶
type SparkInitParameters struct { // Dataproc Metastore Service configuration for the connection. // Structure is documented below. MetastoreServiceConfig *MetastoreServiceConfigInitParameters `json:"metastoreServiceConfig,omitempty" tf:"metastore_service_config,omitempty"` // Spark History Server configuration for the connection. // Structure is documented below. SparkHistoryServerConfig *SparkHistoryServerConfigInitParameters `json:"sparkHistoryServerConfig,omitempty" tf:"spark_history_server_config,omitempty"` }
func (*SparkInitParameters) DeepCopy ¶
func (in *SparkInitParameters) DeepCopy() *SparkInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkInitParameters.
func (*SparkInitParameters) DeepCopyInto ¶
func (in *SparkInitParameters) DeepCopyInto(out *SparkInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkObservation ¶
type SparkObservation struct { // Dataproc Metastore Service configuration for the connection. // Structure is documented below. MetastoreServiceConfig *MetastoreServiceConfigObservation `json:"metastoreServiceConfig,omitempty" tf:"metastore_service_config,omitempty"` // (Output) // The account ID of the service created for the purpose of this connection. ServiceAccountID *string `json:"serviceAccountId,omitempty" tf:"service_account_id,omitempty"` // Spark History Server configuration for the connection. // Structure is documented below. SparkHistoryServerConfig *SparkHistoryServerConfigObservation `json:"sparkHistoryServerConfig,omitempty" tf:"spark_history_server_config,omitempty"` }
func (*SparkObservation) DeepCopy ¶
func (in *SparkObservation) DeepCopy() *SparkObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkObservation.
func (*SparkObservation) DeepCopyInto ¶
func (in *SparkObservation) DeepCopyInto(out *SparkObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkOptionsInitParameters ¶
type SparkOptionsInitParameters struct { // Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Fully qualified name of the user-provided Spark connection object. // Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}" // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Connection // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("name",true) Connection *string `json:"connection,omitempty" tf:"connection,omitempty"` // Reference to a Connection in bigquery to populate connection. // +kubebuilder:validation:Optional ConnectionRef *v1.Reference `json:"connectionRef,omitempty" tf:"-"` // Selector for a Connection in bigquery to populate connection. // +kubebuilder:validation:Optional ConnectionSelector *v1.Selector `json:"connectionSelector,omitempty" tf:"-"` // Custom container image for the runtime environment. ContainerImage *string `json:"containerImage,omitempty" tf:"container_image,omitempty"` // Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark. JarUris []*string `json:"jarUris,omitempty" tf:"jar_uris,omitempty"` // The fully qualified name of a class in jarUris, for example, com.example.wordcount. // Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type. MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The main file/jar URI of the Spark application. // Exactly one of the definitionBody field and the mainFileUri field must be set for Python. // Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type. MainFileURI *string `json:"mainFileUri,omitempty" tf:"main_file_uri,omitempty"` // Configuration properties as a set of key/value pairs, which will be passed on to the Spark application. // For more information, see Apache Spark and the procedure option list. // An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark. PyFileUris []*string `json:"pyFileUris,omitempty" tf:"py_file_uris,omitempty"` // Runtime version. If not specified, the default runtime version is used. RuntimeVersion *string `json:"runtimeVersion,omitempty" tf:"runtime_version,omitempty"` }
func (*SparkOptionsInitParameters) DeepCopy ¶
func (in *SparkOptionsInitParameters) DeepCopy() *SparkOptionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkOptionsInitParameters.
func (*SparkOptionsInitParameters) DeepCopyInto ¶
func (in *SparkOptionsInitParameters) DeepCopyInto(out *SparkOptionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkOptionsObservation ¶
type SparkOptionsObservation struct { // Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Fully qualified name of the user-provided Spark connection object. // Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}" Connection *string `json:"connection,omitempty" tf:"connection,omitempty"` // Custom container image for the runtime environment. ContainerImage *string `json:"containerImage,omitempty" tf:"container_image,omitempty"` // Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark. JarUris []*string `json:"jarUris,omitempty" tf:"jar_uris,omitempty"` // The fully qualified name of a class in jarUris, for example, com.example.wordcount. // Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type. MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The main file/jar URI of the Spark application. // Exactly one of the definitionBody field and the mainFileUri field must be set for Python. // Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type. MainFileURI *string `json:"mainFileUri,omitempty" tf:"main_file_uri,omitempty"` // Configuration properties as a set of key/value pairs, which will be passed on to the Spark application. // For more information, see Apache Spark and the procedure option list. // An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark. PyFileUris []*string `json:"pyFileUris,omitempty" tf:"py_file_uris,omitempty"` // Runtime version. If not specified, the default runtime version is used. RuntimeVersion *string `json:"runtimeVersion,omitempty" tf:"runtime_version,omitempty"` }
func (*SparkOptionsObservation) DeepCopy ¶
func (in *SparkOptionsObservation) DeepCopy() *SparkOptionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkOptionsObservation.
func (*SparkOptionsObservation) DeepCopyInto ¶
func (in *SparkOptionsObservation) DeepCopyInto(out *SparkOptionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkOptionsParameters ¶
type SparkOptionsParameters struct { // Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Fully qualified name of the user-provided Spark connection object. // Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}" // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Connection // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("name",true) // +kubebuilder:validation:Optional Connection *string `json:"connection,omitempty" tf:"connection,omitempty"` // Reference to a Connection in bigquery to populate connection. // +kubebuilder:validation:Optional ConnectionRef *v1.Reference `json:"connectionRef,omitempty" tf:"-"` // Selector for a Connection in bigquery to populate connection. // +kubebuilder:validation:Optional ConnectionSelector *v1.Selector `json:"connectionSelector,omitempty" tf:"-"` // Custom container image for the runtime environment. // +kubebuilder:validation:Optional ContainerImage *string `json:"containerImage,omitempty" tf:"container_image,omitempty"` // Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark. // +kubebuilder:validation:Optional JarUris []*string `json:"jarUris,omitempty" tf:"jar_uris,omitempty"` // The fully qualified name of a class in jarUris, for example, com.example.wordcount. // Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type. // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The main file/jar URI of the Spark application. // Exactly one of the definitionBody field and the mainFileUri field must be set for Python. // Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type. // +kubebuilder:validation:Optional MainFileURI *string `json:"mainFileUri,omitempty" tf:"main_file_uri,omitempty"` // Configuration properties as a set of key/value pairs, which will be passed on to the Spark application. // For more information, see Apache Spark and the procedure option list. // An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark. // +kubebuilder:validation:Optional PyFileUris []*string `json:"pyFileUris,omitempty" tf:"py_file_uris,omitempty"` // Runtime version. If not specified, the default runtime version is used. // +kubebuilder:validation:Optional RuntimeVersion *string `json:"runtimeVersion,omitempty" tf:"runtime_version,omitempty"` }
func (*SparkOptionsParameters) DeepCopy ¶
func (in *SparkOptionsParameters) DeepCopy() *SparkOptionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkOptionsParameters.
func (*SparkOptionsParameters) DeepCopyInto ¶
func (in *SparkOptionsParameters) DeepCopyInto(out *SparkOptionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkParameters ¶
type SparkParameters struct { // Dataproc Metastore Service configuration for the connection. // Structure is documented below. // +kubebuilder:validation:Optional MetastoreServiceConfig *MetastoreServiceConfigParameters `json:"metastoreServiceConfig,omitempty" tf:"metastore_service_config,omitempty"` // Spark History Server configuration for the connection. // Structure is documented below. // +kubebuilder:validation:Optional SparkHistoryServerConfig *SparkHistoryServerConfigParameters `json:"sparkHistoryServerConfig,omitempty" tf:"spark_history_server_config,omitempty"` }
func (*SparkParameters) DeepCopy ¶
func (in *SparkParameters) DeepCopy() *SparkParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkParameters.
func (*SparkParameters) DeepCopyInto ¶
func (in *SparkParameters) DeepCopyInto(out *SparkParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StatusInitParameters ¶
type StatusInitParameters struct { }
func (*StatusInitParameters) DeepCopy ¶
func (in *StatusInitParameters) DeepCopy() *StatusInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatusInitParameters.
func (*StatusInitParameters) DeepCopyInto ¶
func (in *StatusInitParameters) DeepCopyInto(out *StatusInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StatusObservation ¶
type StatusObservation struct { // (Output) // Final error result of the job. If present, indicates that the job has completed and was unsuccessful. // Structure is documented below. ErrorResult []ErrorResultObservation `json:"errorResult,omitempty" tf:"error_result,omitempty"` // (Output) // The first errors encountered during the running of the job. The final message // includes the number of errors that caused the process to stop. Errors here do // not necessarily mean that the job has not completed or was unsuccessful. // Structure is documented below. Errors []ErrorsObservation `json:"errors,omitempty" tf:"errors,omitempty"` // (Output) // Running state of the job. Valid states include 'PENDING', 'RUNNING', and 'DONE'. State *string `json:"state,omitempty" tf:"state,omitempty"` }
func (*StatusObservation) DeepCopy ¶
func (in *StatusObservation) DeepCopy() *StatusObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatusObservation.
func (*StatusObservation) DeepCopyInto ¶
func (in *StatusObservation) DeepCopyInto(out *StatusObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StatusParameters ¶
type StatusParameters struct { }
func (*StatusParameters) DeepCopy ¶
func (in *StatusParameters) DeepCopy() *StatusParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatusParameters.
func (*StatusParameters) DeepCopyInto ¶
func (in *StatusParameters) DeepCopyInto(out *StatusParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Table ¶
type Table struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec TableSpec `json:"spec"` Status TableStatus `json:"status,omitempty"` }
Table is the Schema for the Tables API. Creates a table resource in a dataset for Google BigQuery. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Table) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Table.
func (*Table) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Table) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Table) GetCondition ¶
func (mg *Table) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Table.
func (*Table) GetConnectionDetailsMapping ¶
GetConnectionDetailsMapping for this Table
func (*Table) GetDeletionPolicy ¶
func (mg *Table) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Table.
func (*Table) GetInitParameters ¶
GetInitParameters of this Table
func (*Table) GetManagementPolicies ¶
func (mg *Table) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this Table.
func (*Table) GetMergedParameters ¶
GetInitParameters of this Table
func (*Table) GetObservation ¶
GetObservation of this Table
func (*Table) GetParameters ¶
GetParameters of this Table
func (*Table) GetProviderConfigReference ¶
GetProviderConfigReference of this Table.
func (*Table) GetPublishConnectionDetailsTo ¶
func (mg *Table) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Table.
func (*Table) GetTerraformResourceType ¶
GetTerraformResourceType returns Terraform resource type for this Table
func (*Table) GetTerraformSchemaVersion ¶
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Table) GetWriteConnectionSecretToReference ¶
func (mg *Table) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Table.
func (*Table) LateInitialize ¶
LateInitialize this Table using its observed tfState. returns True if there are any spec changes for the resource.
func (*Table) ResolveReferences ¶
ResolveReferences of this Table.
func (*Table) SetConditions ¶
SetConditions of this Table.
func (*Table) SetDeletionPolicy ¶
func (mg *Table) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Table.
func (*Table) SetManagementPolicies ¶
func (mg *Table) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this Table.
func (*Table) SetObservation ¶
SetObservation for this Table
func (*Table) SetParameters ¶
SetParameters for this Table
func (*Table) SetProviderConfigReference ¶
SetProviderConfigReference of this Table.
func (*Table) SetPublishConnectionDetailsTo ¶
func (mg *Table) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Table.
func (*Table) SetWriteConnectionSecretToReference ¶
func (mg *Table) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Table.
type TableConstraintsInitParameters ¶
type TableConstraintsInitParameters struct { // Present only if the table has a foreign key. // The foreign key is not enforced. // Structure is documented below. ForeignKeys []ForeignKeysInitParameters `json:"foreignKeys,omitempty" tf:"foreign_keys,omitempty"` // Represents the primary key constraint // on a table's columns. Present only if the table has a primary key. // The primary key is not enforced. // Structure is documented below. PrimaryKey *PrimaryKeyInitParameters `json:"primaryKey,omitempty" tf:"primary_key,omitempty"` }
func (*TableConstraintsInitParameters) DeepCopy ¶
func (in *TableConstraintsInitParameters) DeepCopy() *TableConstraintsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableConstraintsInitParameters.
func (*TableConstraintsInitParameters) DeepCopyInto ¶
func (in *TableConstraintsInitParameters) DeepCopyInto(out *TableConstraintsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableConstraintsObservation ¶
type TableConstraintsObservation struct { // Present only if the table has a foreign key. // The foreign key is not enforced. // Structure is documented below. ForeignKeys []ForeignKeysObservation `json:"foreignKeys,omitempty" tf:"foreign_keys,omitempty"` // Represents the primary key constraint // on a table's columns. Present only if the table has a primary key. // The primary key is not enforced. // Structure is documented below. PrimaryKey *PrimaryKeyObservation `json:"primaryKey,omitempty" tf:"primary_key,omitempty"` }
func (*TableConstraintsObservation) DeepCopy ¶
func (in *TableConstraintsObservation) DeepCopy() *TableConstraintsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableConstraintsObservation.
func (*TableConstraintsObservation) DeepCopyInto ¶
func (in *TableConstraintsObservation) DeepCopyInto(out *TableConstraintsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableConstraintsParameters ¶
type TableConstraintsParameters struct { // Present only if the table has a foreign key. // The foreign key is not enforced. // Structure is documented below. // +kubebuilder:validation:Optional ForeignKeys []ForeignKeysParameters `json:"foreignKeys,omitempty" tf:"foreign_keys,omitempty"` // Represents the primary key constraint // on a table's columns. Present only if the table has a primary key. // The primary key is not enforced. // Structure is documented below. // +kubebuilder:validation:Optional PrimaryKey *PrimaryKeyParameters `json:"primaryKey,omitempty" tf:"primary_key,omitempty"` }
func (*TableConstraintsParameters) DeepCopy ¶
func (in *TableConstraintsParameters) DeepCopy() *TableConstraintsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableConstraintsParameters.
func (*TableConstraintsParameters) DeepCopyInto ¶
func (in *TableConstraintsParameters) DeepCopyInto(out *TableConstraintsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMBinding ¶
type TableIAMBinding struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.members) || (has(self.initProvider) && has(self.initProvider.members))",message="spec.forProvider.members is a required parameter" // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.role) || (has(self.initProvider) && has(self.initProvider.role))",message="spec.forProvider.role is a required parameter" Spec TableIAMBindingSpec `json:"spec"` Status TableIAMBindingStatus `json:"status,omitempty"` }
TableIAMBinding is the Schema for the TableIAMBindings API. <no value> +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*TableIAMBinding) DeepCopy ¶
func (in *TableIAMBinding) DeepCopy() *TableIAMBinding
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBinding.
func (*TableIAMBinding) DeepCopyInto ¶
func (in *TableIAMBinding) DeepCopyInto(out *TableIAMBinding)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*TableIAMBinding) DeepCopyObject ¶
func (in *TableIAMBinding) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*TableIAMBinding) GetCondition ¶
func (mg *TableIAMBinding) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this TableIAMBinding.
func (*TableIAMBinding) GetConnectionDetailsMapping ¶
func (tr *TableIAMBinding) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this TableIAMBinding
func (*TableIAMBinding) GetDeletionPolicy ¶
func (mg *TableIAMBinding) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this TableIAMBinding.
func (*TableIAMBinding) GetID ¶
func (tr *TableIAMBinding) GetID() string
GetID returns ID of underlying Terraform resource of this TableIAMBinding
func (*TableIAMBinding) GetInitParameters ¶
func (tr *TableIAMBinding) GetInitParameters() (map[string]any, error)
GetInitParameters of this TableIAMBinding
func (*TableIAMBinding) GetManagementPolicies ¶
func (mg *TableIAMBinding) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this TableIAMBinding.
func (*TableIAMBinding) GetMergedParameters ¶
func (tr *TableIAMBinding) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this TableIAMBinding
func (*TableIAMBinding) GetObservation ¶
func (tr *TableIAMBinding) GetObservation() (map[string]any, error)
GetObservation of this TableIAMBinding
func (*TableIAMBinding) GetParameters ¶
func (tr *TableIAMBinding) GetParameters() (map[string]any, error)
GetParameters of this TableIAMBinding
func (*TableIAMBinding) GetProviderConfigReference ¶
func (mg *TableIAMBinding) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this TableIAMBinding.
func (*TableIAMBinding) GetPublishConnectionDetailsTo ¶
func (mg *TableIAMBinding) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this TableIAMBinding.
func (*TableIAMBinding) GetTerraformResourceType ¶
func (mg *TableIAMBinding) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this TableIAMBinding
func (*TableIAMBinding) GetTerraformSchemaVersion ¶
func (tr *TableIAMBinding) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*TableIAMBinding) GetWriteConnectionSecretToReference ¶
func (mg *TableIAMBinding) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this TableIAMBinding.
func (*TableIAMBinding) Hub ¶
func (tr *TableIAMBinding) Hub()
Hub marks this type as a conversion hub.
func (*TableIAMBinding) LateInitialize ¶
func (tr *TableIAMBinding) LateInitialize(attrs []byte) (bool, error)
LateInitialize this TableIAMBinding using its observed tfState. returns True if there are any spec changes for the resource.
func (*TableIAMBinding) ResolveReferences ¶
ResolveReferences of this TableIAMBinding.
func (*TableIAMBinding) SetConditions ¶
func (mg *TableIAMBinding) SetConditions(c ...xpv1.Condition)
SetConditions of this TableIAMBinding.
func (*TableIAMBinding) SetDeletionPolicy ¶
func (mg *TableIAMBinding) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this TableIAMBinding.
func (*TableIAMBinding) SetManagementPolicies ¶
func (mg *TableIAMBinding) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this TableIAMBinding.
func (*TableIAMBinding) SetObservation ¶
func (tr *TableIAMBinding) SetObservation(obs map[string]any) error
SetObservation for this TableIAMBinding
func (*TableIAMBinding) SetParameters ¶
func (tr *TableIAMBinding) SetParameters(params map[string]any) error
SetParameters for this TableIAMBinding
func (*TableIAMBinding) SetProviderConfigReference ¶
func (mg *TableIAMBinding) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this TableIAMBinding.
func (*TableIAMBinding) SetPublishConnectionDetailsTo ¶
func (mg *TableIAMBinding) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this TableIAMBinding.
func (*TableIAMBinding) SetWriteConnectionSecretToReference ¶
func (mg *TableIAMBinding) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this TableIAMBinding.
type TableIAMBindingConditionInitParameters ¶
type TableIAMBindingConditionInitParameters struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*TableIAMBindingConditionInitParameters) DeepCopy ¶
func (in *TableIAMBindingConditionInitParameters) DeepCopy() *TableIAMBindingConditionInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingConditionInitParameters.
func (*TableIAMBindingConditionInitParameters) DeepCopyInto ¶
func (in *TableIAMBindingConditionInitParameters) DeepCopyInto(out *TableIAMBindingConditionInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMBindingConditionObservation ¶
type TableIAMBindingConditionObservation struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*TableIAMBindingConditionObservation) DeepCopy ¶
func (in *TableIAMBindingConditionObservation) DeepCopy() *TableIAMBindingConditionObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingConditionObservation.
func (*TableIAMBindingConditionObservation) DeepCopyInto ¶
func (in *TableIAMBindingConditionObservation) DeepCopyInto(out *TableIAMBindingConditionObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMBindingConditionParameters ¶
type TableIAMBindingConditionParameters struct { // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // +kubebuilder:validation:Optional Expression *string `json:"expression" tf:"expression,omitempty"` // +kubebuilder:validation:Optional Title *string `json:"title" tf:"title,omitempty"` }
func (*TableIAMBindingConditionParameters) DeepCopy ¶
func (in *TableIAMBindingConditionParameters) DeepCopy() *TableIAMBindingConditionParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingConditionParameters.
func (*TableIAMBindingConditionParameters) DeepCopyInto ¶
func (in *TableIAMBindingConditionParameters) DeepCopyInto(out *TableIAMBindingConditionParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMBindingInitParameters ¶
type TableIAMBindingInitParameters struct { Condition *TableIAMBindingConditionInitParameters `json:"condition,omitempty" tf:"condition,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // +listType=set Members []*string `json:"members,omitempty" tf:"members,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` Role *string `json:"role,omitempty" tf:"role,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*TableIAMBindingInitParameters) DeepCopy ¶
func (in *TableIAMBindingInitParameters) DeepCopy() *TableIAMBindingInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingInitParameters.
func (*TableIAMBindingInitParameters) DeepCopyInto ¶
func (in *TableIAMBindingInitParameters) DeepCopyInto(out *TableIAMBindingInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMBindingList ¶
type TableIAMBindingList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []TableIAMBinding `json:"items"` }
TableIAMBindingList contains a list of TableIAMBindings
func (*TableIAMBindingList) DeepCopy ¶
func (in *TableIAMBindingList) DeepCopy() *TableIAMBindingList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingList.
func (*TableIAMBindingList) DeepCopyInto ¶
func (in *TableIAMBindingList) DeepCopyInto(out *TableIAMBindingList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*TableIAMBindingList) DeepCopyObject ¶
func (in *TableIAMBindingList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*TableIAMBindingList) GetItems ¶
func (l *TableIAMBindingList) GetItems() []resource.Managed
GetItems of this TableIAMBindingList.
type TableIAMBindingObservation ¶
type TableIAMBindingObservation struct { Condition *TableIAMBindingConditionObservation `json:"condition,omitempty" tf:"condition,omitempty"` DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` Etag *string `json:"etag,omitempty" tf:"etag,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` // +listType=set Members []*string `json:"members,omitempty" tf:"members,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` Role *string `json:"role,omitempty" tf:"role,omitempty"` TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*TableIAMBindingObservation) DeepCopy ¶
func (in *TableIAMBindingObservation) DeepCopy() *TableIAMBindingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingObservation.
func (*TableIAMBindingObservation) DeepCopyInto ¶
func (in *TableIAMBindingObservation) DeepCopyInto(out *TableIAMBindingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMBindingParameters ¶
type TableIAMBindingParameters struct { // +kubebuilder:validation:Optional Condition *TableIAMBindingConditionParameters `json:"condition,omitempty" tf:"condition,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // +kubebuilder:validation:Optional // +listType=set Members []*string `json:"members,omitempty" tf:"members,omitempty"` // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // +kubebuilder:validation:Optional Role *string `json:"role,omitempty" tf:"role,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +kubebuilder:validation:Optional TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*TableIAMBindingParameters) DeepCopy ¶
func (in *TableIAMBindingParameters) DeepCopy() *TableIAMBindingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingParameters.
func (*TableIAMBindingParameters) DeepCopyInto ¶
func (in *TableIAMBindingParameters) DeepCopyInto(out *TableIAMBindingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMBindingSpec ¶
type TableIAMBindingSpec struct { v1.ResourceSpec `json:",inline"` ForProvider TableIAMBindingParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider TableIAMBindingInitParameters `json:"initProvider,omitempty"` }
TableIAMBindingSpec defines the desired state of TableIAMBinding
func (*TableIAMBindingSpec) DeepCopy ¶
func (in *TableIAMBindingSpec) DeepCopy() *TableIAMBindingSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingSpec.
func (*TableIAMBindingSpec) DeepCopyInto ¶
func (in *TableIAMBindingSpec) DeepCopyInto(out *TableIAMBindingSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMBindingStatus ¶
type TableIAMBindingStatus struct { v1.ResourceStatus `json:",inline"` AtProvider TableIAMBindingObservation `json:"atProvider,omitempty"` }
TableIAMBindingStatus defines the observed state of TableIAMBinding.
func (*TableIAMBindingStatus) DeepCopy ¶
func (in *TableIAMBindingStatus) DeepCopy() *TableIAMBindingStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMBindingStatus.
func (*TableIAMBindingStatus) DeepCopyInto ¶
func (in *TableIAMBindingStatus) DeepCopyInto(out *TableIAMBindingStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMMember ¶
type TableIAMMember struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.role) || (has(self.initProvider) && has(self.initProvider.role))",message="spec.forProvider.role is a required parameter" Spec TableIAMMemberSpec `json:"spec"` Status TableIAMMemberStatus `json:"status,omitempty"` }
TableIAMMember is the Schema for the TableIAMMembers API. <no value> +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*TableIAMMember) DeepCopy ¶
func (in *TableIAMMember) DeepCopy() *TableIAMMember
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMember.
func (*TableIAMMember) DeepCopyInto ¶
func (in *TableIAMMember) DeepCopyInto(out *TableIAMMember)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*TableIAMMember) DeepCopyObject ¶
func (in *TableIAMMember) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*TableIAMMember) GetCondition ¶
func (mg *TableIAMMember) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this TableIAMMember.
func (*TableIAMMember) GetConnectionDetailsMapping ¶
func (tr *TableIAMMember) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this TableIAMMember
func (*TableIAMMember) GetDeletionPolicy ¶
func (mg *TableIAMMember) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this TableIAMMember.
func (*TableIAMMember) GetID ¶
func (tr *TableIAMMember) GetID() string
GetID returns ID of underlying Terraform resource of this TableIAMMember
func (*TableIAMMember) GetInitParameters ¶
func (tr *TableIAMMember) GetInitParameters() (map[string]any, error)
GetInitParameters of this TableIAMMember
func (*TableIAMMember) GetManagementPolicies ¶
func (mg *TableIAMMember) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this TableIAMMember.
func (*TableIAMMember) GetMergedParameters ¶
func (tr *TableIAMMember) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this TableIAMMember
func (*TableIAMMember) GetObservation ¶
func (tr *TableIAMMember) GetObservation() (map[string]any, error)
GetObservation of this TableIAMMember
func (*TableIAMMember) GetParameters ¶
func (tr *TableIAMMember) GetParameters() (map[string]any, error)
GetParameters of this TableIAMMember
func (*TableIAMMember) GetProviderConfigReference ¶
func (mg *TableIAMMember) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this TableIAMMember.
func (*TableIAMMember) GetPublishConnectionDetailsTo ¶
func (mg *TableIAMMember) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this TableIAMMember.
func (*TableIAMMember) GetTerraformResourceType ¶
func (mg *TableIAMMember) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this TableIAMMember
func (*TableIAMMember) GetTerraformSchemaVersion ¶
func (tr *TableIAMMember) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*TableIAMMember) GetWriteConnectionSecretToReference ¶
func (mg *TableIAMMember) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this TableIAMMember.
func (*TableIAMMember) Hub ¶
func (tr *TableIAMMember) Hub()
Hub marks this type as a conversion hub.
func (*TableIAMMember) LateInitialize ¶
func (tr *TableIAMMember) LateInitialize(attrs []byte) (bool, error)
LateInitialize this TableIAMMember using its observed tfState. returns True if there are any spec changes for the resource.
func (*TableIAMMember) ResolveReferences ¶
ResolveReferences of this TableIAMMember.
func (*TableIAMMember) SetConditions ¶
func (mg *TableIAMMember) SetConditions(c ...xpv1.Condition)
SetConditions of this TableIAMMember.
func (*TableIAMMember) SetDeletionPolicy ¶
func (mg *TableIAMMember) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this TableIAMMember.
func (*TableIAMMember) SetManagementPolicies ¶
func (mg *TableIAMMember) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this TableIAMMember.
func (*TableIAMMember) SetObservation ¶
func (tr *TableIAMMember) SetObservation(obs map[string]any) error
SetObservation for this TableIAMMember
func (*TableIAMMember) SetParameters ¶
func (tr *TableIAMMember) SetParameters(params map[string]any) error
SetParameters for this TableIAMMember
func (*TableIAMMember) SetProviderConfigReference ¶
func (mg *TableIAMMember) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this TableIAMMember.
func (*TableIAMMember) SetPublishConnectionDetailsTo ¶
func (mg *TableIAMMember) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this TableIAMMember.
func (*TableIAMMember) SetWriteConnectionSecretToReference ¶
func (mg *TableIAMMember) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this TableIAMMember.
type TableIAMMemberConditionInitParameters ¶
type TableIAMMemberConditionInitParameters struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*TableIAMMemberConditionInitParameters) DeepCopy ¶
func (in *TableIAMMemberConditionInitParameters) DeepCopy() *TableIAMMemberConditionInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberConditionInitParameters.
func (*TableIAMMemberConditionInitParameters) DeepCopyInto ¶
func (in *TableIAMMemberConditionInitParameters) DeepCopyInto(out *TableIAMMemberConditionInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMMemberConditionObservation ¶
type TableIAMMemberConditionObservation struct { Description *string `json:"description,omitempty" tf:"description,omitempty"` Expression *string `json:"expression,omitempty" tf:"expression,omitempty"` Title *string `json:"title,omitempty" tf:"title,omitempty"` }
func (*TableIAMMemberConditionObservation) DeepCopy ¶
func (in *TableIAMMemberConditionObservation) DeepCopy() *TableIAMMemberConditionObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberConditionObservation.
func (*TableIAMMemberConditionObservation) DeepCopyInto ¶
func (in *TableIAMMemberConditionObservation) DeepCopyInto(out *TableIAMMemberConditionObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMMemberConditionParameters ¶
type TableIAMMemberConditionParameters struct { // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // +kubebuilder:validation:Optional Expression *string `json:"expression" tf:"expression,omitempty"` // +kubebuilder:validation:Optional Title *string `json:"title" tf:"title,omitempty"` }
func (*TableIAMMemberConditionParameters) DeepCopy ¶
func (in *TableIAMMemberConditionParameters) DeepCopy() *TableIAMMemberConditionParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberConditionParameters.
func (*TableIAMMemberConditionParameters) DeepCopyInto ¶
func (in *TableIAMMemberConditionParameters) DeepCopyInto(out *TableIAMMemberConditionParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMMemberInitParameters ¶
type TableIAMMemberInitParameters struct { Condition *TableIAMMemberConditionInitParameters `json:"condition,omitempty" tf:"condition,omitempty"` Role *string `json:"role,omitempty" tf:"role,omitempty"` }
func (*TableIAMMemberInitParameters) DeepCopy ¶
func (in *TableIAMMemberInitParameters) DeepCopy() *TableIAMMemberInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberInitParameters.
func (*TableIAMMemberInitParameters) DeepCopyInto ¶
func (in *TableIAMMemberInitParameters) DeepCopyInto(out *TableIAMMemberInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMMemberList ¶
type TableIAMMemberList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []TableIAMMember `json:"items"` }
TableIAMMemberList contains a list of TableIAMMembers
func (*TableIAMMemberList) DeepCopy ¶
func (in *TableIAMMemberList) DeepCopy() *TableIAMMemberList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberList.
func (*TableIAMMemberList) DeepCopyInto ¶
func (in *TableIAMMemberList) DeepCopyInto(out *TableIAMMemberList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*TableIAMMemberList) DeepCopyObject ¶
func (in *TableIAMMemberList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*TableIAMMemberList) GetItems ¶
func (l *TableIAMMemberList) GetItems() []resource.Managed
GetItems of this TableIAMMemberList.
type TableIAMMemberObservation ¶
type TableIAMMemberObservation struct { Condition *TableIAMMemberConditionObservation `json:"condition,omitempty" tf:"condition,omitempty"` DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` Etag *string `json:"etag,omitempty" tf:"etag,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` Member *string `json:"member,omitempty" tf:"member,omitempty"` Project *string `json:"project,omitempty" tf:"project,omitempty"` Role *string `json:"role,omitempty" tf:"role,omitempty"` TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*TableIAMMemberObservation) DeepCopy ¶
func (in *TableIAMMemberObservation) DeepCopy() *TableIAMMemberObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberObservation.
func (*TableIAMMemberObservation) DeepCopyInto ¶
func (in *TableIAMMemberObservation) DeepCopyInto(out *TableIAMMemberObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMMemberParameters ¶
type TableIAMMemberParameters struct { // +kubebuilder:validation:Optional Condition *TableIAMMemberConditionParameters `json:"condition,omitempty" tf:"condition,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // +kubebuilder:validation:Required Member *string `json:"member" tf:"member,omitempty"` // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // +kubebuilder:validation:Optional Role *string `json:"role,omitempty" tf:"role,omitempty"` // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Table // +kubebuilder:validation:Optional TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` // Reference to a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDRef *v1.Reference `json:"tableIdRef,omitempty" tf:"-"` // Selector for a Table in bigquery to populate tableId. // +kubebuilder:validation:Optional TableIDSelector *v1.Selector `json:"tableIdSelector,omitempty" tf:"-"` }
func (*TableIAMMemberParameters) DeepCopy ¶
func (in *TableIAMMemberParameters) DeepCopy() *TableIAMMemberParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberParameters.
func (*TableIAMMemberParameters) DeepCopyInto ¶
func (in *TableIAMMemberParameters) DeepCopyInto(out *TableIAMMemberParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMMemberSpec ¶
type TableIAMMemberSpec struct { v1.ResourceSpec `json:",inline"` ForProvider TableIAMMemberParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider TableIAMMemberInitParameters `json:"initProvider,omitempty"` }
TableIAMMemberSpec defines the desired state of TableIAMMember
func (*TableIAMMemberSpec) DeepCopy ¶
func (in *TableIAMMemberSpec) DeepCopy() *TableIAMMemberSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberSpec.
func (*TableIAMMemberSpec) DeepCopyInto ¶
func (in *TableIAMMemberSpec) DeepCopyInto(out *TableIAMMemberSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableIAMMemberStatus ¶
type TableIAMMemberStatus struct { v1.ResourceStatus `json:",inline"` AtProvider TableIAMMemberObservation `json:"atProvider,omitempty"` }
TableIAMMemberStatus defines the observed state of TableIAMMember.
func (*TableIAMMemberStatus) DeepCopy ¶
func (in *TableIAMMemberStatus) DeepCopy() *TableIAMMemberStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableIAMMemberStatus.
func (*TableIAMMemberStatus) DeepCopyInto ¶
func (in *TableIAMMemberStatus) DeepCopyInto(out *TableIAMMemberStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableInitParameters ¶
type TableInitParameters struct { // If set to true, it allows table // deletion when there are still resource tags attached. The default value is // false. AllowResourceTagsOnDeletion *bool `json:"allowResourceTagsOnDeletion,omitempty" tf:"allow_resource_tags_on_deletion,omitempty"` // Specifies column names to use for data clustering. // Up to four top-level columns are allowed, and should be specified in // descending priority order. Clustering []*string `json:"clustering,omitempty" tf:"clustering,omitempty"` // When the field is set to false, deleting the table is allowed.. DeletionProtection *bool `json:"deletionProtection,omitempty" tf:"deletion_protection,omitempty"` // The field description. Description *string `json:"description,omitempty" tf:"description,omitempty"` // Specifies how the table should be encrypted. // If left blank, the table will be encrypted with a Google-managed key; that process // is transparent to the user. Structure is documented below. EncryptionConfiguration *EncryptionConfigurationInitParameters `json:"encryptionConfiguration,omitempty" tf:"encryption_configuration,omitempty"` // The time when this table expires, in // milliseconds since the epoch. If not present, the table will persist // indefinitely. Expired tables will be deleted and their storage // reclaimed. ExpirationTime *float64 `json:"expirationTime,omitempty" tf:"expiration_time,omitempty"` // Describes the data format, // location, and other properties of a table stored outside of BigQuery. // By defining these properties, the data source can then be queried as // if it were a standard BigQuery table. Structure is documented below. ExternalDataConfiguration *ExternalDataConfigurationInitParameters `json:"externalDataConfiguration,omitempty" tf:"external_data_configuration,omitempty"` // A descriptive name for the table. FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // A mapping of labels to assign to the resource. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // If specified, configures this table as a materialized view. // Structure is documented below. MaterializedView *MaterializedViewInitParameters `json:"materializedView,omitempty" tf:"materialized_view,omitempty"` // : The maximum staleness of data that could be // returned when the table (or stale MV) is queried. Staleness encoded as a // string encoding of SQL IntervalValue // type. MaxStaleness *string `json:"maxStaleness,omitempty" tf:"max_staleness,omitempty"` // If specified, configures range-based // partitioning for this table. Structure is documented below. RangePartitioning *RangePartitioningInitParameters `json:"rangePartitioning,omitempty" tf:"range_partitioning,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // The tags attached to this table. Tag keys are // globally unique. Tag key is expected to be in the namespaced format, for // example "123456789012/environment" where 123456789012 is the ID of the // parent organization or project resource for this tag key. Tag value is // expected to be the short name, for example "Production". // +mapType=granular ResourceTags map[string]*string `json:"resourceTags,omitempty" tf:"resource_tags,omitempty"` // A JSON schema for the table. Schema *string `json:"schema,omitempty" tf:"schema,omitempty"` // Defines the primary key and foreign keys. // Structure is documented below. TableConstraints *TableConstraintsInitParameters `json:"tableConstraints,omitempty" tf:"table_constraints,omitempty"` // Replication info of a table created // using "AS REPLICA" DDL like: // CREATE MATERIALIZED VIEW mv1 AS REPLICA OF src_mv. // Structure is documented below. TableReplicationInfo *TableReplicationInfoInitParameters `json:"tableReplicationInfo,omitempty" tf:"table_replication_info,omitempty"` // If specified, configures time-based // partitioning for this table. Structure is documented below. TimePartitioning *TableTimePartitioningInitParameters `json:"timePartitioning,omitempty" tf:"time_partitioning,omitempty"` // If specified, configures this table as a view. // Structure is documented below. View *TableViewInitParameters `json:"view,omitempty" tf:"view,omitempty"` }
func (*TableInitParameters) DeepCopy ¶
func (in *TableInitParameters) DeepCopy() *TableInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableInitParameters.
func (*TableInitParameters) DeepCopyInto ¶
func (in *TableInitParameters) DeepCopyInto(out *TableInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableList ¶
type TableList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Table `json:"items"` }
TableList contains a list of Tables
func (*TableList) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableList.
func (*TableList) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*TableList) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type TableObservation ¶
type TableObservation struct { // If set to true, it allows table // deletion when there are still resource tags attached. The default value is // false. AllowResourceTagsOnDeletion *bool `json:"allowResourceTagsOnDeletion,omitempty" tf:"allow_resource_tags_on_deletion,omitempty"` // Specifies column names to use for data clustering. // Up to four top-level columns are allowed, and should be specified in // descending priority order. Clustering []*string `json:"clustering,omitempty" tf:"clustering,omitempty"` // The time when this table was created, in milliseconds since the epoch. CreationTime *float64 `json:"creationTime,omitempty" tf:"creation_time,omitempty"` // The dataset ID to create the table in. // Changing this forces a new resource to be created. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // When the field is set to false, deleting the table is allowed.. DeletionProtection *bool `json:"deletionProtection,omitempty" tf:"deletion_protection,omitempty"` // The field description. Description *string `json:"description,omitempty" tf:"description,omitempty"` // +mapType=granular EffectiveLabels map[string]*string `json:"effectiveLabels,omitempty" tf:"effective_labels,omitempty"` // Specifies how the table should be encrypted. // If left blank, the table will be encrypted with a Google-managed key; that process // is transparent to the user. Structure is documented below. EncryptionConfiguration *EncryptionConfigurationObservation `json:"encryptionConfiguration,omitempty" tf:"encryption_configuration,omitempty"` // A hash of the resource. Etag *string `json:"etag,omitempty" tf:"etag,omitempty"` // The time when this table expires, in // milliseconds since the epoch. If not present, the table will persist // indefinitely. Expired tables will be deleted and their storage // reclaimed. ExpirationTime *float64 `json:"expirationTime,omitempty" tf:"expiration_time,omitempty"` // Describes the data format, // location, and other properties of a table stored outside of BigQuery. // By defining these properties, the data source can then be queried as // if it were a standard BigQuery table. Structure is documented below. ExternalDataConfiguration *ExternalDataConfigurationObservation `json:"externalDataConfiguration,omitempty" tf:"external_data_configuration,omitempty"` // A descriptive name for the table. FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // an identifier for the resource with format projects/{{project}}/datasets/{{dataset}}/tables/{{name}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // A mapping of labels to assign to the resource. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The time when this table was last modified, in milliseconds since the epoch. LastModifiedTime *float64 `json:"lastModifiedTime,omitempty" tf:"last_modified_time,omitempty"` // The geographic location where the table resides. This value is inherited from the dataset. Location *string `json:"location,omitempty" tf:"location,omitempty"` // If specified, configures this table as a materialized view. // Structure is documented below. MaterializedView *MaterializedViewObservation `json:"materializedView,omitempty" tf:"materialized_view,omitempty"` // : The maximum staleness of data that could be // returned when the table (or stale MV) is queried. Staleness encoded as a // string encoding of SQL IntervalValue // type. MaxStaleness *string `json:"maxStaleness,omitempty" tf:"max_staleness,omitempty"` // The size of this table in bytes, excluding any data in the streaming buffer. NumBytes *float64 `json:"numBytes,omitempty" tf:"num_bytes,omitempty"` // The number of bytes in the table that are considered "long-term storage". NumLongTermBytes *float64 `json:"numLongTermBytes,omitempty" tf:"num_long_term_bytes,omitempty"` // The number of rows of data in this table, excluding any data in the streaming buffer. NumRows *float64 `json:"numRows,omitempty" tf:"num_rows,omitempty"` // The ID of the project in which the resource belongs. If it // is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // If specified, configures range-based // partitioning for this table. Structure is documented below. RangePartitioning *RangePartitioningObservation `json:"rangePartitioning,omitempty" tf:"range_partitioning,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // The tags attached to this table. Tag keys are // globally unique. Tag key is expected to be in the namespaced format, for // example "123456789012/environment" where 123456789012 is the ID of the // parent organization or project resource for this tag key. Tag value is // expected to be the short name, for example "Production". // +mapType=granular ResourceTags map[string]*string `json:"resourceTags,omitempty" tf:"resource_tags,omitempty"` // A JSON schema for the table. Schema *string `json:"schema,omitempty" tf:"schema,omitempty"` // The URI of the created resource. SelfLink *string `json:"selfLink,omitempty" tf:"self_link,omitempty"` // Defines the primary key and foreign keys. // Structure is documented below. TableConstraints *TableConstraintsObservation `json:"tableConstraints,omitempty" tf:"table_constraints,omitempty"` // Replication info of a table created // using "AS REPLICA" DDL like: // CREATE MATERIALIZED VIEW mv1 AS REPLICA OF src_mv. // Structure is documented below. TableReplicationInfo *TableReplicationInfoObservation `json:"tableReplicationInfo,omitempty" tf:"table_replication_info,omitempty"` // The combination of labels configured directly on the resource and default labels configured on the provider. // +mapType=granular TerraformLabels map[string]*string `json:"terraformLabels,omitempty" tf:"terraform_labels,omitempty"` // If specified, configures time-based // partitioning for this table. Structure is documented below. TimePartitioning *TableTimePartitioningObservation `json:"timePartitioning,omitempty" tf:"time_partitioning,omitempty"` // Describes the table type. Type *string `json:"type,omitempty" tf:"type,omitempty"` // If specified, configures this table as a view. // Structure is documented below. View *TableViewObservation `json:"view,omitempty" tf:"view,omitempty"` }
func (*TableObservation) DeepCopy ¶
func (in *TableObservation) DeepCopy() *TableObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableObservation.
func (*TableObservation) DeepCopyInto ¶
func (in *TableObservation) DeepCopyInto(out *TableObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableParameters ¶
type TableParameters struct { // If set to true, it allows table // deletion when there are still resource tags attached. The default value is // false. // +kubebuilder:validation:Optional AllowResourceTagsOnDeletion *bool `json:"allowResourceTagsOnDeletion,omitempty" tf:"allow_resource_tags_on_deletion,omitempty"` // Specifies column names to use for data clustering. // Up to four top-level columns are allowed, and should be specified in // descending priority order. // +kubebuilder:validation:Optional Clustering []*string `json:"clustering,omitempty" tf:"clustering,omitempty"` // The dataset ID to create the table in. // Changing this forces a new resource to be created. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/bigquery/v1beta2.Dataset // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // Reference to a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDRef *v1.Reference `json:"datasetIdRef,omitempty" tf:"-"` // Selector for a Dataset in bigquery to populate datasetId. // +kubebuilder:validation:Optional DatasetIDSelector *v1.Selector `json:"datasetIdSelector,omitempty" tf:"-"` // When the field is set to false, deleting the table is allowed.. // +kubebuilder:validation:Optional DeletionProtection *bool `json:"deletionProtection,omitempty" tf:"deletion_protection,omitempty"` // The field description. // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // Specifies how the table should be encrypted. // If left blank, the table will be encrypted with a Google-managed key; that process // is transparent to the user. Structure is documented below. // +kubebuilder:validation:Optional EncryptionConfiguration *EncryptionConfigurationParameters `json:"encryptionConfiguration,omitempty" tf:"encryption_configuration,omitempty"` // The time when this table expires, in // milliseconds since the epoch. If not present, the table will persist // indefinitely. Expired tables will be deleted and their storage // reclaimed. // +kubebuilder:validation:Optional ExpirationTime *float64 `json:"expirationTime,omitempty" tf:"expiration_time,omitempty"` // Describes the data format, // location, and other properties of a table stored outside of BigQuery. // By defining these properties, the data source can then be queried as // if it were a standard BigQuery table. Structure is documented below. // +kubebuilder:validation:Optional ExternalDataConfiguration *ExternalDataConfigurationParameters `json:"externalDataConfiguration,omitempty" tf:"external_data_configuration,omitempty"` // A descriptive name for the table. // +kubebuilder:validation:Optional FriendlyName *string `json:"friendlyName,omitempty" tf:"friendly_name,omitempty"` // A mapping of labels to assign to the resource. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // If specified, configures this table as a materialized view. // Structure is documented below. // +kubebuilder:validation:Optional MaterializedView *MaterializedViewParameters `json:"materializedView,omitempty" tf:"materialized_view,omitempty"` // : The maximum staleness of data that could be // returned when the table (or stale MV) is queried. Staleness encoded as a // string encoding of SQL IntervalValue // type. // +kubebuilder:validation:Optional MaxStaleness *string `json:"maxStaleness,omitempty" tf:"max_staleness,omitempty"` // The ID of the project in which the resource belongs. If it // is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // If specified, configures range-based // partitioning for this table. Structure is documented below. // +kubebuilder:validation:Optional RangePartitioning *RangePartitioningParameters `json:"rangePartitioning,omitempty" tf:"range_partitioning,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. // +kubebuilder:validation:Optional RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // The tags attached to this table. Tag keys are // globally unique. Tag key is expected to be in the namespaced format, for // example "123456789012/environment" where 123456789012 is the ID of the // parent organization or project resource for this tag key. Tag value is // expected to be the short name, for example "Production". // +kubebuilder:validation:Optional // +mapType=granular ResourceTags map[string]*string `json:"resourceTags,omitempty" tf:"resource_tags,omitempty"` // A JSON schema for the table. // +kubebuilder:validation:Optional Schema *string `json:"schema,omitempty" tf:"schema,omitempty"` // Defines the primary key and foreign keys. // Structure is documented below. // +kubebuilder:validation:Optional TableConstraints *TableConstraintsParameters `json:"tableConstraints,omitempty" tf:"table_constraints,omitempty"` // Replication info of a table created // using "AS REPLICA" DDL like: // CREATE MATERIALIZED VIEW mv1 AS REPLICA OF src_mv. // Structure is documented below. // +kubebuilder:validation:Optional TableReplicationInfo *TableReplicationInfoParameters `json:"tableReplicationInfo,omitempty" tf:"table_replication_info,omitempty"` // If specified, configures time-based // partitioning for this table. Structure is documented below. // +kubebuilder:validation:Optional TimePartitioning *TableTimePartitioningParameters `json:"timePartitioning,omitempty" tf:"time_partitioning,omitempty"` // If specified, configures this table as a view. // Structure is documented below. // +kubebuilder:validation:Optional View *TableViewParameters `json:"view,omitempty" tf:"view,omitempty"` }
func (*TableParameters) DeepCopy ¶
func (in *TableParameters) DeepCopy() *TableParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableParameters.
func (*TableParameters) DeepCopyInto ¶
func (in *TableParameters) DeepCopyInto(out *TableParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableReplicationInfoInitParameters ¶
type TableReplicationInfoInitParameters struct { // The interval at which the source // materialized view is polled for updates. The default is 300000. ReplicationIntervalMs *float64 `json:"replicationIntervalMs,omitempty" tf:"replication_interval_ms,omitempty"` // The ID of the source dataset. SourceDatasetID *string `json:"sourceDatasetId,omitempty" tf:"source_dataset_id,omitempty"` // The ID of the source project. SourceProjectID *string `json:"sourceProjectId,omitempty" tf:"source_project_id,omitempty"` // The ID of the source materialized view. SourceTableID *string `json:"sourceTableId,omitempty" tf:"source_table_id,omitempty"` }
func (*TableReplicationInfoInitParameters) DeepCopy ¶
func (in *TableReplicationInfoInitParameters) DeepCopy() *TableReplicationInfoInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableReplicationInfoInitParameters.
func (*TableReplicationInfoInitParameters) DeepCopyInto ¶
func (in *TableReplicationInfoInitParameters) DeepCopyInto(out *TableReplicationInfoInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableReplicationInfoObservation ¶
type TableReplicationInfoObservation struct { // The interval at which the source // materialized view is polled for updates. The default is 300000. ReplicationIntervalMs *float64 `json:"replicationIntervalMs,omitempty" tf:"replication_interval_ms,omitempty"` // The ID of the source dataset. SourceDatasetID *string `json:"sourceDatasetId,omitempty" tf:"source_dataset_id,omitempty"` // The ID of the source project. SourceProjectID *string `json:"sourceProjectId,omitempty" tf:"source_project_id,omitempty"` // The ID of the source materialized view. SourceTableID *string `json:"sourceTableId,omitempty" tf:"source_table_id,omitempty"` }
func (*TableReplicationInfoObservation) DeepCopy ¶
func (in *TableReplicationInfoObservation) DeepCopy() *TableReplicationInfoObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableReplicationInfoObservation.
func (*TableReplicationInfoObservation) DeepCopyInto ¶
func (in *TableReplicationInfoObservation) DeepCopyInto(out *TableReplicationInfoObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableReplicationInfoParameters ¶
type TableReplicationInfoParameters struct { // The interval at which the source // materialized view is polled for updates. The default is 300000. // +kubebuilder:validation:Optional ReplicationIntervalMs *float64 `json:"replicationIntervalMs,omitempty" tf:"replication_interval_ms,omitempty"` // The ID of the source dataset. // +kubebuilder:validation:Optional SourceDatasetID *string `json:"sourceDatasetId" tf:"source_dataset_id,omitempty"` // The ID of the source project. // +kubebuilder:validation:Optional SourceProjectID *string `json:"sourceProjectId" tf:"source_project_id,omitempty"` // The ID of the source materialized view. // +kubebuilder:validation:Optional SourceTableID *string `json:"sourceTableId" tf:"source_table_id,omitempty"` }
func (*TableReplicationInfoParameters) DeepCopy ¶
func (in *TableReplicationInfoParameters) DeepCopy() *TableReplicationInfoParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableReplicationInfoParameters.
func (*TableReplicationInfoParameters) DeepCopyInto ¶
func (in *TableReplicationInfoParameters) DeepCopyInto(out *TableReplicationInfoParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableSpec ¶
type TableSpec struct { v1.ResourceSpec `json:",inline"` ForProvider TableParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider TableInitParameters `json:"initProvider,omitempty"` }
TableSpec defines the desired state of Table
func (*TableSpec) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSpec.
func (*TableSpec) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableStatus ¶
type TableStatus struct { v1.ResourceStatus `json:",inline"` AtProvider TableObservation `json:"atProvider,omitempty"` }
TableStatus defines the observed state of Table.
func (*TableStatus) DeepCopy ¶
func (in *TableStatus) DeepCopy() *TableStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableStatus.
func (*TableStatus) DeepCopyInto ¶
func (in *TableStatus) DeepCopyInto(out *TableStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableTimePartitioningInitParameters ¶
type TableTimePartitioningInitParameters struct { // Number of milliseconds for which to keep the // storage for a partition. ExpirationMs *float64 `json:"expirationMs,omitempty" tf:"expiration_ms,omitempty"` // The field used to determine how to create a time-based // partition. If time-based partitioning is enabled without this value, the // table is partitioned based on the load time. Field *string `json:"field,omitempty" tf:"field,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. require_partition_filter is deprecated and will be removed in // a future major release. Use the top level field with the same name instead. RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // The supported types are DAY, HOUR, MONTH, and YEAR, // which will generate one partition per day, hour, month, and year, respectively. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*TableTimePartitioningInitParameters) DeepCopy ¶
func (in *TableTimePartitioningInitParameters) DeepCopy() *TableTimePartitioningInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableTimePartitioningInitParameters.
func (*TableTimePartitioningInitParameters) DeepCopyInto ¶
func (in *TableTimePartitioningInitParameters) DeepCopyInto(out *TableTimePartitioningInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableTimePartitioningObservation ¶
type TableTimePartitioningObservation struct { // Number of milliseconds for which to keep the // storage for a partition. ExpirationMs *float64 `json:"expirationMs,omitempty" tf:"expiration_ms,omitempty"` // The field used to determine how to create a time-based // partition. If time-based partitioning is enabled without this value, the // table is partitioned based on the load time. Field *string `json:"field,omitempty" tf:"field,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. require_partition_filter is deprecated and will be removed in // a future major release. Use the top level field with the same name instead. RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // The supported types are DAY, HOUR, MONTH, and YEAR, // which will generate one partition per day, hour, month, and year, respectively. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*TableTimePartitioningObservation) DeepCopy ¶
func (in *TableTimePartitioningObservation) DeepCopy() *TableTimePartitioningObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableTimePartitioningObservation.
func (*TableTimePartitioningObservation) DeepCopyInto ¶
func (in *TableTimePartitioningObservation) DeepCopyInto(out *TableTimePartitioningObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableTimePartitioningParameters ¶
type TableTimePartitioningParameters struct { // Number of milliseconds for which to keep the // storage for a partition. // +kubebuilder:validation:Optional ExpirationMs *float64 `json:"expirationMs,omitempty" tf:"expiration_ms,omitempty"` // The field used to determine how to create a time-based // partition. If time-based partitioning is enabled without this value, the // table is partitioned based on the load time. // +kubebuilder:validation:Optional Field *string `json:"field,omitempty" tf:"field,omitempty"` // If set to true, queries over this table // require a partition filter that can be used for partition elimination to be // specified. require_partition_filter is deprecated and will be removed in // a future major release. Use the top level field with the same name instead. // +kubebuilder:validation:Optional RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty" tf:"require_partition_filter,omitempty"` // The supported types are DAY, HOUR, MONTH, and YEAR, // which will generate one partition per day, hour, month, and year, respectively. // +kubebuilder:validation:Optional Type *string `json:"type" tf:"type,omitempty"` }
func (*TableTimePartitioningParameters) DeepCopy ¶
func (in *TableTimePartitioningParameters) DeepCopy() *TableTimePartitioningParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableTimePartitioningParameters.
func (*TableTimePartitioningParameters) DeepCopyInto ¶
func (in *TableTimePartitioningParameters) DeepCopyInto(out *TableTimePartitioningParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableViewInitParameters ¶
type TableViewInitParameters struct { // A query that BigQuery executes when the view is referenced. Query *string `json:"query,omitempty" tf:"query,omitempty"` // Specifies whether to use BigQuery's legacy SQL for this view. // The default value is true. If set to false, the view will use BigQuery's standard SQL. UseLegacySQL *bool `json:"useLegacySql,omitempty" tf:"use_legacy_sql,omitempty"` }
func (*TableViewInitParameters) DeepCopy ¶
func (in *TableViewInitParameters) DeepCopy() *TableViewInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableViewInitParameters.
func (*TableViewInitParameters) DeepCopyInto ¶
func (in *TableViewInitParameters) DeepCopyInto(out *TableViewInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableViewObservation ¶
type TableViewObservation struct { // A query that BigQuery executes when the view is referenced. Query *string `json:"query,omitempty" tf:"query,omitempty"` // Specifies whether to use BigQuery's legacy SQL for this view. // The default value is true. If set to false, the view will use BigQuery's standard SQL. UseLegacySQL *bool `json:"useLegacySql,omitempty" tf:"use_legacy_sql,omitempty"` }
func (*TableViewObservation) DeepCopy ¶
func (in *TableViewObservation) DeepCopy() *TableViewObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableViewObservation.
func (*TableViewObservation) DeepCopyInto ¶
func (in *TableViewObservation) DeepCopyInto(out *TableViewObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableViewParameters ¶
type TableViewParameters struct { // A query that BigQuery executes when the view is referenced. // +kubebuilder:validation:Optional Query *string `json:"query" tf:"query,omitempty"` // Specifies whether to use BigQuery's legacy SQL for this view. // The default value is true. If set to false, the view will use BigQuery's standard SQL. // +kubebuilder:validation:Optional UseLegacySQL *bool `json:"useLegacySql,omitempty" tf:"use_legacy_sql,omitempty"` }
func (*TableViewParameters) DeepCopy ¶
func (in *TableViewParameters) DeepCopy() *TableViewParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableViewParameters.
func (*TableViewParameters) DeepCopyInto ¶
func (in *TableViewParameters) DeepCopyInto(out *TableViewParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TimePartitioningInitParameters ¶
type TimePartitioningInitParameters struct { // Number of milliseconds for which to keep the storage for a partition. A wrapper is used here because 0 is an invalid value. ExpirationMs *string `json:"expirationMs,omitempty" tf:"expiration_ms,omitempty"` // If not set, the table is partitioned by pseudo column '_PARTITIONTIME'; if set, the table is partitioned by this field. // The field must be a top-level TIMESTAMP or DATE field. Its mode must be NULLABLE or REQUIRED. // A wrapper is used here because an empty string is an invalid value. Field *string `json:"field,omitempty" tf:"field,omitempty"` // The only type supported is DAY, which will generate one partition per day. Providing an empty string used to cause an error, // but in OnePlatform the field will be treated as unset. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*TimePartitioningInitParameters) DeepCopy ¶
func (in *TimePartitioningInitParameters) DeepCopy() *TimePartitioningInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimePartitioningInitParameters.
func (*TimePartitioningInitParameters) DeepCopyInto ¶
func (in *TimePartitioningInitParameters) DeepCopyInto(out *TimePartitioningInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TimePartitioningObservation ¶
type TimePartitioningObservation struct { // Number of milliseconds for which to keep the storage for a partition. A wrapper is used here because 0 is an invalid value. ExpirationMs *string `json:"expirationMs,omitempty" tf:"expiration_ms,omitempty"` // If not set, the table is partitioned by pseudo column '_PARTITIONTIME'; if set, the table is partitioned by this field. // The field must be a top-level TIMESTAMP or DATE field. Its mode must be NULLABLE or REQUIRED. // A wrapper is used here because an empty string is an invalid value. Field *string `json:"field,omitempty" tf:"field,omitempty"` // The only type supported is DAY, which will generate one partition per day. Providing an empty string used to cause an error, // but in OnePlatform the field will be treated as unset. Type *string `json:"type,omitempty" tf:"type,omitempty"` }
func (*TimePartitioningObservation) DeepCopy ¶
func (in *TimePartitioningObservation) DeepCopy() *TimePartitioningObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimePartitioningObservation.
func (*TimePartitioningObservation) DeepCopyInto ¶
func (in *TimePartitioningObservation) DeepCopyInto(out *TimePartitioningObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TimePartitioningParameters ¶
type TimePartitioningParameters struct { // Number of milliseconds for which to keep the storage for a partition. A wrapper is used here because 0 is an invalid value. // +kubebuilder:validation:Optional ExpirationMs *string `json:"expirationMs,omitempty" tf:"expiration_ms,omitempty"` // If not set, the table is partitioned by pseudo column '_PARTITIONTIME'; if set, the table is partitioned by this field. // The field must be a top-level TIMESTAMP or DATE field. Its mode must be NULLABLE or REQUIRED. // A wrapper is used here because an empty string is an invalid value. // +kubebuilder:validation:Optional Field *string `json:"field,omitempty" tf:"field,omitempty"` // The only type supported is DAY, which will generate one partition per day. Providing an empty string used to cause an error, // but in OnePlatform the field will be treated as unset. // +kubebuilder:validation:Optional Type *string `json:"type" tf:"type,omitempty"` }
func (*TimePartitioningParameters) DeepCopy ¶
func (in *TimePartitioningParameters) DeepCopy() *TimePartitioningParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimePartitioningParameters.
func (*TimePartitioningParameters) DeepCopyInto ¶
func (in *TimePartitioningParameters) DeepCopyInto(out *TimePartitioningParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type UserDefinedFunctionResourcesInitParameters ¶
type UserDefinedFunctionResourcesInitParameters struct { // An inline resource that contains code for a user-defined function (UDF). // Providing a inline code resource is equivalent to providing a URI for a file containing the same code. InlineCode *string `json:"inlineCode,omitempty" tf:"inline_code,omitempty"` // A code resource to load from a Google Cloud Storage URI (gs://bucket/path). ResourceURI *string `json:"resourceUri,omitempty" tf:"resource_uri,omitempty"` }
func (*UserDefinedFunctionResourcesInitParameters) DeepCopy ¶
func (in *UserDefinedFunctionResourcesInitParameters) DeepCopy() *UserDefinedFunctionResourcesInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new UserDefinedFunctionResourcesInitParameters.
func (*UserDefinedFunctionResourcesInitParameters) DeepCopyInto ¶
func (in *UserDefinedFunctionResourcesInitParameters) DeepCopyInto(out *UserDefinedFunctionResourcesInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type UserDefinedFunctionResourcesObservation ¶
type UserDefinedFunctionResourcesObservation struct { // An inline resource that contains code for a user-defined function (UDF). // Providing a inline code resource is equivalent to providing a URI for a file containing the same code. InlineCode *string `json:"inlineCode,omitempty" tf:"inline_code,omitempty"` // A code resource to load from a Google Cloud Storage URI (gs://bucket/path). ResourceURI *string `json:"resourceUri,omitempty" tf:"resource_uri,omitempty"` }
func (*UserDefinedFunctionResourcesObservation) DeepCopy ¶
func (in *UserDefinedFunctionResourcesObservation) DeepCopy() *UserDefinedFunctionResourcesObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new UserDefinedFunctionResourcesObservation.
func (*UserDefinedFunctionResourcesObservation) DeepCopyInto ¶
func (in *UserDefinedFunctionResourcesObservation) DeepCopyInto(out *UserDefinedFunctionResourcesObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type UserDefinedFunctionResourcesParameters ¶
type UserDefinedFunctionResourcesParameters struct { // An inline resource that contains code for a user-defined function (UDF). // Providing a inline code resource is equivalent to providing a URI for a file containing the same code. // +kubebuilder:validation:Optional InlineCode *string `json:"inlineCode,omitempty" tf:"inline_code,omitempty"` // A code resource to load from a Google Cloud Storage URI (gs://bucket/path). // +kubebuilder:validation:Optional ResourceURI *string `json:"resourceUri,omitempty" tf:"resource_uri,omitempty"` }
func (*UserDefinedFunctionResourcesParameters) DeepCopy ¶
func (in *UserDefinedFunctionResourcesParameters) DeepCopy() *UserDefinedFunctionResourcesParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new UserDefinedFunctionResourcesParameters.
func (*UserDefinedFunctionResourcesParameters) DeepCopyInto ¶
func (in *UserDefinedFunctionResourcesParameters) DeepCopyInto(out *UserDefinedFunctionResourcesParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ViewInitParameters ¶
type ViewInitParameters struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The ID of the table. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 1,024 characters. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*ViewInitParameters) DeepCopy ¶
func (in *ViewInitParameters) DeepCopy() *ViewInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewInitParameters.
func (*ViewInitParameters) DeepCopyInto ¶
func (in *ViewInitParameters) DeepCopyInto(out *ViewInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ViewObservation ¶
type ViewObservation struct { // The ID of the dataset containing this table. DatasetID *string `json:"datasetId,omitempty" tf:"dataset_id,omitempty"` // The ID of the project containing this table. ProjectID *string `json:"projectId,omitempty" tf:"project_id,omitempty"` // The ID of the table. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 1,024 characters. TableID *string `json:"tableId,omitempty" tf:"table_id,omitempty"` }
func (*ViewObservation) DeepCopy ¶
func (in *ViewObservation) DeepCopy() *ViewObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewObservation.
func (*ViewObservation) DeepCopyInto ¶
func (in *ViewObservation) DeepCopyInto(out *ViewObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ViewParameters ¶
type ViewParameters struct { // The ID of the dataset containing this table. // +kubebuilder:validation:Optional DatasetID *string `json:"datasetId" tf:"dataset_id,omitempty"` // The ID of the project containing this table. // +kubebuilder:validation:Optional ProjectID *string `json:"projectId" tf:"project_id,omitempty"` // The ID of the table. The ID must contain only letters (a-z, // A-Z), numbers (0-9), or underscores (_). The maximum length // is 1,024 characters. // +kubebuilder:validation:Optional TableID *string `json:"tableId" tf:"table_id,omitempty"` }
func (*ViewParameters) DeepCopy ¶
func (in *ViewParameters) DeepCopy() *ViewParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewParameters.
func (*ViewParameters) DeepCopyInto ¶
func (in *ViewParameters) DeepCopyInto(out *ViewParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
Source Files ¶
- zz_analyticshubdataexchangeiammember_terraformed.go
- zz_analyticshubdataexchangeiammember_types.go
- zz_analyticshublisting_terraformed.go
- zz_analyticshublisting_types.go
- zz_connection_terraformed.go
- zz_connection_types.go
- zz_dataset_terraformed.go
- zz_dataset_types.go
- zz_datasetaccess_terraformed.go
- zz_datasetaccess_types.go
- zz_datasetiambinding_terraformed.go
- zz_datasetiambinding_types.go
- zz_datasetiammember_terraformed.go
- zz_datasetiammember_types.go
- zz_datatransferconfig_terraformed.go
- zz_datatransferconfig_types.go
- zz_generated.conversion_hubs.go
- zz_generated.deepcopy.go
- zz_generated.managed.go
- zz_generated.managedlist.go
- zz_generated.resolvers.go
- zz_groupversion_info.go
- zz_job_terraformed.go
- zz_job_types.go
- zz_reservation_terraformed.go
- zz_reservation_types.go
- zz_routine_terraformed.go
- zz_routine_types.go
- zz_table_terraformed.go
- zz_table_types.go
- zz_tableiambinding_terraformed.go
- zz_tableiambinding_types.go
- zz_tableiammember_terraformed.go
- zz_tableiammember_types.go