Documentation ¶
Overview ¶
+kubebuilder:object:generate=true +groupName=dataproc.gcp.upbound.io +versionName=v1beta1
Index ¶
- Constants
- Variables
- type AcceleratorsObservation
- type AcceleratorsParameters
- type AutoscalingConfigObservation
- type AutoscalingConfigParameters
- type AutoscalingObservation
- type AutoscalingParameters
- type AutoscalingPolicy
- func (in *AutoscalingPolicy) DeepCopy() *AutoscalingPolicy
- func (in *AutoscalingPolicy) DeepCopyInto(out *AutoscalingPolicy)
- func (in *AutoscalingPolicy) DeepCopyObject() runtime.Object
- func (mg *AutoscalingPolicy) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *AutoscalingPolicy) GetConnectionDetailsMapping() map[string]string
- func (mg *AutoscalingPolicy) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *AutoscalingPolicy) GetID() string
- func (mg *AutoscalingPolicy) GetManagementPolicy() xpv1.ManagementPolicy
- func (tr *AutoscalingPolicy) GetObservation() (map[string]any, error)
- func (tr *AutoscalingPolicy) GetParameters() (map[string]any, error)
- func (mg *AutoscalingPolicy) GetProviderConfigReference() *xpv1.Reference
- func (mg *AutoscalingPolicy) GetProviderReference() *xpv1.Reference
- func (mg *AutoscalingPolicy) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *AutoscalingPolicy) GetTerraformResourceType() string
- func (tr *AutoscalingPolicy) GetTerraformSchemaVersion() int
- func (mg *AutoscalingPolicy) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *AutoscalingPolicy) LateInitialize(attrs []byte) (bool, error)
- func (mg *AutoscalingPolicy) SetConditions(c ...xpv1.Condition)
- func (mg *AutoscalingPolicy) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *AutoscalingPolicy) SetManagementPolicy(r xpv1.ManagementPolicy)
- func (tr *AutoscalingPolicy) SetObservation(obs map[string]any) error
- func (tr *AutoscalingPolicy) SetParameters(params map[string]any) error
- func (mg *AutoscalingPolicy) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *AutoscalingPolicy) SetProviderReference(r *xpv1.Reference)
- func (mg *AutoscalingPolicy) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *AutoscalingPolicy) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type AutoscalingPolicyList
- type AutoscalingPolicyObservation
- type AutoscalingPolicyParameters
- type AutoscalingPolicySpec
- type AutoscalingPolicyStatus
- type AuxiliaryServicesConfigMetastoreConfigObservation
- type AuxiliaryServicesConfigMetastoreConfigParameters
- type AuxiliaryServicesConfigObservation
- type AuxiliaryServicesConfigParameters
- type BasicAlgorithmObservation
- type BasicAlgorithmParameters
- type Cluster
- func (in *Cluster) DeepCopy() *Cluster
- func (in *Cluster) DeepCopyInto(out *Cluster)
- func (in *Cluster) DeepCopyObject() runtime.Object
- func (mg *Cluster) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Cluster) GetConnectionDetailsMapping() map[string]string
- func (mg *Cluster) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Cluster) GetID() string
- func (mg *Cluster) GetManagementPolicy() xpv1.ManagementPolicy
- func (tr *Cluster) GetObservation() (map[string]any, error)
- func (tr *Cluster) GetParameters() (map[string]any, error)
- func (mg *Cluster) GetProviderConfigReference() *xpv1.Reference
- func (mg *Cluster) GetProviderReference() *xpv1.Reference
- func (mg *Cluster) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Cluster) GetTerraformResourceType() string
- func (tr *Cluster) GetTerraformSchemaVersion() int
- func (mg *Cluster) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Cluster) LateInitialize(attrs []byte) (bool, error)
- func (mg *Cluster) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Cluster) SetConditions(c ...xpv1.Condition)
- func (mg *Cluster) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Cluster) SetManagementPolicy(r xpv1.ManagementPolicy)
- func (tr *Cluster) SetObservation(obs map[string]any) error
- func (tr *Cluster) SetParameters(params map[string]any) error
- func (mg *Cluster) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Cluster) SetProviderReference(r *xpv1.Reference)
- func (mg *Cluster) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Cluster) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type ClusterConfigObservation
- type ClusterConfigParameters
- type ClusterConfigWorkerConfigObservation
- type ClusterConfigWorkerConfigParameters
- type ClusterList
- type ClusterObservation
- type ClusterParameters
- type ClusterSelectorObservation
- type ClusterSelectorParameters
- type ClusterSpec
- type ClusterStatus
- type ConfigAutoscalingConfigObservation
- type ConfigAutoscalingConfigParameters
- type ConfigEncryptionConfigObservation
- type ConfigEncryptionConfigParameters
- type ConfigEndpointConfigObservation
- type ConfigEndpointConfigParameters
- type ConfigGceClusterConfigObservation
- type ConfigGceClusterConfigParameters
- type ConfigLifecycleConfigObservation
- type ConfigLifecycleConfigParameters
- type ConfigMasterConfigObservation
- type ConfigMasterConfigParameters
- type ConfigObservation
- type ConfigParameters
- type ConfigSecondaryWorkerConfigObservation
- type ConfigSecondaryWorkerConfigParameters
- type ConfigSecurityConfigObservation
- type ConfigSecurityConfigParameters
- type ConfigSoftwareConfigObservation
- type ConfigSoftwareConfigParameters
- type ConfigWorkerConfigAcceleratorsObservation
- type ConfigWorkerConfigAcceleratorsParameters
- type ConfigWorkerConfigDiskConfigObservation
- type ConfigWorkerConfigDiskConfigParameters
- type ConfigWorkerConfigObservation
- type ConfigWorkerConfigParameters
- type ConsumersObservation
- type ConsumersParameters
- type DataprocMetricConfigObservation
- type DataprocMetricConfigParameters
- type DiskConfigObservation
- type DiskConfigParameters
- type EncryptionConfigObservation
- type EncryptionConfigParameters
- type EndpointConfigObservation
- type EndpointConfigParameters
- type GceClusterConfigNodeGroupAffinityObservation
- type GceClusterConfigNodeGroupAffinityParameters
- type GceClusterConfigObservation
- type GceClusterConfigParameters
- type GceClusterConfigReservationAffinityObservation
- type GceClusterConfigReservationAffinityParameters
- type GceClusterConfigShieldedInstanceConfigObservation
- type GceClusterConfigShieldedInstanceConfigParameters
- type GkeClusterConfigObservation
- type GkeClusterConfigParameters
- type HadoopConfigObservation
- type HadoopConfigParameters
- type HadoopJobLoggingConfigObservation
- type HadoopJobLoggingConfigParameters
- type HadoopJobObservation
- type HadoopJobParameters
- type HiveConfigObservation
- type HiveConfigParameters
- type HiveJobObservation
- type HiveJobParameters
- type HiveMetastoreConfigKerberosConfigObservation
- type HiveMetastoreConfigKerberosConfigParameters
- type HiveMetastoreConfigObservation
- type HiveMetastoreConfigParameters
- type InitializationActionObservation
- type InitializationActionParameters
- type InitializationActionsObservation
- type InitializationActionsParameters
- type Job
- func (in *Job) DeepCopy() *Job
- func (in *Job) DeepCopyInto(out *Job)
- func (in *Job) DeepCopyObject() runtime.Object
- func (mg *Job) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Job) GetConnectionDetailsMapping() map[string]string
- func (mg *Job) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Job) GetID() string
- func (mg *Job) GetManagementPolicy() xpv1.ManagementPolicy
- func (tr *Job) GetObservation() (map[string]any, error)
- func (tr *Job) GetParameters() (map[string]any, error)
- func (mg *Job) GetProviderConfigReference() *xpv1.Reference
- func (mg *Job) GetProviderReference() *xpv1.Reference
- func (mg *Job) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Job) GetTerraformResourceType() string
- func (tr *Job) GetTerraformSchemaVersion() int
- func (mg *Job) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Job) LateInitialize(attrs []byte) (bool, error)
- func (mg *Job) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Job) SetConditions(c ...xpv1.Condition)
- func (mg *Job) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Job) SetManagementPolicy(r xpv1.ManagementPolicy)
- func (tr *Job) SetObservation(obs map[string]any) error
- func (tr *Job) SetParameters(params map[string]any) error
- func (mg *Job) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Job) SetProviderReference(r *xpv1.Reference)
- func (mg *Job) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Job) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type JobList
- type JobObservation
- type JobParameters
- type JobSpec
- type JobStatus
- type JobsObservation
- type JobsParameters
- type JobsSchedulingObservation
- type JobsSchedulingParameters
- type KerberosConfigObservation
- type KerberosConfigParameters
- type KeytabObservation
- type KeytabParameters
- type KubernetesClusterConfigObservation
- type KubernetesClusterConfigParameters
- type KubernetesSoftwareConfigObservation
- type KubernetesSoftwareConfigParameters
- type LifecycleConfigObservation
- type LifecycleConfigParameters
- type LoggingConfigObservation
- type LoggingConfigParameters
- type MaintenanceWindowObservation
- type MaintenanceWindowParameters
- type ManagedClusterConfigObservation
- type ManagedClusterConfigParameters
- type ManagedClusterObservation
- type ManagedClusterParameters
- type ManagedGroupConfigObservation
- type ManagedGroupConfigParameters
- type MasterConfigAcceleratorsObservation
- type MasterConfigAcceleratorsParameters
- type MasterConfigDiskConfigObservation
- type MasterConfigDiskConfigParameters
- type MasterConfigObservation
- type MasterConfigParameters
- type MetastoreConfigObservation
- type MetastoreConfigParameters
- type MetastoreService
- func (in *MetastoreService) DeepCopy() *MetastoreService
- func (in *MetastoreService) DeepCopyInto(out *MetastoreService)
- func (in *MetastoreService) DeepCopyObject() runtime.Object
- func (mg *MetastoreService) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *MetastoreService) GetConnectionDetailsMapping() map[string]string
- func (mg *MetastoreService) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *MetastoreService) GetID() string
- func (mg *MetastoreService) GetManagementPolicy() xpv1.ManagementPolicy
- func (tr *MetastoreService) GetObservation() (map[string]any, error)
- func (tr *MetastoreService) GetParameters() (map[string]any, error)
- func (mg *MetastoreService) GetProviderConfigReference() *xpv1.Reference
- func (mg *MetastoreService) GetProviderReference() *xpv1.Reference
- func (mg *MetastoreService) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *MetastoreService) GetTerraformResourceType() string
- func (tr *MetastoreService) GetTerraformSchemaVersion() int
- func (mg *MetastoreService) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *MetastoreService) LateInitialize(attrs []byte) (bool, error)
- func (mg *MetastoreService) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *MetastoreService) SetConditions(c ...xpv1.Condition)
- func (mg *MetastoreService) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *MetastoreService) SetManagementPolicy(r xpv1.ManagementPolicy)
- func (tr *MetastoreService) SetObservation(obs map[string]any) error
- func (tr *MetastoreService) SetParameters(params map[string]any) error
- func (mg *MetastoreService) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *MetastoreService) SetProviderReference(r *xpv1.Reference)
- func (mg *MetastoreService) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *MetastoreService) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type MetastoreServiceEncryptionConfigObservation
- type MetastoreServiceEncryptionConfigParameters
- type MetastoreServiceList
- type MetastoreServiceObservation
- type MetastoreServiceParameters
- type MetastoreServiceSpec
- type MetastoreServiceStatus
- type MetricsObservation
- type MetricsParameters
- type NetworkConfigObservation
- type NetworkConfigParameters
- type NodeGroupAffinityObservation
- type NodeGroupAffinityParameters
- type NodePoolConfigObservation
- type NodePoolConfigParameters
- type NodePoolTargetObservation
- type NodePoolTargetParameters
- type ParametersObservation
- type ParametersParameters
- type PigConfigLoggingConfigObservation
- type PigConfigLoggingConfigParameters
- type PigConfigObservation
- type PigConfigParameters
- type PigJobLoggingConfigObservation
- type PigJobLoggingConfigParameters
- type PigJobObservation
- type PigJobParameters
- type PigJobQueryListObservation
- type PigJobQueryListParameters
- type PlacementObservation
- type PlacementParameters
- type PreemptibleWorkerConfigDiskConfigObservation
- type PreemptibleWorkerConfigDiskConfigParameters
- type PreemptibleWorkerConfigObservation
- type PreemptibleWorkerConfigParameters
- type PrestoConfigLoggingConfigObservation
- type PrestoConfigLoggingConfigParameters
- type PrestoConfigObservation
- type PrestoConfigParameters
- type PrestoJobLoggingConfigObservation
- type PrestoJobLoggingConfigParameters
- type PrestoJobObservation
- type PrestoJobParameters
- type PrestoJobQueryListObservation
- type PrestoJobQueryListParameters
- type PysparkConfigLoggingConfigObservation
- type PysparkConfigLoggingConfigParameters
- type PysparkConfigObservation
- type PysparkConfigParameters
- type PysparkJobLoggingConfigObservation
- type PysparkJobLoggingConfigParameters
- type PysparkJobObservation
- type PysparkJobParameters
- type QueryListObservation
- type QueryListParameters
- type ReferenceObservation
- type ReferenceParameters
- type RegexObservation
- type RegexParameters
- type ReservationAffinityObservation
- type ReservationAffinityParameters
- type SchedulingObservation
- type SchedulingParameters
- type SecondaryWorkerConfigAcceleratorsObservation
- type SecondaryWorkerConfigAcceleratorsParameters
- type SecondaryWorkerConfigDiskConfigObservation
- type SecondaryWorkerConfigDiskConfigParameters
- type SecondaryWorkerConfigManagedGroupConfigObservation
- type SecondaryWorkerConfigManagedGroupConfigParameters
- type SecondaryWorkerConfigObservation
- type SecondaryWorkerConfigParameters
- type SecurityConfigKerberosConfigObservation
- type SecurityConfigKerberosConfigParameters
- type SecurityConfigObservation
- type SecurityConfigParameters
- type ShieldedInstanceConfigObservation
- type ShieldedInstanceConfigParameters
- type SoftwareConfigObservation
- type SoftwareConfigParameters
- type SparkConfigLoggingConfigObservation
- type SparkConfigLoggingConfigParameters
- type SparkConfigObservation
- type SparkConfigParameters
- type SparkHistoryServerConfigObservation
- type SparkHistoryServerConfigParameters
- type SparkJobLoggingConfigObservation
- type SparkJobLoggingConfigParameters
- type SparkJobObservation
- type SparkJobParameters
- type SparkRJobLoggingConfigObservation
- type SparkRJobLoggingConfigParameters
- type SparkRJobObservation
- type SparkRJobParameters
- type SparkSQLJobLoggingConfigObservation
- type SparkSQLJobLoggingConfigParameters
- type SparkSQLJobObservation
- type SparkSQLJobParameters
- type SparkSQLJobQueryListObservation
- type SparkSQLJobQueryListParameters
- type SparksqlConfigLoggingConfigObservation
- type SparksqlConfigLoggingConfigParameters
- type SparksqlConfigObservation
- type SparksqlConfigParameters
- type StatusObservation
- type StatusParameters
- type TelemetryConfigObservation
- type TelemetryConfigParameters
- type ValidationObservation
- type ValidationParameters
- type ValuesObservation
- type ValuesParameters
- type VirtualClusterConfigObservation
- type VirtualClusterConfigParameters
- type WorkerConfigAcceleratorsObservation
- type WorkerConfigAcceleratorsParameters
- type WorkerConfigDiskConfigObservation
- type WorkerConfigDiskConfigParameters
- type WorkerConfigManagedGroupConfigObservation
- type WorkerConfigManagedGroupConfigParameters
- type WorkerConfigObservation
- type WorkerConfigParameters
- type WorkflowTemplate
- func (in *WorkflowTemplate) DeepCopy() *WorkflowTemplate
- func (in *WorkflowTemplate) DeepCopyInto(out *WorkflowTemplate)
- func (in *WorkflowTemplate) DeepCopyObject() runtime.Object
- func (mg *WorkflowTemplate) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *WorkflowTemplate) GetConnectionDetailsMapping() map[string]string
- func (mg *WorkflowTemplate) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *WorkflowTemplate) GetID() string
- func (mg *WorkflowTemplate) GetManagementPolicy() xpv1.ManagementPolicy
- func (tr *WorkflowTemplate) GetObservation() (map[string]any, error)
- func (tr *WorkflowTemplate) GetParameters() (map[string]any, error)
- func (mg *WorkflowTemplate) GetProviderConfigReference() *xpv1.Reference
- func (mg *WorkflowTemplate) GetProviderReference() *xpv1.Reference
- func (mg *WorkflowTemplate) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *WorkflowTemplate) GetTerraformResourceType() string
- func (tr *WorkflowTemplate) GetTerraformSchemaVersion() int
- func (mg *WorkflowTemplate) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *WorkflowTemplate) LateInitialize(attrs []byte) (bool, error)
- func (mg *WorkflowTemplate) SetConditions(c ...xpv1.Condition)
- func (mg *WorkflowTemplate) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *WorkflowTemplate) SetManagementPolicy(r xpv1.ManagementPolicy)
- func (tr *WorkflowTemplate) SetObservation(obs map[string]any) error
- func (tr *WorkflowTemplate) SetParameters(params map[string]any) error
- func (mg *WorkflowTemplate) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *WorkflowTemplate) SetProviderReference(r *xpv1.Reference)
- func (mg *WorkflowTemplate) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *WorkflowTemplate) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type WorkflowTemplateList
- type WorkflowTemplateObservation
- type WorkflowTemplateParameters
- type WorkflowTemplatePlacementObservation
- type WorkflowTemplatePlacementParameters
- type WorkflowTemplateSpec
- type WorkflowTemplateStatus
- type YarnConfigObservation
- type YarnConfigParameters
Constants ¶
const ( CRDGroup = "dataproc.gcp.upbound.io" CRDVersion = "v1beta1" )
Package type metadata.
Variables ¶
var ( AutoscalingPolicy_Kind = "AutoscalingPolicy" AutoscalingPolicy_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: AutoscalingPolicy_Kind}.String() AutoscalingPolicy_KindAPIVersion = AutoscalingPolicy_Kind + "." + CRDGroupVersion.String() AutoscalingPolicy_GroupVersionKind = CRDGroupVersion.WithKind(AutoscalingPolicy_Kind) )
Repository type metadata.
var ( Cluster_Kind = "Cluster" Cluster_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Cluster_Kind}.String() Cluster_KindAPIVersion = Cluster_Kind + "." + CRDGroupVersion.String() Cluster_GroupVersionKind = CRDGroupVersion.WithKind(Cluster_Kind) )
Repository type metadata.
var ( // CRDGroupVersion is the API Group Version used to register the objects CRDGroupVersion = schema.GroupVersion{Group: CRDGroup, Version: CRDVersion} // SchemeBuilder is used to add go types to the GroupVersionKind scheme SchemeBuilder = &scheme.Builder{GroupVersion: CRDGroupVersion} // AddToScheme adds the types in this group-version to the given scheme. AddToScheme = SchemeBuilder.AddToScheme )
var ( Job_Kind = "Job" Job_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Job_Kind}.String() Job_KindAPIVersion = Job_Kind + "." + CRDGroupVersion.String() Job_GroupVersionKind = CRDGroupVersion.WithKind(Job_Kind) )
Repository type metadata.
var ( MetastoreService_Kind = "MetastoreService" MetastoreService_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: MetastoreService_Kind}.String() MetastoreService_KindAPIVersion = MetastoreService_Kind + "." + CRDGroupVersion.String() MetastoreService_GroupVersionKind = CRDGroupVersion.WithKind(MetastoreService_Kind) )
Repository type metadata.
var ( WorkflowTemplate_Kind = "WorkflowTemplate" WorkflowTemplate_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: WorkflowTemplate_Kind}.String() WorkflowTemplate_KindAPIVersion = WorkflowTemplate_Kind + "." + CRDGroupVersion.String() WorkflowTemplate_GroupVersionKind = CRDGroupVersion.WithKind(WorkflowTemplate_Kind) )
Repository type metadata.
Functions ¶
This section is empty.
Types ¶
type AcceleratorsObservation ¶
type AcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*AcceleratorsObservation) DeepCopy ¶
func (in *AcceleratorsObservation) DeepCopy() *AcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AcceleratorsObservation.
func (*AcceleratorsObservation) DeepCopyInto ¶
func (in *AcceleratorsObservation) DeepCopyInto(out *AcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AcceleratorsParameters ¶
type AcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. // +kubebuilder:validation:Required AcceleratorCount *float64 `json:"acceleratorCount" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. // +kubebuilder:validation:Required AcceleratorType *string `json:"acceleratorType" tf:"accelerator_type,omitempty"` }
func (*AcceleratorsParameters) DeepCopy ¶
func (in *AcceleratorsParameters) DeepCopy() *AcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AcceleratorsParameters.
func (*AcceleratorsParameters) DeepCopyInto ¶
func (in *AcceleratorsParameters) DeepCopyInto(out *AcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingConfigObservation ¶
type AutoscalingConfigObservation struct { // The autoscaling policy used by the cluster. PolicyURI *string `json:"policyUri,omitempty" tf:"policy_uri,omitempty"` }
func (*AutoscalingConfigObservation) DeepCopy ¶
func (in *AutoscalingConfigObservation) DeepCopy() *AutoscalingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingConfigObservation.
func (*AutoscalingConfigObservation) DeepCopyInto ¶
func (in *AutoscalingConfigObservation) DeepCopyInto(out *AutoscalingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingConfigParameters ¶
type AutoscalingConfigParameters struct { // The autoscaling policy used by the cluster. // +kubebuilder:validation:Required PolicyURI *string `json:"policyUri" tf:"policy_uri,omitempty"` }
func (*AutoscalingConfigParameters) DeepCopy ¶
func (in *AutoscalingConfigParameters) DeepCopy() *AutoscalingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingConfigParameters.
func (*AutoscalingConfigParameters) DeepCopyInto ¶
func (in *AutoscalingConfigParameters) DeepCopyInto(out *AutoscalingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingObservation ¶
type AutoscalingObservation struct { // The maximum number of nodes in the node pool. Must be >= minNodeCount, and must be > 0. MaxNodeCount *float64 `json:"maxNodeCount,omitempty" tf:"max_node_count,omitempty"` // The minimum number of nodes in the node pool. Must be >= 0 and <= maxNodeCount. MinNodeCount *float64 `json:"minNodeCount,omitempty" tf:"min_node_count,omitempty"` }
func (*AutoscalingObservation) DeepCopy ¶
func (in *AutoscalingObservation) DeepCopy() *AutoscalingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingObservation.
func (*AutoscalingObservation) DeepCopyInto ¶
func (in *AutoscalingObservation) DeepCopyInto(out *AutoscalingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingParameters ¶
type AutoscalingParameters struct { // The maximum number of nodes in the node pool. Must be >= minNodeCount, and must be > 0. // +kubebuilder:validation:Optional MaxNodeCount *float64 `json:"maxNodeCount,omitempty" tf:"max_node_count,omitempty"` // The minimum number of nodes in the node pool. Must be >= 0 and <= maxNodeCount. // +kubebuilder:validation:Optional MinNodeCount *float64 `json:"minNodeCount,omitempty" tf:"min_node_count,omitempty"` }
func (*AutoscalingParameters) DeepCopy ¶
func (in *AutoscalingParameters) DeepCopy() *AutoscalingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingParameters.
func (*AutoscalingParameters) DeepCopyInto ¶
func (in *AutoscalingParameters) DeepCopyInto(out *AutoscalingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicy ¶
type AutoscalingPolicy struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec AutoscalingPolicySpec `json:"spec"` Status AutoscalingPolicyStatus `json:"status,omitempty"` }
AutoscalingPolicy is the Schema for the AutoscalingPolicys API. Describes an autoscaling policy for Dataproc cluster autoscaler. +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:subresource:status +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*AutoscalingPolicy) DeepCopy ¶
func (in *AutoscalingPolicy) DeepCopy() *AutoscalingPolicy
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicy.
func (*AutoscalingPolicy) DeepCopyInto ¶
func (in *AutoscalingPolicy) DeepCopyInto(out *AutoscalingPolicy)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*AutoscalingPolicy) DeepCopyObject ¶
func (in *AutoscalingPolicy) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*AutoscalingPolicy) GetCondition ¶
func (mg *AutoscalingPolicy) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetConnectionDetailsMapping ¶
func (tr *AutoscalingPolicy) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this AutoscalingPolicy
func (*AutoscalingPolicy) GetDeletionPolicy ¶
func (mg *AutoscalingPolicy) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetID ¶
func (tr *AutoscalingPolicy) GetID() string
GetID returns ID of underlying Terraform resource of this AutoscalingPolicy
func (*AutoscalingPolicy) GetManagementPolicy ¶ added in v0.31.0
func (mg *AutoscalingPolicy) GetManagementPolicy() xpv1.ManagementPolicy
GetManagementPolicy of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetObservation ¶
func (tr *AutoscalingPolicy) GetObservation() (map[string]any, error)
GetObservation of this AutoscalingPolicy
func (*AutoscalingPolicy) GetParameters ¶
func (tr *AutoscalingPolicy) GetParameters() (map[string]any, error)
GetParameters of this AutoscalingPolicy
func (*AutoscalingPolicy) GetProviderConfigReference ¶
func (mg *AutoscalingPolicy) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetProviderReference ¶
func (mg *AutoscalingPolicy) GetProviderReference() *xpv1.Reference
GetProviderReference of this AutoscalingPolicy. Deprecated: Use GetProviderConfigReference.
func (*AutoscalingPolicy) GetPublishConnectionDetailsTo ¶
func (mg *AutoscalingPolicy) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetTerraformResourceType ¶
func (mg *AutoscalingPolicy) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this AutoscalingPolicy
func (*AutoscalingPolicy) GetTerraformSchemaVersion ¶
func (tr *AutoscalingPolicy) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*AutoscalingPolicy) GetWriteConnectionSecretToReference ¶
func (mg *AutoscalingPolicy) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this AutoscalingPolicy.
func (*AutoscalingPolicy) LateInitialize ¶
func (tr *AutoscalingPolicy) LateInitialize(attrs []byte) (bool, error)
LateInitialize this AutoscalingPolicy using its observed tfState. returns True if there are any spec changes for the resource.
func (*AutoscalingPolicy) SetConditions ¶
func (mg *AutoscalingPolicy) SetConditions(c ...xpv1.Condition)
SetConditions of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetDeletionPolicy ¶
func (mg *AutoscalingPolicy) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetManagementPolicy ¶ added in v0.31.0
func (mg *AutoscalingPolicy) SetManagementPolicy(r xpv1.ManagementPolicy)
SetManagementPolicy of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetObservation ¶
func (tr *AutoscalingPolicy) SetObservation(obs map[string]any) error
SetObservation for this AutoscalingPolicy
func (*AutoscalingPolicy) SetParameters ¶
func (tr *AutoscalingPolicy) SetParameters(params map[string]any) error
SetParameters for this AutoscalingPolicy
func (*AutoscalingPolicy) SetProviderConfigReference ¶
func (mg *AutoscalingPolicy) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetProviderReference ¶
func (mg *AutoscalingPolicy) SetProviderReference(r *xpv1.Reference)
SetProviderReference of this AutoscalingPolicy. Deprecated: Use SetProviderConfigReference.
func (*AutoscalingPolicy) SetPublishConnectionDetailsTo ¶
func (mg *AutoscalingPolicy) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetWriteConnectionSecretToReference ¶
func (mg *AutoscalingPolicy) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this AutoscalingPolicy.
type AutoscalingPolicyList ¶
type AutoscalingPolicyList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []AutoscalingPolicy `json:"items"` }
AutoscalingPolicyList contains a list of AutoscalingPolicys
func (*AutoscalingPolicyList) DeepCopy ¶
func (in *AutoscalingPolicyList) DeepCopy() *AutoscalingPolicyList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyList.
func (*AutoscalingPolicyList) DeepCopyInto ¶
func (in *AutoscalingPolicyList) DeepCopyInto(out *AutoscalingPolicyList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*AutoscalingPolicyList) DeepCopyObject ¶
func (in *AutoscalingPolicyList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*AutoscalingPolicyList) GetItems ¶
func (l *AutoscalingPolicyList) GetItems() []resource.Managed
GetItems of this AutoscalingPolicyList.
type AutoscalingPolicyObservation ¶
type AutoscalingPolicyObservation struct { // Basic algorithm for autoscaling. // Structure is documented below. BasicAlgorithm []BasicAlgorithmObservation `json:"basicAlgorithm,omitempty" tf:"basic_algorithm,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/autoscalingPolicies/{{policy_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // The location where the autoscaling policy should reside. // The default value is global. Location *string `json:"location,omitempty" tf:"location,omitempty"` // The "resource name" of the autoscaling policy. Name *string `json:"name,omitempty" tf:"name,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Describes how the autoscaler will operate for secondary workers. // Structure is documented below. SecondaryWorkerConfig []SecondaryWorkerConfigObservation `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Describes how the autoscaler will operate for primary workers. // Structure is documented below. WorkerConfig []WorkerConfigObservation `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*AutoscalingPolicyObservation) DeepCopy ¶
func (in *AutoscalingPolicyObservation) DeepCopy() *AutoscalingPolicyObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyObservation.
func (*AutoscalingPolicyObservation) DeepCopyInto ¶
func (in *AutoscalingPolicyObservation) DeepCopyInto(out *AutoscalingPolicyObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicyParameters ¶
type AutoscalingPolicyParameters struct { // Basic algorithm for autoscaling. // Structure is documented below. // +kubebuilder:validation:Optional BasicAlgorithm []BasicAlgorithmParameters `json:"basicAlgorithm,omitempty" tf:"basic_algorithm,omitempty"` // The location where the autoscaling policy should reside. // The default value is global. // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Describes how the autoscaler will operate for secondary workers. // Structure is documented below. // +kubebuilder:validation:Optional SecondaryWorkerConfig []SecondaryWorkerConfigParameters `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Describes how the autoscaler will operate for primary workers. // Structure is documented below. // +kubebuilder:validation:Optional WorkerConfig []WorkerConfigParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*AutoscalingPolicyParameters) DeepCopy ¶
func (in *AutoscalingPolicyParameters) DeepCopy() *AutoscalingPolicyParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyParameters.
func (*AutoscalingPolicyParameters) DeepCopyInto ¶
func (in *AutoscalingPolicyParameters) DeepCopyInto(out *AutoscalingPolicyParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicySpec ¶
type AutoscalingPolicySpec struct { v1.ResourceSpec `json:",inline"` ForProvider AutoscalingPolicyParameters `json:"forProvider"` }
AutoscalingPolicySpec defines the desired state of AutoscalingPolicy
func (*AutoscalingPolicySpec) DeepCopy ¶
func (in *AutoscalingPolicySpec) DeepCopy() *AutoscalingPolicySpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicySpec.
func (*AutoscalingPolicySpec) DeepCopyInto ¶
func (in *AutoscalingPolicySpec) DeepCopyInto(out *AutoscalingPolicySpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicyStatus ¶
type AutoscalingPolicyStatus struct { v1.ResourceStatus `json:",inline"` AtProvider AutoscalingPolicyObservation `json:"atProvider,omitempty"` }
AutoscalingPolicyStatus defines the observed state of AutoscalingPolicy.
func (*AutoscalingPolicyStatus) DeepCopy ¶
func (in *AutoscalingPolicyStatus) DeepCopy() *AutoscalingPolicyStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyStatus.
func (*AutoscalingPolicyStatus) DeepCopyInto ¶
func (in *AutoscalingPolicyStatus) DeepCopyInto(out *AutoscalingPolicyStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigMetastoreConfigObservation ¶
type AuxiliaryServicesConfigMetastoreConfigObservation struct { // Resource name of an existing Dataproc Metastore service. DataprocMetastoreService *string `json:"dataprocMetastoreService,omitempty" tf:"dataproc_metastore_service,omitempty"` }
func (*AuxiliaryServicesConfigMetastoreConfigObservation) DeepCopy ¶
func (in *AuxiliaryServicesConfigMetastoreConfigObservation) DeepCopy() *AuxiliaryServicesConfigMetastoreConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigMetastoreConfigObservation.
func (*AuxiliaryServicesConfigMetastoreConfigObservation) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigMetastoreConfigObservation) DeepCopyInto(out *AuxiliaryServicesConfigMetastoreConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigMetastoreConfigParameters ¶
type AuxiliaryServicesConfigMetastoreConfigParameters struct { // Resource name of an existing Dataproc Metastore service. // +kubebuilder:validation:Optional DataprocMetastoreService *string `json:"dataprocMetastoreService,omitempty" tf:"dataproc_metastore_service,omitempty"` }
func (*AuxiliaryServicesConfigMetastoreConfigParameters) DeepCopy ¶
func (in *AuxiliaryServicesConfigMetastoreConfigParameters) DeepCopy() *AuxiliaryServicesConfigMetastoreConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigMetastoreConfigParameters.
func (*AuxiliaryServicesConfigMetastoreConfigParameters) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigMetastoreConfigParameters) DeepCopyInto(out *AuxiliaryServicesConfigMetastoreConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigObservation ¶
type AuxiliaryServicesConfigObservation struct { // The config setting for metastore service with the cluster. // Structure defined below. MetastoreConfig []AuxiliaryServicesConfigMetastoreConfigObservation `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Spark History Server configuration for the workload. SparkHistoryServerConfig []SparkHistoryServerConfigObservation `json:"sparkHistoryServerConfig,omitempty" tf:"spark_history_server_config,omitempty"` }
func (*AuxiliaryServicesConfigObservation) DeepCopy ¶
func (in *AuxiliaryServicesConfigObservation) DeepCopy() *AuxiliaryServicesConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigObservation.
func (*AuxiliaryServicesConfigObservation) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigObservation) DeepCopyInto(out *AuxiliaryServicesConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigParameters ¶
type AuxiliaryServicesConfigParameters struct { // The config setting for metastore service with the cluster. // Structure defined below. // +kubebuilder:validation:Optional MetastoreConfig []AuxiliaryServicesConfigMetastoreConfigParameters `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Spark History Server configuration for the workload. // +kubebuilder:validation:Optional SparkHistoryServerConfig []SparkHistoryServerConfigParameters `json:"sparkHistoryServerConfig,omitempty" tf:"spark_history_server_config,omitempty"` }
func (*AuxiliaryServicesConfigParameters) DeepCopy ¶
func (in *AuxiliaryServicesConfigParameters) DeepCopy() *AuxiliaryServicesConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigParameters.
func (*AuxiliaryServicesConfigParameters) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigParameters) DeepCopyInto(out *AuxiliaryServicesConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BasicAlgorithmObservation ¶
type BasicAlgorithmObservation struct { // Duration between scaling events. A scaling period starts after the // update operation from the previous event has completed. // Bounds: [2m, 1d]. Default: 2m. CooldownPeriod *string `json:"cooldownPeriod,omitempty" tf:"cooldown_period,omitempty"` // YARN autoscaling configuration. // Structure is documented below. YarnConfig []YarnConfigObservation `json:"yarnConfig,omitempty" tf:"yarn_config,omitempty"` }
func (*BasicAlgorithmObservation) DeepCopy ¶
func (in *BasicAlgorithmObservation) DeepCopy() *BasicAlgorithmObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAlgorithmObservation.
func (*BasicAlgorithmObservation) DeepCopyInto ¶
func (in *BasicAlgorithmObservation) DeepCopyInto(out *BasicAlgorithmObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BasicAlgorithmParameters ¶
type BasicAlgorithmParameters struct { // Duration between scaling events. A scaling period starts after the // update operation from the previous event has completed. // Bounds: [2m, 1d]. Default: 2m. // +kubebuilder:validation:Optional CooldownPeriod *string `json:"cooldownPeriod,omitempty" tf:"cooldown_period,omitempty"` // YARN autoscaling configuration. // Structure is documented below. // +kubebuilder:validation:Required YarnConfig []YarnConfigParameters `json:"yarnConfig" tf:"yarn_config,omitempty"` }
func (*BasicAlgorithmParameters) DeepCopy ¶
func (in *BasicAlgorithmParameters) DeepCopy() *BasicAlgorithmParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAlgorithmParameters.
func (*BasicAlgorithmParameters) DeepCopyInto ¶
func (in *BasicAlgorithmParameters) DeepCopyInto(out *BasicAlgorithmParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Cluster ¶
type Cluster struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="self.managementPolicy == 'ObserveOnly' || has(self.forProvider.name)",message="name is a required parameter" Spec ClusterSpec `json:"spec"` Status ClusterStatus `json:"status,omitempty"` }
Cluster is the Schema for the Clusters API. Manages a Cloud Dataproc cluster resource. +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:subresource:status +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Cluster) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Cluster.
func (*Cluster) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Cluster) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Cluster) GetCondition ¶
func (mg *Cluster) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Cluster.
func (*Cluster) GetConnectionDetailsMapping ¶
GetConnectionDetailsMapping for this Cluster
func (*Cluster) GetDeletionPolicy ¶
func (mg *Cluster) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Cluster.
func (*Cluster) GetManagementPolicy ¶ added in v0.31.0
func (mg *Cluster) GetManagementPolicy() xpv1.ManagementPolicy
GetManagementPolicy of this Cluster.
func (*Cluster) GetObservation ¶
GetObservation of this Cluster
func (*Cluster) GetParameters ¶
GetParameters of this Cluster
func (*Cluster) GetProviderConfigReference ¶
GetProviderConfigReference of this Cluster.
func (*Cluster) GetProviderReference ¶
GetProviderReference of this Cluster. Deprecated: Use GetProviderConfigReference.
func (*Cluster) GetPublishConnectionDetailsTo ¶
func (mg *Cluster) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Cluster.
func (*Cluster) GetTerraformResourceType ¶
GetTerraformResourceType returns Terraform resource type for this Cluster
func (*Cluster) GetTerraformSchemaVersion ¶
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Cluster) GetWriteConnectionSecretToReference ¶
func (mg *Cluster) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Cluster.
func (*Cluster) LateInitialize ¶
LateInitialize this Cluster using its observed tfState. returns True if there are any spec changes for the resource.
func (*Cluster) ResolveReferences ¶
ResolveReferences of this Cluster.
func (*Cluster) SetConditions ¶
SetConditions of this Cluster.
func (*Cluster) SetDeletionPolicy ¶
func (mg *Cluster) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Cluster.
func (*Cluster) SetManagementPolicy ¶ added in v0.31.0
func (mg *Cluster) SetManagementPolicy(r xpv1.ManagementPolicy)
SetManagementPolicy of this Cluster.
func (*Cluster) SetObservation ¶
SetObservation for this Cluster
func (*Cluster) SetParameters ¶
SetParameters for this Cluster
func (*Cluster) SetProviderConfigReference ¶
SetProviderConfigReference of this Cluster.
func (*Cluster) SetProviderReference ¶
SetProviderReference of this Cluster. Deprecated: Use SetProviderConfigReference.
func (*Cluster) SetPublishConnectionDetailsTo ¶
func (mg *Cluster) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Cluster.
func (*Cluster) SetWriteConnectionSecretToReference ¶
func (mg *Cluster) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Cluster.
type ClusterConfigObservation ¶
type ClusterConfigObservation struct { // The autoscaling policy config associated with the cluster. // Note that once set, if autoscaling_config is the only field set in cluster_config, it can // only be removed by setting policy_uri = "", rather than removing the whole block. // Structure defined below. AutoscalingConfig []AutoscalingConfigObservation `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // The name of the cloud storage bucket ultimately used to house the staging data // for the cluster. If staging_bucket is specified, it will contain this value, otherwise // it will be the auto generated name. Bucket *string `json:"bucket,omitempty" tf:"bucket,omitempty"` // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. // Structure defined below. DataprocMetricConfig []DataprocMetricConfigObservation `json:"dataprocMetricConfig,omitempty" tf:"dataproc_metric_config,omitempty"` // The Customer managed encryption keys settings for the cluster. // Structure defined below. EncryptionConfig []EncryptionConfigObservation `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // The config settings for port access on the cluster. // Structure defined below. EndpointConfig []EndpointConfigObservation `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // Common config settings for resources of Google Compute Engine cluster // instances, applicable to all instances in the cluster. Structure defined below. GceClusterConfig []GceClusterConfigObservation `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Commands to execute on each node after config is completed. // You can specify multiple versions of these. Structure defined below. InitializationAction []InitializationActionObservation `json:"initializationAction,omitempty" tf:"initialization_action,omitempty"` // The settings for auto deletion cluster schedule. // Structure defined below. LifecycleConfig []LifecycleConfigObservation `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // The Google Compute Engine config settings for the master instances // in a cluster. Structure defined below. MasterConfig []MasterConfigObservation `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // The config setting for metastore service with the cluster. // Structure defined below. MetastoreConfig []MetastoreConfigObservation `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Google Compute Engine config settings for the additional // instances in a cluster. Structure defined below. PreemptibleWorkerConfig []PreemptibleWorkerConfigObservation `json:"preemptibleWorkerConfig,omitempty" tf:"preemptible_worker_config,omitempty"` // Security related configuration. Structure defined below. SecurityConfig []SecurityConfigObservation `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // The config settings for software inside the cluster. // Structure defined below. SoftwareConfig []SoftwareConfigObservation `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // The Cloud Storage temp bucket used to store ephemeral cluster // and jobs data, such as Spark and MapReduce history files. // Note: If you don't explicitly specify a temp_bucket then GCP will auto create / assign one for you. TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // The Google Compute Engine config settings for the worker instances // in a cluster. Structure defined below. WorkerConfig []ClusterConfigWorkerConfigObservation `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ClusterConfigObservation) DeepCopy ¶
func (in *ClusterConfigObservation) DeepCopy() *ClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigObservation.
func (*ClusterConfigObservation) DeepCopyInto ¶
func (in *ClusterConfigObservation) DeepCopyInto(out *ClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterConfigParameters ¶
type ClusterConfigParameters struct { // The autoscaling policy config associated with the cluster. // Note that once set, if autoscaling_config is the only field set in cluster_config, it can // only be removed by setting policy_uri = "", rather than removing the whole block. // Structure defined below. // +kubebuilder:validation:Optional AutoscalingConfig []AutoscalingConfigParameters `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. // Structure defined below. // +kubebuilder:validation:Optional DataprocMetricConfig []DataprocMetricConfigParameters `json:"dataprocMetricConfig,omitempty" tf:"dataproc_metric_config,omitempty"` // The Customer managed encryption keys settings for the cluster. // Structure defined below. // +kubebuilder:validation:Optional EncryptionConfig []EncryptionConfigParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // The config settings for port access on the cluster. // Structure defined below. // +kubebuilder:validation:Optional EndpointConfig []EndpointConfigParameters `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // Common config settings for resources of Google Compute Engine cluster // instances, applicable to all instances in the cluster. Structure defined below. // +kubebuilder:validation:Optional GceClusterConfig []GceClusterConfigParameters `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Commands to execute on each node after config is completed. // You can specify multiple versions of these. Structure defined below. // +kubebuilder:validation:Optional InitializationAction []InitializationActionParameters `json:"initializationAction,omitempty" tf:"initialization_action,omitempty"` // The settings for auto deletion cluster schedule. // Structure defined below. // +kubebuilder:validation:Optional LifecycleConfig []LifecycleConfigParameters `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // The Google Compute Engine config settings for the master instances // in a cluster. Structure defined below. // +kubebuilder:validation:Optional MasterConfig []MasterConfigParameters `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // The config setting for metastore service with the cluster. // Structure defined below. // +kubebuilder:validation:Optional MetastoreConfig []MetastoreConfigParameters `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Google Compute Engine config settings for the additional // instances in a cluster. Structure defined below. // +kubebuilder:validation:Optional PreemptibleWorkerConfig []PreemptibleWorkerConfigParameters `json:"preemptibleWorkerConfig,omitempty" tf:"preemptible_worker_config,omitempty"` // Security related configuration. Structure defined below. // +kubebuilder:validation:Optional SecurityConfig []SecurityConfigParameters `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // The config settings for software inside the cluster. // Structure defined below. // +kubebuilder:validation:Optional SoftwareConfig []SoftwareConfigParameters `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. // +kubebuilder:validation:Optional StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // The Cloud Storage temp bucket used to store ephemeral cluster // and jobs data, such as Spark and MapReduce history files. // Note: If you don't explicitly specify a temp_bucket then GCP will auto create / assign one for you. // +kubebuilder:validation:Optional TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // The Google Compute Engine config settings for the worker instances // in a cluster. Structure defined below. // +kubebuilder:validation:Optional WorkerConfig []ClusterConfigWorkerConfigParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ClusterConfigParameters) DeepCopy ¶
func (in *ClusterConfigParameters) DeepCopy() *ClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigParameters.
func (*ClusterConfigParameters) DeepCopyInto ¶
func (in *ClusterConfigParameters) DeepCopyInto(out *ClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterConfigWorkerConfigObservation ¶
type ClusterConfigWorkerConfigObservation struct { // The Compute Engine accelerator configuration for these instances. Can be specified multiple times. Accelerators []WorkerConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config DiskConfig []WorkerConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // List of worker instance names which have been assigned // to the cluster. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // The name of a Google Compute Engine machine type // to create for the worker nodes. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of worker nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 2). // There is currently a beta feature which allows you to run a // Single Node Cluster. // In order to take advantage of this you need to set // "dataproc:dataproc.allow.zero.workers" = "true" in // cluster_config.software_config.properties NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*ClusterConfigWorkerConfigObservation) DeepCopy ¶
func (in *ClusterConfigWorkerConfigObservation) DeepCopy() *ClusterConfigWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigWorkerConfigObservation.
func (*ClusterConfigWorkerConfigObservation) DeepCopyInto ¶
func (in *ClusterConfigWorkerConfigObservation) DeepCopyInto(out *ClusterConfigWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterConfigWorkerConfigParameters ¶
type ClusterConfigWorkerConfigParameters struct { // The Compute Engine accelerator configuration for these instances. Can be specified multiple times. // +kubebuilder:validation:Optional Accelerators []WorkerConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config // +kubebuilder:validation:Optional DiskConfig []WorkerConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. // +kubebuilder:validation:Optional ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // The name of a Google Compute Engine machine type // to create for the worker nodes. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of worker nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 2). // There is currently a beta feature which allows you to run a // Single Node Cluster. // In order to take advantage of this you need to set // "dataproc:dataproc.allow.zero.workers" = "true" in // cluster_config.software_config.properties // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*ClusterConfigWorkerConfigParameters) DeepCopy ¶
func (in *ClusterConfigWorkerConfigParameters) DeepCopy() *ClusterConfigWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigWorkerConfigParameters.
func (*ClusterConfigWorkerConfigParameters) DeepCopyInto ¶
func (in *ClusterConfigWorkerConfigParameters) DeepCopyInto(out *ClusterConfigWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterList ¶
type ClusterList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Cluster `json:"items"` }
ClusterList contains a list of Clusters
func (*ClusterList) DeepCopy ¶
func (in *ClusterList) DeepCopy() *ClusterList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterList.
func (*ClusterList) DeepCopyInto ¶
func (in *ClusterList) DeepCopyInto(out *ClusterList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ClusterList) DeepCopyObject ¶
func (in *ClusterList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*ClusterList) GetItems ¶
func (l *ClusterList) GetItems() []resource.Managed
GetItems of this ClusterList.
type ClusterObservation ¶
type ClusterObservation struct { // Allows you to configure various aspects of the cluster. // Structure defined below. ClusterConfig []ClusterConfigObservation `json:"clusterConfig,omitempty" tf:"cluster_config,omitempty"` // Does not affect auto scaling decomissioning from an autoscaling policy. // Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. // Timeout specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). // Default timeout is 0 (for forceful decommission), and the maximum allowed timeout is 1 day. (see JSON representation of // Duration). // Only supported on Dataproc image versions 1.2 and higher. // For more context see the docs GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` // The list of labels (key/value pairs) to be applied to // instances in the cluster. GCP generates some itself including goog-dataproc-cluster-name // which is the name of the cluster. Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The name of the cluster, unique within the project and // zone. Name *string `json:"name,omitempty" tf:"name,omitempty"` // The ID of the project in which the cluster will exist. If it // is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // The region in which the cluster and associated nodes will be created in. // Defaults to global. Region *string `json:"region,omitempty" tf:"region,omitempty"` // Allows you to configure a virtual Dataproc on GKE cluster. // Structure defined below. VirtualClusterConfig []VirtualClusterConfigObservation `json:"virtualClusterConfig,omitempty" tf:"virtual_cluster_config,omitempty"` }
func (*ClusterObservation) DeepCopy ¶
func (in *ClusterObservation) DeepCopy() *ClusterObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterObservation.
func (*ClusterObservation) DeepCopyInto ¶
func (in *ClusterObservation) DeepCopyInto(out *ClusterObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterParameters ¶
type ClusterParameters struct { // Allows you to configure various aspects of the cluster. // Structure defined below. // +kubebuilder:validation:Optional ClusterConfig []ClusterConfigParameters `json:"clusterConfig,omitempty" tf:"cluster_config,omitempty"` // Does not affect auto scaling decomissioning from an autoscaling policy. // Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. // Timeout specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). // Default timeout is 0 (for forceful decommission), and the maximum allowed timeout is 1 day. (see JSON representation of // Duration). // Only supported on Dataproc image versions 1.2 and higher. // For more context see the docs // +kubebuilder:validation:Optional GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout,omitempty"` // The list of labels (key/value pairs) to be applied to // instances in the cluster. GCP generates some itself including goog-dataproc-cluster-name // which is the name of the cluster. // +kubebuilder:validation:Optional Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The name of the cluster, unique within the project and // zone. // +kubebuilder:validation:Optional Name *string `json:"name,omitempty" tf:"name,omitempty"` // The ID of the project in which the cluster will exist. If it // is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // The region in which the cluster and associated nodes will be created in. // Defaults to global. // +kubebuilder:validation:Optional Region *string `json:"region,omitempty" tf:"region,omitempty"` // Allows you to configure a virtual Dataproc on GKE cluster. // Structure defined below. // +kubebuilder:validation:Optional VirtualClusterConfig []VirtualClusterConfigParameters `json:"virtualClusterConfig,omitempty" tf:"virtual_cluster_config,omitempty"` }
func (*ClusterParameters) DeepCopy ¶
func (in *ClusterParameters) DeepCopy() *ClusterParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterParameters.
func (*ClusterParameters) DeepCopyInto ¶
func (in *ClusterParameters) DeepCopyInto(out *ClusterParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterSelectorObservation ¶
type ClusterSelectorObservation struct { // Required. The cluster labels. Cluster must have all labels to match. ClusterLabels map[string]*string `json:"clusterLabels,omitempty" tf:"cluster_labels,omitempty"` // Optional. The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ClusterSelectorObservation) DeepCopy ¶
func (in *ClusterSelectorObservation) DeepCopy() *ClusterSelectorObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSelectorObservation.
func (*ClusterSelectorObservation) DeepCopyInto ¶
func (in *ClusterSelectorObservation) DeepCopyInto(out *ClusterSelectorObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterSelectorParameters ¶
type ClusterSelectorParameters struct { // Required. The cluster labels. Cluster must have all labels to match. // +kubebuilder:validation:Required ClusterLabels map[string]*string `json:"clusterLabels" tf:"cluster_labels,omitempty"` // Optional. The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f // +kubebuilder:validation:Optional Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ClusterSelectorParameters) DeepCopy ¶
func (in *ClusterSelectorParameters) DeepCopy() *ClusterSelectorParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSelectorParameters.
func (*ClusterSelectorParameters) DeepCopyInto ¶
func (in *ClusterSelectorParameters) DeepCopyInto(out *ClusterSelectorParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterSpec ¶
type ClusterSpec struct { v1.ResourceSpec `json:",inline"` ForProvider ClusterParameters `json:"forProvider"` }
ClusterSpec defines the desired state of Cluster
func (*ClusterSpec) DeepCopy ¶
func (in *ClusterSpec) DeepCopy() *ClusterSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpec.
func (*ClusterSpec) DeepCopyInto ¶
func (in *ClusterSpec) DeepCopyInto(out *ClusterSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterStatus ¶
type ClusterStatus struct { v1.ResourceStatus `json:",inline"` AtProvider ClusterObservation `json:"atProvider,omitempty"` }
ClusterStatus defines the observed state of Cluster.
func (*ClusterStatus) DeepCopy ¶
func (in *ClusterStatus) DeepCopy() *ClusterStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterStatus.
func (*ClusterStatus) DeepCopyInto ¶
func (in *ClusterStatus) DeepCopyInto(out *ClusterStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigAutoscalingConfigObservation ¶
type ConfigAutoscalingConfigObservation struct { // Optional. The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region. Policy *string `json:"policy,omitempty" tf:"policy,omitempty"` }
func (*ConfigAutoscalingConfigObservation) DeepCopy ¶
func (in *ConfigAutoscalingConfigObservation) DeepCopy() *ConfigAutoscalingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigAutoscalingConfigObservation.
func (*ConfigAutoscalingConfigObservation) DeepCopyInto ¶
func (in *ConfigAutoscalingConfigObservation) DeepCopyInto(out *ConfigAutoscalingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigAutoscalingConfigParameters ¶
type ConfigAutoscalingConfigParameters struct { // Optional. The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region. // +kubebuilder:validation:Optional Policy *string `json:"policy,omitempty" tf:"policy,omitempty"` }
func (*ConfigAutoscalingConfigParameters) DeepCopy ¶
func (in *ConfigAutoscalingConfigParameters) DeepCopy() *ConfigAutoscalingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigAutoscalingConfigParameters.
func (*ConfigAutoscalingConfigParameters) DeepCopyInto ¶
func (in *ConfigAutoscalingConfigParameters) DeepCopyInto(out *ConfigAutoscalingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEncryptionConfigObservation ¶
type ConfigEncryptionConfigObservation struct { // Optional. The Cloud KMS key name to use for PD disk encryption for all instances in the cluster. GcePdKMSKeyName *string `json:"gcePdKmsKeyName,omitempty" tf:"gce_pd_kms_key_name,omitempty"` }
func (*ConfigEncryptionConfigObservation) DeepCopy ¶
func (in *ConfigEncryptionConfigObservation) DeepCopy() *ConfigEncryptionConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEncryptionConfigObservation.
func (*ConfigEncryptionConfigObservation) DeepCopyInto ¶
func (in *ConfigEncryptionConfigObservation) DeepCopyInto(out *ConfigEncryptionConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEncryptionConfigParameters ¶
type ConfigEncryptionConfigParameters struct { // Optional. The Cloud KMS key name to use for PD disk encryption for all instances in the cluster. // +kubebuilder:validation:Optional GcePdKMSKeyName *string `json:"gcePdKmsKeyName,omitempty" tf:"gce_pd_kms_key_name,omitempty"` }
func (*ConfigEncryptionConfigParameters) DeepCopy ¶
func (in *ConfigEncryptionConfigParameters) DeepCopy() *ConfigEncryptionConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEncryptionConfigParameters.
func (*ConfigEncryptionConfigParameters) DeepCopyInto ¶
func (in *ConfigEncryptionConfigParameters) DeepCopyInto(out *ConfigEncryptionConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEndpointConfigObservation ¶
type ConfigEndpointConfigObservation struct { // Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false. EnableHTTPPortAccess *bool `json:"enableHttpPortAccess,omitempty" tf:"enable_http_port_access,omitempty"` // Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true. HTTPPorts map[string]*string `json:"httpPorts,omitempty" tf:"http_ports,omitempty"` }
func (*ConfigEndpointConfigObservation) DeepCopy ¶
func (in *ConfigEndpointConfigObservation) DeepCopy() *ConfigEndpointConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEndpointConfigObservation.
func (*ConfigEndpointConfigObservation) DeepCopyInto ¶
func (in *ConfigEndpointConfigObservation) DeepCopyInto(out *ConfigEndpointConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEndpointConfigParameters ¶
type ConfigEndpointConfigParameters struct { // Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false. // +kubebuilder:validation:Optional EnableHTTPPortAccess *bool `json:"enableHttpPortAccess,omitempty" tf:"enable_http_port_access,omitempty"` }
func (*ConfigEndpointConfigParameters) DeepCopy ¶
func (in *ConfigEndpointConfigParameters) DeepCopy() *ConfigEndpointConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEndpointConfigParameters.
func (*ConfigEndpointConfigParameters) DeepCopyInto ¶
func (in *ConfigEndpointConfigParameters) DeepCopyInto(out *ConfigEndpointConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigGceClusterConfigObservation ¶
type ConfigGceClusterConfigObservation struct { // Optional. If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // The Compute Engine metadata entries to add to all instances (see (https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // Optional. The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default` Network *string `json:"network,omitempty" tf:"network,omitempty"` // Optional. Node Group Affinity for sole-tenant clusters. NodeGroupAffinity []GceClusterConfigNodeGroupAffinityObservation `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // Optional. The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL PrivateIPv6GoogleAccess *string `json:"privateIpv6GoogleAccess,omitempty" tf:"private_ipv6_google_access,omitempty"` // Optional. Reservation Affinity for consuming Zonal reservation. ReservationAffinity []GceClusterConfigReservationAffinityObservation `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // Optional. The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used. ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // Optional. The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Optional. Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below. ShieldedInstanceConfig []GceClusterConfigShieldedInstanceConfigObservation `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // Optional. The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0 Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The Compute Engine tags to add to all instances (see (https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // Optional. The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ConfigGceClusterConfigObservation) DeepCopy ¶
func (in *ConfigGceClusterConfigObservation) DeepCopy() *ConfigGceClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigGceClusterConfigObservation.
func (*ConfigGceClusterConfigObservation) DeepCopyInto ¶
func (in *ConfigGceClusterConfigObservation) DeepCopyInto(out *ConfigGceClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigGceClusterConfigParameters ¶
type ConfigGceClusterConfigParameters struct { // Optional. If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. // +kubebuilder:validation:Optional InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // The Compute Engine metadata entries to add to all instances (see (https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). // +kubebuilder:validation:Optional Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // Optional. The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default` // +kubebuilder:validation:Optional Network *string `json:"network,omitempty" tf:"network,omitempty"` // Optional. Node Group Affinity for sole-tenant clusters. // +kubebuilder:validation:Optional NodeGroupAffinity []GceClusterConfigNodeGroupAffinityParameters `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // Optional. The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL // +kubebuilder:validation:Optional PrivateIPv6GoogleAccess *string `json:"privateIpv6GoogleAccess,omitempty" tf:"private_ipv6_google_access,omitempty"` // Optional. Reservation Affinity for consuming Zonal reservation. // +kubebuilder:validation:Optional ReservationAffinity []GceClusterConfigReservationAffinityParameters `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // Optional. The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used. // +kubebuilder:validation:Optional ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // Optional. The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control // +kubebuilder:validation:Optional ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Optional. Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below. // +kubebuilder:validation:Optional ShieldedInstanceConfig []GceClusterConfigShieldedInstanceConfigParameters `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // Optional. The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0 // +kubebuilder:validation:Optional Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The Compute Engine tags to add to all instances (see (https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). // +kubebuilder:validation:Optional Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // Optional. The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f // +kubebuilder:validation:Optional Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ConfigGceClusterConfigParameters) DeepCopy ¶
func (in *ConfigGceClusterConfigParameters) DeepCopy() *ConfigGceClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigGceClusterConfigParameters.
func (*ConfigGceClusterConfigParameters) DeepCopyInto ¶
func (in *ConfigGceClusterConfigParameters) DeepCopyInto(out *ConfigGceClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigLifecycleConfigObservation ¶
type ConfigLifecycleConfigObservation struct { // Optional. The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). AutoDeleteTTL *string `json:"autoDeleteTtl,omitempty" tf:"auto_delete_ttl,omitempty"` // Optional. The time when cluster will be auto-deleted (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // Optional. The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json). IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` // Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). IdleStartTime *string `json:"idleStartTime,omitempty" tf:"idle_start_time,omitempty"` }
func (*ConfigLifecycleConfigObservation) DeepCopy ¶
func (in *ConfigLifecycleConfigObservation) DeepCopy() *ConfigLifecycleConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigLifecycleConfigObservation.
func (*ConfigLifecycleConfigObservation) DeepCopyInto ¶
func (in *ConfigLifecycleConfigObservation) DeepCopyInto(out *ConfigLifecycleConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigLifecycleConfigParameters ¶
type ConfigLifecycleConfigParameters struct { // Optional. The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). // +kubebuilder:validation:Optional AutoDeleteTTL *string `json:"autoDeleteTtl,omitempty" tf:"auto_delete_ttl,omitempty"` // Optional. The time when cluster will be auto-deleted (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). // +kubebuilder:validation:Optional AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // Optional. The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json). // +kubebuilder:validation:Optional IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` }
func (*ConfigLifecycleConfigParameters) DeepCopy ¶
func (in *ConfigLifecycleConfigParameters) DeepCopy() *ConfigLifecycleConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigLifecycleConfigParameters.
func (*ConfigLifecycleConfigParameters) DeepCopyInto ¶
func (in *ConfigLifecycleConfigParameters) DeepCopyInto(out *ConfigLifecycleConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigObservation ¶
type ConfigMasterConfigObservation struct { // Optional. The Compute Engine accelerator configuration for these instances. Accelerators []MasterConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Optional. Disk option config settings. DiskConfig []MasterConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // Output only. Specifies that this instance group contains preemptible instances. IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible,omitempty"` // Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. ManagedGroupConfig []ManagedGroupConfigObservation `json:"managedGroupConfig,omitempty" tf:"managed_group_config,omitempty"` // Optional. Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigMasterConfigObservation) DeepCopy ¶
func (in *ConfigMasterConfigObservation) DeepCopy() *ConfigMasterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigObservation.
func (*ConfigMasterConfigObservation) DeepCopyInto ¶
func (in *ConfigMasterConfigObservation) DeepCopyInto(out *ConfigMasterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigParameters ¶
type ConfigMasterConfigParameters struct { // Optional. The Compute Engine accelerator configuration for these instances. // +kubebuilder:validation:Optional Accelerators []MasterConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Optional. Disk option config settings. // +kubebuilder:validation:Optional DiskConfig []MasterConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. // +kubebuilder:validation:Optional Image *string `json:"image,omitempty" tf:"image,omitempty"` // Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Optional. Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // +kubebuilder:validation:Optional Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigMasterConfigParameters) DeepCopy ¶
func (in *ConfigMasterConfigParameters) DeepCopy() *ConfigMasterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigParameters.
func (*ConfigMasterConfigParameters) DeepCopyInto ¶
func (in *ConfigMasterConfigParameters) DeepCopyInto(out *ConfigMasterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigObservation ¶
type ConfigObservation struct { // The number of local SSD disks to attach to the node, // which is limited by the maximum number of disks allowable per zone. LocalSsdCount *float64 `json:"localSsdCount,omitempty" tf:"local_ssd_count,omitempty"` // The name of a Compute Engine machine type. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Minimum CPU platform to be used by this instance. // The instance may be scheduled on the specified or a newer CPU platform. // Specify the friendly names of CPU platforms, such as "Intel Haswell" or "Intel Sandy Bridge". MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Whether the nodes are created as preemptible VM instances. // Preemptible nodes cannot be used in a node pool with the CONTROLLER role or in the DEFAULT node pool if the // CONTROLLER role is not assigned (the DEFAULT node pool will assume the CONTROLLER role). Preemptible *bool `json:"preemptible,omitempty" tf:"preemptible,omitempty"` // Spot flag for enabling Spot VM, which is a rebrand of the existing preemptible flag. Spot *bool `json:"spot,omitempty" tf:"spot,omitempty"` }
func (*ConfigObservation) DeepCopy ¶
func (in *ConfigObservation) DeepCopy() *ConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigObservation.
func (*ConfigObservation) DeepCopyInto ¶
func (in *ConfigObservation) DeepCopyInto(out *ConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigParameters ¶
type ConfigParameters struct { // The number of local SSD disks to attach to the node, // which is limited by the maximum number of disks allowable per zone. // +kubebuilder:validation:Optional LocalSsdCount *float64 `json:"localSsdCount,omitempty" tf:"local_ssd_count,omitempty"` // The name of a Compute Engine machine type. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Minimum CPU platform to be used by this instance. // The instance may be scheduled on the specified or a newer CPU platform. // Specify the friendly names of CPU platforms, such as "Intel Haswell" or "Intel Sandy Bridge". // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Whether the nodes are created as preemptible VM instances. // Preemptible nodes cannot be used in a node pool with the CONTROLLER role or in the DEFAULT node pool if the // CONTROLLER role is not assigned (the DEFAULT node pool will assume the CONTROLLER role). // +kubebuilder:validation:Optional Preemptible *bool `json:"preemptible,omitempty" tf:"preemptible,omitempty"` // Spot flag for enabling Spot VM, which is a rebrand of the existing preemptible flag. // +kubebuilder:validation:Optional Spot *bool `json:"spot,omitempty" tf:"spot,omitempty"` }
func (*ConfigParameters) DeepCopy ¶
func (in *ConfigParameters) DeepCopy() *ConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigParameters.
func (*ConfigParameters) DeepCopyInto ¶
func (in *ConfigParameters) DeepCopyInto(out *ConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecondaryWorkerConfigObservation ¶
type ConfigSecondaryWorkerConfigObservation struct { // Optional. The Compute Engine accelerator configuration for these instances. Accelerators []SecondaryWorkerConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Optional. Disk option config settings. DiskConfig []SecondaryWorkerConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // Output only. Specifies that this instance group contains preemptible instances. IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible,omitempty"` // Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. ManagedGroupConfig []SecondaryWorkerConfigManagedGroupConfigObservation `json:"managedGroupConfig,omitempty" tf:"managed_group_config,omitempty"` // Optional. Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigSecondaryWorkerConfigObservation) DeepCopy ¶
func (in *ConfigSecondaryWorkerConfigObservation) DeepCopy() *ConfigSecondaryWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecondaryWorkerConfigObservation.
func (*ConfigSecondaryWorkerConfigObservation) DeepCopyInto ¶
func (in *ConfigSecondaryWorkerConfigObservation) DeepCopyInto(out *ConfigSecondaryWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecondaryWorkerConfigParameters ¶
type ConfigSecondaryWorkerConfigParameters struct { // Optional. The Compute Engine accelerator configuration for these instances. // +kubebuilder:validation:Optional Accelerators []SecondaryWorkerConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Optional. Disk option config settings. // +kubebuilder:validation:Optional DiskConfig []SecondaryWorkerConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. // +kubebuilder:validation:Optional Image *string `json:"image,omitempty" tf:"image,omitempty"` // Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Optional. Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // +kubebuilder:validation:Optional Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigSecondaryWorkerConfigParameters) DeepCopy ¶
func (in *ConfigSecondaryWorkerConfigParameters) DeepCopy() *ConfigSecondaryWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecondaryWorkerConfigParameters.
func (*ConfigSecondaryWorkerConfigParameters) DeepCopyInto ¶
func (in *ConfigSecondaryWorkerConfigParameters) DeepCopyInto(out *ConfigSecondaryWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecurityConfigObservation ¶
type ConfigSecurityConfigObservation struct { // Kerberos related configuration. KerberosConfig []SecurityConfigKerberosConfigObservation `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` }
func (*ConfigSecurityConfigObservation) DeepCopy ¶
func (in *ConfigSecurityConfigObservation) DeepCopy() *ConfigSecurityConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecurityConfigObservation.
func (*ConfigSecurityConfigObservation) DeepCopyInto ¶
func (in *ConfigSecurityConfigObservation) DeepCopyInto(out *ConfigSecurityConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecurityConfigParameters ¶
type ConfigSecurityConfigParameters struct { // Kerberos related configuration. // +kubebuilder:validation:Optional KerberosConfig []SecurityConfigKerberosConfigParameters `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` }
func (*ConfigSecurityConfigParameters) DeepCopy ¶
func (in *ConfigSecurityConfigParameters) DeepCopy() *ConfigSecurityConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecurityConfigParameters.
func (*ConfigSecurityConfigParameters) DeepCopyInto ¶
func (in *ConfigSecurityConfigParameters) DeepCopyInto(out *ConfigSecurityConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSoftwareConfigObservation ¶
type ConfigSoftwareConfigObservation struct { // Optional. The version of software inside the cluster. It must be one of the supported (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version. ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*ConfigSoftwareConfigObservation) DeepCopy ¶
func (in *ConfigSoftwareConfigObservation) DeepCopy() *ConfigSoftwareConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSoftwareConfigObservation.
func (*ConfigSoftwareConfigObservation) DeepCopyInto ¶
func (in *ConfigSoftwareConfigObservation) DeepCopyInto(out *ConfigSoftwareConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSoftwareConfigParameters ¶
type ConfigSoftwareConfigParameters struct { // Optional. The version of software inside the cluster. It must be one of the supported (https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version. // +kubebuilder:validation:Optional ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // +kubebuilder:validation:Optional OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*ConfigSoftwareConfigParameters) DeepCopy ¶
func (in *ConfigSoftwareConfigParameters) DeepCopy() *ConfigSoftwareConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSoftwareConfigParameters.
func (*ConfigSoftwareConfigParameters) DeepCopyInto ¶
func (in *ConfigSoftwareConfigParameters) DeepCopyInto(out *ConfigSoftwareConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigAcceleratorsObservation ¶
type ConfigWorkerConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*ConfigWorkerConfigAcceleratorsObservation) DeepCopy ¶
func (in *ConfigWorkerConfigAcceleratorsObservation) DeepCopy() *ConfigWorkerConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigAcceleratorsObservation.
func (*ConfigWorkerConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *ConfigWorkerConfigAcceleratorsObservation) DeepCopyInto(out *ConfigWorkerConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigAcceleratorsParameters ¶
type ConfigWorkerConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*ConfigWorkerConfigAcceleratorsParameters) DeepCopy ¶
func (in *ConfigWorkerConfigAcceleratorsParameters) DeepCopy() *ConfigWorkerConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigAcceleratorsParameters.
func (*ConfigWorkerConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigAcceleratorsParameters) DeepCopyInto(out *ConfigWorkerConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigDiskConfigObservation ¶
type ConfigWorkerConfigDiskConfigObservation struct { // Optional. Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*ConfigWorkerConfigDiskConfigObservation) DeepCopy ¶
func (in *ConfigWorkerConfigDiskConfigObservation) DeepCopy() *ConfigWorkerConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigDiskConfigObservation.
func (*ConfigWorkerConfigDiskConfigObservation) DeepCopyInto ¶
func (in *ConfigWorkerConfigDiskConfigObservation) DeepCopyInto(out *ConfigWorkerConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigDiskConfigParameters ¶
type ConfigWorkerConfigDiskConfigParameters struct { // Optional. Size in GB of the boot disk (default is 500GB). // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*ConfigWorkerConfigDiskConfigParameters) DeepCopy ¶
func (in *ConfigWorkerConfigDiskConfigParameters) DeepCopy() *ConfigWorkerConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigDiskConfigParameters.
func (*ConfigWorkerConfigDiskConfigParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigDiskConfigParameters) DeepCopyInto(out *ConfigWorkerConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigObservation ¶
type ConfigWorkerConfigObservation struct { // Optional. The Compute Engine accelerator configuration for these instances. Accelerators []ConfigWorkerConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Optional. Disk option config settings. DiskConfig []ConfigWorkerConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // Output only. Specifies that this instance group contains preemptible instances. IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible,omitempty"` // Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. ManagedGroupConfig []WorkerConfigManagedGroupConfigObservation `json:"managedGroupConfig,omitempty" tf:"managed_group_config,omitempty"` // Optional. Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigWorkerConfigObservation) DeepCopy ¶
func (in *ConfigWorkerConfigObservation) DeepCopy() *ConfigWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigObservation.
func (*ConfigWorkerConfigObservation) DeepCopyInto ¶
func (in *ConfigWorkerConfigObservation) DeepCopyInto(out *ConfigWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigParameters ¶
type ConfigWorkerConfigParameters struct { // Optional. The Compute Engine accelerator configuration for these instances. // +kubebuilder:validation:Optional Accelerators []ConfigWorkerConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Optional. Disk option config settings. // +kubebuilder:validation:Optional DiskConfig []ConfigWorkerConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Optional. The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. // +kubebuilder:validation:Optional Image *string `json:"image,omitempty" tf:"image,omitempty"` // Optional. The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Optional. Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Optional. Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // +kubebuilder:validation:Optional Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigWorkerConfigParameters) DeepCopy ¶
func (in *ConfigWorkerConfigParameters) DeepCopy() *ConfigWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigParameters.
func (*ConfigWorkerConfigParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigParameters) DeepCopyInto(out *ConfigWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConsumersObservation ¶ added in v0.27.0
type ConsumersObservation struct { // (Output) // The URI of the endpoint used to access the metastore service. EndpointURI *string `json:"endpointUri,omitempty" tf:"endpoint_uri,omitempty"` // The subnetwork of the customer project from which an IP address is reserved and used as the Dataproc Metastore service's endpoint. // It is accessible to hosts in the subnet and to all hosts in a subnet in the same region and same network. // There must be at least one IP address available in the subnet's primary range. The subnet is specified in the following form: // `projects/{projectNumber}/regions/{region_id}/subnetworks/{subnetwork_id} Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` }
func (*ConsumersObservation) DeepCopy ¶ added in v0.27.0
func (in *ConsumersObservation) DeepCopy() *ConsumersObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConsumersObservation.
func (*ConsumersObservation) DeepCopyInto ¶ added in v0.27.0
func (in *ConsumersObservation) DeepCopyInto(out *ConsumersObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConsumersParameters ¶ added in v0.27.0
type ConsumersParameters struct { // The subnetwork of the customer project from which an IP address is reserved and used as the Dataproc Metastore service's endpoint. // It is accessible to hosts in the subnet and to all hosts in a subnet in the same region and same network. // There must be at least one IP address available in the subnet's primary range. The subnet is specified in the following form: // `projects/{projectNumber}/regions/{region_id}/subnetworks/{subnetwork_id} // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/compute/v1beta1.Subnetwork // +crossplane:generate:reference:extractor=github.com/upbound/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // Reference to a Subnetwork in compute to populate subnetwork. // +kubebuilder:validation:Optional SubnetworkRef *v1.Reference `json:"subnetworkRef,omitempty" tf:"-"` // Selector for a Subnetwork in compute to populate subnetwork. // +kubebuilder:validation:Optional SubnetworkSelector *v1.Selector `json:"subnetworkSelector,omitempty" tf:"-"` }
func (*ConsumersParameters) DeepCopy ¶ added in v0.27.0
func (in *ConsumersParameters) DeepCopy() *ConsumersParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConsumersParameters.
func (*ConsumersParameters) DeepCopyInto ¶ added in v0.27.0
func (in *ConsumersParameters) DeepCopyInto(out *ConsumersParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataprocMetricConfigObservation ¶ added in v0.27.0
type DataprocMetricConfigObservation struct { // Metrics sources to enable. Metrics []MetricsObservation `json:"metrics,omitempty" tf:"metrics,omitempty"` }
func (*DataprocMetricConfigObservation) DeepCopy ¶ added in v0.27.0
func (in *DataprocMetricConfigObservation) DeepCopy() *DataprocMetricConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataprocMetricConfigObservation.
func (*DataprocMetricConfigObservation) DeepCopyInto ¶ added in v0.27.0
func (in *DataprocMetricConfigObservation) DeepCopyInto(out *DataprocMetricConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataprocMetricConfigParameters ¶ added in v0.27.0
type DataprocMetricConfigParameters struct { // Metrics sources to enable. // +kubebuilder:validation:Required Metrics []MetricsParameters `json:"metrics" tf:"metrics,omitempty"` }
func (*DataprocMetricConfigParameters) DeepCopy ¶ added in v0.27.0
func (in *DataprocMetricConfigParameters) DeepCopy() *DataprocMetricConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataprocMetricConfigParameters.
func (*DataprocMetricConfigParameters) DeepCopyInto ¶ added in v0.27.0
func (in *DataprocMetricConfigParameters) DeepCopyInto(out *DataprocMetricConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DiskConfigObservation ¶
type DiskConfigObservation struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*DiskConfigObservation) DeepCopy ¶
func (in *DiskConfigObservation) DeepCopy() *DiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskConfigObservation.
func (*DiskConfigObservation) DeepCopyInto ¶
func (in *DiskConfigObservation) DeepCopyInto(out *DiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DiskConfigParameters ¶
type DiskConfigParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*DiskConfigParameters) DeepCopy ¶
func (in *DiskConfigParameters) DeepCopy() *DiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskConfigParameters.
func (*DiskConfigParameters) DeepCopyInto ¶
func (in *DiskConfigParameters) DeepCopyInto(out *DiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfigObservation ¶
type EncryptionConfigObservation struct { // The Cloud KMS key name to use for PD disk encryption for // all instances in the cluster. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` }
func (*EncryptionConfigObservation) DeepCopy ¶
func (in *EncryptionConfigObservation) DeepCopy() *EncryptionConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfigObservation.
func (*EncryptionConfigObservation) DeepCopyInto ¶
func (in *EncryptionConfigObservation) DeepCopyInto(out *EncryptionConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfigParameters ¶
type EncryptionConfigParameters struct { // The Cloud KMS key name to use for PD disk encryption for // all instances in the cluster. // +kubebuilder:validation:Required KMSKeyName *string `json:"kmsKeyName" tf:"kms_key_name,omitempty"` }
func (*EncryptionConfigParameters) DeepCopy ¶
func (in *EncryptionConfigParameters) DeepCopy() *EncryptionConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfigParameters.
func (*EncryptionConfigParameters) DeepCopyInto ¶
func (in *EncryptionConfigParameters) DeepCopyInto(out *EncryptionConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EndpointConfigObservation ¶
type EndpointConfigObservation struct { // The flag to enable http access to specific ports // on the cluster from external sources (aka Component Gateway). Defaults to false. EnableHTTPPortAccess *bool `json:"enableHttpPortAccess,omitempty" tf:"enable_http_port_access,omitempty"` // The map of port descriptions to URLs. Will only be populated if // enable_http_port_access is true. HTTPPorts map[string]*string `json:"httpPorts,omitempty" tf:"http_ports,omitempty"` }
func (*EndpointConfigObservation) DeepCopy ¶
func (in *EndpointConfigObservation) DeepCopy() *EndpointConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EndpointConfigObservation.
func (*EndpointConfigObservation) DeepCopyInto ¶
func (in *EndpointConfigObservation) DeepCopyInto(out *EndpointConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EndpointConfigParameters ¶
type EndpointConfigParameters struct { // The flag to enable http access to specific ports // on the cluster from external sources (aka Component Gateway). Defaults to false. // +kubebuilder:validation:Required EnableHTTPPortAccess *bool `json:"enableHttpPortAccess" tf:"enable_http_port_access,omitempty"` }
func (*EndpointConfigParameters) DeepCopy ¶
func (in *EndpointConfigParameters) DeepCopy() *EndpointConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EndpointConfigParameters.
func (*EndpointConfigParameters) DeepCopyInto ¶
func (in *EndpointConfigParameters) DeepCopyInto(out *EndpointConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigNodeGroupAffinityObservation ¶ added in v0.27.0
type GceClusterConfigNodeGroupAffinityObservation struct { // Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1` NodeGroup *string `json:"nodeGroup,omitempty" tf:"node_group,omitempty"` }
func (*GceClusterConfigNodeGroupAffinityObservation) DeepCopy ¶ added in v0.27.0
func (in *GceClusterConfigNodeGroupAffinityObservation) DeepCopy() *GceClusterConfigNodeGroupAffinityObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigNodeGroupAffinityObservation.
func (*GceClusterConfigNodeGroupAffinityObservation) DeepCopyInto ¶ added in v0.27.0
func (in *GceClusterConfigNodeGroupAffinityObservation) DeepCopyInto(out *GceClusterConfigNodeGroupAffinityObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigNodeGroupAffinityParameters ¶ added in v0.27.0
type GceClusterConfigNodeGroupAffinityParameters struct { // Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1` // +kubebuilder:validation:Required NodeGroup *string `json:"nodeGroup" tf:"node_group,omitempty"` }
func (*GceClusterConfigNodeGroupAffinityParameters) DeepCopy ¶ added in v0.27.0
func (in *GceClusterConfigNodeGroupAffinityParameters) DeepCopy() *GceClusterConfigNodeGroupAffinityParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigNodeGroupAffinityParameters.
func (*GceClusterConfigNodeGroupAffinityParameters) DeepCopyInto ¶ added in v0.27.0
func (in *GceClusterConfigNodeGroupAffinityParameters) DeepCopyInto(out *GceClusterConfigNodeGroupAffinityParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigObservation ¶
type GceClusterConfigObservation struct { // By default, clusters are not restricted to internal IP addresses, // and will have ephemeral external IP addresses assigned to each instance. If set to true, all // instances in the cluster will only have internal IP addresses. Note: Private Google Access // (also known as privateIpGoogleAccess) must be enabled on the subnetwork that the cluster // will be launched in. InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // A map of the Compute Engine metadata entries to add to all instances // (see Project and instance metadata). Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // The name or self_link of the Google Compute Engine // network to the cluster will be part of. Conflicts with subnetwork. // If neither is specified, this defaults to the "default" network. Network *string `json:"network,omitempty" tf:"network,omitempty"` // Node Group Affinity for sole-tenant clusters. NodeGroupAffinity []NodeGroupAffinityObservation `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // Reservation Affinity for consuming zonal reservation. ReservationAffinity []ReservationAffinityObservation `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // The service account to be used by the Node VMs. // If not specified, the "default" service account is used. ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // The set of Google API scopes // to be made available on all of the node VMs under the service_account // specified. Both OAuth2 URLs and gcloud // short names are supported. To allow full access to all Cloud APIs, use the // cloud-platform scope. See a complete list of scopes here. ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Shielded Instance Config for clusters using Compute Engine Shielded VMs. ShieldedInstanceConfig []ShieldedInstanceConfigObservation `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // The name or self_link of the Google Compute Engine // subnetwork the cluster will be part of. Conflicts with network. Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The list of instance tags applied to instances in the cluster. // Tags are used to identify valid sources or targets for network firewalls. Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // The GCP zone where your data is stored and used (i.e. where // the master and the worker nodes will be created in). If region is set to 'global' (default) // then zone is mandatory, otherwise GCP is able to make use of Auto Zone Placement // to determine this automatically for you. // Note: This setting additionally determines and restricts // which computing resources are available for use with other configs such as // cluster_config.master_config.machine_type and cluster_config.worker_config.machine_type. Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*GceClusterConfigObservation) DeepCopy ¶
func (in *GceClusterConfigObservation) DeepCopy() *GceClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigObservation.
func (*GceClusterConfigObservation) DeepCopyInto ¶
func (in *GceClusterConfigObservation) DeepCopyInto(out *GceClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigParameters ¶
type GceClusterConfigParameters struct { // By default, clusters are not restricted to internal IP addresses, // and will have ephemeral external IP addresses assigned to each instance. If set to true, all // instances in the cluster will only have internal IP addresses. Note: Private Google Access // (also known as privateIpGoogleAccess) must be enabled on the subnetwork that the cluster // will be launched in. // +kubebuilder:validation:Optional InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // A map of the Compute Engine metadata entries to add to all instances // (see Project and instance metadata). // +kubebuilder:validation:Optional Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // The name or self_link of the Google Compute Engine // network to the cluster will be part of. Conflicts with subnetwork. // If neither is specified, this defaults to the "default" network. // +kubebuilder:validation:Optional Network *string `json:"network,omitempty" tf:"network,omitempty"` // Node Group Affinity for sole-tenant clusters. // +kubebuilder:validation:Optional NodeGroupAffinity []NodeGroupAffinityParameters `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // Reservation Affinity for consuming zonal reservation. // +kubebuilder:validation:Optional ReservationAffinity []ReservationAffinityParameters `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // The service account to be used by the Node VMs. // If not specified, the "default" service account is used. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/cloudplatform/v1beta1.ServiceAccount // +crossplane:generate:reference:extractor=github.com/upbound/upjet/pkg/resource.ExtractParamPath("email",true) // +kubebuilder:validation:Optional ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // Reference to a ServiceAccount in cloudplatform to populate serviceAccount. // +kubebuilder:validation:Optional ServiceAccountRef *v1.Reference `json:"serviceAccountRef,omitempty" tf:"-"` // The set of Google API scopes // to be made available on all of the node VMs under the service_account // specified. Both OAuth2 URLs and gcloud // short names are supported. To allow full access to all Cloud APIs, use the // cloud-platform scope. See a complete list of scopes here. // +kubebuilder:validation:Optional ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Selector for a ServiceAccount in cloudplatform to populate serviceAccount. // +kubebuilder:validation:Optional ServiceAccountSelector *v1.Selector `json:"serviceAccountSelector,omitempty" tf:"-"` // Shielded Instance Config for clusters using Compute Engine Shielded VMs. // +kubebuilder:validation:Optional ShieldedInstanceConfig []ShieldedInstanceConfigParameters `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // The name or self_link of the Google Compute Engine // subnetwork the cluster will be part of. Conflicts with network. // +kubebuilder:validation:Optional Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The list of instance tags applied to instances in the cluster. // Tags are used to identify valid sources or targets for network firewalls. // +kubebuilder:validation:Optional Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // The GCP zone where your data is stored and used (i.e. where // the master and the worker nodes will be created in). If region is set to 'global' (default) // then zone is mandatory, otherwise GCP is able to make use of Auto Zone Placement // to determine this automatically for you. // Note: This setting additionally determines and restricts // which computing resources are available for use with other configs such as // cluster_config.master_config.machine_type and cluster_config.worker_config.machine_type. // +kubebuilder:validation:Optional Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*GceClusterConfigParameters) DeepCopy ¶
func (in *GceClusterConfigParameters) DeepCopy() *GceClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigParameters.
func (*GceClusterConfigParameters) DeepCopyInto ¶
func (in *GceClusterConfigParameters) DeepCopyInto(out *GceClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigReservationAffinityObservation ¶ added in v0.27.0
type GceClusterConfigReservationAffinityObservation struct { // Optional. Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Optional. Corresponds to the label key of reservation resource. Key *string `json:"key,omitempty" tf:"key,omitempty"` // Required. List of allowed values for the parameter. Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*GceClusterConfigReservationAffinityObservation) DeepCopy ¶ added in v0.27.0
func (in *GceClusterConfigReservationAffinityObservation) DeepCopy() *GceClusterConfigReservationAffinityObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigReservationAffinityObservation.
func (*GceClusterConfigReservationAffinityObservation) DeepCopyInto ¶ added in v0.27.0
func (in *GceClusterConfigReservationAffinityObservation) DeepCopyInto(out *GceClusterConfigReservationAffinityObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigReservationAffinityParameters ¶ added in v0.27.0
type GceClusterConfigReservationAffinityParameters struct { // Optional. Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION // +kubebuilder:validation:Optional ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Optional. Corresponds to the label key of reservation resource. // +kubebuilder:validation:Optional Key *string `json:"key,omitempty" tf:"key,omitempty"` // Required. List of allowed values for the parameter. // +kubebuilder:validation:Optional Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*GceClusterConfigReservationAffinityParameters) DeepCopy ¶ added in v0.27.0
func (in *GceClusterConfigReservationAffinityParameters) DeepCopy() *GceClusterConfigReservationAffinityParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigReservationAffinityParameters.
func (*GceClusterConfigReservationAffinityParameters) DeepCopyInto ¶ added in v0.27.0
func (in *GceClusterConfigReservationAffinityParameters) DeepCopyInto(out *GceClusterConfigReservationAffinityParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigShieldedInstanceConfigObservation ¶
type GceClusterConfigShieldedInstanceConfigObservation struct { // Optional. Defines whether instances have Integrity Monitoring enabled. EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Optional. Defines whether instances have Secure Boot enabled. EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Optional. Defines whether instances have the vTPM enabled. EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*GceClusterConfigShieldedInstanceConfigObservation) DeepCopy ¶
func (in *GceClusterConfigShieldedInstanceConfigObservation) DeepCopy() *GceClusterConfigShieldedInstanceConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigShieldedInstanceConfigObservation.
func (*GceClusterConfigShieldedInstanceConfigObservation) DeepCopyInto ¶
func (in *GceClusterConfigShieldedInstanceConfigObservation) DeepCopyInto(out *GceClusterConfigShieldedInstanceConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigShieldedInstanceConfigParameters ¶
type GceClusterConfigShieldedInstanceConfigParameters struct { // Optional. Defines whether instances have Integrity Monitoring enabled. // +kubebuilder:validation:Optional EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Optional. Defines whether instances have Secure Boot enabled. // +kubebuilder:validation:Optional EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Optional. Defines whether instances have the vTPM enabled. // +kubebuilder:validation:Optional EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*GceClusterConfigShieldedInstanceConfigParameters) DeepCopy ¶
func (in *GceClusterConfigShieldedInstanceConfigParameters) DeepCopy() *GceClusterConfigShieldedInstanceConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigShieldedInstanceConfigParameters.
func (*GceClusterConfigShieldedInstanceConfigParameters) DeepCopyInto ¶
func (in *GceClusterConfigShieldedInstanceConfigParameters) DeepCopyInto(out *GceClusterConfigShieldedInstanceConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GkeClusterConfigObservation ¶
type GkeClusterConfigObservation struct { // A target GKE cluster to deploy to. It must be in the same project and region as the Dataproc cluster // (the GKE cluster can be zonal or regional) GkeClusterTarget *string `json:"gkeClusterTarget,omitempty" tf:"gke_cluster_target,omitempty"` // GKE node pools where workloads will be scheduled. At least one node pool must be assigned the DEFAULT // GkeNodePoolTarget.Role. If a GkeNodePoolTarget is not specified, Dataproc constructs a DEFAULT GkeNodePoolTarget. // Each role can be given to only one GkeNodePoolTarget. All node pools must have the same location settings. NodePoolTarget []NodePoolTargetObservation `json:"nodePoolTarget,omitempty" tf:"node_pool_target,omitempty"` }
func (*GkeClusterConfigObservation) DeepCopy ¶
func (in *GkeClusterConfigObservation) DeepCopy() *GkeClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GkeClusterConfigObservation.
func (*GkeClusterConfigObservation) DeepCopyInto ¶
func (in *GkeClusterConfigObservation) DeepCopyInto(out *GkeClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GkeClusterConfigParameters ¶
type GkeClusterConfigParameters struct { // A target GKE cluster to deploy to. It must be in the same project and region as the Dataproc cluster // (the GKE cluster can be zonal or regional) // +kubebuilder:validation:Optional GkeClusterTarget *string `json:"gkeClusterTarget,omitempty" tf:"gke_cluster_target,omitempty"` // GKE node pools where workloads will be scheduled. At least one node pool must be assigned the DEFAULT // GkeNodePoolTarget.Role. If a GkeNodePoolTarget is not specified, Dataproc constructs a DEFAULT GkeNodePoolTarget. // Each role can be given to only one GkeNodePoolTarget. All node pools must have the same location settings. // +kubebuilder:validation:Optional NodePoolTarget []NodePoolTargetParameters `json:"nodePoolTarget,omitempty" tf:"node_pool_target,omitempty"` }
func (*GkeClusterConfigParameters) DeepCopy ¶
func (in *GkeClusterConfigParameters) DeepCopy() *GkeClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GkeClusterConfigParameters.
func (*GkeClusterConfigParameters) DeepCopyInto ¶
func (in *GkeClusterConfigParameters) DeepCopyInto(out *GkeClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopConfigObservation ¶
type HadoopConfigObservation struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig []LoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopConfigObservation) DeepCopy ¶
func (in *HadoopConfigObservation) DeepCopy() *HadoopConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopConfigObservation.
func (*HadoopConfigObservation) DeepCopyInto ¶
func (in *HadoopConfigObservation) DeepCopyInto(out *HadoopConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopConfigParameters ¶
type HadoopConfigParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig []LoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class // +kubebuilder:validation:Optional MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopConfigParameters) DeepCopy ¶
func (in *HadoopConfigParameters) DeepCopy() *HadoopConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopConfigParameters.
func (*HadoopConfigParameters) DeepCopyInto ¶
func (in *HadoopConfigParameters) DeepCopyInto(out *HadoopConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobLoggingConfigObservation ¶
type HadoopJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*HadoopJobLoggingConfigObservation) DeepCopy ¶
func (in *HadoopJobLoggingConfigObservation) DeepCopy() *HadoopJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobLoggingConfigObservation.
func (*HadoopJobLoggingConfigObservation) DeepCopyInto ¶
func (in *HadoopJobLoggingConfigObservation) DeepCopyInto(out *HadoopJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobLoggingConfigParameters ¶
type HadoopJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*HadoopJobLoggingConfigParameters) DeepCopy ¶
func (in *HadoopJobLoggingConfigParameters) DeepCopy() *HadoopJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobLoggingConfigParameters.
func (*HadoopJobLoggingConfigParameters) DeepCopyInto ¶
func (in *HadoopJobLoggingConfigParameters) DeepCopyInto(out *HadoopJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobObservation ¶
type HadoopJobObservation struct { // Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. LoggingConfig []HadoopJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopJobObservation) DeepCopy ¶
func (in *HadoopJobObservation) DeepCopy() *HadoopJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobObservation.
func (*HadoopJobObservation) DeepCopyInto ¶
func (in *HadoopJobObservation) DeepCopyInto(out *HadoopJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobParameters ¶
type HadoopJobParameters struct { // Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig []HadoopJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. // +kubebuilder:validation:Optional MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopJobParameters) DeepCopy ¶
func (in *HadoopJobParameters) DeepCopy() *HadoopJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobParameters.
func (*HadoopJobParameters) DeepCopyInto ¶
func (in *HadoopJobParameters) DeepCopyInto(out *HadoopJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveConfigObservation ¶
type HiveConfigObservation struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Hive command: SET name="value";). ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveConfigObservation) DeepCopy ¶
func (in *HiveConfigObservation) DeepCopy() *HiveConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveConfigObservation.
func (*HiveConfigObservation) DeepCopyInto ¶
func (in *HiveConfigObservation) DeepCopyInto(out *HiveConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveConfigParameters ¶
type HiveConfigParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri // +kubebuilder:validation:Optional QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Hive command: SET name="value";). // +kubebuilder:validation:Optional ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveConfigParameters) DeepCopy ¶
func (in *HiveConfigParameters) DeepCopy() *HiveConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveConfigParameters.
func (*HiveConfigParameters) DeepCopyInto ¶
func (in *HiveConfigParameters) DeepCopyInto(out *HiveConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveJobObservation ¶
type HiveJobObservation struct { // Optional. Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList []QueryListObservation `json:"queryList,omitempty" tf:"query_list,omitempty"` // Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveJobObservation) DeepCopy ¶
func (in *HiveJobObservation) DeepCopy() *HiveJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveJobObservation.
func (*HiveJobObservation) DeepCopyInto ¶
func (in *HiveJobObservation) DeepCopyInto(out *HiveJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveJobParameters ¶
type HiveJobParameters struct { // Optional. Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. // +kubebuilder:validation:Optional QueryList []QueryListParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +kubebuilder:validation:Optional ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveJobParameters) DeepCopy ¶
func (in *HiveJobParameters) DeepCopy() *HiveJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveJobParameters.
func (*HiveJobParameters) DeepCopyInto ¶
func (in *HiveJobParameters) DeepCopyInto(out *HiveJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigKerberosConfigObservation ¶ added in v0.27.0
type HiveMetastoreConfigKerberosConfigObservation struct { // A Kerberos keytab file that can be used to authenticate a service principal with a Kerberos Key Distribution Center (KDC). // Structure is documented below. Keytab []KeytabObservation `json:"keytab,omitempty" tf:"keytab,omitempty"` // A Cloud Storage URI that specifies the path to a krb5.conf file. It is of the form gs://{bucket_name}/path/to/krb5.conf, although the file does not need to be named krb5.conf explicitly. Krb5ConfigGcsURI *string `json:"krb5ConfigGcsUri,omitempty" tf:"krb5_config_gcs_uri,omitempty"` // A Kerberos principal that exists in the both the keytab the KDC to authenticate as. A typical principal is of the form "primary/instance@REALM", but there is no exact format. Principal *string `json:"principal,omitempty" tf:"principal,omitempty"` }
func (*HiveMetastoreConfigKerberosConfigObservation) DeepCopy ¶ added in v0.27.0
func (in *HiveMetastoreConfigKerberosConfigObservation) DeepCopy() *HiveMetastoreConfigKerberosConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigKerberosConfigObservation.
func (*HiveMetastoreConfigKerberosConfigObservation) DeepCopyInto ¶ added in v0.27.0
func (in *HiveMetastoreConfigKerberosConfigObservation) DeepCopyInto(out *HiveMetastoreConfigKerberosConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigKerberosConfigParameters ¶ added in v0.27.0
type HiveMetastoreConfigKerberosConfigParameters struct { // A Kerberos keytab file that can be used to authenticate a service principal with a Kerberos Key Distribution Center (KDC). // Structure is documented below. // +kubebuilder:validation:Required Keytab []KeytabParameters `json:"keytab" tf:"keytab,omitempty"` // A Cloud Storage URI that specifies the path to a krb5.conf file. It is of the form gs://{bucket_name}/path/to/krb5.conf, although the file does not need to be named krb5.conf explicitly. // +kubebuilder:validation:Required Krb5ConfigGcsURI *string `json:"krb5ConfigGcsUri" tf:"krb5_config_gcs_uri,omitempty"` // A Kerberos principal that exists in the both the keytab the KDC to authenticate as. A typical principal is of the form "primary/instance@REALM", but there is no exact format. // +kubebuilder:validation:Required Principal *string `json:"principal" tf:"principal,omitempty"` }
func (*HiveMetastoreConfigKerberosConfigParameters) DeepCopy ¶ added in v0.27.0
func (in *HiveMetastoreConfigKerberosConfigParameters) DeepCopy() *HiveMetastoreConfigKerberosConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigKerberosConfigParameters.
func (*HiveMetastoreConfigKerberosConfigParameters) DeepCopyInto ¶ added in v0.27.0
func (in *HiveMetastoreConfigKerberosConfigParameters) DeepCopyInto(out *HiveMetastoreConfigKerberosConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigObservation ¶ added in v0.27.0
type HiveMetastoreConfigObservation struct { // A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). // The mappings override system defaults (some keys cannot be overridden) ConfigOverrides map[string]*string `json:"configOverrides,omitempty" tf:"config_overrides,omitempty"` // Information used to configure the Hive metastore service as a service principal in a Kerberos realm. // Structure is documented below. KerberosConfig []HiveMetastoreConfigKerberosConfigObservation `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` // The Hive metastore schema version. Version *string `json:"version,omitempty" tf:"version,omitempty"` }
func (*HiveMetastoreConfigObservation) DeepCopy ¶ added in v0.27.0
func (in *HiveMetastoreConfigObservation) DeepCopy() *HiveMetastoreConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigObservation.
func (*HiveMetastoreConfigObservation) DeepCopyInto ¶ added in v0.27.0
func (in *HiveMetastoreConfigObservation) DeepCopyInto(out *HiveMetastoreConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigParameters ¶ added in v0.27.0
type HiveMetastoreConfigParameters struct { // A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). // The mappings override system defaults (some keys cannot be overridden) // +kubebuilder:validation:Optional ConfigOverrides map[string]*string `json:"configOverrides,omitempty" tf:"config_overrides,omitempty"` // Information used to configure the Hive metastore service as a service principal in a Kerberos realm. // Structure is documented below. // +kubebuilder:validation:Optional KerberosConfig []HiveMetastoreConfigKerberosConfigParameters `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` // The Hive metastore schema version. // +kubebuilder:validation:Required Version *string `json:"version" tf:"version,omitempty"` }
func (*HiveMetastoreConfigParameters) DeepCopy ¶ added in v0.27.0
func (in *HiveMetastoreConfigParameters) DeepCopy() *HiveMetastoreConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigParameters.
func (*HiveMetastoreConfigParameters) DeepCopyInto ¶ added in v0.27.0
func (in *HiveMetastoreConfigParameters) DeepCopyInto(out *HiveMetastoreConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionObservation ¶
type InitializationActionObservation struct { // The script to be executed during initialization of the cluster. // The script must be a GCS file with a gs:// prefix. Script *string `json:"script,omitempty" tf:"script,omitempty"` // The maximum duration (in seconds) which script is // allowed to take to execute its action. GCP will default to a predetermined // computed value if not set (currently 300). TimeoutSec *float64 `json:"timeoutSec,omitempty" tf:"timeout_sec,omitempty"` }
func (*InitializationActionObservation) DeepCopy ¶
func (in *InitializationActionObservation) DeepCopy() *InitializationActionObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionObservation.
func (*InitializationActionObservation) DeepCopyInto ¶
func (in *InitializationActionObservation) DeepCopyInto(out *InitializationActionObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionParameters ¶
type InitializationActionParameters struct { // The script to be executed during initialization of the cluster. // The script must be a GCS file with a gs:// prefix. // +kubebuilder:validation:Required Script *string `json:"script" tf:"script,omitempty"` // The maximum duration (in seconds) which script is // allowed to take to execute its action. GCP will default to a predetermined // computed value if not set (currently 300). // +kubebuilder:validation:Optional TimeoutSec *float64 `json:"timeoutSec,omitempty" tf:"timeout_sec,omitempty"` }
func (*InitializationActionParameters) DeepCopy ¶
func (in *InitializationActionParameters) DeepCopy() *InitializationActionParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionParameters.
func (*InitializationActionParameters) DeepCopyInto ¶
func (in *InitializationActionParameters) DeepCopyInto(out *InitializationActionParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionsObservation ¶
type InitializationActionsObservation struct { // Required. Cloud Storage URI of executable file. ExecutableFile *string `json:"executableFile,omitempty" tf:"executable_file,omitempty"` // Optional. Amount of time executable has to complete. Default is 10 minutes (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period. ExecutionTimeout *string `json:"executionTimeout,omitempty" tf:"execution_timeout,omitempty"` }
func (*InitializationActionsObservation) DeepCopy ¶
func (in *InitializationActionsObservation) DeepCopy() *InitializationActionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionsObservation.
func (*InitializationActionsObservation) DeepCopyInto ¶
func (in *InitializationActionsObservation) DeepCopyInto(out *InitializationActionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionsParameters ¶
type InitializationActionsParameters struct { // Required. Cloud Storage URI of executable file. // +kubebuilder:validation:Optional ExecutableFile *string `json:"executableFile,omitempty" tf:"executable_file,omitempty"` // Optional. Amount of time executable has to complete. Default is 10 minutes (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period. // +kubebuilder:validation:Optional ExecutionTimeout *string `json:"executionTimeout,omitempty" tf:"execution_timeout,omitempty"` }
func (*InitializationActionsParameters) DeepCopy ¶
func (in *InitializationActionsParameters) DeepCopy() *InitializationActionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionsParameters.
func (*InitializationActionsParameters) DeepCopyInto ¶
func (in *InitializationActionsParameters) DeepCopyInto(out *InitializationActionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Job ¶
type Job struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="self.managementPolicy == 'ObserveOnly' || has(self.forProvider.placement)",message="placement is a required parameter" Spec JobSpec `json:"spec"` Status JobStatus `json:"status,omitempty"` }
Job is the Schema for the Jobs API. Manages a job resource within a Dataproc cluster. +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:subresource:status +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Job) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job.
func (*Job) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Job) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Job) GetCondition ¶
func (mg *Job) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Job.
func (*Job) GetConnectionDetailsMapping ¶
GetConnectionDetailsMapping for this Job
func (*Job) GetDeletionPolicy ¶
func (mg *Job) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Job.
func (*Job) GetManagementPolicy ¶ added in v0.31.0
func (mg *Job) GetManagementPolicy() xpv1.ManagementPolicy
GetManagementPolicy of this Job.
func (*Job) GetObservation ¶
GetObservation of this Job
func (*Job) GetParameters ¶
GetParameters of this Job
func (*Job) GetProviderConfigReference ¶
GetProviderConfigReference of this Job.
func (*Job) GetProviderReference ¶
GetProviderReference of this Job. Deprecated: Use GetProviderConfigReference.
func (*Job) GetPublishConnectionDetailsTo ¶
func (mg *Job) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Job.
func (*Job) GetTerraformResourceType ¶
GetTerraformResourceType returns Terraform resource type for this Job
func (*Job) GetTerraformSchemaVersion ¶
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Job) GetWriteConnectionSecretToReference ¶
func (mg *Job) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Job.
func (*Job) LateInitialize ¶
LateInitialize this Job using its observed tfState. returns True if there are any spec changes for the resource.
func (*Job) ResolveReferences ¶
ResolveReferences of this Job.
func (*Job) SetConditions ¶
SetConditions of this Job.
func (*Job) SetDeletionPolicy ¶
func (mg *Job) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Job.
func (*Job) SetManagementPolicy ¶ added in v0.31.0
func (mg *Job) SetManagementPolicy(r xpv1.ManagementPolicy)
SetManagementPolicy of this Job.
func (*Job) SetObservation ¶
SetObservation for this Job
func (*Job) SetParameters ¶
SetParameters for this Job
func (*Job) SetProviderConfigReference ¶
SetProviderConfigReference of this Job.
func (*Job) SetProviderReference ¶
SetProviderReference of this Job. Deprecated: Use SetProviderConfigReference.
func (*Job) SetPublishConnectionDetailsTo ¶
func (mg *Job) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Job.
func (*Job) SetWriteConnectionSecretToReference ¶
func (mg *Job) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Job.
type JobList ¶
type JobList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Job `json:"items"` }
JobList contains a list of Jobs
func (*JobList) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobList.
func (*JobList) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*JobList) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type JobObservation ¶
type JobObservation struct { // If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri. DriverControlsFilesURI *string `json:"driverControlsFilesUri,omitempty" tf:"driver_controls_files_uri,omitempty"` // A URI pointing to the location of the stdout of the job's driver program. DriverOutputResourceURI *string `json:"driverOutputResourceUri,omitempty" tf:"driver_output_resource_uri,omitempty"` // By default, you can only delete inactive jobs within // Dataproc. Setting this to true, and calling destroy, will ensure that the // job is first cancelled before issuing the delete. ForceDelete *bool `json:"forceDelete,omitempty" tf:"force_delete,omitempty"` HadoopConfig []HadoopConfigObservation `json:"hadoopConfig,omitempty" tf:"hadoop_config,omitempty"` HiveConfig []HiveConfigObservation `json:"hiveConfig,omitempty" tf:"hive_config,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` // The list of labels (key/value pairs) to add to the job. Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` PigConfig []PigConfigObservation `json:"pigConfig,omitempty" tf:"pig_config,omitempty"` Placement []PlacementObservation `json:"placement,omitempty" tf:"placement,omitempty"` PrestoConfig []PrestoConfigObservation `json:"prestoConfig,omitempty" tf:"presto_config,omitempty"` // The project in which the cluster can be found and jobs // subsequently run against. If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` PysparkConfig []PysparkConfigObservation `json:"pysparkConfig,omitempty" tf:"pyspark_config,omitempty"` Reference []ReferenceObservation `json:"reference,omitempty" tf:"reference,omitempty"` // The Cloud Dataproc region. This essentially determines which clusters are available // for this job to be submitted to. If not specified, defaults to global. Region *string `json:"region,omitempty" tf:"region,omitempty"` Scheduling []SchedulingObservation `json:"scheduling,omitempty" tf:"scheduling,omitempty"` SparkConfig []SparkConfigObservation `json:"sparkConfig,omitempty" tf:"spark_config,omitempty"` SparksqlConfig []SparksqlConfigObservation `json:"sparksqlConfig,omitempty" tf:"sparksql_config,omitempty"` Status []StatusObservation `json:"status,omitempty" tf:"status,omitempty"` }
func (*JobObservation) DeepCopy ¶
func (in *JobObservation) DeepCopy() *JobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobObservation.
func (*JobObservation) DeepCopyInto ¶
func (in *JobObservation) DeepCopyInto(out *JobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobParameters ¶
type JobParameters struct { // By default, you can only delete inactive jobs within // Dataproc. Setting this to true, and calling destroy, will ensure that the // job is first cancelled before issuing the delete. // +kubebuilder:validation:Optional ForceDelete *bool `json:"forceDelete,omitempty" tf:"force_delete,omitempty"` // +kubebuilder:validation:Optional HadoopConfig []HadoopConfigParameters `json:"hadoopConfig,omitempty" tf:"hadoop_config,omitempty"` // +kubebuilder:validation:Optional HiveConfig []HiveConfigParameters `json:"hiveConfig,omitempty" tf:"hive_config,omitempty"` // The list of labels (key/value pairs) to add to the job. // +kubebuilder:validation:Optional Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // +kubebuilder:validation:Optional PigConfig []PigConfigParameters `json:"pigConfig,omitempty" tf:"pig_config,omitempty"` // +kubebuilder:validation:Optional Placement []PlacementParameters `json:"placement,omitempty" tf:"placement,omitempty"` // +kubebuilder:validation:Optional PrestoConfig []PrestoConfigParameters `json:"prestoConfig,omitempty" tf:"presto_config,omitempty"` // The project in which the cluster can be found and jobs // subsequently run against. If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // +kubebuilder:validation:Optional PysparkConfig []PysparkConfigParameters `json:"pysparkConfig,omitempty" tf:"pyspark_config,omitempty"` // +kubebuilder:validation:Optional Reference []ReferenceParameters `json:"reference,omitempty" tf:"reference,omitempty"` // The Cloud Dataproc region. This essentially determines which clusters are available // for this job to be submitted to. If not specified, defaults to global. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/dataproc/v1beta1.Cluster // +crossplane:generate:reference:extractor=github.com/upbound/upjet/pkg/resource.ExtractParamPath("region",false) // +kubebuilder:validation:Optional Region *string `json:"region,omitempty" tf:"region,omitempty"` // Reference to a Cluster in dataproc to populate region. // +kubebuilder:validation:Optional RegionRef *v1.Reference `json:"regionRef,omitempty" tf:"-"` // Selector for a Cluster in dataproc to populate region. // +kubebuilder:validation:Optional RegionSelector *v1.Selector `json:"regionSelector,omitempty" tf:"-"` // +kubebuilder:validation:Optional Scheduling []SchedulingParameters `json:"scheduling,omitempty" tf:"scheduling,omitempty"` // +kubebuilder:validation:Optional SparkConfig []SparkConfigParameters `json:"sparkConfig,omitempty" tf:"spark_config,omitempty"` // +kubebuilder:validation:Optional SparksqlConfig []SparksqlConfigParameters `json:"sparksqlConfig,omitempty" tf:"sparksql_config,omitempty"` }
func (*JobParameters) DeepCopy ¶
func (in *JobParameters) DeepCopy() *JobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobParameters.
func (*JobParameters) DeepCopyInto ¶
func (in *JobParameters) DeepCopyInto(out *JobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobSpec ¶
type JobSpec struct { v1.ResourceSpec `json:",inline"` ForProvider JobParameters `json:"forProvider"` }
JobSpec defines the desired state of Job
func (*JobSpec) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpec.
func (*JobSpec) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobStatus ¶
type JobStatus struct { v1.ResourceStatus `json:",inline"` AtProvider JobObservation `json:"atProvider,omitempty"` }
JobStatus defines the observed state of Job.
func (*JobStatus) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatus.
func (*JobStatus) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsObservation ¶
type JobsObservation struct { // Optional. Job is a Hadoop job. HadoopJob []HadoopJobObservation `json:"hadoopJob,omitempty" tf:"hadoop_job,omitempty"` // Optional. Job is a Hive job. HiveJob []HiveJobObservation `json:"hiveJob,omitempty" tf:"hive_job,omitempty"` // Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job. Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Optional. Job is a Pig job. PigJob []PigJobObservation `json:"pigJob,omitempty" tf:"pig_job,omitempty"` // Optional. The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow. PrerequisiteStepIds []*string `json:"prerequisiteStepIds,omitempty" tf:"prerequisite_step_ids,omitempty"` // Optional. Job is a Presto job. PrestoJob []PrestoJobObservation `json:"prestoJob,omitempty" tf:"presto_job,omitempty"` // Optional. Job is a PySpark job. PysparkJob []PysparkJobObservation `json:"pysparkJob,omitempty" tf:"pyspark_job,omitempty"` // Optional. Job scheduling configuration. Scheduling []JobsSchedulingObservation `json:"scheduling,omitempty" tf:"scheduling,omitempty"` // Optional. Job is a Spark job. SparkJob []SparkJobObservation `json:"sparkJob,omitempty" tf:"spark_job,omitempty"` // Optional. Job is a SparkR job. SparkRJob []SparkRJobObservation `json:"sparkRJob,omitempty" tf:"spark_r_job,omitempty"` // Optional. Job is a SparkSql job. SparkSQLJob []SparkSQLJobObservation `json:"sparkSqlJob,omitempty" tf:"spark_sql_job,omitempty"` // Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. StepID *string `json:"stepId,omitempty" tf:"step_id,omitempty"` }
func (*JobsObservation) DeepCopy ¶
func (in *JobsObservation) DeepCopy() *JobsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsObservation.
func (*JobsObservation) DeepCopyInto ¶
func (in *JobsObservation) DeepCopyInto(out *JobsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsParameters ¶
type JobsParameters struct { // Optional. Job is a Hadoop job. // +kubebuilder:validation:Optional HadoopJob []HadoopJobParameters `json:"hadoopJob,omitempty" tf:"hadoop_job,omitempty"` // Optional. Job is a Hive job. // +kubebuilder:validation:Optional HiveJob []HiveJobParameters `json:"hiveJob,omitempty" tf:"hive_job,omitempty"` // Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job. // +kubebuilder:validation:Optional Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Optional. Job is a Pig job. // +kubebuilder:validation:Optional PigJob []PigJobParameters `json:"pigJob,omitempty" tf:"pig_job,omitempty"` // Optional. The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow. // +kubebuilder:validation:Optional PrerequisiteStepIds []*string `json:"prerequisiteStepIds,omitempty" tf:"prerequisite_step_ids,omitempty"` // Optional. Job is a Presto job. // +kubebuilder:validation:Optional PrestoJob []PrestoJobParameters `json:"prestoJob,omitempty" tf:"presto_job,omitempty"` // Optional. Job is a PySpark job. // +kubebuilder:validation:Optional PysparkJob []PysparkJobParameters `json:"pysparkJob,omitempty" tf:"pyspark_job,omitempty"` // Optional. Job scheduling configuration. // +kubebuilder:validation:Optional Scheduling []JobsSchedulingParameters `json:"scheduling,omitempty" tf:"scheduling,omitempty"` // Optional. Job is a Spark job. // +kubebuilder:validation:Optional SparkJob []SparkJobParameters `json:"sparkJob,omitempty" tf:"spark_job,omitempty"` // Optional. Job is a SparkR job. // +kubebuilder:validation:Optional SparkRJob []SparkRJobParameters `json:"sparkRJob,omitempty" tf:"spark_r_job,omitempty"` // Optional. Job is a SparkSql job. // +kubebuilder:validation:Optional SparkSQLJob []SparkSQLJobParameters `json:"sparkSqlJob,omitempty" tf:"spark_sql_job,omitempty"` // Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. // +kubebuilder:validation:Required StepID *string `json:"stepId" tf:"step_id,omitempty"` }
func (*JobsParameters) DeepCopy ¶
func (in *JobsParameters) DeepCopy() *JobsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsParameters.
func (*JobsParameters) DeepCopyInto ¶
func (in *JobsParameters) DeepCopyInto(out *JobsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsSchedulingObservation ¶
type JobsSchedulingObservation struct { // Optional. Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10. MaxFailuresPerHour *float64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour,omitempty"` // Optional. Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240 MaxFailuresTotal *float64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total,omitempty"` }
func (*JobsSchedulingObservation) DeepCopy ¶
func (in *JobsSchedulingObservation) DeepCopy() *JobsSchedulingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsSchedulingObservation.
func (*JobsSchedulingObservation) DeepCopyInto ¶
func (in *JobsSchedulingObservation) DeepCopyInto(out *JobsSchedulingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsSchedulingParameters ¶
type JobsSchedulingParameters struct { // Optional. Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10. // +kubebuilder:validation:Optional MaxFailuresPerHour *float64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour,omitempty"` // Optional. Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240 // +kubebuilder:validation:Optional MaxFailuresTotal *float64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total,omitempty"` }
func (*JobsSchedulingParameters) DeepCopy ¶
func (in *JobsSchedulingParameters) DeepCopy() *JobsSchedulingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsSchedulingParameters.
func (*JobsSchedulingParameters) DeepCopyInto ¶
func (in *JobsSchedulingParameters) DeepCopyInto(out *JobsSchedulingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KerberosConfigObservation ¶
type KerberosConfigObservation struct { // The admin server (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // The KDC (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // The remote realm the Dataproc on-cluster KDC will // trust, should the user enable cross realm trust. CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` // encrypted file containing the shared password between the on-cluster Kerberos realm // and the remote trusted realm, in a cross realm trust relationship. CrossRealmTrustSharedPasswordURI *string `json:"crossRealmTrustSharedPasswordUri,omitempty" tf:"cross_realm_trust_shared_password_uri,omitempty"` // Flag to indicate whether to Kerberize the cluster. EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // The URI of the KMS key used to encrypt various sensitive files. KMSKeyURI *string `json:"kmsKeyUri,omitempty" tf:"kms_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the master key of the KDC database. KdcDBKeyURI *string `json:"kdcDbKeyUri,omitempty" tf:"kdc_db_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided key. For the self-signed certificate, this password // is generated by Dataproc. KeyPasswordURI *string `json:"keyPasswordUri,omitempty" tf:"key_password_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided keystore. For the self-signed certificated, the password // is generated by Dataproc. KeystorePasswordURI *string `json:"keystorePasswordUri,omitempty" tf:"keystore_password_uri,omitempty"` // The Cloud Storage URI of the keystore file used for SSL encryption. // If not provided, Dataproc will provide a self-signed certificate. KeystoreURI *string `json:"keystoreUri,omitempty" tf:"keystore_uri,omitempty"` // The name of the on-cluster Kerberos realm. If not specified, the // uppercased domain of hostnames will be the realm. Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the root principal password. RootPrincipalPasswordURI *string `json:"rootPrincipalPasswordUri,omitempty" tf:"root_principal_password_uri,omitempty"` // The lifetime of the ticket granting ticket, in hours. TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the password to the user provided truststore. For the self-signed // certificate, this password is generated by Dataproc. TruststorePasswordURI *string `json:"truststorePasswordUri,omitempty" tf:"truststore_password_uri,omitempty"` // The Cloud Storage URI of the truststore file used for // SSL encryption. If not provided, Dataproc will provide a self-signed certificate. TruststoreURI *string `json:"truststoreUri,omitempty" tf:"truststore_uri,omitempty"` }
func (*KerberosConfigObservation) DeepCopy ¶
func (in *KerberosConfigObservation) DeepCopy() *KerberosConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KerberosConfigObservation.
func (*KerberosConfigObservation) DeepCopyInto ¶
func (in *KerberosConfigObservation) DeepCopyInto(out *KerberosConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KerberosConfigParameters ¶
type KerberosConfigParameters struct { // The admin server (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // The KDC (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // The remote realm the Dataproc on-cluster KDC will // trust, should the user enable cross realm trust. // +kubebuilder:validation:Optional CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` // encrypted file containing the shared password between the on-cluster Kerberos realm // and the remote trusted realm, in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustSharedPasswordURI *string `json:"crossRealmTrustSharedPasswordUri,omitempty" tf:"cross_realm_trust_shared_password_uri,omitempty"` // Flag to indicate whether to Kerberize the cluster. // +kubebuilder:validation:Optional EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // The URI of the KMS key used to encrypt various sensitive files. // +kubebuilder:validation:Required KMSKeyURI *string `json:"kmsKeyUri" tf:"kms_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the master key of the KDC database. // +kubebuilder:validation:Optional KdcDBKeyURI *string `json:"kdcDbKeyUri,omitempty" tf:"kdc_db_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided key. For the self-signed certificate, this password // is generated by Dataproc. // +kubebuilder:validation:Optional KeyPasswordURI *string `json:"keyPasswordUri,omitempty" tf:"key_password_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided keystore. For the self-signed certificated, the password // is generated by Dataproc. // +kubebuilder:validation:Optional KeystorePasswordURI *string `json:"keystorePasswordUri,omitempty" tf:"keystore_password_uri,omitempty"` // The Cloud Storage URI of the keystore file used for SSL encryption. // If not provided, Dataproc will provide a self-signed certificate. // +kubebuilder:validation:Optional KeystoreURI *string `json:"keystoreUri,omitempty" tf:"keystore_uri,omitempty"` // The name of the on-cluster Kerberos realm. If not specified, the // uppercased domain of hostnames will be the realm. // +kubebuilder:validation:Optional Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the root principal password. // +kubebuilder:validation:Required RootPrincipalPasswordURI *string `json:"rootPrincipalPasswordUri" tf:"root_principal_password_uri,omitempty"` // The lifetime of the ticket granting ticket, in hours. // +kubebuilder:validation:Optional TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the password to the user provided truststore. For the self-signed // certificate, this password is generated by Dataproc. // +kubebuilder:validation:Optional TruststorePasswordURI *string `json:"truststorePasswordUri,omitempty" tf:"truststore_password_uri,omitempty"` // The Cloud Storage URI of the truststore file used for // SSL encryption. If not provided, Dataproc will provide a self-signed certificate. // +kubebuilder:validation:Optional TruststoreURI *string `json:"truststoreUri,omitempty" tf:"truststore_uri,omitempty"` }
func (*KerberosConfigParameters) DeepCopy ¶
func (in *KerberosConfigParameters) DeepCopy() *KerberosConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KerberosConfigParameters.
func (*KerberosConfigParameters) DeepCopyInto ¶
func (in *KerberosConfigParameters) DeepCopyInto(out *KerberosConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KeytabObservation ¶ added in v0.27.0
type KeytabObservation struct { // The relative resource name of a Secret Manager secret version, in the following form: // "projects/{projectNumber}/secrets/{secret_id}/versions/{version_id}". CloudSecret *string `json:"cloudSecret,omitempty" tf:"cloud_secret,omitempty"` }
func (*KeytabObservation) DeepCopy ¶ added in v0.27.0
func (in *KeytabObservation) DeepCopy() *KeytabObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KeytabObservation.
func (*KeytabObservation) DeepCopyInto ¶ added in v0.27.0
func (in *KeytabObservation) DeepCopyInto(out *KeytabObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KeytabParameters ¶ added in v0.27.0
type KeytabParameters struct { // The relative resource name of a Secret Manager secret version, in the following form: // "projects/{projectNumber}/secrets/{secret_id}/versions/{version_id}". // +kubebuilder:validation:Required CloudSecret *string `json:"cloudSecret" tf:"cloud_secret,omitempty"` }
func (*KeytabParameters) DeepCopy ¶ added in v0.27.0
func (in *KeytabParameters) DeepCopy() *KeytabParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KeytabParameters.
func (*KeytabParameters) DeepCopyInto ¶ added in v0.27.0
func (in *KeytabParameters) DeepCopyInto(out *KeytabParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesClusterConfigObservation ¶
type KubernetesClusterConfigObservation struct { // The configuration for running the Dataproc cluster on GKE. GkeClusterConfig []GkeClusterConfigObservation `json:"gkeClusterConfig,omitempty" tf:"gke_cluster_config,omitempty"` // A namespace within the Kubernetes cluster to deploy into. // If this namespace does not exist, it is created. // If it exists, Dataproc verifies that another Dataproc VirtualCluster is not installed into it. // If not specified, the name of the Dataproc Cluster is used. KubernetesNamespace *string `json:"kubernetesNamespace,omitempty" tf:"kubernetes_namespace,omitempty"` // The software configuration for this Dataproc cluster running on Kubernetes. KubernetesSoftwareConfig []KubernetesSoftwareConfigObservation `json:"kubernetesSoftwareConfig,omitempty" tf:"kubernetes_software_config,omitempty"` }
func (*KubernetesClusterConfigObservation) DeepCopy ¶
func (in *KubernetesClusterConfigObservation) DeepCopy() *KubernetesClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesClusterConfigObservation.
func (*KubernetesClusterConfigObservation) DeepCopyInto ¶
func (in *KubernetesClusterConfigObservation) DeepCopyInto(out *KubernetesClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesClusterConfigParameters ¶
type KubernetesClusterConfigParameters struct { // The configuration for running the Dataproc cluster on GKE. // +kubebuilder:validation:Required GkeClusterConfig []GkeClusterConfigParameters `json:"gkeClusterConfig" tf:"gke_cluster_config,omitempty"` // A namespace within the Kubernetes cluster to deploy into. // If this namespace does not exist, it is created. // If it exists, Dataproc verifies that another Dataproc VirtualCluster is not installed into it. // If not specified, the name of the Dataproc Cluster is used. // +kubebuilder:validation:Optional KubernetesNamespace *string `json:"kubernetesNamespace,omitempty" tf:"kubernetes_namespace,omitempty"` // The software configuration for this Dataproc cluster running on Kubernetes. // +kubebuilder:validation:Required KubernetesSoftwareConfig []KubernetesSoftwareConfigParameters `json:"kubernetesSoftwareConfig" tf:"kubernetes_software_config,omitempty"` }
func (*KubernetesClusterConfigParameters) DeepCopy ¶
func (in *KubernetesClusterConfigParameters) DeepCopy() *KubernetesClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesClusterConfigParameters.
func (*KubernetesClusterConfigParameters) DeepCopyInto ¶
func (in *KubernetesClusterConfigParameters) DeepCopyInto(out *KubernetesClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesSoftwareConfigObservation ¶
type KubernetesSoftwareConfigObservation struct { // The components that should be installed in this Dataproc cluster. The key must be a string from the // KubernetesComponent enumeration. The value is the version of the software to be installed. At least one entry must be specified. ComponentVersion map[string]*string `json:"componentVersion,omitempty" tf:"component_version,omitempty"` // The properties to set on daemon config files. Property keys are specified in prefix:property format, // for example spark:spark.kubernetes.container.image. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*KubernetesSoftwareConfigObservation) DeepCopy ¶
func (in *KubernetesSoftwareConfigObservation) DeepCopy() *KubernetesSoftwareConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesSoftwareConfigObservation.
func (*KubernetesSoftwareConfigObservation) DeepCopyInto ¶
func (in *KubernetesSoftwareConfigObservation) DeepCopyInto(out *KubernetesSoftwareConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesSoftwareConfigParameters ¶
type KubernetesSoftwareConfigParameters struct { // The components that should be installed in this Dataproc cluster. The key must be a string from the // KubernetesComponent enumeration. The value is the version of the software to be installed. At least one entry must be specified. // +kubebuilder:validation:Required ComponentVersion map[string]*string `json:"componentVersion" tf:"component_version,omitempty"` // The properties to set on daemon config files. Property keys are specified in prefix:property format, // for example spark:spark.kubernetes.container.image. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*KubernetesSoftwareConfigParameters) DeepCopy ¶
func (in *KubernetesSoftwareConfigParameters) DeepCopy() *KubernetesSoftwareConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesSoftwareConfigParameters.
func (*KubernetesSoftwareConfigParameters) DeepCopyInto ¶
func (in *KubernetesSoftwareConfigParameters) DeepCopyInto(out *KubernetesSoftwareConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LifecycleConfigObservation ¶
type LifecycleConfigObservation struct { // The time when cluster will be auto-deleted. // A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. // Example: "2014-10-02T15:01:23.045123456Z". AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // The duration to keep the cluster alive while idling // (no jobs running). After this TTL, the cluster will be deleted. Valid range: [10m, 14d]. IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` // Time when the cluster became idle // (most recent job finished) and became eligible for deletion due to idleness. IdleStartTime *string `json:"idleStartTime,omitempty" tf:"idle_start_time,omitempty"` }
func (*LifecycleConfigObservation) DeepCopy ¶
func (in *LifecycleConfigObservation) DeepCopy() *LifecycleConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LifecycleConfigObservation.
func (*LifecycleConfigObservation) DeepCopyInto ¶
func (in *LifecycleConfigObservation) DeepCopyInto(out *LifecycleConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LifecycleConfigParameters ¶
type LifecycleConfigParameters struct { // The time when cluster will be auto-deleted. // A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. // Example: "2014-10-02T15:01:23.045123456Z". // +kubebuilder:validation:Optional AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // The duration to keep the cluster alive while idling // (no jobs running). After this TTL, the cluster will be deleted. Valid range: [10m, 14d]. // +kubebuilder:validation:Optional IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` }
func (*LifecycleConfigParameters) DeepCopy ¶
func (in *LifecycleConfigParameters) DeepCopy() *LifecycleConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LifecycleConfigParameters.
func (*LifecycleConfigParameters) DeepCopyInto ¶
func (in *LifecycleConfigParameters) DeepCopyInto(out *LifecycleConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoggingConfigObservation ¶
type LoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*LoggingConfigObservation) DeepCopy ¶
func (in *LoggingConfigObservation) DeepCopy() *LoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoggingConfigObservation.
func (*LoggingConfigObservation) DeepCopyInto ¶
func (in *LoggingConfigObservation) DeepCopyInto(out *LoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoggingConfigParameters ¶
type LoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Required DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*LoggingConfigParameters) DeepCopy ¶
func (in *LoggingConfigParameters) DeepCopy() *LoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoggingConfigParameters.
func (*LoggingConfigParameters) DeepCopyInto ¶
func (in *LoggingConfigParameters) DeepCopyInto(out *LoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MaintenanceWindowObservation ¶ added in v0.27.0
type MaintenanceWindowObservation struct { // The day of week, when the window starts. // Possible values are: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. DayOfWeek *string `json:"dayOfWeek,omitempty" tf:"day_of_week,omitempty"` // The hour of day (0-23) when the window starts. HourOfDay *float64 `json:"hourOfDay,omitempty" tf:"hour_of_day,omitempty"` }
func (*MaintenanceWindowObservation) DeepCopy ¶ added in v0.27.0
func (in *MaintenanceWindowObservation) DeepCopy() *MaintenanceWindowObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaintenanceWindowObservation.
func (*MaintenanceWindowObservation) DeepCopyInto ¶ added in v0.27.0
func (in *MaintenanceWindowObservation) DeepCopyInto(out *MaintenanceWindowObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MaintenanceWindowParameters ¶ added in v0.27.0
type MaintenanceWindowParameters struct { // The day of week, when the window starts. // Possible values are: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. // +kubebuilder:validation:Required DayOfWeek *string `json:"dayOfWeek" tf:"day_of_week,omitempty"` // The hour of day (0-23) when the window starts. // +kubebuilder:validation:Required HourOfDay *float64 `json:"hourOfDay" tf:"hour_of_day,omitempty"` }
func (*MaintenanceWindowParameters) DeepCopy ¶ added in v0.27.0
func (in *MaintenanceWindowParameters) DeepCopy() *MaintenanceWindowParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaintenanceWindowParameters.
func (*MaintenanceWindowParameters) DeepCopyInto ¶ added in v0.27.0
func (in *MaintenanceWindowParameters) DeepCopyInto(out *MaintenanceWindowParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterConfigObservation ¶
type ManagedClusterConfigObservation struct { // Optional. Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset. AutoscalingConfig []ConfigAutoscalingConfigObservation `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // Optional. Encryption settings for the cluster. EncryptionConfig []ConfigEncryptionConfigObservation `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // Optional. Port/endpoint configuration for this cluster EndpointConfig []ConfigEndpointConfigObservation `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // Optional. The shared Compute Engine config settings for all instances in a cluster. GceClusterConfig []ConfigGceClusterConfigObservation `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Optional. Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi InitializationActions []InitializationActionsObservation `json:"initializationActions,omitempty" tf:"initialization_actions,omitempty"` // Optional. Lifecycle setting for the cluster. LifecycleConfig []ConfigLifecycleConfigObservation `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // Optional. The Compute Engine config settings for additional worker instances in a cluster. MasterConfig []ConfigMasterConfigObservation `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // Optional. The Compute Engine config settings for additional worker instances in a cluster. SecondaryWorkerConfig []ConfigSecondaryWorkerConfigObservation `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Optional. Security settings for the cluster. SecurityConfig []ConfigSecurityConfigObservation `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // Optional. The config settings for software inside the cluster. SoftwareConfig []ConfigSoftwareConfigObservation `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // Optional. A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // Optional. The Compute Engine config settings for additional worker instances in a cluster. WorkerConfig []ConfigWorkerConfigObservation `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ManagedClusterConfigObservation) DeepCopy ¶
func (in *ManagedClusterConfigObservation) DeepCopy() *ManagedClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterConfigObservation.
func (*ManagedClusterConfigObservation) DeepCopyInto ¶
func (in *ManagedClusterConfigObservation) DeepCopyInto(out *ManagedClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterConfigParameters ¶
type ManagedClusterConfigParameters struct { // Optional. Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset. // +kubebuilder:validation:Optional AutoscalingConfig []ConfigAutoscalingConfigParameters `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // Optional. Encryption settings for the cluster. // +kubebuilder:validation:Optional EncryptionConfig []ConfigEncryptionConfigParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // Optional. Port/endpoint configuration for this cluster // +kubebuilder:validation:Optional EndpointConfig []ConfigEndpointConfigParameters `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // Optional. The shared Compute Engine config settings for all instances in a cluster. // +kubebuilder:validation:Optional GceClusterConfig []ConfigGceClusterConfigParameters `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Optional. Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi // +kubebuilder:validation:Optional InitializationActions []InitializationActionsParameters `json:"initializationActions,omitempty" tf:"initialization_actions,omitempty"` // Optional. Lifecycle setting for the cluster. // +kubebuilder:validation:Optional LifecycleConfig []ConfigLifecycleConfigParameters `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // Optional. The Compute Engine config settings for additional worker instances in a cluster. // +kubebuilder:validation:Optional MasterConfig []ConfigMasterConfigParameters `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // Optional. The Compute Engine config settings for additional worker instances in a cluster. // +kubebuilder:validation:Optional SecondaryWorkerConfig []ConfigSecondaryWorkerConfigParameters `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Optional. Security settings for the cluster. // +kubebuilder:validation:Optional SecurityConfig []ConfigSecurityConfigParameters `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // Optional. The config settings for software inside the cluster. // +kubebuilder:validation:Optional SoftwareConfig []ConfigSoftwareConfigParameters `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // Optional. A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). // +kubebuilder:validation:Optional StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. // +kubebuilder:validation:Optional TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // Optional. The Compute Engine config settings for additional worker instances in a cluster. // +kubebuilder:validation:Optional WorkerConfig []ConfigWorkerConfigParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ManagedClusterConfigParameters) DeepCopy ¶
func (in *ManagedClusterConfigParameters) DeepCopy() *ManagedClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterConfigParameters.
func (*ManagedClusterConfigParameters) DeepCopyInto ¶
func (in *ManagedClusterConfigParameters) DeepCopyInto(out *ManagedClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterObservation ¶
type ManagedClusterObservation struct { // Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters. ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"` // Required. The cluster configuration. Config []ManagedClusterConfigObservation `json:"config,omitempty" tf:"config,omitempty"` // Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` }
func (*ManagedClusterObservation) DeepCopy ¶
func (in *ManagedClusterObservation) DeepCopy() *ManagedClusterObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterObservation.
func (*ManagedClusterObservation) DeepCopyInto ¶
func (in *ManagedClusterObservation) DeepCopyInto(out *ManagedClusterObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterParameters ¶
type ManagedClusterParameters struct { // Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters. // +kubebuilder:validation:Required ClusterName *string `json:"clusterName" tf:"cluster_name,omitempty"` // Required. The cluster configuration. // +kubebuilder:validation:Required Config []ManagedClusterConfigParameters `json:"config" tf:"config,omitempty"` // Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. // +kubebuilder:validation:Optional Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` }
func (*ManagedClusterParameters) DeepCopy ¶
func (in *ManagedClusterParameters) DeepCopy() *ManagedClusterParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterParameters.
func (*ManagedClusterParameters) DeepCopyInto ¶
func (in *ManagedClusterParameters) DeepCopyInto(out *ManagedClusterParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedGroupConfigObservation ¶
type ManagedGroupConfigObservation struct { // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name,omitempty"` // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name,omitempty"` }
func (*ManagedGroupConfigObservation) DeepCopy ¶
func (in *ManagedGroupConfigObservation) DeepCopy() *ManagedGroupConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedGroupConfigObservation.
func (*ManagedGroupConfigObservation) DeepCopyInto ¶
func (in *ManagedGroupConfigObservation) DeepCopyInto(out *ManagedGroupConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedGroupConfigParameters ¶
type ManagedGroupConfigParameters struct { }
func (*ManagedGroupConfigParameters) DeepCopy ¶
func (in *ManagedGroupConfigParameters) DeepCopy() *ManagedGroupConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedGroupConfigParameters.
func (*ManagedGroupConfigParameters) DeepCopyInto ¶
func (in *ManagedGroupConfigParameters) DeepCopyInto(out *ManagedGroupConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigAcceleratorsObservation ¶
type MasterConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*MasterConfigAcceleratorsObservation) DeepCopy ¶
func (in *MasterConfigAcceleratorsObservation) DeepCopy() *MasterConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigAcceleratorsObservation.
func (*MasterConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *MasterConfigAcceleratorsObservation) DeepCopyInto(out *MasterConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigAcceleratorsParameters ¶
type MasterConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*MasterConfigAcceleratorsParameters) DeepCopy ¶
func (in *MasterConfigAcceleratorsParameters) DeepCopy() *MasterConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigAcceleratorsParameters.
func (*MasterConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *MasterConfigAcceleratorsParameters) DeepCopyInto(out *MasterConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigDiskConfigObservation ¶
type MasterConfigDiskConfigObservation struct { // Optional. Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*MasterConfigDiskConfigObservation) DeepCopy ¶
func (in *MasterConfigDiskConfigObservation) DeepCopy() *MasterConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigDiskConfigObservation.
func (*MasterConfigDiskConfigObservation) DeepCopyInto ¶
func (in *MasterConfigDiskConfigObservation) DeepCopyInto(out *MasterConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigDiskConfigParameters ¶
type MasterConfigDiskConfigParameters struct { // Optional. Size in GB of the boot disk (default is 500GB). // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*MasterConfigDiskConfigParameters) DeepCopy ¶
func (in *MasterConfigDiskConfigParameters) DeepCopy() *MasterConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigDiskConfigParameters.
func (*MasterConfigDiskConfigParameters) DeepCopyInto ¶
func (in *MasterConfigDiskConfigParameters) DeepCopyInto(out *MasterConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigObservation ¶
type MasterConfigObservation struct { // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. Accelerators []AcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config DiskConfig []DiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // List of worker instance names which have been assigned // to the cluster. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // The name of a Google Compute Engine machine type // to create for the master. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of master nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 1). NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*MasterConfigObservation) DeepCopy ¶
func (in *MasterConfigObservation) DeepCopy() *MasterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigObservation.
func (*MasterConfigObservation) DeepCopyInto ¶
func (in *MasterConfigObservation) DeepCopyInto(out *MasterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigParameters ¶
type MasterConfigParameters struct { // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. // +kubebuilder:validation:Optional Accelerators []AcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config // +kubebuilder:validation:Optional DiskConfig []DiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. // +kubebuilder:validation:Optional ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // The name of a Google Compute Engine machine type // to create for the master. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of master nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 1). // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*MasterConfigParameters) DeepCopy ¶
func (in *MasterConfigParameters) DeepCopy() *MasterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigParameters.
func (*MasterConfigParameters) DeepCopyInto ¶
func (in *MasterConfigParameters) DeepCopyInto(out *MasterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreConfigObservation ¶
type MetastoreConfigObservation struct { // Resource name of an existing Dataproc Metastore service. DataprocMetastoreService *string `json:"dataprocMetastoreService,omitempty" tf:"dataproc_metastore_service,omitempty"` }
func (*MetastoreConfigObservation) DeepCopy ¶
func (in *MetastoreConfigObservation) DeepCopy() *MetastoreConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreConfigObservation.
func (*MetastoreConfigObservation) DeepCopyInto ¶
func (in *MetastoreConfigObservation) DeepCopyInto(out *MetastoreConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreConfigParameters ¶
type MetastoreConfigParameters struct { // Resource name of an existing Dataproc Metastore service. // +kubebuilder:validation:Required DataprocMetastoreService *string `json:"dataprocMetastoreService" tf:"dataproc_metastore_service,omitempty"` }
func (*MetastoreConfigParameters) DeepCopy ¶
func (in *MetastoreConfigParameters) DeepCopy() *MetastoreConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreConfigParameters.
func (*MetastoreConfigParameters) DeepCopyInto ¶
func (in *MetastoreConfigParameters) DeepCopyInto(out *MetastoreConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreService ¶ added in v0.27.0
type MetastoreService struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec MetastoreServiceSpec `json:"spec"` Status MetastoreServiceStatus `json:"status,omitempty"` }
MetastoreService is the Schema for the MetastoreServices API. A managed metastore service that serves metadata queries. +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:subresource:status +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*MetastoreService) DeepCopy ¶ added in v0.27.0
func (in *MetastoreService) DeepCopy() *MetastoreService
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreService.
func (*MetastoreService) DeepCopyInto ¶ added in v0.27.0
func (in *MetastoreService) DeepCopyInto(out *MetastoreService)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*MetastoreService) DeepCopyObject ¶ added in v0.27.0
func (in *MetastoreService) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*MetastoreService) GetCondition ¶ added in v0.27.0
func (mg *MetastoreService) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this MetastoreService.
func (*MetastoreService) GetConnectionDetailsMapping ¶ added in v0.27.0
func (tr *MetastoreService) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this MetastoreService
func (*MetastoreService) GetDeletionPolicy ¶ added in v0.27.0
func (mg *MetastoreService) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this MetastoreService.
func (*MetastoreService) GetID ¶ added in v0.27.0
func (tr *MetastoreService) GetID() string
GetID returns ID of underlying Terraform resource of this MetastoreService
func (*MetastoreService) GetManagementPolicy ¶ added in v0.31.0
func (mg *MetastoreService) GetManagementPolicy() xpv1.ManagementPolicy
GetManagementPolicy of this MetastoreService.
func (*MetastoreService) GetObservation ¶ added in v0.27.0
func (tr *MetastoreService) GetObservation() (map[string]any, error)
GetObservation of this MetastoreService
func (*MetastoreService) GetParameters ¶ added in v0.27.0
func (tr *MetastoreService) GetParameters() (map[string]any, error)
GetParameters of this MetastoreService
func (*MetastoreService) GetProviderConfigReference ¶ added in v0.27.0
func (mg *MetastoreService) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this MetastoreService.
func (*MetastoreService) GetProviderReference ¶ added in v0.27.0
func (mg *MetastoreService) GetProviderReference() *xpv1.Reference
GetProviderReference of this MetastoreService. Deprecated: Use GetProviderConfigReference.
func (*MetastoreService) GetPublishConnectionDetailsTo ¶ added in v0.27.0
func (mg *MetastoreService) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this MetastoreService.
func (*MetastoreService) GetTerraformResourceType ¶ added in v0.27.0
func (mg *MetastoreService) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this MetastoreService
func (*MetastoreService) GetTerraformSchemaVersion ¶ added in v0.27.0
func (tr *MetastoreService) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*MetastoreService) GetWriteConnectionSecretToReference ¶ added in v0.27.0
func (mg *MetastoreService) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this MetastoreService.
func (*MetastoreService) LateInitialize ¶ added in v0.27.0
func (tr *MetastoreService) LateInitialize(attrs []byte) (bool, error)
LateInitialize this MetastoreService using its observed tfState. returns True if there are any spec changes for the resource.
func (*MetastoreService) ResolveReferences ¶ added in v0.27.0
ResolveReferences of this MetastoreService.
func (*MetastoreService) SetConditions ¶ added in v0.27.0
func (mg *MetastoreService) SetConditions(c ...xpv1.Condition)
SetConditions of this MetastoreService.
func (*MetastoreService) SetDeletionPolicy ¶ added in v0.27.0
func (mg *MetastoreService) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this MetastoreService.
func (*MetastoreService) SetManagementPolicy ¶ added in v0.31.0
func (mg *MetastoreService) SetManagementPolicy(r xpv1.ManagementPolicy)
SetManagementPolicy of this MetastoreService.
func (*MetastoreService) SetObservation ¶ added in v0.27.0
func (tr *MetastoreService) SetObservation(obs map[string]any) error
SetObservation for this MetastoreService
func (*MetastoreService) SetParameters ¶ added in v0.27.0
func (tr *MetastoreService) SetParameters(params map[string]any) error
SetParameters for this MetastoreService
func (*MetastoreService) SetProviderConfigReference ¶ added in v0.27.0
func (mg *MetastoreService) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this MetastoreService.
func (*MetastoreService) SetProviderReference ¶ added in v0.27.0
func (mg *MetastoreService) SetProviderReference(r *xpv1.Reference)
SetProviderReference of this MetastoreService. Deprecated: Use SetProviderConfigReference.
func (*MetastoreService) SetPublishConnectionDetailsTo ¶ added in v0.27.0
func (mg *MetastoreService) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this MetastoreService.
func (*MetastoreService) SetWriteConnectionSecretToReference ¶ added in v0.27.0
func (mg *MetastoreService) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this MetastoreService.
type MetastoreServiceEncryptionConfigObservation ¶ added in v0.27.0
type MetastoreServiceEncryptionConfigObservation struct { // The fully qualified customer provided Cloud KMS key name to use for customer data encryption. // Use the following format: projects/([^/]+)/locations/([^/]+)/keyRings/([^/]+)/cryptoKeys/([^/]+) KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` }
func (*MetastoreServiceEncryptionConfigObservation) DeepCopy ¶ added in v0.27.0
func (in *MetastoreServiceEncryptionConfigObservation) DeepCopy() *MetastoreServiceEncryptionConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceEncryptionConfigObservation.
func (*MetastoreServiceEncryptionConfigObservation) DeepCopyInto ¶ added in v0.27.0
func (in *MetastoreServiceEncryptionConfigObservation) DeepCopyInto(out *MetastoreServiceEncryptionConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceEncryptionConfigParameters ¶ added in v0.27.0
type MetastoreServiceEncryptionConfigParameters struct { // The fully qualified customer provided Cloud KMS key name to use for customer data encryption. // Use the following format: projects/([^/]+)/locations/([^/]+)/keyRings/([^/]+)/cryptoKeys/([^/]+) // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/kms/v1beta1.CryptoKey // +crossplane:generate:reference:extractor=github.com/upbound/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` // Reference to a CryptoKey in kms to populate kmsKey. // +kubebuilder:validation:Optional KMSKeyRef *v1.Reference `json:"kmsKeyRef,omitempty" tf:"-"` // Selector for a CryptoKey in kms to populate kmsKey. // +kubebuilder:validation:Optional KMSKeySelector *v1.Selector `json:"kmsKeySelector,omitempty" tf:"-"` }
func (*MetastoreServiceEncryptionConfigParameters) DeepCopy ¶ added in v0.27.0
func (in *MetastoreServiceEncryptionConfigParameters) DeepCopy() *MetastoreServiceEncryptionConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceEncryptionConfigParameters.
func (*MetastoreServiceEncryptionConfigParameters) DeepCopyInto ¶ added in v0.27.0
func (in *MetastoreServiceEncryptionConfigParameters) DeepCopyInto(out *MetastoreServiceEncryptionConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceList ¶ added in v0.27.0
type MetastoreServiceList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []MetastoreService `json:"items"` }
MetastoreServiceList contains a list of MetastoreServices
func (*MetastoreServiceList) DeepCopy ¶ added in v0.27.0
func (in *MetastoreServiceList) DeepCopy() *MetastoreServiceList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceList.
func (*MetastoreServiceList) DeepCopyInto ¶ added in v0.27.0
func (in *MetastoreServiceList) DeepCopyInto(out *MetastoreServiceList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*MetastoreServiceList) DeepCopyObject ¶ added in v0.27.0
func (in *MetastoreServiceList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*MetastoreServiceList) GetItems ¶ added in v0.27.0
func (l *MetastoreServiceList) GetItems() []resource.Managed
GetItems of this MetastoreServiceList.
type MetastoreServiceObservation ¶ added in v0.27.0
type MetastoreServiceObservation struct { // A Cloud Storage URI (starting with gs://) that specifies where artifacts related to the metastore service are stored. ArtifactGcsURI *string `json:"artifactGcsUri,omitempty" tf:"artifact_gcs_uri,omitempty"` // The database type that the Metastore service stores its data. // Default value is MYSQL. // Possible values are: MYSQL, SPANNER. DatabaseType *string `json:"databaseType,omitempty" tf:"database_type,omitempty"` // Information used to configure the Dataproc Metastore service to encrypt // customer data at rest. // Structure is documented below. EncryptionConfig []MetastoreServiceEncryptionConfigObservation `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // The URI of the endpoint used to access the metastore service. EndpointURI *string `json:"endpointUri,omitempty" tf:"endpoint_uri,omitempty"` // Configuration information specific to running Hive metastore software as the metastore service. // Structure is documented below. HiveMetastoreConfig []HiveMetastoreConfigObservation `json:"hiveMetastoreConfig,omitempty" tf:"hive_metastore_config,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/services/{{service_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // User-defined labels for the metastore service. Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The location where the metastore service should reside. // The default value is global. Location *string `json:"location,omitempty" tf:"location,omitempty"` // The one hour maintenance window of the metastore service. // This specifies when the service can be restarted for maintenance purposes in UTC time. // Maintenance window is not needed for services with the SPANNER database type. // Structure is documented below. MaintenanceWindow []MaintenanceWindowObservation `json:"maintenanceWindow,omitempty" tf:"maintenance_window,omitempty"` // The relative resource name of the metastore service. Name *string `json:"name,omitempty" tf:"name,omitempty"` // The relative resource name of the VPC network on which the instance can be accessed. It is specified in the following form: // "projects/{projectNumber}/global/networks/{network_id}". Network *string `json:"network,omitempty" tf:"network,omitempty"` // The configuration specifying the network settings for the Dataproc Metastore service. // Structure is documented below. NetworkConfig []NetworkConfigObservation `json:"networkConfig,omitempty" tf:"network_config,omitempty"` // The TCP port at which the metastore service is reached. Default: 9083. Port *float64 `json:"port,omitempty" tf:"port,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // The release channel of the service. If unspecified, defaults to STABLE. // Default value is STABLE. // Possible values are: CANARY, STABLE. ReleaseChannel *string `json:"releaseChannel,omitempty" tf:"release_channel,omitempty"` // The current state of the metastore service. State *string `json:"state,omitempty" tf:"state,omitempty"` // Additional information about the current state of the metastore service, if available. StateMessage *string `json:"stateMessage,omitempty" tf:"state_message,omitempty"` // The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON. // Structure is documented below. TelemetryConfig []TelemetryConfigObservation `json:"telemetryConfig,omitempty" tf:"telemetry_config,omitempty"` // The tier of the service. // Possible values are: DEVELOPER, ENTERPRISE. Tier *string `json:"tier,omitempty" tf:"tier,omitempty"` // The globally unique resource identifier of the metastore service. UID *string `json:"uid,omitempty" tf:"uid,omitempty"` }
func (*MetastoreServiceObservation) DeepCopy ¶ added in v0.27.0
func (in *MetastoreServiceObservation) DeepCopy() *MetastoreServiceObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceObservation.
func (*MetastoreServiceObservation) DeepCopyInto ¶ added in v0.27.0
func (in *MetastoreServiceObservation) DeepCopyInto(out *MetastoreServiceObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceParameters ¶ added in v0.27.0
type MetastoreServiceParameters struct { // The database type that the Metastore service stores its data. // Default value is MYSQL. // Possible values are: MYSQL, SPANNER. // +kubebuilder:validation:Optional DatabaseType *string `json:"databaseType,omitempty" tf:"database_type,omitempty"` // Information used to configure the Dataproc Metastore service to encrypt // customer data at rest. // Structure is documented below. // +kubebuilder:validation:Optional EncryptionConfig []MetastoreServiceEncryptionConfigParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // Configuration information specific to running Hive metastore software as the metastore service. // Structure is documented below. // +kubebuilder:validation:Optional HiveMetastoreConfig []HiveMetastoreConfigParameters `json:"hiveMetastoreConfig,omitempty" tf:"hive_metastore_config,omitempty"` // User-defined labels for the metastore service. // +kubebuilder:validation:Optional Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The location where the metastore service should reside. // The default value is global. // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // The one hour maintenance window of the metastore service. // This specifies when the service can be restarted for maintenance purposes in UTC time. // Maintenance window is not needed for services with the SPANNER database type. // Structure is documented below. // +kubebuilder:validation:Optional MaintenanceWindow []MaintenanceWindowParameters `json:"maintenanceWindow,omitempty" tf:"maintenance_window,omitempty"` // The relative resource name of the VPC network on which the instance can be accessed. It is specified in the following form: // "projects/{projectNumber}/global/networks/{network_id}". // +kubebuilder:validation:Optional Network *string `json:"network,omitempty" tf:"network,omitempty"` // The configuration specifying the network settings for the Dataproc Metastore service. // Structure is documented below. // +kubebuilder:validation:Optional NetworkConfig []NetworkConfigParameters `json:"networkConfig,omitempty" tf:"network_config,omitempty"` // The TCP port at which the metastore service is reached. Default: 9083. // +kubebuilder:validation:Optional Port *float64 `json:"port,omitempty" tf:"port,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // The release channel of the service. If unspecified, defaults to STABLE. // Default value is STABLE. // Possible values are: CANARY, STABLE. // +kubebuilder:validation:Optional ReleaseChannel *string `json:"releaseChannel,omitempty" tf:"release_channel,omitempty"` // The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON. // Structure is documented below. // +kubebuilder:validation:Optional TelemetryConfig []TelemetryConfigParameters `json:"telemetryConfig,omitempty" tf:"telemetry_config,omitempty"` // The tier of the service. // Possible values are: DEVELOPER, ENTERPRISE. // +kubebuilder:validation:Optional Tier *string `json:"tier,omitempty" tf:"tier,omitempty"` }
func (*MetastoreServiceParameters) DeepCopy ¶ added in v0.27.0
func (in *MetastoreServiceParameters) DeepCopy() *MetastoreServiceParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceParameters.
func (*MetastoreServiceParameters) DeepCopyInto ¶ added in v0.27.0
func (in *MetastoreServiceParameters) DeepCopyInto(out *MetastoreServiceParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceSpec ¶ added in v0.27.0
type MetastoreServiceSpec struct { v1.ResourceSpec `json:",inline"` ForProvider MetastoreServiceParameters `json:"forProvider"` }
MetastoreServiceSpec defines the desired state of MetastoreService
func (*MetastoreServiceSpec) DeepCopy ¶ added in v0.27.0
func (in *MetastoreServiceSpec) DeepCopy() *MetastoreServiceSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceSpec.
func (*MetastoreServiceSpec) DeepCopyInto ¶ added in v0.27.0
func (in *MetastoreServiceSpec) DeepCopyInto(out *MetastoreServiceSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceStatus ¶ added in v0.27.0
type MetastoreServiceStatus struct { v1.ResourceStatus `json:",inline"` AtProvider MetastoreServiceObservation `json:"atProvider,omitempty"` }
MetastoreServiceStatus defines the observed state of MetastoreService.
func (*MetastoreServiceStatus) DeepCopy ¶ added in v0.27.0
func (in *MetastoreServiceStatus) DeepCopy() *MetastoreServiceStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceStatus.
func (*MetastoreServiceStatus) DeepCopyInto ¶ added in v0.27.0
func (in *MetastoreServiceStatus) DeepCopyInto(out *MetastoreServiceStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetricsObservation ¶ added in v0.27.0
type MetricsObservation struct { // One or more [available OSS metrics] (https://cloud.google.com/dataproc/docs/guides/monitoring#available_oss_metrics) to collect for the metric course. MetricOverrides []*string `json:"metricOverrides,omitempty" tf:"metric_overrides,omitempty"` // A source for the collection of Dataproc OSS metrics (see available OSS metrics). MetricSource *string `json:"metricSource,omitempty" tf:"metric_source,omitempty"` }
func (*MetricsObservation) DeepCopy ¶ added in v0.27.0
func (in *MetricsObservation) DeepCopy() *MetricsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricsObservation.
func (*MetricsObservation) DeepCopyInto ¶ added in v0.27.0
func (in *MetricsObservation) DeepCopyInto(out *MetricsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetricsParameters ¶ added in v0.27.0
type MetricsParameters struct { // One or more [available OSS metrics] (https://cloud.google.com/dataproc/docs/guides/monitoring#available_oss_metrics) to collect for the metric course. // +kubebuilder:validation:Optional MetricOverrides []*string `json:"metricOverrides,omitempty" tf:"metric_overrides,omitempty"` // A source for the collection of Dataproc OSS metrics (see available OSS metrics). // +kubebuilder:validation:Required MetricSource *string `json:"metricSource" tf:"metric_source,omitempty"` }
func (*MetricsParameters) DeepCopy ¶ added in v0.27.0
func (in *MetricsParameters) DeepCopy() *MetricsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricsParameters.
func (*MetricsParameters) DeepCopyInto ¶ added in v0.27.0
func (in *MetricsParameters) DeepCopyInto(out *MetricsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NetworkConfigObservation ¶ added in v0.27.0
type NetworkConfigObservation struct { // The consumer-side network configuration for the Dataproc Metastore instance. // Structure is documented below. Consumers []ConsumersObservation `json:"consumers,omitempty" tf:"consumers,omitempty"` }
func (*NetworkConfigObservation) DeepCopy ¶ added in v0.27.0
func (in *NetworkConfigObservation) DeepCopy() *NetworkConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NetworkConfigObservation.
func (*NetworkConfigObservation) DeepCopyInto ¶ added in v0.27.0
func (in *NetworkConfigObservation) DeepCopyInto(out *NetworkConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NetworkConfigParameters ¶ added in v0.27.0
type NetworkConfigParameters struct { // The consumer-side network configuration for the Dataproc Metastore instance. // Structure is documented below. // +kubebuilder:validation:Required Consumers []ConsumersParameters `json:"consumers" tf:"consumers,omitempty"` }
func (*NetworkConfigParameters) DeepCopy ¶ added in v0.27.0
func (in *NetworkConfigParameters) DeepCopy() *NetworkConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NetworkConfigParameters.
func (*NetworkConfigParameters) DeepCopyInto ¶ added in v0.27.0
func (in *NetworkConfigParameters) DeepCopyInto(out *NetworkConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupAffinityObservation ¶
type NodeGroupAffinityObservation struct { // The URI of a sole-tenant node group resource that the cluster will be created on. NodeGroupURI *string `json:"nodeGroupUri,omitempty" tf:"node_group_uri,omitempty"` }
func (*NodeGroupAffinityObservation) DeepCopy ¶
func (in *NodeGroupAffinityObservation) DeepCopy() *NodeGroupAffinityObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupAffinityObservation.
func (*NodeGroupAffinityObservation) DeepCopyInto ¶
func (in *NodeGroupAffinityObservation) DeepCopyInto(out *NodeGroupAffinityObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupAffinityParameters ¶
type NodeGroupAffinityParameters struct { // The URI of a sole-tenant node group resource that the cluster will be created on. // +kubebuilder:validation:Required NodeGroupURI *string `json:"nodeGroupUri" tf:"node_group_uri,omitempty"` }
func (*NodeGroupAffinityParameters) DeepCopy ¶
func (in *NodeGroupAffinityParameters) DeepCopy() *NodeGroupAffinityParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupAffinityParameters.
func (*NodeGroupAffinityParameters) DeepCopyInto ¶
func (in *NodeGroupAffinityParameters) DeepCopyInto(out *NodeGroupAffinityParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolConfigObservation ¶
type NodePoolConfigObservation struct { // The autoscaler configuration for this node pool. // The autoscaler is enabled only when a valid configuration is present. Autoscaling []AutoscalingObservation `json:"autoscaling,omitempty" tf:"autoscaling,omitempty"` // The node pool configuration. Config []ConfigObservation `json:"config,omitempty" tf:"config,omitempty"` // The list of Compute Engine zones where node pool nodes associated // with a Dataproc on GKE virtual cluster will be located. Locations []*string `json:"locations,omitempty" tf:"locations,omitempty"` }
func (*NodePoolConfigObservation) DeepCopy ¶
func (in *NodePoolConfigObservation) DeepCopy() *NodePoolConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolConfigObservation.
func (*NodePoolConfigObservation) DeepCopyInto ¶
func (in *NodePoolConfigObservation) DeepCopyInto(out *NodePoolConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolConfigParameters ¶
type NodePoolConfigParameters struct { // The autoscaler configuration for this node pool. // The autoscaler is enabled only when a valid configuration is present. // +kubebuilder:validation:Optional Autoscaling []AutoscalingParameters `json:"autoscaling,omitempty" tf:"autoscaling,omitempty"` // The node pool configuration. // +kubebuilder:validation:Optional Config []ConfigParameters `json:"config,omitempty" tf:"config,omitempty"` // The list of Compute Engine zones where node pool nodes associated // with a Dataproc on GKE virtual cluster will be located. // +kubebuilder:validation:Required Locations []*string `json:"locations" tf:"locations,omitempty"` }
func (*NodePoolConfigParameters) DeepCopy ¶
func (in *NodePoolConfigParameters) DeepCopy() *NodePoolConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolConfigParameters.
func (*NodePoolConfigParameters) DeepCopyInto ¶
func (in *NodePoolConfigParameters) DeepCopyInto(out *NodePoolConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolTargetObservation ¶
type NodePoolTargetObservation struct { // The target GKE node pool. NodePool *string `json:"nodePool,omitempty" tf:"node_pool,omitempty"` // (Input only) The configuration for the GKE node pool. // If specified, Dataproc attempts to create a node pool with the specified shape. // If one with the same name already exists, it is verified against all specified fields. // If a field differs, the virtual cluster creation will fail. NodePoolConfig []NodePoolConfigObservation `json:"nodePoolConfig,omitempty" tf:"node_pool_config,omitempty"` // The roles associated with the GKE node pool. // One of "DEFAULT", "CONTROLLER", "SPARK_DRIVER" or "SPARK_EXECUTOR". Roles []*string `json:"roles,omitempty" tf:"roles,omitempty"` }
func (*NodePoolTargetObservation) DeepCopy ¶
func (in *NodePoolTargetObservation) DeepCopy() *NodePoolTargetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolTargetObservation.
func (*NodePoolTargetObservation) DeepCopyInto ¶
func (in *NodePoolTargetObservation) DeepCopyInto(out *NodePoolTargetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolTargetParameters ¶
type NodePoolTargetParameters struct { // The target GKE node pool. // +kubebuilder:validation:Required NodePool *string `json:"nodePool" tf:"node_pool,omitempty"` // (Input only) The configuration for the GKE node pool. // If specified, Dataproc attempts to create a node pool with the specified shape. // If one with the same name already exists, it is verified against all specified fields. // If a field differs, the virtual cluster creation will fail. // +kubebuilder:validation:Optional NodePoolConfig []NodePoolConfigParameters `json:"nodePoolConfig,omitempty" tf:"node_pool_config,omitempty"` // The roles associated with the GKE node pool. // One of "DEFAULT", "CONTROLLER", "SPARK_DRIVER" or "SPARK_EXECUTOR". // +kubebuilder:validation:Required Roles []*string `json:"roles" tf:"roles,omitempty"` }
func (*NodePoolTargetParameters) DeepCopy ¶
func (in *NodePoolTargetParameters) DeepCopy() *NodePoolTargetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolTargetParameters.
func (*NodePoolTargetParameters) DeepCopyInto ¶
func (in *NodePoolTargetParameters) DeepCopyInto(out *NodePoolTargetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ParametersObservation ¶
type ParametersObservation struct { // Optional. Brief description of the parameter. Must not exceed 1024 characters. Description *string `json:"description,omitempty" tf:"description,omitempty"` // Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args Fields []*string `json:"fields,omitempty" tf:"fields,omitempty"` // Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters. Name *string `json:"name,omitempty" tf:"name,omitempty"` // Optional. Validation rules to be applied to this parameter's value. Validation []ValidationObservation `json:"validation,omitempty" tf:"validation,omitempty"` }
func (*ParametersObservation) DeepCopy ¶
func (in *ParametersObservation) DeepCopy() *ParametersObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParametersObservation.
func (*ParametersObservation) DeepCopyInto ¶
func (in *ParametersObservation) DeepCopyInto(out *ParametersObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ParametersParameters ¶
type ParametersParameters struct { // Optional. Brief description of the parameter. Must not exceed 1024 characters. // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args // +kubebuilder:validation:Required Fields []*string `json:"fields" tf:"fields,omitempty"` // Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters. // +kubebuilder:validation:Required Name *string `json:"name" tf:"name,omitempty"` // Optional. Validation rules to be applied to this parameter's value. // +kubebuilder:validation:Optional Validation []ValidationParameters `json:"validation,omitempty" tf:"validation,omitempty"` }
func (*ParametersParameters) DeepCopy ¶
func (in *ParametersParameters) DeepCopy() *ParametersParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParametersParameters.
func (*ParametersParameters) DeepCopyInto ¶
func (in *ParametersParameters) DeepCopyInto(out *ParametersParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigLoggingConfigObservation ¶
type PigConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PigConfigLoggingConfigObservation) DeepCopy ¶
func (in *PigConfigLoggingConfigObservation) DeepCopy() *PigConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigLoggingConfigObservation.
func (*PigConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *PigConfigLoggingConfigObservation) DeepCopyInto(out *PigConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigLoggingConfigParameters ¶
type PigConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Required DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*PigConfigLoggingConfigParameters) DeepCopy ¶
func (in *PigConfigLoggingConfigParameters) DeepCopy() *PigConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigLoggingConfigParameters.
func (*PigConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *PigConfigLoggingConfigParameters) DeepCopyInto(out *PigConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigObservation ¶
type PigConfigObservation struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig []PigConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Pig command: name=[value]). ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigConfigObservation) DeepCopy ¶
func (in *PigConfigObservation) DeepCopy() *PigConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigObservation.
func (*PigConfigObservation) DeepCopyInto ¶
func (in *PigConfigObservation) DeepCopyInto(out *PigConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigParameters ¶
type PigConfigParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig []PigConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri // +kubebuilder:validation:Optional QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Pig command: name=[value]). // +kubebuilder:validation:Optional ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigConfigParameters) DeepCopy ¶
func (in *PigConfigParameters) DeepCopy() *PigConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigParameters.
func (*PigConfigParameters) DeepCopyInto ¶
func (in *PigConfigParameters) DeepCopyInto(out *PigConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobLoggingConfigObservation ¶
type PigJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PigJobLoggingConfigObservation) DeepCopy ¶
func (in *PigJobLoggingConfigObservation) DeepCopy() *PigJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobLoggingConfigObservation.
func (*PigJobLoggingConfigObservation) DeepCopyInto ¶
func (in *PigJobLoggingConfigObservation) DeepCopyInto(out *PigJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobLoggingConfigParameters ¶
type PigJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PigJobLoggingConfigParameters) DeepCopy ¶
func (in *PigJobLoggingConfigParameters) DeepCopy() *PigJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobLoggingConfigParameters.
func (*PigJobLoggingConfigParameters) DeepCopyInto ¶
func (in *PigJobLoggingConfigParameters) DeepCopyInto(out *PigJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobObservation ¶
type PigJobObservation struct { // Optional. Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. LoggingConfig []PigJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList []PigJobQueryListObservation `json:"queryList,omitempty" tf:"query_list,omitempty"` // Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigJobObservation) DeepCopy ¶
func (in *PigJobObservation) DeepCopy() *PigJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobObservation.
func (*PigJobObservation) DeepCopyInto ¶
func (in *PigJobObservation) DeepCopyInto(out *PigJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobParameters ¶
type PigJobParameters struct { // Optional. Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig []PigJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. // +kubebuilder:validation:Optional QueryList []PigJobQueryListParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +kubebuilder:validation:Optional ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigJobParameters) DeepCopy ¶
func (in *PigJobParameters) DeepCopy() *PigJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobParameters.
func (*PigJobParameters) DeepCopyInto ¶
func (in *PigJobParameters) DeepCopyInto(out *PigJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobQueryListObservation ¶
type PigJobQueryListObservation struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*PigJobQueryListObservation) DeepCopy ¶
func (in *PigJobQueryListObservation) DeepCopy() *PigJobQueryListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobQueryListObservation.
func (*PigJobQueryListObservation) DeepCopyInto ¶
func (in *PigJobQueryListObservation) DeepCopyInto(out *PigJobQueryListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobQueryListParameters ¶
type PigJobQueryListParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } // +kubebuilder:validation:Required Queries []*string `json:"queries" tf:"queries,omitempty"` }
func (*PigJobQueryListParameters) DeepCopy ¶
func (in *PigJobQueryListParameters) DeepCopy() *PigJobQueryListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobQueryListParameters.
func (*PigJobQueryListParameters) DeepCopyInto ¶
func (in *PigJobQueryListParameters) DeepCopyInto(out *PigJobQueryListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PlacementObservation ¶
type PlacementObservation struct { // The name of the cluster where the job // will be submitted. ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"` // A cluster UUID generated by the Cloud Dataproc service when the job is submitted. ClusterUUID *string `json:"clusterUuid,omitempty" tf:"cluster_uuid,omitempty"` }
func (*PlacementObservation) DeepCopy ¶
func (in *PlacementObservation) DeepCopy() *PlacementObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PlacementObservation.
func (*PlacementObservation) DeepCopyInto ¶
func (in *PlacementObservation) DeepCopyInto(out *PlacementObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PlacementParameters ¶
type PlacementParameters struct { // The name of the cluster where the job // will be submitted. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/dataproc/v1beta1.Cluster // +crossplane:generate:reference:extractor=github.com/upbound/upjet/pkg/resource.ExtractParamPath("name",false) // +kubebuilder:validation:Optional ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"` // Reference to a Cluster in dataproc to populate clusterName. // +kubebuilder:validation:Optional ClusterNameRef *v1.Reference `json:"clusterNameRef,omitempty" tf:"-"` // Selector for a Cluster in dataproc to populate clusterName. // +kubebuilder:validation:Optional ClusterNameSelector *v1.Selector `json:"clusterNameSelector,omitempty" tf:"-"` }
func (*PlacementParameters) DeepCopy ¶
func (in *PlacementParameters) DeepCopy() *PlacementParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PlacementParameters.
func (*PlacementParameters) DeepCopyInto ¶
func (in *PlacementParameters) DeepCopyInto(out *PlacementParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigDiskConfigObservation ¶
type PreemptibleWorkerConfigDiskConfigObservation struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*PreemptibleWorkerConfigDiskConfigObservation) DeepCopy ¶
func (in *PreemptibleWorkerConfigDiskConfigObservation) DeepCopy() *PreemptibleWorkerConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigDiskConfigObservation.
func (*PreemptibleWorkerConfigDiskConfigObservation) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigDiskConfigObservation) DeepCopyInto(out *PreemptibleWorkerConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigDiskConfigParameters ¶
type PreemptibleWorkerConfigDiskConfigParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*PreemptibleWorkerConfigDiskConfigParameters) DeepCopy ¶
func (in *PreemptibleWorkerConfigDiskConfigParameters) DeepCopy() *PreemptibleWorkerConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigDiskConfigParameters.
func (*PreemptibleWorkerConfigDiskConfigParameters) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigDiskConfigParameters) DeepCopyInto(out *PreemptibleWorkerConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigObservation ¶
type PreemptibleWorkerConfigObservation struct { // Disk Config DiskConfig []PreemptibleWorkerConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // List of worker instance names which have been assigned // to the cluster. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // Specifies the number of preemptible nodes to create. // Defaults to 0. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the secondary workers. The default value is PREEMPTIBLE // Accepted values are: Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*PreemptibleWorkerConfigObservation) DeepCopy ¶
func (in *PreemptibleWorkerConfigObservation) DeepCopy() *PreemptibleWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigObservation.
func (*PreemptibleWorkerConfigObservation) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigObservation) DeepCopyInto(out *PreemptibleWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigParameters ¶
type PreemptibleWorkerConfigParameters struct { // Disk Config // +kubebuilder:validation:Optional DiskConfig []PreemptibleWorkerConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Specifies the number of preemptible nodes to create. // Defaults to 0. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the secondary workers. The default value is PREEMPTIBLE // Accepted values are: // +kubebuilder:validation:Optional Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*PreemptibleWorkerConfigParameters) DeepCopy ¶
func (in *PreemptibleWorkerConfigParameters) DeepCopy() *PreemptibleWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigParameters.
func (*PreemptibleWorkerConfigParameters) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigParameters) DeepCopyInto(out *PreemptibleWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigLoggingConfigObservation ¶
type PrestoConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PrestoConfigLoggingConfigObservation) DeepCopy ¶
func (in *PrestoConfigLoggingConfigObservation) DeepCopy() *PrestoConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigLoggingConfigObservation.
func (*PrestoConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *PrestoConfigLoggingConfigObservation) DeepCopyInto(out *PrestoConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigLoggingConfigParameters ¶
type PrestoConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Required DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*PrestoConfigLoggingConfigParameters) DeepCopy ¶
func (in *PrestoConfigLoggingConfigParameters) DeepCopy() *PrestoConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigLoggingConfigParameters.
func (*PrestoConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *PrestoConfigLoggingConfigParameters) DeepCopyInto(out *PrestoConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigObservation ¶
type PrestoConfigObservation struct { // Presto client tags to attach to this query. ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` LoggingConfig []PrestoConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The format in which query output will be displayed. See the Presto documentation for supported output formats. OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoConfigObservation) DeepCopy ¶
func (in *PrestoConfigObservation) DeepCopy() *PrestoConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigObservation.
func (*PrestoConfigObservation) DeepCopyInto ¶
func (in *PrestoConfigObservation) DeepCopyInto(out *PrestoConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigParameters ¶
type PrestoConfigParameters struct { // Presto client tags to attach to this query. // +kubebuilder:validation:Optional ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // +kubebuilder:validation:Optional LoggingConfig []PrestoConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The format in which query output will be displayed. See the Presto documentation for supported output formats. // +kubebuilder:validation:Optional OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri // +kubebuilder:validation:Optional QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoConfigParameters) DeepCopy ¶
func (in *PrestoConfigParameters) DeepCopy() *PrestoConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigParameters.
func (*PrestoConfigParameters) DeepCopyInto ¶
func (in *PrestoConfigParameters) DeepCopyInto(out *PrestoConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobLoggingConfigObservation ¶
type PrestoJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PrestoJobLoggingConfigObservation) DeepCopy ¶
func (in *PrestoJobLoggingConfigObservation) DeepCopy() *PrestoJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobLoggingConfigObservation.
func (*PrestoJobLoggingConfigObservation) DeepCopyInto ¶
func (in *PrestoJobLoggingConfigObservation) DeepCopyInto(out *PrestoJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobLoggingConfigParameters ¶
type PrestoJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PrestoJobLoggingConfigParameters) DeepCopy ¶
func (in *PrestoJobLoggingConfigParameters) DeepCopy() *PrestoJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobLoggingConfigParameters.
func (*PrestoJobLoggingConfigParameters) DeepCopyInto ¶
func (in *PrestoJobLoggingConfigParameters) DeepCopyInto(out *PrestoJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobObservation ¶
type PrestoJobObservation struct { // Optional. Presto client tags to attach to this query ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Optional. Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // Optional. The runtime log config for job execution. LoggingConfig []PrestoJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Optional. The format in which query output will be displayed. See the Presto documentation for supported output formats OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList []PrestoJobQueryListObservation `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoJobObservation) DeepCopy ¶
func (in *PrestoJobObservation) DeepCopy() *PrestoJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobObservation.
func (*PrestoJobObservation) DeepCopyInto ¶
func (in *PrestoJobObservation) DeepCopyInto(out *PrestoJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobParameters ¶
type PrestoJobParameters struct { // Optional. Presto client tags to attach to this query // +kubebuilder:validation:Optional ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Optional. Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // Optional. The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig []PrestoJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Optional. The format in which query output will be displayed. See the Presto documentation for supported output formats // +kubebuilder:validation:Optional OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. // +kubebuilder:validation:Optional QueryList []PrestoJobQueryListParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoJobParameters) DeepCopy ¶
func (in *PrestoJobParameters) DeepCopy() *PrestoJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobParameters.
func (*PrestoJobParameters) DeepCopyInto ¶
func (in *PrestoJobParameters) DeepCopyInto(out *PrestoJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobQueryListObservation ¶
type PrestoJobQueryListObservation struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*PrestoJobQueryListObservation) DeepCopy ¶
func (in *PrestoJobQueryListObservation) DeepCopy() *PrestoJobQueryListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobQueryListObservation.
func (*PrestoJobQueryListObservation) DeepCopyInto ¶
func (in *PrestoJobQueryListObservation) DeepCopyInto(out *PrestoJobQueryListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobQueryListParameters ¶
type PrestoJobQueryListParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } // +kubebuilder:validation:Required Queries []*string `json:"queries" tf:"queries,omitempty"` }
func (*PrestoJobQueryListParameters) DeepCopy ¶
func (in *PrestoJobQueryListParameters) DeepCopy() *PrestoJobQueryListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobQueryListParameters.
func (*PrestoJobQueryListParameters) DeepCopyInto ¶
func (in *PrestoJobQueryListParameters) DeepCopyInto(out *PrestoJobQueryListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigLoggingConfigObservation ¶
type PysparkConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PysparkConfigLoggingConfigObservation) DeepCopy ¶
func (in *PysparkConfigLoggingConfigObservation) DeepCopy() *PysparkConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigLoggingConfigObservation.
func (*PysparkConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *PysparkConfigLoggingConfigObservation) DeepCopyInto(out *PysparkConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigLoggingConfigParameters ¶
type PysparkConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Required DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*PysparkConfigLoggingConfigParameters) DeepCopy ¶
func (in *PysparkConfigLoggingConfigParameters) DeepCopy() *PysparkConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigLoggingConfigParameters.
func (*PysparkConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *PysparkConfigLoggingConfigParameters) DeepCopyInto(out *PysparkConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigObservation ¶
type PysparkConfigObservation struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig []PysparkConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The HCFS URI of the main Python file to use as the driver. Must be a .py file. MainPythonFileURI *string `json:"mainPythonFileUri,omitempty" tf:"main_python_file_uri,omitempty"` // A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkConfigObservation) DeepCopy ¶
func (in *PysparkConfigObservation) DeepCopy() *PysparkConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigObservation.
func (*PysparkConfigObservation) DeepCopyInto ¶
func (in *PysparkConfigObservation) DeepCopyInto(out *PysparkConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigParameters ¶
type PysparkConfigParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig []PysparkConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The HCFS URI of the main Python file to use as the driver. Must be a .py file. // +kubebuilder:validation:Required MainPythonFileURI *string `json:"mainPythonFileUri" tf:"main_python_file_uri,omitempty"` // A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. // +kubebuilder:validation:Optional PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkConfigParameters) DeepCopy ¶
func (in *PysparkConfigParameters) DeepCopy() *PysparkConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigParameters.
func (*PysparkConfigParameters) DeepCopyInto ¶
func (in *PysparkConfigParameters) DeepCopyInto(out *PysparkConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobLoggingConfigObservation ¶
type PysparkJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PysparkJobLoggingConfigObservation) DeepCopy ¶
func (in *PysparkJobLoggingConfigObservation) DeepCopy() *PysparkJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobLoggingConfigObservation.
func (*PysparkJobLoggingConfigObservation) DeepCopyInto ¶
func (in *PysparkJobLoggingConfigObservation) DeepCopyInto(out *PysparkJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobLoggingConfigParameters ¶
type PysparkJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PysparkJobLoggingConfigParameters) DeepCopy ¶
func (in *PysparkJobLoggingConfigParameters) DeepCopy() *PysparkJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobLoggingConfigParameters.
func (*PysparkJobLoggingConfigParameters) DeepCopyInto ¶
func (in *PysparkJobLoggingConfigParameters) DeepCopyInto(out *PysparkJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobObservation ¶
type PysparkJobObservation struct { // Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. LoggingConfig []PysparkJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file. MainPythonFileURI *string `json:"mainPythonFileUri,omitempty" tf:"main_python_file_uri,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkJobObservation) DeepCopy ¶
func (in *PysparkJobObservation) DeepCopy() *PysparkJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobObservation.
func (*PysparkJobObservation) DeepCopyInto ¶
func (in *PysparkJobObservation) DeepCopyInto(out *PysparkJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobParameters ¶
type PysparkJobParameters struct { // Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig []PysparkJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file. // +kubebuilder:validation:Required MainPythonFileURI *string `json:"mainPythonFileUri" tf:"main_python_file_uri,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. // +kubebuilder:validation:Optional PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkJobParameters) DeepCopy ¶
func (in *PysparkJobParameters) DeepCopy() *PysparkJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobParameters.
func (*PysparkJobParameters) DeepCopyInto ¶
func (in *PysparkJobParameters) DeepCopyInto(out *PysparkJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryListObservation ¶
type QueryListObservation struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*QueryListObservation) DeepCopy ¶
func (in *QueryListObservation) DeepCopy() *QueryListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryListObservation.
func (*QueryListObservation) DeepCopyInto ¶
func (in *QueryListObservation) DeepCopyInto(out *QueryListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryListParameters ¶
type QueryListParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } // +kubebuilder:validation:Required Queries []*string `json:"queries" tf:"queries,omitempty"` }
func (*QueryListParameters) DeepCopy ¶
func (in *QueryListParameters) DeepCopy() *QueryListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryListParameters.
func (*QueryListParameters) DeepCopyInto ¶
func (in *QueryListParameters) DeepCopyInto(out *QueryListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReferenceObservation ¶
type ReferenceObservation struct {
JobID *string `json:"jobId,omitempty" tf:"job_id,omitempty"`
}
func (*ReferenceObservation) DeepCopy ¶
func (in *ReferenceObservation) DeepCopy() *ReferenceObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReferenceObservation.
func (*ReferenceObservation) DeepCopyInto ¶
func (in *ReferenceObservation) DeepCopyInto(out *ReferenceObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReferenceParameters ¶
type ReferenceParameters struct { // +kubebuilder:validation:Optional JobID *string `json:"jobId,omitempty" tf:"job_id,omitempty"` }
func (*ReferenceParameters) DeepCopy ¶
func (in *ReferenceParameters) DeepCopy() *ReferenceParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReferenceParameters.
func (*ReferenceParameters) DeepCopyInto ¶
func (in *ReferenceParameters) DeepCopyInto(out *ReferenceParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RegexObservation ¶
type RegexObservation struct { // Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient). Regexes []*string `json:"regexes,omitempty" tf:"regexes,omitempty"` }
func (*RegexObservation) DeepCopy ¶
func (in *RegexObservation) DeepCopy() *RegexObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RegexObservation.
func (*RegexObservation) DeepCopyInto ¶
func (in *RegexObservation) DeepCopyInto(out *RegexObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RegexParameters ¶
type RegexParameters struct { // Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient). // +kubebuilder:validation:Required Regexes []*string `json:"regexes" tf:"regexes,omitempty"` }
func (*RegexParameters) DeepCopy ¶
func (in *RegexParameters) DeepCopy() *RegexParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RegexParameters.
func (*RegexParameters) DeepCopyInto ¶
func (in *RegexParameters) DeepCopyInto(out *RegexParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationAffinityObservation ¶
type ReservationAffinityObservation struct { // Corresponds to the type of reservation consumption. ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Corresponds to the label key of reservation resource. Key *string `json:"key,omitempty" tf:"key,omitempty"` // Corresponds to the label values of reservation resource. Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*ReservationAffinityObservation) DeepCopy ¶
func (in *ReservationAffinityObservation) DeepCopy() *ReservationAffinityObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationAffinityObservation.
func (*ReservationAffinityObservation) DeepCopyInto ¶
func (in *ReservationAffinityObservation) DeepCopyInto(out *ReservationAffinityObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationAffinityParameters ¶
type ReservationAffinityParameters struct { // Corresponds to the type of reservation consumption. // +kubebuilder:validation:Optional ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Corresponds to the label key of reservation resource. // +kubebuilder:validation:Optional Key *string `json:"key,omitempty" tf:"key,omitempty"` // Corresponds to the label values of reservation resource. // +kubebuilder:validation:Optional Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*ReservationAffinityParameters) DeepCopy ¶
func (in *ReservationAffinityParameters) DeepCopy() *ReservationAffinityParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationAffinityParameters.
func (*ReservationAffinityParameters) DeepCopyInto ¶
func (in *ReservationAffinityParameters) DeepCopyInto(out *ReservationAffinityParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SchedulingObservation ¶
type SchedulingObservation struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. MaxFailuresPerHour *float64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour,omitempty"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. MaxFailuresTotal *float64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total,omitempty"` }
func (*SchedulingObservation) DeepCopy ¶
func (in *SchedulingObservation) DeepCopy() *SchedulingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SchedulingObservation.
func (*SchedulingObservation) DeepCopyInto ¶
func (in *SchedulingObservation) DeepCopyInto(out *SchedulingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SchedulingParameters ¶
type SchedulingParameters struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. // +kubebuilder:validation:Required MaxFailuresPerHour *float64 `json:"maxFailuresPerHour" tf:"max_failures_per_hour,omitempty"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. // +kubebuilder:validation:Required MaxFailuresTotal *float64 `json:"maxFailuresTotal" tf:"max_failures_total,omitempty"` }
func (*SchedulingParameters) DeepCopy ¶
func (in *SchedulingParameters) DeepCopy() *SchedulingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SchedulingParameters.
func (*SchedulingParameters) DeepCopyInto ¶
func (in *SchedulingParameters) DeepCopyInto(out *SchedulingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigAcceleratorsObservation ¶
type SecondaryWorkerConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*SecondaryWorkerConfigAcceleratorsObservation) DeepCopy ¶
func (in *SecondaryWorkerConfigAcceleratorsObservation) DeepCopy() *SecondaryWorkerConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigAcceleratorsObservation.
func (*SecondaryWorkerConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *SecondaryWorkerConfigAcceleratorsObservation) DeepCopyInto(out *SecondaryWorkerConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigAcceleratorsParameters ¶
type SecondaryWorkerConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*SecondaryWorkerConfigAcceleratorsParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigAcceleratorsParameters) DeepCopy() *SecondaryWorkerConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigAcceleratorsParameters.
func (*SecondaryWorkerConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigAcceleratorsParameters) DeepCopyInto(out *SecondaryWorkerConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigDiskConfigObservation ¶
type SecondaryWorkerConfigDiskConfigObservation struct { // Optional. Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*SecondaryWorkerConfigDiskConfigObservation) DeepCopy ¶
func (in *SecondaryWorkerConfigDiskConfigObservation) DeepCopy() *SecondaryWorkerConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigDiskConfigObservation.
func (*SecondaryWorkerConfigDiskConfigObservation) DeepCopyInto ¶
func (in *SecondaryWorkerConfigDiskConfigObservation) DeepCopyInto(out *SecondaryWorkerConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigDiskConfigParameters ¶
type SecondaryWorkerConfigDiskConfigParameters struct { // Optional. Size in GB of the boot disk (default is 500GB). // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Optional. Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*SecondaryWorkerConfigDiskConfigParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigDiskConfigParameters) DeepCopy() *SecondaryWorkerConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigDiskConfigParameters.
func (*SecondaryWorkerConfigDiskConfigParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigDiskConfigParameters) DeepCopyInto(out *SecondaryWorkerConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigManagedGroupConfigObservation ¶
type SecondaryWorkerConfigManagedGroupConfigObservation struct { // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name,omitempty"` // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name,omitempty"` }
func (*SecondaryWorkerConfigManagedGroupConfigObservation) DeepCopy ¶
func (in *SecondaryWorkerConfigManagedGroupConfigObservation) DeepCopy() *SecondaryWorkerConfigManagedGroupConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigManagedGroupConfigObservation.
func (*SecondaryWorkerConfigManagedGroupConfigObservation) DeepCopyInto ¶
func (in *SecondaryWorkerConfigManagedGroupConfigObservation) DeepCopyInto(out *SecondaryWorkerConfigManagedGroupConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigManagedGroupConfigParameters ¶
type SecondaryWorkerConfigManagedGroupConfigParameters struct { }
func (*SecondaryWorkerConfigManagedGroupConfigParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigManagedGroupConfigParameters) DeepCopy() *SecondaryWorkerConfigManagedGroupConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigManagedGroupConfigParameters.
func (*SecondaryWorkerConfigManagedGroupConfigParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigManagedGroupConfigParameters) DeepCopyInto(out *SecondaryWorkerConfigManagedGroupConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigObservation ¶
type SecondaryWorkerConfigObservation struct { // Maximum number of instances for this group. Note that by default, clusters will not use // secondary workers. Required for secondary workers if the minimum secondary instances is set. // Bounds: [minInstances, ). Defaults to 0. MaxInstances *float64 `json:"maxInstances,omitempty" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [0, maxInstances]. Defaults to 0. MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*SecondaryWorkerConfigObservation) DeepCopy ¶
func (in *SecondaryWorkerConfigObservation) DeepCopy() *SecondaryWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigObservation.
func (*SecondaryWorkerConfigObservation) DeepCopyInto ¶
func (in *SecondaryWorkerConfigObservation) DeepCopyInto(out *SecondaryWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigParameters ¶
type SecondaryWorkerConfigParameters struct { // Maximum number of instances for this group. Note that by default, clusters will not use // secondary workers. Required for secondary workers if the minimum secondary instances is set. // Bounds: [minInstances, ). Defaults to 0. // +kubebuilder:validation:Optional MaxInstances *float64 `json:"maxInstances,omitempty" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [0, maxInstances]. Defaults to 0. // +kubebuilder:validation:Optional MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. // +kubebuilder:validation:Optional Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*SecondaryWorkerConfigParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigParameters) DeepCopy() *SecondaryWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigParameters.
func (*SecondaryWorkerConfigParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigParameters) DeepCopyInto(out *SecondaryWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigKerberosConfigObservation ¶
type SecurityConfigKerberosConfigObservation struct { // Optional. The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship. CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship. CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // Optional. The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust. CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` CrossRealmTrustSharedPassword *string `json:"crossRealmTrustSharedPassword,omitempty" tf:"cross_realm_trust_shared_password,omitempty"` // Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster. EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // Optional. The uri of the KMS key used to encrypt various sensitive files. KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database. KdcDBKey *string `json:"kdcDbKey,omitempty" tf:"kdc_db_key,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc. KeyPassword *string `json:"keyPassword,omitempty" tf:"key_password,omitempty"` // Optional. The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. Keystore *string `json:"keystore,omitempty" tf:"keystore,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc. KeystorePassword *string `json:"keystorePassword,omitempty" tf:"keystore_password,omitempty"` // Optional. The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm. Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the root principal password. RootPrincipalPassword *string `json:"rootPrincipalPassword,omitempty" tf:"root_principal_password,omitempty"` // Optional. The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used. TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // Optional. The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. Truststore *string `json:"truststore,omitempty" tf:"truststore,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc. TruststorePassword *string `json:"truststorePassword,omitempty" tf:"truststore_password,omitempty"` }
func (*SecurityConfigKerberosConfigObservation) DeepCopy ¶
func (in *SecurityConfigKerberosConfigObservation) DeepCopy() *SecurityConfigKerberosConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigKerberosConfigObservation.
func (*SecurityConfigKerberosConfigObservation) DeepCopyInto ¶
func (in *SecurityConfigKerberosConfigObservation) DeepCopyInto(out *SecurityConfigKerberosConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigKerberosConfigParameters ¶
type SecurityConfigKerberosConfigParameters struct { // Optional. The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // Optional. The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust. // +kubebuilder:validation:Optional CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` // +kubebuilder:validation:Optional CrossRealmTrustSharedPassword *string `json:"crossRealmTrustSharedPassword,omitempty" tf:"cross_realm_trust_shared_password,omitempty"` // Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster. // +kubebuilder:validation:Optional EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // Optional. The uri of the KMS key used to encrypt various sensitive files. // +kubebuilder:validation:Optional KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database. // +kubebuilder:validation:Optional KdcDBKey *string `json:"kdcDbKey,omitempty" tf:"kdc_db_key,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc. // +kubebuilder:validation:Optional KeyPassword *string `json:"keyPassword,omitempty" tf:"key_password,omitempty"` // Optional. The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. // +kubebuilder:validation:Optional Keystore *string `json:"keystore,omitempty" tf:"keystore,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc. // +kubebuilder:validation:Optional KeystorePassword *string `json:"keystorePassword,omitempty" tf:"keystore_password,omitempty"` // Optional. The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm. // +kubebuilder:validation:Optional Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the root principal password. // +kubebuilder:validation:Optional RootPrincipalPassword *string `json:"rootPrincipalPassword,omitempty" tf:"root_principal_password,omitempty"` // Optional. The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used. // +kubebuilder:validation:Optional TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // Optional. The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. // +kubebuilder:validation:Optional Truststore *string `json:"truststore,omitempty" tf:"truststore,omitempty"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc. // +kubebuilder:validation:Optional TruststorePassword *string `json:"truststorePassword,omitempty" tf:"truststore_password,omitempty"` }
func (*SecurityConfigKerberosConfigParameters) DeepCopy ¶
func (in *SecurityConfigKerberosConfigParameters) DeepCopy() *SecurityConfigKerberosConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigKerberosConfigParameters.
func (*SecurityConfigKerberosConfigParameters) DeepCopyInto ¶
func (in *SecurityConfigKerberosConfigParameters) DeepCopyInto(out *SecurityConfigKerberosConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigObservation ¶
type SecurityConfigObservation struct { // Kerberos Configuration KerberosConfig []KerberosConfigObservation `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` }
func (*SecurityConfigObservation) DeepCopy ¶
func (in *SecurityConfigObservation) DeepCopy() *SecurityConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigObservation.
func (*SecurityConfigObservation) DeepCopyInto ¶
func (in *SecurityConfigObservation) DeepCopyInto(out *SecurityConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigParameters ¶
type SecurityConfigParameters struct { // Kerberos Configuration // +kubebuilder:validation:Required KerberosConfig []KerberosConfigParameters `json:"kerberosConfig" tf:"kerberos_config,omitempty"` }
func (*SecurityConfigParameters) DeepCopy ¶
func (in *SecurityConfigParameters) DeepCopy() *SecurityConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigParameters.
func (*SecurityConfigParameters) DeepCopyInto ¶
func (in *SecurityConfigParameters) DeepCopyInto(out *SecurityConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ShieldedInstanceConfigObservation ¶
type ShieldedInstanceConfigObservation struct { // Defines whether instances have integrity monitoring enabled. EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Defines whether instances have Secure Boot enabled. EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Defines whether instances have the vTPM enabled. EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*ShieldedInstanceConfigObservation) DeepCopy ¶
func (in *ShieldedInstanceConfigObservation) DeepCopy() *ShieldedInstanceConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ShieldedInstanceConfigObservation.
func (*ShieldedInstanceConfigObservation) DeepCopyInto ¶
func (in *ShieldedInstanceConfigObservation) DeepCopyInto(out *ShieldedInstanceConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ShieldedInstanceConfigParameters ¶
type ShieldedInstanceConfigParameters struct { // Defines whether instances have integrity monitoring enabled. // +kubebuilder:validation:Optional EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Defines whether instances have Secure Boot enabled. // +kubebuilder:validation:Optional EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Defines whether instances have the vTPM enabled. // +kubebuilder:validation:Optional EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*ShieldedInstanceConfigParameters) DeepCopy ¶
func (in *ShieldedInstanceConfigParameters) DeepCopy() *ShieldedInstanceConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ShieldedInstanceConfigParameters.
func (*ShieldedInstanceConfigParameters) DeepCopyInto ¶
func (in *ShieldedInstanceConfigParameters) DeepCopyInto(out *ShieldedInstanceConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SoftwareConfigObservation ¶
type SoftwareConfigObservation struct { // The Cloud Dataproc image version to use // for the cluster - this controls the sets of software versions // installed onto the nodes when you create clusters. If not specified, defaults to the // latest version. For a list of valid versions see // Cloud Dataproc versions ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // The set of optional components to activate on the cluster. See Available Optional Components. OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // A list of override and additional properties (key/value pairs) // used to modify various aspects of the common configuration files used when creating // a cluster. For a list of valid properties please see // Cluster properties OverrideProperties map[string]*string `json:"overrideProperties,omitempty" tf:"override_properties,omitempty"` // The properties to set on daemon config files. Property keys are specified in prefix:property format, // for example spark:spark.kubernetes.container.image. Properties map[string]string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SoftwareConfigObservation) DeepCopy ¶
func (in *SoftwareConfigObservation) DeepCopy() *SoftwareConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SoftwareConfigObservation.
func (*SoftwareConfigObservation) DeepCopyInto ¶
func (in *SoftwareConfigObservation) DeepCopyInto(out *SoftwareConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SoftwareConfigParameters ¶
type SoftwareConfigParameters struct { // The Cloud Dataproc image version to use // for the cluster - this controls the sets of software versions // installed onto the nodes when you create clusters. If not specified, defaults to the // latest version. For a list of valid versions see // Cloud Dataproc versions // +kubebuilder:validation:Optional ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // The set of optional components to activate on the cluster. See Available Optional Components. // +kubebuilder:validation:Optional OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // A list of override and additional properties (key/value pairs) // used to modify various aspects of the common configuration files used when creating // a cluster. For a list of valid properties please see // Cluster properties // +kubebuilder:validation:Optional OverrideProperties map[string]*string `json:"overrideProperties,omitempty" tf:"override_properties,omitempty"` }
func (*SoftwareConfigParameters) DeepCopy ¶
func (in *SoftwareConfigParameters) DeepCopy() *SoftwareConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SoftwareConfigParameters.
func (*SoftwareConfigParameters) DeepCopyInto ¶
func (in *SoftwareConfigParameters) DeepCopyInto(out *SoftwareConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigLoggingConfigObservation ¶
type SparkConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkConfigLoggingConfigObservation) DeepCopy ¶
func (in *SparkConfigLoggingConfigObservation) DeepCopy() *SparkConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigLoggingConfigObservation.
func (*SparkConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *SparkConfigLoggingConfigObservation) DeepCopyInto(out *SparkConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigLoggingConfigParameters ¶
type SparkConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Required DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*SparkConfigLoggingConfigParameters) DeepCopy ¶
func (in *SparkConfigLoggingConfigParameters) DeepCopy() *SparkConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigLoggingConfigParameters.
func (*SparkConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *SparkConfigLoggingConfigParameters) DeepCopyInto(out *SparkConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigObservation ¶
type SparkConfigObservation struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig []SparkConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The class containing the main method of the driver. Must be in a // provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of jar file containing // the driver jar. Conflicts with main_class MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkConfigObservation) DeepCopy ¶
func (in *SparkConfigObservation) DeepCopy() *SparkConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigObservation.
func (*SparkConfigObservation) DeepCopyInto ¶
func (in *SparkConfigObservation) DeepCopyInto(out *SparkConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigParameters ¶
type SparkConfigParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig []SparkConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The class containing the main method of the driver. Must be in a // provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of jar file containing // the driver jar. Conflicts with main_class // +kubebuilder:validation:Optional MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkConfigParameters) DeepCopy ¶
func (in *SparkConfigParameters) DeepCopy() *SparkConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigParameters.
func (*SparkConfigParameters) DeepCopyInto ¶
func (in *SparkConfigParameters) DeepCopyInto(out *SparkConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkHistoryServerConfigObservation ¶
type SparkHistoryServerConfigObservation struct { // Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload. DataprocCluster *string `json:"dataprocCluster,omitempty" tf:"dataproc_cluster,omitempty"` }
func (*SparkHistoryServerConfigObservation) DeepCopy ¶
func (in *SparkHistoryServerConfigObservation) DeepCopy() *SparkHistoryServerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkHistoryServerConfigObservation.
func (*SparkHistoryServerConfigObservation) DeepCopyInto ¶
func (in *SparkHistoryServerConfigObservation) DeepCopyInto(out *SparkHistoryServerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkHistoryServerConfigParameters ¶
type SparkHistoryServerConfigParameters struct { // Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload. // +kubebuilder:validation:Optional DataprocCluster *string `json:"dataprocCluster,omitempty" tf:"dataproc_cluster,omitempty"` }
func (*SparkHistoryServerConfigParameters) DeepCopy ¶
func (in *SparkHistoryServerConfigParameters) DeepCopy() *SparkHistoryServerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkHistoryServerConfigParameters.
func (*SparkHistoryServerConfigParameters) DeepCopyInto ¶
func (in *SparkHistoryServerConfigParameters) DeepCopyInto(out *SparkHistoryServerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobLoggingConfigObservation ¶
type SparkJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkJobLoggingConfigObservation) DeepCopy ¶
func (in *SparkJobLoggingConfigObservation) DeepCopy() *SparkJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobLoggingConfigObservation.
func (*SparkJobLoggingConfigObservation) DeepCopyInto ¶
func (in *SparkJobLoggingConfigObservation) DeepCopyInto(out *SparkJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobLoggingConfigParameters ¶
type SparkJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkJobLoggingConfigParameters) DeepCopy ¶
func (in *SparkJobLoggingConfigParameters) DeepCopy() *SparkJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobLoggingConfigParameters.
func (*SparkJobLoggingConfigParameters) DeepCopyInto ¶
func (in *SparkJobLoggingConfigParameters) DeepCopyInto(out *SparkJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobObservation ¶
type SparkJobObservation struct { // Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. LoggingConfig []SparkJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkJobObservation) DeepCopy ¶
func (in *SparkJobObservation) DeepCopy() *SparkJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobObservation.
func (*SparkJobObservation) DeepCopyInto ¶
func (in *SparkJobObservation) DeepCopyInto(out *SparkJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobParameters ¶
type SparkJobParameters struct { // Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig []SparkJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. // +kubebuilder:validation:Optional MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkJobParameters) DeepCopy ¶
func (in *SparkJobParameters) DeepCopy() *SparkJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobParameters.
func (*SparkJobParameters) DeepCopyInto ¶
func (in *SparkJobParameters) DeepCopyInto(out *SparkJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobLoggingConfigObservation ¶
type SparkRJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkRJobLoggingConfigObservation) DeepCopy ¶
func (in *SparkRJobLoggingConfigObservation) DeepCopy() *SparkRJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobLoggingConfigObservation.
func (*SparkRJobLoggingConfigObservation) DeepCopyInto ¶
func (in *SparkRJobLoggingConfigObservation) DeepCopyInto(out *SparkRJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobLoggingConfigParameters ¶
type SparkRJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkRJobLoggingConfigParameters) DeepCopy ¶
func (in *SparkRJobLoggingConfigParameters) DeepCopy() *SparkRJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobLoggingConfigParameters.
func (*SparkRJobLoggingConfigParameters) DeepCopyInto ¶
func (in *SparkRJobLoggingConfigParameters) DeepCopyInto(out *SparkRJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobObservation ¶
type SparkRJobObservation struct { // Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // Optional. The runtime log config for job execution. LoggingConfig []SparkRJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main R file to use as the driver. Must be a .R file. MainRFileURI *string `json:"mainRFileUri,omitempty" tf:"main_r_file_uri,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkRJobObservation) DeepCopy ¶
func (in *SparkRJobObservation) DeepCopy() *SparkRJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobObservation.
func (*SparkRJobObservation) DeepCopyInto ¶
func (in *SparkRJobObservation) DeepCopyInto(out *SparkRJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobParameters ¶
type SparkRJobParameters struct { // Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // Optional. The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig []SparkRJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main R file to use as the driver. Must be a .R file. // +kubebuilder:validation:Required MainRFileURI *string `json:"mainRFileUri" tf:"main_r_file_uri,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkRJobParameters) DeepCopy ¶
func (in *SparkRJobParameters) DeepCopy() *SparkRJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobParameters.
func (*SparkRJobParameters) DeepCopyInto ¶
func (in *SparkRJobParameters) DeepCopyInto(out *SparkRJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobLoggingConfigObservation ¶
type SparkSQLJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkSQLJobLoggingConfigObservation) DeepCopy ¶
func (in *SparkSQLJobLoggingConfigObservation) DeepCopy() *SparkSQLJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobLoggingConfigObservation.
func (*SparkSQLJobLoggingConfigObservation) DeepCopyInto ¶
func (in *SparkSQLJobLoggingConfigObservation) DeepCopyInto(out *SparkSQLJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobLoggingConfigParameters ¶
type SparkSQLJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkSQLJobLoggingConfigParameters) DeepCopy ¶
func (in *SparkSQLJobLoggingConfigParameters) DeepCopy() *SparkSQLJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobLoggingConfigParameters.
func (*SparkSQLJobLoggingConfigParameters) DeepCopyInto ¶
func (in *SparkSQLJobLoggingConfigParameters) DeepCopyInto(out *SparkSQLJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobObservation ¶
type SparkSQLJobObservation struct { // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. LoggingConfig []SparkSQLJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList []SparkSQLJobQueryListObservation `json:"queryList,omitempty" tf:"query_list,omitempty"` // Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparkSQLJobObservation) DeepCopy ¶
func (in *SparkSQLJobObservation) DeepCopy() *SparkSQLJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobObservation.
func (*SparkSQLJobObservation) DeepCopyInto ¶
func (in *SparkSQLJobObservation) DeepCopyInto(out *SparkSQLJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobParameters ¶
type SparkSQLJobParameters struct { // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // Optional. The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig []SparkSQLJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. // +kubebuilder:validation:Optional QueryList []SparkSQLJobQueryListParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +kubebuilder:validation:Optional ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparkSQLJobParameters) DeepCopy ¶
func (in *SparkSQLJobParameters) DeepCopy() *SparkSQLJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobParameters.
func (*SparkSQLJobParameters) DeepCopyInto ¶
func (in *SparkSQLJobParameters) DeepCopyInto(out *SparkSQLJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobQueryListObservation ¶
type SparkSQLJobQueryListObservation struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*SparkSQLJobQueryListObservation) DeepCopy ¶
func (in *SparkSQLJobQueryListObservation) DeepCopy() *SparkSQLJobQueryListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobQueryListObservation.
func (*SparkSQLJobQueryListObservation) DeepCopyInto ¶
func (in *SparkSQLJobQueryListObservation) DeepCopyInto(out *SparkSQLJobQueryListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobQueryListParameters ¶
type SparkSQLJobQueryListParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } // +kubebuilder:validation:Required Queries []*string `json:"queries" tf:"queries,omitempty"` }
func (*SparkSQLJobQueryListParameters) DeepCopy ¶
func (in *SparkSQLJobQueryListParameters) DeepCopy() *SparkSQLJobQueryListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobQueryListParameters.
func (*SparkSQLJobQueryListParameters) DeepCopyInto ¶
func (in *SparkSQLJobQueryListParameters) DeepCopyInto(out *SparkSQLJobQueryListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigLoggingConfigObservation ¶
type SparksqlConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparksqlConfigLoggingConfigObservation) DeepCopy ¶
func (in *SparksqlConfigLoggingConfigObservation) DeepCopy() *SparksqlConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigLoggingConfigObservation.
func (*SparksqlConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *SparksqlConfigLoggingConfigObservation) DeepCopyInto(out *SparksqlConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigLoggingConfigParameters ¶
type SparksqlConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Required DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*SparksqlConfigLoggingConfigParameters) DeepCopy ¶
func (in *SparksqlConfigLoggingConfigParameters) DeepCopy() *SparksqlConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigLoggingConfigParameters.
func (*SparksqlConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *SparksqlConfigLoggingConfigParameters) DeepCopyInto(out *SparksqlConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigObservation ¶
type SparksqlConfigObservation struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig []SparksqlConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparksqlConfigObservation) DeepCopy ¶
func (in *SparksqlConfigObservation) DeepCopy() *SparksqlConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigObservation.
func (*SparksqlConfigObservation) DeepCopyInto ¶
func (in *SparksqlConfigObservation) DeepCopyInto(out *SparksqlConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigParameters ¶
type SparksqlConfigParameters struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig []SparksqlConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. // +kubebuilder:validation:Optional Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri // +kubebuilder:validation:Optional QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +kubebuilder:validation:Optional ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparksqlConfigParameters) DeepCopy ¶
func (in *SparksqlConfigParameters) DeepCopy() *SparksqlConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigParameters.
func (*SparksqlConfigParameters) DeepCopyInto ¶
func (in *SparksqlConfigParameters) DeepCopyInto(out *SparksqlConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StatusObservation ¶
type StatusObservation struct { // Optional job state details, such as an error description if the state is ERROR. Details *string `json:"details,omitempty" tf:"details,omitempty"` // A state message specifying the overall job state. State *string `json:"state,omitempty" tf:"state,omitempty"` // The time when this state was entered. StateStartTime *string `json:"stateStartTime,omitempty" tf:"state_start_time,omitempty"` // Additional state information, which includes status reported by the agent. Substate *string `json:"substate,omitempty" tf:"substate,omitempty"` }
func (*StatusObservation) DeepCopy ¶
func (in *StatusObservation) DeepCopy() *StatusObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatusObservation.
func (*StatusObservation) DeepCopyInto ¶
func (in *StatusObservation) DeepCopyInto(out *StatusObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StatusParameters ¶
type StatusParameters struct { }
func (*StatusParameters) DeepCopy ¶
func (in *StatusParameters) DeepCopy() *StatusParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatusParameters.
func (*StatusParameters) DeepCopyInto ¶
func (in *StatusParameters) DeepCopyInto(out *StatusParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TelemetryConfigObservation ¶ added in v0.27.0
type TelemetryConfigObservation struct { // The output format of the Dataproc Metastore service's logs. // Default value is JSON. // Possible values are: LEGACY, JSON. LogFormat *string `json:"logFormat,omitempty" tf:"log_format,omitempty"` }
func (*TelemetryConfigObservation) DeepCopy ¶ added in v0.27.0
func (in *TelemetryConfigObservation) DeepCopy() *TelemetryConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TelemetryConfigObservation.
func (*TelemetryConfigObservation) DeepCopyInto ¶ added in v0.27.0
func (in *TelemetryConfigObservation) DeepCopyInto(out *TelemetryConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TelemetryConfigParameters ¶ added in v0.27.0
type TelemetryConfigParameters struct { // The output format of the Dataproc Metastore service's logs. // Default value is JSON. // Possible values are: LEGACY, JSON. // +kubebuilder:validation:Optional LogFormat *string `json:"logFormat,omitempty" tf:"log_format,omitempty"` }
func (*TelemetryConfigParameters) DeepCopy ¶ added in v0.27.0
func (in *TelemetryConfigParameters) DeepCopy() *TelemetryConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TelemetryConfigParameters.
func (*TelemetryConfigParameters) DeepCopyInto ¶ added in v0.27.0
func (in *TelemetryConfigParameters) DeepCopyInto(out *TelemetryConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValidationObservation ¶
type ValidationObservation struct { // Validation based on regular expressions. Regex []RegexObservation `json:"regex,omitempty" tf:"regex,omitempty"` // Required. List of allowed values for the parameter. Values []ValuesObservation `json:"values,omitempty" tf:"values,omitempty"` }
func (*ValidationObservation) DeepCopy ¶
func (in *ValidationObservation) DeepCopy() *ValidationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidationObservation.
func (*ValidationObservation) DeepCopyInto ¶
func (in *ValidationObservation) DeepCopyInto(out *ValidationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValidationParameters ¶
type ValidationParameters struct { // Validation based on regular expressions. // +kubebuilder:validation:Optional Regex []RegexParameters `json:"regex,omitempty" tf:"regex,omitempty"` // Required. List of allowed values for the parameter. // +kubebuilder:validation:Optional Values []ValuesParameters `json:"values,omitempty" tf:"values,omitempty"` }
func (*ValidationParameters) DeepCopy ¶
func (in *ValidationParameters) DeepCopy() *ValidationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidationParameters.
func (*ValidationParameters) DeepCopyInto ¶
func (in *ValidationParameters) DeepCopyInto(out *ValidationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValuesObservation ¶
type ValuesObservation struct { // Required. List of allowed values for the parameter. Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*ValuesObservation) DeepCopy ¶
func (in *ValuesObservation) DeepCopy() *ValuesObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValuesObservation.
func (*ValuesObservation) DeepCopyInto ¶
func (in *ValuesObservation) DeepCopyInto(out *ValuesObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValuesParameters ¶
type ValuesParameters struct { // Required. List of allowed values for the parameter. // +kubebuilder:validation:Required Values []*string `json:"values" tf:"values,omitempty"` }
func (*ValuesParameters) DeepCopy ¶
func (in *ValuesParameters) DeepCopy() *ValuesParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValuesParameters.
func (*ValuesParameters) DeepCopyInto ¶
func (in *ValuesParameters) DeepCopyInto(out *ValuesParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type VirtualClusterConfigObservation ¶
type VirtualClusterConfigObservation struct { // Configuration of auxiliary services used by this cluster. // Structure defined below. AuxiliaryServicesConfig []AuxiliaryServicesConfigObservation `json:"auxiliaryServicesConfig,omitempty" tf:"auxiliary_services_config,omitempty"` // The configuration for running the Dataproc cluster on Kubernetes. // Structure defined below. KubernetesClusterConfig []KubernetesClusterConfigObservation `json:"kubernetesClusterConfig,omitempty" tf:"kubernetes_cluster_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` }
func (*VirtualClusterConfigObservation) DeepCopy ¶
func (in *VirtualClusterConfigObservation) DeepCopy() *VirtualClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VirtualClusterConfigObservation.
func (*VirtualClusterConfigObservation) DeepCopyInto ¶
func (in *VirtualClusterConfigObservation) DeepCopyInto(out *VirtualClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type VirtualClusterConfigParameters ¶
type VirtualClusterConfigParameters struct { // Configuration of auxiliary services used by this cluster. // Structure defined below. // +kubebuilder:validation:Optional AuxiliaryServicesConfig []AuxiliaryServicesConfigParameters `json:"auxiliaryServicesConfig,omitempty" tf:"auxiliary_services_config,omitempty"` // The configuration for running the Dataproc cluster on Kubernetes. // Structure defined below. // +kubebuilder:validation:Optional KubernetesClusterConfig []KubernetesClusterConfigParameters `json:"kubernetesClusterConfig,omitempty" tf:"kubernetes_cluster_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. // +kubebuilder:validation:Optional StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` }
func (*VirtualClusterConfigParameters) DeepCopy ¶
func (in *VirtualClusterConfigParameters) DeepCopy() *VirtualClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VirtualClusterConfigParameters.
func (*VirtualClusterConfigParameters) DeepCopyInto ¶
func (in *VirtualClusterConfigParameters) DeepCopyInto(out *VirtualClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigAcceleratorsObservation ¶
type WorkerConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*WorkerConfigAcceleratorsObservation) DeepCopy ¶
func (in *WorkerConfigAcceleratorsObservation) DeepCopy() *WorkerConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigAcceleratorsObservation.
func (*WorkerConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *WorkerConfigAcceleratorsObservation) DeepCopyInto(out *WorkerConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigAcceleratorsParameters ¶
type WorkerConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. // +kubebuilder:validation:Required AcceleratorCount *float64 `json:"acceleratorCount" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. // +kubebuilder:validation:Required AcceleratorType *string `json:"acceleratorType" tf:"accelerator_type,omitempty"` }
func (*WorkerConfigAcceleratorsParameters) DeepCopy ¶
func (in *WorkerConfigAcceleratorsParameters) DeepCopy() *WorkerConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigAcceleratorsParameters.
func (*WorkerConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *WorkerConfigAcceleratorsParameters) DeepCopyInto(out *WorkerConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigDiskConfigObservation ¶
type WorkerConfigDiskConfigObservation struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*WorkerConfigDiskConfigObservation) DeepCopy ¶
func (in *WorkerConfigDiskConfigObservation) DeepCopy() *WorkerConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigDiskConfigObservation.
func (*WorkerConfigDiskConfigObservation) DeepCopyInto ¶
func (in *WorkerConfigDiskConfigObservation) DeepCopyInto(out *WorkerConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigDiskConfigParameters ¶
type WorkerConfigDiskConfigParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*WorkerConfigDiskConfigParameters) DeepCopy ¶
func (in *WorkerConfigDiskConfigParameters) DeepCopy() *WorkerConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigDiskConfigParameters.
func (*WorkerConfigDiskConfigParameters) DeepCopyInto ¶
func (in *WorkerConfigDiskConfigParameters) DeepCopyInto(out *WorkerConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigManagedGroupConfigObservation ¶
type WorkerConfigManagedGroupConfigObservation struct { // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name,omitempty"` // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name,omitempty"` }
func (*WorkerConfigManagedGroupConfigObservation) DeepCopy ¶
func (in *WorkerConfigManagedGroupConfigObservation) DeepCopy() *WorkerConfigManagedGroupConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigManagedGroupConfigObservation.
func (*WorkerConfigManagedGroupConfigObservation) DeepCopyInto ¶
func (in *WorkerConfigManagedGroupConfigObservation) DeepCopyInto(out *WorkerConfigManagedGroupConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigManagedGroupConfigParameters ¶
type WorkerConfigManagedGroupConfigParameters struct { }
func (*WorkerConfigManagedGroupConfigParameters) DeepCopy ¶
func (in *WorkerConfigManagedGroupConfigParameters) DeepCopy() *WorkerConfigManagedGroupConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigManagedGroupConfigParameters.
func (*WorkerConfigManagedGroupConfigParameters) DeepCopyInto ¶
func (in *WorkerConfigManagedGroupConfigParameters) DeepCopyInto(out *WorkerConfigManagedGroupConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigObservation ¶
type WorkerConfigObservation struct { // Maximum number of instances for this group. MaxInstances *float64 `json:"maxInstances,omitempty" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [2, maxInstances]. Defaults to 2. MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*WorkerConfigObservation) DeepCopy ¶
func (in *WorkerConfigObservation) DeepCopy() *WorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigObservation.
func (*WorkerConfigObservation) DeepCopyInto ¶
func (in *WorkerConfigObservation) DeepCopyInto(out *WorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigParameters ¶
type WorkerConfigParameters struct { // Maximum number of instances for this group. // +kubebuilder:validation:Required MaxInstances *float64 `json:"maxInstances" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [2, maxInstances]. Defaults to 2. // +kubebuilder:validation:Optional MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. // +kubebuilder:validation:Optional Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*WorkerConfigParameters) DeepCopy ¶
func (in *WorkerConfigParameters) DeepCopy() *WorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigParameters.
func (*WorkerConfigParameters) DeepCopyInto ¶
func (in *WorkerConfigParameters) DeepCopyInto(out *WorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplate ¶
type WorkflowTemplate struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="self.managementPolicy == 'ObserveOnly' || has(self.forProvider.jobs)",message="jobs is a required parameter" // +kubebuilder:validation:XValidation:rule="self.managementPolicy == 'ObserveOnly' || has(self.forProvider.placement)",message="placement is a required parameter" Spec WorkflowTemplateSpec `json:"spec"` Status WorkflowTemplateStatus `json:"status,omitempty"` }
WorkflowTemplate is the Schema for the WorkflowTemplates API. A Workflow Template is a reusable workflow configuration. +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:subresource:status +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*WorkflowTemplate) DeepCopy ¶
func (in *WorkflowTemplate) DeepCopy() *WorkflowTemplate
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplate.
func (*WorkflowTemplate) DeepCopyInto ¶
func (in *WorkflowTemplate) DeepCopyInto(out *WorkflowTemplate)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*WorkflowTemplate) DeepCopyObject ¶
func (in *WorkflowTemplate) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*WorkflowTemplate) GetCondition ¶
func (mg *WorkflowTemplate) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this WorkflowTemplate.
func (*WorkflowTemplate) GetConnectionDetailsMapping ¶
func (tr *WorkflowTemplate) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this WorkflowTemplate
func (*WorkflowTemplate) GetDeletionPolicy ¶
func (mg *WorkflowTemplate) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this WorkflowTemplate.
func (*WorkflowTemplate) GetID ¶
func (tr *WorkflowTemplate) GetID() string
GetID returns ID of underlying Terraform resource of this WorkflowTemplate
func (*WorkflowTemplate) GetManagementPolicy ¶ added in v0.31.0
func (mg *WorkflowTemplate) GetManagementPolicy() xpv1.ManagementPolicy
GetManagementPolicy of this WorkflowTemplate.
func (*WorkflowTemplate) GetObservation ¶
func (tr *WorkflowTemplate) GetObservation() (map[string]any, error)
GetObservation of this WorkflowTemplate
func (*WorkflowTemplate) GetParameters ¶
func (tr *WorkflowTemplate) GetParameters() (map[string]any, error)
GetParameters of this WorkflowTemplate
func (*WorkflowTemplate) GetProviderConfigReference ¶
func (mg *WorkflowTemplate) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this WorkflowTemplate.
func (*WorkflowTemplate) GetProviderReference ¶
func (mg *WorkflowTemplate) GetProviderReference() *xpv1.Reference
GetProviderReference of this WorkflowTemplate. Deprecated: Use GetProviderConfigReference.
func (*WorkflowTemplate) GetPublishConnectionDetailsTo ¶
func (mg *WorkflowTemplate) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this WorkflowTemplate.
func (*WorkflowTemplate) GetTerraformResourceType ¶
func (mg *WorkflowTemplate) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this WorkflowTemplate
func (*WorkflowTemplate) GetTerraformSchemaVersion ¶
func (tr *WorkflowTemplate) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*WorkflowTemplate) GetWriteConnectionSecretToReference ¶
func (mg *WorkflowTemplate) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this WorkflowTemplate.
func (*WorkflowTemplate) LateInitialize ¶
func (tr *WorkflowTemplate) LateInitialize(attrs []byte) (bool, error)
LateInitialize this WorkflowTemplate using its observed tfState. returns True if there are any spec changes for the resource.
func (*WorkflowTemplate) SetConditions ¶
func (mg *WorkflowTemplate) SetConditions(c ...xpv1.Condition)
SetConditions of this WorkflowTemplate.
func (*WorkflowTemplate) SetDeletionPolicy ¶
func (mg *WorkflowTemplate) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this WorkflowTemplate.
func (*WorkflowTemplate) SetManagementPolicy ¶ added in v0.31.0
func (mg *WorkflowTemplate) SetManagementPolicy(r xpv1.ManagementPolicy)
SetManagementPolicy of this WorkflowTemplate.
func (*WorkflowTemplate) SetObservation ¶
func (tr *WorkflowTemplate) SetObservation(obs map[string]any) error
SetObservation for this WorkflowTemplate
func (*WorkflowTemplate) SetParameters ¶
func (tr *WorkflowTemplate) SetParameters(params map[string]any) error
SetParameters for this WorkflowTemplate
func (*WorkflowTemplate) SetProviderConfigReference ¶
func (mg *WorkflowTemplate) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this WorkflowTemplate.
func (*WorkflowTemplate) SetProviderReference ¶
func (mg *WorkflowTemplate) SetProviderReference(r *xpv1.Reference)
SetProviderReference of this WorkflowTemplate. Deprecated: Use SetProviderConfigReference.
func (*WorkflowTemplate) SetPublishConnectionDetailsTo ¶
func (mg *WorkflowTemplate) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this WorkflowTemplate.
func (*WorkflowTemplate) SetWriteConnectionSecretToReference ¶
func (mg *WorkflowTemplate) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this WorkflowTemplate.
type WorkflowTemplateList ¶
type WorkflowTemplateList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []WorkflowTemplate `json:"items"` }
WorkflowTemplateList contains a list of WorkflowTemplates
func (*WorkflowTemplateList) DeepCopy ¶
func (in *WorkflowTemplateList) DeepCopy() *WorkflowTemplateList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateList.
func (*WorkflowTemplateList) DeepCopyInto ¶
func (in *WorkflowTemplateList) DeepCopyInto(out *WorkflowTemplateList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*WorkflowTemplateList) DeepCopyObject ¶
func (in *WorkflowTemplateList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*WorkflowTemplateList) GetItems ¶
func (l *WorkflowTemplateList) GetItems() []resource.Managed
GetItems of this WorkflowTemplateList.
type WorkflowTemplateObservation ¶
type WorkflowTemplateObservation struct { // Output only. The time template was created. CreateTime *string `json:"createTime,omitempty" tf:"create_time,omitempty"` // (Beta only) Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration must be from 10 minutes ("10m") to 24 hours ("24h" or "1d"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a (/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted. DagTimeout *string `json:"dagTimeout,omitempty" tf:"dag_timeout,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // Required. The Directed Acyclic Graph of Jobs to submit. Jobs []JobsObservation `json:"jobs,omitempty" tf:"jobs,omitempty"` // Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The location for the resource Location *string `json:"location,omitempty" tf:"location,omitempty"` // Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated. Parameters []ParametersObservation `json:"parameters,omitempty" tf:"parameters,omitempty"` // Required. WorkflowTemplate scheduling information. Placement []WorkflowTemplatePlacementObservation `json:"placement,omitempty" tf:"placement,omitempty"` // The project for the resource Project *string `json:"project,omitempty" tf:"project,omitempty"` // Output only. The time template was last updated. UpdateTime *string `json:"updateTime,omitempty" tf:"update_time,omitempty"` // Optional. Used to perform a consistent read-modify-write. This field should be left blank for a CreateWorkflowTemplate request. It is required for an UpdateWorkflowTemplate request, and must match the current server version. A typical update template flow would fetch the current template with a GetWorkflowTemplate request, which will return the current template with the version field filled in with the current server version. The user updates other fields in the template, then returns it as part of the UpdateWorkflowTemplate request. Version *float64 `json:"version,omitempty" tf:"version,omitempty"` }
func (*WorkflowTemplateObservation) DeepCopy ¶
func (in *WorkflowTemplateObservation) DeepCopy() *WorkflowTemplateObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateObservation.
func (*WorkflowTemplateObservation) DeepCopyInto ¶
func (in *WorkflowTemplateObservation) DeepCopyInto(out *WorkflowTemplateObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplateParameters ¶
type WorkflowTemplateParameters struct { // (Beta only) Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration must be from 10 minutes ("10m") to 24 hours ("24h" or "1d"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a (/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted. // +kubebuilder:validation:Optional DagTimeout *string `json:"dagTimeout,omitempty" tf:"dag_timeout,omitempty"` // Required. The Directed Acyclic Graph of Jobs to submit. // +kubebuilder:validation:Optional Jobs []JobsParameters `json:"jobs,omitempty" tf:"jobs,omitempty"` // Optional. The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. // +kubebuilder:validation:Optional Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The location for the resource // +kubebuilder:validation:Required Location *string `json:"location" tf:"location,omitempty"` // Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated. // +kubebuilder:validation:Optional Parameters []ParametersParameters `json:"parameters,omitempty" tf:"parameters,omitempty"` // Required. WorkflowTemplate scheduling information. // +kubebuilder:validation:Optional Placement []WorkflowTemplatePlacementParameters `json:"placement,omitempty" tf:"placement,omitempty"` // The project for the resource // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Optional. Used to perform a consistent read-modify-write. This field should be left blank for a CreateWorkflowTemplate request. It is required for an UpdateWorkflowTemplate request, and must match the current server version. A typical update template flow would fetch the current template with a GetWorkflowTemplate request, which will return the current template with the version field filled in with the current server version. The user updates other fields in the template, then returns it as part of the UpdateWorkflowTemplate request. // +kubebuilder:validation:Optional Version *float64 `json:"version,omitempty" tf:"version,omitempty"` }
func (*WorkflowTemplateParameters) DeepCopy ¶
func (in *WorkflowTemplateParameters) DeepCopy() *WorkflowTemplateParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateParameters.
func (*WorkflowTemplateParameters) DeepCopyInto ¶
func (in *WorkflowTemplateParameters) DeepCopyInto(out *WorkflowTemplateParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplatePlacementObservation ¶
type WorkflowTemplatePlacementObservation struct { // Optional. A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted. ClusterSelector []ClusterSelectorObservation `json:"clusterSelector,omitempty" tf:"cluster_selector,omitempty"` // A cluster that is managed by the workflow. ManagedCluster []ManagedClusterObservation `json:"managedCluster,omitempty" tf:"managed_cluster,omitempty"` }
func (*WorkflowTemplatePlacementObservation) DeepCopy ¶
func (in *WorkflowTemplatePlacementObservation) DeepCopy() *WorkflowTemplatePlacementObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplatePlacementObservation.
func (*WorkflowTemplatePlacementObservation) DeepCopyInto ¶
func (in *WorkflowTemplatePlacementObservation) DeepCopyInto(out *WorkflowTemplatePlacementObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplatePlacementParameters ¶
type WorkflowTemplatePlacementParameters struct { // Optional. A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted. // +kubebuilder:validation:Optional ClusterSelector []ClusterSelectorParameters `json:"clusterSelector,omitempty" tf:"cluster_selector,omitempty"` // A cluster that is managed by the workflow. // +kubebuilder:validation:Optional ManagedCluster []ManagedClusterParameters `json:"managedCluster,omitempty" tf:"managed_cluster,omitempty"` }
func (*WorkflowTemplatePlacementParameters) DeepCopy ¶
func (in *WorkflowTemplatePlacementParameters) DeepCopy() *WorkflowTemplatePlacementParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplatePlacementParameters.
func (*WorkflowTemplatePlacementParameters) DeepCopyInto ¶
func (in *WorkflowTemplatePlacementParameters) DeepCopyInto(out *WorkflowTemplatePlacementParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplateSpec ¶
type WorkflowTemplateSpec struct { v1.ResourceSpec `json:",inline"` ForProvider WorkflowTemplateParameters `json:"forProvider"` }
WorkflowTemplateSpec defines the desired state of WorkflowTemplate
func (*WorkflowTemplateSpec) DeepCopy ¶
func (in *WorkflowTemplateSpec) DeepCopy() *WorkflowTemplateSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpec.
func (*WorkflowTemplateSpec) DeepCopyInto ¶
func (in *WorkflowTemplateSpec) DeepCopyInto(out *WorkflowTemplateSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplateStatus ¶
type WorkflowTemplateStatus struct { v1.ResourceStatus `json:",inline"` AtProvider WorkflowTemplateObservation `json:"atProvider,omitempty"` }
WorkflowTemplateStatus defines the observed state of WorkflowTemplate.
func (*WorkflowTemplateStatus) DeepCopy ¶
func (in *WorkflowTemplateStatus) DeepCopy() *WorkflowTemplateStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateStatus.
func (*WorkflowTemplateStatus) DeepCopyInto ¶
func (in *WorkflowTemplateStatus) DeepCopyInto(out *WorkflowTemplateStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type YarnConfigObservation ¶
type YarnConfigObservation struct { // Timeout for YARN graceful decommissioning of Node Managers. Specifies the // duration to wait for jobs to complete before forcefully removing workers // (and potentially interrupting jobs). Only applicable to downscaling operations. // Bounds: [0s, 1d]. GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // remove workers. A scale-down factor of 1 will result in scaling down so that there // is no available memory remaining after the update (more aggressive scaling). // A scale-down factor of 0 disables removing workers, which can be beneficial for // autoscaling a single job. // Bounds: [0.0, 1.0]. ScaleDownFactor *float64 `json:"scaleDownFactor,omitempty" tf:"scale_down_factor,omitempty"` // Minimum scale-down threshold as a fraction of total cluster size before scaling occurs. // For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler must // recommend at least a 2 worker scale-down for the cluster to scale. A threshold of 0 // means the autoscaler will scale down on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. ScaleDownMinWorkerFraction *float64 `json:"scaleDownMinWorkerFraction,omitempty" tf:"scale_down_min_worker_fraction,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // add workers. A scale-up factor of 1.0 will result in scaling up so that there // is no pending memory remaining after the update (more aggressive scaling). // A scale-up factor closer to 0 will result in a smaller magnitude of scaling up // (less aggressive scaling). // Bounds: [0.0, 1.0]. ScaleUpFactor *float64 `json:"scaleUpFactor,omitempty" tf:"scale_up_factor,omitempty"` // Minimum scale-up threshold as a fraction of total cluster size before scaling // occurs. For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler // must recommend at least a 2-worker scale-up for the cluster to scale. A threshold of // 0 means the autoscaler will scale up on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. ScaleUpMinWorkerFraction *float64 `json:"scaleUpMinWorkerFraction,omitempty" tf:"scale_up_min_worker_fraction,omitempty"` }
func (*YarnConfigObservation) DeepCopy ¶
func (in *YarnConfigObservation) DeepCopy() *YarnConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new YarnConfigObservation.
func (*YarnConfigObservation) DeepCopyInto ¶
func (in *YarnConfigObservation) DeepCopyInto(out *YarnConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type YarnConfigParameters ¶
type YarnConfigParameters struct { // Timeout for YARN graceful decommissioning of Node Managers. Specifies the // duration to wait for jobs to complete before forcefully removing workers // (and potentially interrupting jobs). Only applicable to downscaling operations. // Bounds: [0s, 1d]. // +kubebuilder:validation:Required GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout" tf:"graceful_decommission_timeout,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // remove workers. A scale-down factor of 1 will result in scaling down so that there // is no available memory remaining after the update (more aggressive scaling). // A scale-down factor of 0 disables removing workers, which can be beneficial for // autoscaling a single job. // Bounds: [0.0, 1.0]. // +kubebuilder:validation:Required ScaleDownFactor *float64 `json:"scaleDownFactor" tf:"scale_down_factor,omitempty"` // Minimum scale-down threshold as a fraction of total cluster size before scaling occurs. // For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler must // recommend at least a 2 worker scale-down for the cluster to scale. A threshold of 0 // means the autoscaler will scale down on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. // +kubebuilder:validation:Optional ScaleDownMinWorkerFraction *float64 `json:"scaleDownMinWorkerFraction,omitempty" tf:"scale_down_min_worker_fraction,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // add workers. A scale-up factor of 1.0 will result in scaling up so that there // is no pending memory remaining after the update (more aggressive scaling). // A scale-up factor closer to 0 will result in a smaller magnitude of scaling up // (less aggressive scaling). // Bounds: [0.0, 1.0]. // +kubebuilder:validation:Required ScaleUpFactor *float64 `json:"scaleUpFactor" tf:"scale_up_factor,omitempty"` // Minimum scale-up threshold as a fraction of total cluster size before scaling // occurs. For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler // must recommend at least a 2-worker scale-up for the cluster to scale. A threshold of // 0 means the autoscaler will scale up on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. // +kubebuilder:validation:Optional ScaleUpMinWorkerFraction *float64 `json:"scaleUpMinWorkerFraction,omitempty" tf:"scale_up_min_worker_fraction,omitempty"` }
func (*YarnConfigParameters) DeepCopy ¶
func (in *YarnConfigParameters) DeepCopy() *YarnConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new YarnConfigParameters.
func (*YarnConfigParameters) DeepCopyInto ¶
func (in *YarnConfigParameters) DeepCopyInto(out *YarnConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.