Documentation ¶
Overview ¶
+kubebuilder:object:generate=true +groupName=dataproc.gcp.upbound.io +versionName=v1beta2
Index ¶
- Constants
- Variables
- type AcceleratorsInitParameters
- type AcceleratorsObservation
- type AcceleratorsParameters
- type AutoscalingConfigInitParameters
- type AutoscalingConfigObservation
- type AutoscalingConfigParameters
- type AutoscalingInitParameters
- type AutoscalingObservation
- type AutoscalingParameters
- type AutoscalingPolicy
- func (in *AutoscalingPolicy) DeepCopy() *AutoscalingPolicy
- func (in *AutoscalingPolicy) DeepCopyInto(out *AutoscalingPolicy)
- func (in *AutoscalingPolicy) DeepCopyObject() runtime.Object
- func (mg *AutoscalingPolicy) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *AutoscalingPolicy) GetConnectionDetailsMapping() map[string]string
- func (mg *AutoscalingPolicy) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *AutoscalingPolicy) GetID() string
- func (tr *AutoscalingPolicy) GetInitParameters() (map[string]any, error)
- func (mg *AutoscalingPolicy) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *AutoscalingPolicy) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *AutoscalingPolicy) GetObservation() (map[string]any, error)
- func (tr *AutoscalingPolicy) GetParameters() (map[string]any, error)
- func (mg *AutoscalingPolicy) GetProviderConfigReference() *xpv1.Reference
- func (mg *AutoscalingPolicy) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *AutoscalingPolicy) GetTerraformResourceType() string
- func (tr *AutoscalingPolicy) GetTerraformSchemaVersion() int
- func (mg *AutoscalingPolicy) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *AutoscalingPolicy) Hub()
- func (tr *AutoscalingPolicy) LateInitialize(attrs []byte) (bool, error)
- func (mg *AutoscalingPolicy) SetConditions(c ...xpv1.Condition)
- func (mg *AutoscalingPolicy) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *AutoscalingPolicy) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *AutoscalingPolicy) SetObservation(obs map[string]any) error
- func (tr *AutoscalingPolicy) SetParameters(params map[string]any) error
- func (mg *AutoscalingPolicy) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *AutoscalingPolicy) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *AutoscalingPolicy) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type AutoscalingPolicyInitParameters
- type AutoscalingPolicyList
- type AutoscalingPolicyObservation
- type AutoscalingPolicyParameters
- type AutoscalingPolicySpec
- type AutoscalingPolicyStatus
- type AuxiliaryNodeGroupsInitParameters
- type AuxiliaryNodeGroupsObservation
- type AuxiliaryNodeGroupsParameters
- type AuxiliaryServicesConfigInitParameters
- type AuxiliaryServicesConfigMetastoreConfigInitParameters
- type AuxiliaryServicesConfigMetastoreConfigObservation
- type AuxiliaryServicesConfigMetastoreConfigParameters
- type AuxiliaryServicesConfigObservation
- type AuxiliaryServicesConfigParameters
- type AuxiliaryVersionsInitParameters
- type AuxiliaryVersionsObservation
- type AuxiliaryVersionsParameters
- type BasicAlgorithmInitParameters
- type BasicAlgorithmObservation
- type BasicAlgorithmParameters
- type Cluster
- func (in *Cluster) DeepCopy() *Cluster
- func (in *Cluster) DeepCopyInto(out *Cluster)
- func (in *Cluster) DeepCopyObject() runtime.Object
- func (mg *Cluster) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Cluster) GetConnectionDetailsMapping() map[string]string
- func (mg *Cluster) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Cluster) GetID() string
- func (tr *Cluster) GetInitParameters() (map[string]any, error)
- func (mg *Cluster) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *Cluster) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *Cluster) GetObservation() (map[string]any, error)
- func (tr *Cluster) GetParameters() (map[string]any, error)
- func (mg *Cluster) GetProviderConfigReference() *xpv1.Reference
- func (mg *Cluster) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Cluster) GetTerraformResourceType() string
- func (tr *Cluster) GetTerraformSchemaVersion() int
- func (mg *Cluster) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Cluster) Hub()
- func (tr *Cluster) LateInitialize(attrs []byte) (bool, error)
- func (mg *Cluster) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Cluster) SetConditions(c ...xpv1.Condition)
- func (mg *Cluster) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Cluster) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *Cluster) SetObservation(obs map[string]any) error
- func (tr *Cluster) SetParameters(params map[string]any) error
- func (mg *Cluster) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Cluster) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Cluster) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type ClusterConfigInitParameters
- type ClusterConfigObservation
- type ClusterConfigParameters
- type ClusterConfigWorkerConfigInitParameters
- type ClusterConfigWorkerConfigObservation
- type ClusterConfigWorkerConfigParameters
- type ClusterInitParameters
- type ClusterList
- type ClusterObservation
- type ClusterParameters
- type ClusterSelectorInitParameters
- type ClusterSelectorObservation
- type ClusterSelectorParameters
- type ClusterSpec
- type ClusterStatus
- type ConfigAutoscalingConfigInitParameters
- type ConfigAutoscalingConfigObservation
- type ConfigAutoscalingConfigParameters
- type ConfigEncryptionConfigInitParameters
- type ConfigEncryptionConfigObservation
- type ConfigEncryptionConfigParameters
- type ConfigEndpointConfigInitParameters
- type ConfigEndpointConfigObservation
- type ConfigEndpointConfigParameters
- type ConfigGceClusterConfigInitParameters
- type ConfigGceClusterConfigObservation
- type ConfigGceClusterConfigParameters
- type ConfigInitParameters
- type ConfigLifecycleConfigInitParameters
- type ConfigLifecycleConfigObservation
- type ConfigLifecycleConfigParameters
- type ConfigMasterConfigAcceleratorsInitParameters
- type ConfigMasterConfigAcceleratorsObservation
- type ConfigMasterConfigAcceleratorsParameters
- type ConfigMasterConfigDiskConfigInitParameters
- type ConfigMasterConfigDiskConfigObservation
- type ConfigMasterConfigDiskConfigParameters
- type ConfigMasterConfigInitParameters
- type ConfigMasterConfigObservation
- type ConfigMasterConfigParameters
- type ConfigObservation
- type ConfigParameters
- type ConfigSecondaryWorkerConfigInitParameters
- type ConfigSecondaryWorkerConfigObservation
- type ConfigSecondaryWorkerConfigParameters
- type ConfigSecurityConfigInitParameters
- type ConfigSecurityConfigObservation
- type ConfigSecurityConfigParameters
- type ConfigSoftwareConfigInitParameters
- type ConfigSoftwareConfigObservation
- type ConfigSoftwareConfigParameters
- type ConfigWorkerConfigAcceleratorsInitParameters
- type ConfigWorkerConfigAcceleratorsObservation
- type ConfigWorkerConfigAcceleratorsParameters
- type ConfigWorkerConfigDiskConfigInitParameters
- type ConfigWorkerConfigDiskConfigObservation
- type ConfigWorkerConfigDiskConfigParameters
- type ConfigWorkerConfigInitParameters
- type ConfigWorkerConfigObservation
- type ConfigWorkerConfigParameters
- type ConsumersInitParameters
- type ConsumersObservation
- type ConsumersParameters
- type DataCatalogConfigInitParameters
- type DataCatalogConfigObservation
- type DataCatalogConfigParameters
- type DataprocMetricConfigInitParameters
- type DataprocMetricConfigObservation
- type DataprocMetricConfigParameters
- type DiskConfigInitParameters
- type DiskConfigObservation
- type DiskConfigParameters
- type EncryptionConfigInitParameters
- type EncryptionConfigObservation
- type EncryptionConfigParameters
- type EndpointConfigInitParameters
- type EndpointConfigObservation
- type EndpointConfigParameters
- type GceClusterConfigInitParameters
- type GceClusterConfigNodeGroupAffinityInitParameters
- type GceClusterConfigNodeGroupAffinityObservation
- type GceClusterConfigNodeGroupAffinityParameters
- type GceClusterConfigObservation
- type GceClusterConfigParameters
- type GceClusterConfigReservationAffinityInitParameters
- type GceClusterConfigReservationAffinityObservation
- type GceClusterConfigReservationAffinityParameters
- type GceClusterConfigShieldedInstanceConfigInitParameters
- type GceClusterConfigShieldedInstanceConfigObservation
- type GceClusterConfigShieldedInstanceConfigParameters
- type GkeClusterConfigInitParameters
- type GkeClusterConfigObservation
- type GkeClusterConfigParameters
- type HadoopConfigInitParameters
- type HadoopConfigObservation
- type HadoopConfigParameters
- type HadoopJobInitParameters
- type HadoopJobLoggingConfigInitParameters
- type HadoopJobLoggingConfigObservation
- type HadoopJobLoggingConfigParameters
- type HadoopJobObservation
- type HadoopJobParameters
- type HiveConfigInitParameters
- type HiveConfigObservation
- type HiveConfigParameters
- type HiveJobInitParameters
- type HiveJobObservation
- type HiveJobParameters
- type HiveMetastoreConfigInitParameters
- type HiveMetastoreConfigKerberosConfigInitParameters
- type HiveMetastoreConfigKerberosConfigObservation
- type HiveMetastoreConfigKerberosConfigParameters
- type HiveMetastoreConfigObservation
- type HiveMetastoreConfigParameters
- type InitializationActionInitParameters
- type InitializationActionObservation
- type InitializationActionParameters
- type InitializationActionsInitParameters
- type InitializationActionsObservation
- type InitializationActionsParameters
- type InstanceFlexibilityPolicyInitParameters
- type InstanceFlexibilityPolicyObservation
- type InstanceFlexibilityPolicyParameters
- type InstanceSelectionListInitParameters
- type InstanceSelectionListObservation
- type InstanceSelectionListParameters
- type InstanceSelectionResultsInitParameters
- type InstanceSelectionResultsObservation
- type InstanceSelectionResultsParameters
- type Job
- func (in *Job) DeepCopy() *Job
- func (in *Job) DeepCopyInto(out *Job)
- func (in *Job) DeepCopyObject() runtime.Object
- func (mg *Job) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *Job) GetConnectionDetailsMapping() map[string]string
- func (mg *Job) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *Job) GetID() string
- func (tr *Job) GetInitParameters() (map[string]any, error)
- func (mg *Job) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *Job) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *Job) GetObservation() (map[string]any, error)
- func (tr *Job) GetParameters() (map[string]any, error)
- func (mg *Job) GetProviderConfigReference() *xpv1.Reference
- func (mg *Job) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *Job) GetTerraformResourceType() string
- func (tr *Job) GetTerraformSchemaVersion() int
- func (mg *Job) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *Job) Hub()
- func (tr *Job) LateInitialize(attrs []byte) (bool, error)
- func (mg *Job) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *Job) SetConditions(c ...xpv1.Condition)
- func (mg *Job) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *Job) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *Job) SetObservation(obs map[string]any) error
- func (tr *Job) SetParameters(params map[string]any) error
- func (mg *Job) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *Job) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *Job) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type JobInitParameters
- type JobList
- type JobObservation
- type JobParameters
- type JobSpec
- type JobStatus
- type JobsInitParameters
- type JobsObservation
- type JobsParameters
- type JobsSchedulingInitParameters
- type JobsSchedulingObservation
- type JobsSchedulingParameters
- type KerberosConfigInitParameters
- type KerberosConfigObservation
- type KerberosConfigParameters
- type KeytabInitParameters
- type KeytabObservation
- type KeytabParameters
- type KubernetesClusterConfigInitParameters
- type KubernetesClusterConfigObservation
- type KubernetesClusterConfigParameters
- type KubernetesSoftwareConfigInitParameters
- type KubernetesSoftwareConfigObservation
- type KubernetesSoftwareConfigParameters
- type LifecycleConfigInitParameters
- type LifecycleConfigObservation
- type LifecycleConfigParameters
- type LoggingConfigInitParameters
- type LoggingConfigObservation
- type LoggingConfigParameters
- type MaintenanceWindowInitParameters
- type MaintenanceWindowObservation
- type MaintenanceWindowParameters
- type ManagedClusterConfigInitParameters
- type ManagedClusterConfigObservation
- type ManagedClusterConfigParameters
- type ManagedClusterInitParameters
- type ManagedClusterObservation
- type ManagedClusterParameters
- type ManagedGroupConfigInitParameters
- type ManagedGroupConfigObservation
- type ManagedGroupConfigParameters
- type MasterConfigAcceleratorsInitParameters
- type MasterConfigAcceleratorsObservation
- type MasterConfigAcceleratorsParameters
- type MasterConfigDiskConfigInitParameters
- type MasterConfigDiskConfigObservation
- type MasterConfigDiskConfigParameters
- type MasterConfigInitParameters
- type MasterConfigObservation
- type MasterConfigParameters
- type MetadataIntegrationInitParameters
- type MetadataIntegrationObservation
- type MetadataIntegrationParameters
- type MetastoreConfigInitParameters
- type MetastoreConfigObservation
- type MetastoreConfigParameters
- type MetastoreService
- func (in *MetastoreService) DeepCopy() *MetastoreService
- func (in *MetastoreService) DeepCopyInto(out *MetastoreService)
- func (in *MetastoreService) DeepCopyObject() runtime.Object
- func (mg *MetastoreService) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *MetastoreService) GetConnectionDetailsMapping() map[string]string
- func (mg *MetastoreService) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *MetastoreService) GetID() string
- func (tr *MetastoreService) GetInitParameters() (map[string]any, error)
- func (mg *MetastoreService) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *MetastoreService) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *MetastoreService) GetObservation() (map[string]any, error)
- func (tr *MetastoreService) GetParameters() (map[string]any, error)
- func (mg *MetastoreService) GetProviderConfigReference() *xpv1.Reference
- func (mg *MetastoreService) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *MetastoreService) GetTerraformResourceType() string
- func (tr *MetastoreService) GetTerraformSchemaVersion() int
- func (mg *MetastoreService) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *MetastoreService) Hub()
- func (tr *MetastoreService) LateInitialize(attrs []byte) (bool, error)
- func (mg *MetastoreService) ResolveReferences(ctx context.Context, c client.Reader) error
- func (mg *MetastoreService) SetConditions(c ...xpv1.Condition)
- func (mg *MetastoreService) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *MetastoreService) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *MetastoreService) SetObservation(obs map[string]any) error
- func (tr *MetastoreService) SetParameters(params map[string]any) error
- func (mg *MetastoreService) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *MetastoreService) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *MetastoreService) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type MetastoreServiceEncryptionConfigInitParameters
- type MetastoreServiceEncryptionConfigObservation
- type MetastoreServiceEncryptionConfigParameters
- type MetastoreServiceInitParameters
- type MetastoreServiceList
- type MetastoreServiceObservation
- type MetastoreServiceParameters
- type MetastoreServiceSpec
- type MetastoreServiceStatus
- type MetricsInitParameters
- type MetricsObservation
- type MetricsParameters
- type NetworkConfigInitParameters
- type NetworkConfigObservation
- type NetworkConfigParameters
- type NodeGroupAffinityInitParameters
- type NodeGroupAffinityObservation
- type NodeGroupAffinityParameters
- type NodeGroupConfigInitParameters
- type NodeGroupConfigObservation
- type NodeGroupConfigParameters
- type NodeGroupInitParameters
- type NodeGroupObservation
- type NodeGroupParameters
- type NodePoolConfigInitParameters
- type NodePoolConfigObservation
- type NodePoolConfigParameters
- type NodePoolTargetInitParameters
- type NodePoolTargetObservation
- type NodePoolTargetParameters
- type ParametersInitParameters
- type ParametersObservation
- type ParametersParameters
- type PigConfigInitParameters
- type PigConfigLoggingConfigInitParameters
- type PigConfigLoggingConfigObservation
- type PigConfigLoggingConfigParameters
- type PigConfigObservation
- type PigConfigParameters
- type PigJobInitParameters
- type PigJobLoggingConfigInitParameters
- type PigJobLoggingConfigObservation
- type PigJobLoggingConfigParameters
- type PigJobObservation
- type PigJobParameters
- type PigJobQueryListInitParameters
- type PigJobQueryListObservation
- type PigJobQueryListParameters
- type PlacementInitParameters
- type PlacementObservation
- type PlacementParameters
- type PreemptibleWorkerConfigDiskConfigInitParameters
- type PreemptibleWorkerConfigDiskConfigObservation
- type PreemptibleWorkerConfigDiskConfigParameters
- type PreemptibleWorkerConfigInitParameters
- type PreemptibleWorkerConfigObservation
- type PreemptibleWorkerConfigParameters
- type PrestoConfigInitParameters
- type PrestoConfigLoggingConfigInitParameters
- type PrestoConfigLoggingConfigObservation
- type PrestoConfigLoggingConfigParameters
- type PrestoConfigObservation
- type PrestoConfigParameters
- type PrestoJobInitParameters
- type PrestoJobLoggingConfigInitParameters
- type PrestoJobLoggingConfigObservation
- type PrestoJobLoggingConfigParameters
- type PrestoJobObservation
- type PrestoJobParameters
- type PrestoJobQueryListInitParameters
- type PrestoJobQueryListObservation
- type PrestoJobQueryListParameters
- type PysparkConfigInitParameters
- type PysparkConfigLoggingConfigInitParameters
- type PysparkConfigLoggingConfigObservation
- type PysparkConfigLoggingConfigParameters
- type PysparkConfigObservation
- type PysparkConfigParameters
- type PysparkJobInitParameters
- type PysparkJobLoggingConfigInitParameters
- type PysparkJobLoggingConfigObservation
- type PysparkJobLoggingConfigParameters
- type PysparkJobObservation
- type PysparkJobParameters
- type QueryListInitParameters
- type QueryListObservation
- type QueryListParameters
- type ReferenceInitParameters
- type ReferenceObservation
- type ReferenceParameters
- type RegexInitParameters
- type RegexObservation
- type RegexParameters
- type ReservationAffinityInitParameters
- type ReservationAffinityObservation
- type ReservationAffinityParameters
- type ScalingConfigInitParameters
- type ScalingConfigObservation
- type ScalingConfigParameters
- type ScheduledBackupInitParameters
- type ScheduledBackupObservation
- type ScheduledBackupParameters
- type SchedulingInitParameters
- type SchedulingObservation
- type SchedulingParameters
- type SecondaryWorkerConfigAcceleratorsInitParameters
- type SecondaryWorkerConfigAcceleratorsObservation
- type SecondaryWorkerConfigAcceleratorsParameters
- type SecondaryWorkerConfigDiskConfigInitParameters
- type SecondaryWorkerConfigDiskConfigObservation
- type SecondaryWorkerConfigDiskConfigParameters
- type SecondaryWorkerConfigInitParameters
- type SecondaryWorkerConfigManagedGroupConfigInitParameters
- type SecondaryWorkerConfigManagedGroupConfigObservation
- type SecondaryWorkerConfigManagedGroupConfigParameters
- type SecondaryWorkerConfigObservation
- type SecondaryWorkerConfigParameters
- type SecurityConfigInitParameters
- type SecurityConfigKerberosConfigInitParameters
- type SecurityConfigKerberosConfigObservation
- type SecurityConfigKerberosConfigParameters
- type SecurityConfigObservation
- type SecurityConfigParameters
- type ShieldedInstanceConfigInitParameters
- type ShieldedInstanceConfigObservation
- type ShieldedInstanceConfigParameters
- type SoftwareConfigInitParameters
- type SoftwareConfigObservation
- type SoftwareConfigParameters
- type SparkConfigInitParameters
- type SparkConfigLoggingConfigInitParameters
- type SparkConfigLoggingConfigObservation
- type SparkConfigLoggingConfigParameters
- type SparkConfigObservation
- type SparkConfigParameters
- type SparkHistoryServerConfigInitParameters
- type SparkHistoryServerConfigObservation
- type SparkHistoryServerConfigParameters
- type SparkJobInitParameters
- type SparkJobLoggingConfigInitParameters
- type SparkJobLoggingConfigObservation
- type SparkJobLoggingConfigParameters
- type SparkJobObservation
- type SparkJobParameters
- type SparkRJobInitParameters
- type SparkRJobLoggingConfigInitParameters
- type SparkRJobLoggingConfigObservation
- type SparkRJobLoggingConfigParameters
- type SparkRJobObservation
- type SparkRJobParameters
- type SparkSQLJobInitParameters
- type SparkSQLJobLoggingConfigInitParameters
- type SparkSQLJobLoggingConfigObservation
- type SparkSQLJobLoggingConfigParameters
- type SparkSQLJobObservation
- type SparkSQLJobParameters
- type SparkSQLJobQueryListInitParameters
- type SparkSQLJobQueryListObservation
- type SparkSQLJobQueryListParameters
- type SparksqlConfigInitParameters
- type SparksqlConfigLoggingConfigInitParameters
- type SparksqlConfigLoggingConfigObservation
- type SparksqlConfigLoggingConfigParameters
- type SparksqlConfigObservation
- type SparksqlConfigParameters
- type StatusInitParameters
- type StatusObservation
- type StatusParameters
- type TelemetryConfigInitParameters
- type TelemetryConfigObservation
- type TelemetryConfigParameters
- type ValidationInitParameters
- type ValidationObservation
- type ValidationParameters
- type ValuesInitParameters
- type ValuesObservation
- type ValuesParameters
- type VirtualClusterConfigInitParameters
- type VirtualClusterConfigObservation
- type VirtualClusterConfigParameters
- type WorkerConfigAcceleratorsInitParameters
- type WorkerConfigAcceleratorsObservation
- type WorkerConfigAcceleratorsParameters
- type WorkerConfigDiskConfigInitParameters
- type WorkerConfigDiskConfigObservation
- type WorkerConfigDiskConfigParameters
- type WorkerConfigInitParameters
- type WorkerConfigManagedGroupConfigInitParameters
- type WorkerConfigManagedGroupConfigObservation
- type WorkerConfigManagedGroupConfigParameters
- type WorkerConfigObservation
- type WorkerConfigParameters
- type WorkflowTemplate
- func (in *WorkflowTemplate) DeepCopy() *WorkflowTemplate
- func (in *WorkflowTemplate) DeepCopyInto(out *WorkflowTemplate)
- func (in *WorkflowTemplate) DeepCopyObject() runtime.Object
- func (mg *WorkflowTemplate) GetCondition(ct xpv1.ConditionType) xpv1.Condition
- func (tr *WorkflowTemplate) GetConnectionDetailsMapping() map[string]string
- func (mg *WorkflowTemplate) GetDeletionPolicy() xpv1.DeletionPolicy
- func (tr *WorkflowTemplate) GetID() string
- func (tr *WorkflowTemplate) GetInitParameters() (map[string]any, error)
- func (mg *WorkflowTemplate) GetManagementPolicies() xpv1.ManagementPolicies
- func (tr *WorkflowTemplate) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
- func (tr *WorkflowTemplate) GetObservation() (map[string]any, error)
- func (tr *WorkflowTemplate) GetParameters() (map[string]any, error)
- func (mg *WorkflowTemplate) GetProviderConfigReference() *xpv1.Reference
- func (mg *WorkflowTemplate) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
- func (mg *WorkflowTemplate) GetTerraformResourceType() string
- func (tr *WorkflowTemplate) GetTerraformSchemaVersion() int
- func (mg *WorkflowTemplate) GetWriteConnectionSecretToReference() *xpv1.SecretReference
- func (tr *WorkflowTemplate) Hub()
- func (tr *WorkflowTemplate) LateInitialize(attrs []byte) (bool, error)
- func (mg *WorkflowTemplate) SetConditions(c ...xpv1.Condition)
- func (mg *WorkflowTemplate) SetDeletionPolicy(r xpv1.DeletionPolicy)
- func (mg *WorkflowTemplate) SetManagementPolicies(r xpv1.ManagementPolicies)
- func (tr *WorkflowTemplate) SetObservation(obs map[string]any) error
- func (tr *WorkflowTemplate) SetParameters(params map[string]any) error
- func (mg *WorkflowTemplate) SetProviderConfigReference(r *xpv1.Reference)
- func (mg *WorkflowTemplate) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
- func (mg *WorkflowTemplate) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
- type WorkflowTemplateInitParameters
- type WorkflowTemplateList
- type WorkflowTemplateObservation
- type WorkflowTemplateParameters
- type WorkflowTemplatePlacementInitParameters
- type WorkflowTemplatePlacementObservation
- type WorkflowTemplatePlacementParameters
- type WorkflowTemplateSpec
- type WorkflowTemplateStatus
- type YarnConfigInitParameters
- type YarnConfigObservation
- type YarnConfigParameters
Constants ¶
const ( CRDGroup = "dataproc.gcp.upbound.io" CRDVersion = "v1beta2" )
Package type metadata.
Variables ¶
var ( AutoscalingPolicy_Kind = "AutoscalingPolicy" AutoscalingPolicy_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: AutoscalingPolicy_Kind}.String() AutoscalingPolicy_KindAPIVersion = AutoscalingPolicy_Kind + "." + CRDGroupVersion.String() AutoscalingPolicy_GroupVersionKind = CRDGroupVersion.WithKind(AutoscalingPolicy_Kind) )
Repository type metadata.
var ( Cluster_Kind = "Cluster" Cluster_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Cluster_Kind}.String() Cluster_KindAPIVersion = Cluster_Kind + "." + CRDGroupVersion.String() Cluster_GroupVersionKind = CRDGroupVersion.WithKind(Cluster_Kind) )
Repository type metadata.
var ( // CRDGroupVersion is the API Group Version used to register the objects CRDGroupVersion = schema.GroupVersion{Group: CRDGroup, Version: CRDVersion} // SchemeBuilder is used to add go types to the GroupVersionKind scheme SchemeBuilder = &scheme.Builder{GroupVersion: CRDGroupVersion} // AddToScheme adds the types in this group-version to the given scheme. AddToScheme = SchemeBuilder.AddToScheme )
var ( Job_Kind = "Job" Job_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: Job_Kind}.String() Job_KindAPIVersion = Job_Kind + "." + CRDGroupVersion.String() Job_GroupVersionKind = CRDGroupVersion.WithKind(Job_Kind) )
Repository type metadata.
var ( MetastoreService_Kind = "MetastoreService" MetastoreService_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: MetastoreService_Kind}.String() MetastoreService_KindAPIVersion = MetastoreService_Kind + "." + CRDGroupVersion.String() MetastoreService_GroupVersionKind = CRDGroupVersion.WithKind(MetastoreService_Kind) )
Repository type metadata.
var ( WorkflowTemplate_Kind = "WorkflowTemplate" WorkflowTemplate_GroupKind = schema.GroupKind{Group: CRDGroup, Kind: WorkflowTemplate_Kind}.String() WorkflowTemplate_KindAPIVersion = WorkflowTemplate_Kind + "." + CRDGroupVersion.String() WorkflowTemplate_GroupVersionKind = CRDGroupVersion.WithKind(WorkflowTemplate_Kind) )
Repository type metadata.
Functions ¶
This section is empty.
Types ¶
type AcceleratorsInitParameters ¶
type AcceleratorsInitParameters struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*AcceleratorsInitParameters) DeepCopy ¶
func (in *AcceleratorsInitParameters) DeepCopy() *AcceleratorsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AcceleratorsInitParameters.
func (*AcceleratorsInitParameters) DeepCopyInto ¶
func (in *AcceleratorsInitParameters) DeepCopyInto(out *AcceleratorsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AcceleratorsObservation ¶
type AcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*AcceleratorsObservation) DeepCopy ¶
func (in *AcceleratorsObservation) DeepCopy() *AcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AcceleratorsObservation.
func (*AcceleratorsObservation) DeepCopyInto ¶
func (in *AcceleratorsObservation) DeepCopyInto(out *AcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AcceleratorsParameters ¶
type AcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType" tf:"accelerator_type,omitempty"` }
func (*AcceleratorsParameters) DeepCopy ¶
func (in *AcceleratorsParameters) DeepCopy() *AcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AcceleratorsParameters.
func (*AcceleratorsParameters) DeepCopyInto ¶
func (in *AcceleratorsParameters) DeepCopyInto(out *AcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingConfigInitParameters ¶
type AutoscalingConfigInitParameters struct { // The autoscaling policy used by the cluster. PolicyURI *string `json:"policyUri,omitempty" tf:"policy_uri,omitempty"` }
func (*AutoscalingConfigInitParameters) DeepCopy ¶
func (in *AutoscalingConfigInitParameters) DeepCopy() *AutoscalingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingConfigInitParameters.
func (*AutoscalingConfigInitParameters) DeepCopyInto ¶
func (in *AutoscalingConfigInitParameters) DeepCopyInto(out *AutoscalingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingConfigObservation ¶
type AutoscalingConfigObservation struct { // The autoscaling policy used by the cluster. PolicyURI *string `json:"policyUri,omitempty" tf:"policy_uri,omitempty"` }
func (*AutoscalingConfigObservation) DeepCopy ¶
func (in *AutoscalingConfigObservation) DeepCopy() *AutoscalingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingConfigObservation.
func (*AutoscalingConfigObservation) DeepCopyInto ¶
func (in *AutoscalingConfigObservation) DeepCopyInto(out *AutoscalingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingConfigParameters ¶
type AutoscalingConfigParameters struct { // The autoscaling policy used by the cluster. // +kubebuilder:validation:Optional PolicyURI *string `json:"policyUri" tf:"policy_uri,omitempty"` }
func (*AutoscalingConfigParameters) DeepCopy ¶
func (in *AutoscalingConfigParameters) DeepCopy() *AutoscalingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingConfigParameters.
func (*AutoscalingConfigParameters) DeepCopyInto ¶
func (in *AutoscalingConfigParameters) DeepCopyInto(out *AutoscalingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingInitParameters ¶
type AutoscalingInitParameters struct { // The maximum number of nodes in the node pool. Must be >= minNodeCount, and must be > 0. MaxNodeCount *float64 `json:"maxNodeCount,omitempty" tf:"max_node_count,omitempty"` // The minimum number of nodes in the node pool. Must be >= 0 and <= maxNodeCount. MinNodeCount *float64 `json:"minNodeCount,omitempty" tf:"min_node_count,omitempty"` }
func (*AutoscalingInitParameters) DeepCopy ¶
func (in *AutoscalingInitParameters) DeepCopy() *AutoscalingInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingInitParameters.
func (*AutoscalingInitParameters) DeepCopyInto ¶
func (in *AutoscalingInitParameters) DeepCopyInto(out *AutoscalingInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingObservation ¶
type AutoscalingObservation struct { // The maximum number of nodes in the node pool. Must be >= minNodeCount, and must be > 0. MaxNodeCount *float64 `json:"maxNodeCount,omitempty" tf:"max_node_count,omitempty"` // The minimum number of nodes in the node pool. Must be >= 0 and <= maxNodeCount. MinNodeCount *float64 `json:"minNodeCount,omitempty" tf:"min_node_count,omitempty"` }
func (*AutoscalingObservation) DeepCopy ¶
func (in *AutoscalingObservation) DeepCopy() *AutoscalingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingObservation.
func (*AutoscalingObservation) DeepCopyInto ¶
func (in *AutoscalingObservation) DeepCopyInto(out *AutoscalingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingParameters ¶
type AutoscalingParameters struct { // The maximum number of nodes in the node pool. Must be >= minNodeCount, and must be > 0. // +kubebuilder:validation:Optional MaxNodeCount *float64 `json:"maxNodeCount,omitempty" tf:"max_node_count,omitempty"` // The minimum number of nodes in the node pool. Must be >= 0 and <= maxNodeCount. // +kubebuilder:validation:Optional MinNodeCount *float64 `json:"minNodeCount,omitempty" tf:"min_node_count,omitempty"` }
func (*AutoscalingParameters) DeepCopy ¶
func (in *AutoscalingParameters) DeepCopy() *AutoscalingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingParameters.
func (*AutoscalingParameters) DeepCopyInto ¶
func (in *AutoscalingParameters) DeepCopyInto(out *AutoscalingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicy ¶
type AutoscalingPolicy struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec AutoscalingPolicySpec `json:"spec"` Status AutoscalingPolicyStatus `json:"status,omitempty"` }
AutoscalingPolicy is the Schema for the AutoscalingPolicys API. Describes an autoscaling policy for Dataproc cluster autoscaler. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*AutoscalingPolicy) DeepCopy ¶
func (in *AutoscalingPolicy) DeepCopy() *AutoscalingPolicy
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicy.
func (*AutoscalingPolicy) DeepCopyInto ¶
func (in *AutoscalingPolicy) DeepCopyInto(out *AutoscalingPolicy)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*AutoscalingPolicy) DeepCopyObject ¶
func (in *AutoscalingPolicy) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*AutoscalingPolicy) GetCondition ¶
func (mg *AutoscalingPolicy) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetConnectionDetailsMapping ¶
func (tr *AutoscalingPolicy) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this AutoscalingPolicy
func (*AutoscalingPolicy) GetDeletionPolicy ¶
func (mg *AutoscalingPolicy) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetID ¶
func (tr *AutoscalingPolicy) GetID() string
GetID returns ID of underlying Terraform resource of this AutoscalingPolicy
func (*AutoscalingPolicy) GetInitParameters ¶
func (tr *AutoscalingPolicy) GetInitParameters() (map[string]any, error)
GetInitParameters of this AutoscalingPolicy
func (*AutoscalingPolicy) GetManagementPolicies ¶
func (mg *AutoscalingPolicy) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetMergedParameters ¶
func (tr *AutoscalingPolicy) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this AutoscalingPolicy
func (*AutoscalingPolicy) GetObservation ¶
func (tr *AutoscalingPolicy) GetObservation() (map[string]any, error)
GetObservation of this AutoscalingPolicy
func (*AutoscalingPolicy) GetParameters ¶
func (tr *AutoscalingPolicy) GetParameters() (map[string]any, error)
GetParameters of this AutoscalingPolicy
func (*AutoscalingPolicy) GetProviderConfigReference ¶
func (mg *AutoscalingPolicy) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetPublishConnectionDetailsTo ¶
func (mg *AutoscalingPolicy) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this AutoscalingPolicy.
func (*AutoscalingPolicy) GetTerraformResourceType ¶
func (mg *AutoscalingPolicy) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this AutoscalingPolicy
func (*AutoscalingPolicy) GetTerraformSchemaVersion ¶
func (tr *AutoscalingPolicy) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*AutoscalingPolicy) GetWriteConnectionSecretToReference ¶
func (mg *AutoscalingPolicy) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this AutoscalingPolicy.
func (*AutoscalingPolicy) Hub ¶
func (tr *AutoscalingPolicy) Hub()
Hub marks this type as a conversion hub.
func (*AutoscalingPolicy) LateInitialize ¶
func (tr *AutoscalingPolicy) LateInitialize(attrs []byte) (bool, error)
LateInitialize this AutoscalingPolicy using its observed tfState. returns True if there are any spec changes for the resource.
func (*AutoscalingPolicy) SetConditions ¶
func (mg *AutoscalingPolicy) SetConditions(c ...xpv1.Condition)
SetConditions of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetDeletionPolicy ¶
func (mg *AutoscalingPolicy) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetManagementPolicies ¶
func (mg *AutoscalingPolicy) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetObservation ¶
func (tr *AutoscalingPolicy) SetObservation(obs map[string]any) error
SetObservation for this AutoscalingPolicy
func (*AutoscalingPolicy) SetParameters ¶
func (tr *AutoscalingPolicy) SetParameters(params map[string]any) error
SetParameters for this AutoscalingPolicy
func (*AutoscalingPolicy) SetProviderConfigReference ¶
func (mg *AutoscalingPolicy) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetPublishConnectionDetailsTo ¶
func (mg *AutoscalingPolicy) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this AutoscalingPolicy.
func (*AutoscalingPolicy) SetWriteConnectionSecretToReference ¶
func (mg *AutoscalingPolicy) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this AutoscalingPolicy.
type AutoscalingPolicyInitParameters ¶
type AutoscalingPolicyInitParameters struct { // Basic algorithm for autoscaling. // Structure is documented below. BasicAlgorithm *BasicAlgorithmInitParameters `json:"basicAlgorithm,omitempty" tf:"basic_algorithm,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Describes how the autoscaler will operate for secondary workers. // Structure is documented below. SecondaryWorkerConfig *SecondaryWorkerConfigInitParameters `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Describes how the autoscaler will operate for primary workers. // Structure is documented below. WorkerConfig *WorkerConfigInitParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*AutoscalingPolicyInitParameters) DeepCopy ¶
func (in *AutoscalingPolicyInitParameters) DeepCopy() *AutoscalingPolicyInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyInitParameters.
func (*AutoscalingPolicyInitParameters) DeepCopyInto ¶
func (in *AutoscalingPolicyInitParameters) DeepCopyInto(out *AutoscalingPolicyInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicyList ¶
type AutoscalingPolicyList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []AutoscalingPolicy `json:"items"` }
AutoscalingPolicyList contains a list of AutoscalingPolicys
func (*AutoscalingPolicyList) DeepCopy ¶
func (in *AutoscalingPolicyList) DeepCopy() *AutoscalingPolicyList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyList.
func (*AutoscalingPolicyList) DeepCopyInto ¶
func (in *AutoscalingPolicyList) DeepCopyInto(out *AutoscalingPolicyList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*AutoscalingPolicyList) DeepCopyObject ¶
func (in *AutoscalingPolicyList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*AutoscalingPolicyList) GetItems ¶
func (l *AutoscalingPolicyList) GetItems() []resource.Managed
GetItems of this AutoscalingPolicyList.
type AutoscalingPolicyObservation ¶
type AutoscalingPolicyObservation struct { // Basic algorithm for autoscaling. // Structure is documented below. BasicAlgorithm *BasicAlgorithmObservation `json:"basicAlgorithm,omitempty" tf:"basic_algorithm,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/autoscalingPolicies/{{policy_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // The location where the autoscaling policy should reside. // The default value is global. Location *string `json:"location,omitempty" tf:"location,omitempty"` // The "resource name" of the autoscaling policy. Name *string `json:"name,omitempty" tf:"name,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // Describes how the autoscaler will operate for secondary workers. // Structure is documented below. SecondaryWorkerConfig *SecondaryWorkerConfigObservation `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Describes how the autoscaler will operate for primary workers. // Structure is documented below. WorkerConfig *WorkerConfigObservation `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*AutoscalingPolicyObservation) DeepCopy ¶
func (in *AutoscalingPolicyObservation) DeepCopy() *AutoscalingPolicyObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyObservation.
func (*AutoscalingPolicyObservation) DeepCopyInto ¶
func (in *AutoscalingPolicyObservation) DeepCopyInto(out *AutoscalingPolicyObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicyParameters ¶
type AutoscalingPolicyParameters struct { // Basic algorithm for autoscaling. // Structure is documented below. // +kubebuilder:validation:Optional BasicAlgorithm *BasicAlgorithmParameters `json:"basicAlgorithm,omitempty" tf:"basic_algorithm,omitempty"` // The location where the autoscaling policy should reside. // The default value is global. // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Describes how the autoscaler will operate for secondary workers. // Structure is documented below. // +kubebuilder:validation:Optional SecondaryWorkerConfig *SecondaryWorkerConfigParameters `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Describes how the autoscaler will operate for primary workers. // Structure is documented below. // +kubebuilder:validation:Optional WorkerConfig *WorkerConfigParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*AutoscalingPolicyParameters) DeepCopy ¶
func (in *AutoscalingPolicyParameters) DeepCopy() *AutoscalingPolicyParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyParameters.
func (*AutoscalingPolicyParameters) DeepCopyInto ¶
func (in *AutoscalingPolicyParameters) DeepCopyInto(out *AutoscalingPolicyParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicySpec ¶
type AutoscalingPolicySpec struct { v1.ResourceSpec `json:",inline"` ForProvider AutoscalingPolicyParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider AutoscalingPolicyInitParameters `json:"initProvider,omitempty"` }
AutoscalingPolicySpec defines the desired state of AutoscalingPolicy
func (*AutoscalingPolicySpec) DeepCopy ¶
func (in *AutoscalingPolicySpec) DeepCopy() *AutoscalingPolicySpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicySpec.
func (*AutoscalingPolicySpec) DeepCopyInto ¶
func (in *AutoscalingPolicySpec) DeepCopyInto(out *AutoscalingPolicySpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AutoscalingPolicyStatus ¶
type AutoscalingPolicyStatus struct { v1.ResourceStatus `json:",inline"` AtProvider AutoscalingPolicyObservation `json:"atProvider,omitempty"` }
AutoscalingPolicyStatus defines the observed state of AutoscalingPolicy.
func (*AutoscalingPolicyStatus) DeepCopy ¶
func (in *AutoscalingPolicyStatus) DeepCopy() *AutoscalingPolicyStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingPolicyStatus.
func (*AutoscalingPolicyStatus) DeepCopyInto ¶
func (in *AutoscalingPolicyStatus) DeepCopyInto(out *AutoscalingPolicyStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryNodeGroupsInitParameters ¶
type AuxiliaryNodeGroupsInitParameters struct { // Node group configuration. NodeGroup []NodeGroupInitParameters `json:"nodeGroup,omitempty" tf:"node_group,omitempty"` NodeGroupID *string `json:"nodeGroupId,omitempty" tf:"node_group_id,omitempty"` }
func (*AuxiliaryNodeGroupsInitParameters) DeepCopy ¶
func (in *AuxiliaryNodeGroupsInitParameters) DeepCopy() *AuxiliaryNodeGroupsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryNodeGroupsInitParameters.
func (*AuxiliaryNodeGroupsInitParameters) DeepCopyInto ¶
func (in *AuxiliaryNodeGroupsInitParameters) DeepCopyInto(out *AuxiliaryNodeGroupsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryNodeGroupsObservation ¶
type AuxiliaryNodeGroupsObservation struct { // Node group configuration. NodeGroup []NodeGroupObservation `json:"nodeGroup,omitempty" tf:"node_group,omitempty"` NodeGroupID *string `json:"nodeGroupId,omitempty" tf:"node_group_id,omitempty"` }
func (*AuxiliaryNodeGroupsObservation) DeepCopy ¶
func (in *AuxiliaryNodeGroupsObservation) DeepCopy() *AuxiliaryNodeGroupsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryNodeGroupsObservation.
func (*AuxiliaryNodeGroupsObservation) DeepCopyInto ¶
func (in *AuxiliaryNodeGroupsObservation) DeepCopyInto(out *AuxiliaryNodeGroupsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryNodeGroupsParameters ¶
type AuxiliaryNodeGroupsParameters struct { // Node group configuration. // +kubebuilder:validation:Optional NodeGroup []NodeGroupParameters `json:"nodeGroup" tf:"node_group,omitempty"` // +kubebuilder:validation:Optional NodeGroupID *string `json:"nodeGroupId,omitempty" tf:"node_group_id,omitempty"` }
func (*AuxiliaryNodeGroupsParameters) DeepCopy ¶
func (in *AuxiliaryNodeGroupsParameters) DeepCopy() *AuxiliaryNodeGroupsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryNodeGroupsParameters.
func (*AuxiliaryNodeGroupsParameters) DeepCopyInto ¶
func (in *AuxiliaryNodeGroupsParameters) DeepCopyInto(out *AuxiliaryNodeGroupsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigInitParameters ¶
type AuxiliaryServicesConfigInitParameters struct { // The config setting for metastore service with the cluster. // Structure defined below. MetastoreConfig *AuxiliaryServicesConfigMetastoreConfigInitParameters `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Spark History Server configuration for the workload. SparkHistoryServerConfig *SparkHistoryServerConfigInitParameters `json:"sparkHistoryServerConfig,omitempty" tf:"spark_history_server_config,omitempty"` }
func (*AuxiliaryServicesConfigInitParameters) DeepCopy ¶
func (in *AuxiliaryServicesConfigInitParameters) DeepCopy() *AuxiliaryServicesConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigInitParameters.
func (*AuxiliaryServicesConfigInitParameters) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigInitParameters) DeepCopyInto(out *AuxiliaryServicesConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigMetastoreConfigInitParameters ¶
type AuxiliaryServicesConfigMetastoreConfigInitParameters struct { // Resource name of an existing Dataproc Metastore service. DataprocMetastoreService *string `json:"dataprocMetastoreService,omitempty" tf:"dataproc_metastore_service,omitempty"` }
func (*AuxiliaryServicesConfigMetastoreConfigInitParameters) DeepCopy ¶
func (in *AuxiliaryServicesConfigMetastoreConfigInitParameters) DeepCopy() *AuxiliaryServicesConfigMetastoreConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigMetastoreConfigInitParameters.
func (*AuxiliaryServicesConfigMetastoreConfigInitParameters) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigMetastoreConfigInitParameters) DeepCopyInto(out *AuxiliaryServicesConfigMetastoreConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigMetastoreConfigObservation ¶
type AuxiliaryServicesConfigMetastoreConfigObservation struct { // Resource name of an existing Dataproc Metastore service. DataprocMetastoreService *string `json:"dataprocMetastoreService,omitempty" tf:"dataproc_metastore_service,omitempty"` }
func (*AuxiliaryServicesConfigMetastoreConfigObservation) DeepCopy ¶
func (in *AuxiliaryServicesConfigMetastoreConfigObservation) DeepCopy() *AuxiliaryServicesConfigMetastoreConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigMetastoreConfigObservation.
func (*AuxiliaryServicesConfigMetastoreConfigObservation) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigMetastoreConfigObservation) DeepCopyInto(out *AuxiliaryServicesConfigMetastoreConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigMetastoreConfigParameters ¶
type AuxiliaryServicesConfigMetastoreConfigParameters struct { // Resource name of an existing Dataproc Metastore service. // +kubebuilder:validation:Optional DataprocMetastoreService *string `json:"dataprocMetastoreService,omitempty" tf:"dataproc_metastore_service,omitempty"` }
func (*AuxiliaryServicesConfigMetastoreConfigParameters) DeepCopy ¶
func (in *AuxiliaryServicesConfigMetastoreConfigParameters) DeepCopy() *AuxiliaryServicesConfigMetastoreConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigMetastoreConfigParameters.
func (*AuxiliaryServicesConfigMetastoreConfigParameters) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigMetastoreConfigParameters) DeepCopyInto(out *AuxiliaryServicesConfigMetastoreConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigObservation ¶
type AuxiliaryServicesConfigObservation struct { // The config setting for metastore service with the cluster. // Structure defined below. MetastoreConfig *AuxiliaryServicesConfigMetastoreConfigObservation `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Spark History Server configuration for the workload. SparkHistoryServerConfig *SparkHistoryServerConfigObservation `json:"sparkHistoryServerConfig,omitempty" tf:"spark_history_server_config,omitempty"` }
func (*AuxiliaryServicesConfigObservation) DeepCopy ¶
func (in *AuxiliaryServicesConfigObservation) DeepCopy() *AuxiliaryServicesConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigObservation.
func (*AuxiliaryServicesConfigObservation) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigObservation) DeepCopyInto(out *AuxiliaryServicesConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryServicesConfigParameters ¶
type AuxiliaryServicesConfigParameters struct { // The config setting for metastore service with the cluster. // Structure defined below. // +kubebuilder:validation:Optional MetastoreConfig *AuxiliaryServicesConfigMetastoreConfigParameters `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Spark History Server configuration for the workload. // +kubebuilder:validation:Optional SparkHistoryServerConfig *SparkHistoryServerConfigParameters `json:"sparkHistoryServerConfig,omitempty" tf:"spark_history_server_config,omitempty"` }
func (*AuxiliaryServicesConfigParameters) DeepCopy ¶
func (in *AuxiliaryServicesConfigParameters) DeepCopy() *AuxiliaryServicesConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryServicesConfigParameters.
func (*AuxiliaryServicesConfigParameters) DeepCopyInto ¶
func (in *AuxiliaryServicesConfigParameters) DeepCopyInto(out *AuxiliaryServicesConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryVersionsInitParameters ¶
type AuxiliaryVersionsInitParameters struct { // A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). // The mappings override system defaults (some keys cannot be overridden) // +mapType=granular ConfigOverrides map[string]*string `json:"configOverrides,omitempty" tf:"config_overrides,omitempty"` // The identifier for this object. Format specified above. Key *string `json:"key,omitempty" tf:"key,omitempty"` // The Hive metastore schema version. Version *string `json:"version,omitempty" tf:"version,omitempty"` }
func (*AuxiliaryVersionsInitParameters) DeepCopy ¶
func (in *AuxiliaryVersionsInitParameters) DeepCopy() *AuxiliaryVersionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryVersionsInitParameters.
func (*AuxiliaryVersionsInitParameters) DeepCopyInto ¶
func (in *AuxiliaryVersionsInitParameters) DeepCopyInto(out *AuxiliaryVersionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryVersionsObservation ¶
type AuxiliaryVersionsObservation struct { // A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). // The mappings override system defaults (some keys cannot be overridden) // +mapType=granular ConfigOverrides map[string]*string `json:"configOverrides,omitempty" tf:"config_overrides,omitempty"` // The identifier for this object. Format specified above. Key *string `json:"key,omitempty" tf:"key,omitempty"` // The Hive metastore schema version. Version *string `json:"version,omitempty" tf:"version,omitempty"` }
func (*AuxiliaryVersionsObservation) DeepCopy ¶
func (in *AuxiliaryVersionsObservation) DeepCopy() *AuxiliaryVersionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryVersionsObservation.
func (*AuxiliaryVersionsObservation) DeepCopyInto ¶
func (in *AuxiliaryVersionsObservation) DeepCopyInto(out *AuxiliaryVersionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type AuxiliaryVersionsParameters ¶
type AuxiliaryVersionsParameters struct { // A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). // The mappings override system defaults (some keys cannot be overridden) // +kubebuilder:validation:Optional // +mapType=granular ConfigOverrides map[string]*string `json:"configOverrides,omitempty" tf:"config_overrides,omitempty"` // The identifier for this object. Format specified above. // +kubebuilder:validation:Optional Key *string `json:"key" tf:"key,omitempty"` // The Hive metastore schema version. // +kubebuilder:validation:Optional Version *string `json:"version" tf:"version,omitempty"` }
func (*AuxiliaryVersionsParameters) DeepCopy ¶
func (in *AuxiliaryVersionsParameters) DeepCopy() *AuxiliaryVersionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AuxiliaryVersionsParameters.
func (*AuxiliaryVersionsParameters) DeepCopyInto ¶
func (in *AuxiliaryVersionsParameters) DeepCopyInto(out *AuxiliaryVersionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BasicAlgorithmInitParameters ¶
type BasicAlgorithmInitParameters struct { // Duration between scaling events. A scaling period starts after the // update operation from the previous event has completed. // Bounds: [2m, 1d]. Default: 2m. CooldownPeriod *string `json:"cooldownPeriod,omitempty" tf:"cooldown_period,omitempty"` // YARN autoscaling configuration. // Structure is documented below. YarnConfig *YarnConfigInitParameters `json:"yarnConfig,omitempty" tf:"yarn_config,omitempty"` }
func (*BasicAlgorithmInitParameters) DeepCopy ¶
func (in *BasicAlgorithmInitParameters) DeepCopy() *BasicAlgorithmInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAlgorithmInitParameters.
func (*BasicAlgorithmInitParameters) DeepCopyInto ¶
func (in *BasicAlgorithmInitParameters) DeepCopyInto(out *BasicAlgorithmInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BasicAlgorithmObservation ¶
type BasicAlgorithmObservation struct { // Duration between scaling events. A scaling period starts after the // update operation from the previous event has completed. // Bounds: [2m, 1d]. Default: 2m. CooldownPeriod *string `json:"cooldownPeriod,omitempty" tf:"cooldown_period,omitempty"` // YARN autoscaling configuration. // Structure is documented below. YarnConfig *YarnConfigObservation `json:"yarnConfig,omitempty" tf:"yarn_config,omitempty"` }
func (*BasicAlgorithmObservation) DeepCopy ¶
func (in *BasicAlgorithmObservation) DeepCopy() *BasicAlgorithmObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAlgorithmObservation.
func (*BasicAlgorithmObservation) DeepCopyInto ¶
func (in *BasicAlgorithmObservation) DeepCopyInto(out *BasicAlgorithmObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BasicAlgorithmParameters ¶
type BasicAlgorithmParameters struct { // Duration between scaling events. A scaling period starts after the // update operation from the previous event has completed. // Bounds: [2m, 1d]. Default: 2m. // +kubebuilder:validation:Optional CooldownPeriod *string `json:"cooldownPeriod,omitempty" tf:"cooldown_period,omitempty"` // YARN autoscaling configuration. // Structure is documented below. // +kubebuilder:validation:Optional YarnConfig *YarnConfigParameters `json:"yarnConfig" tf:"yarn_config,omitempty"` }
func (*BasicAlgorithmParameters) DeepCopy ¶
func (in *BasicAlgorithmParameters) DeepCopy() *BasicAlgorithmParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BasicAlgorithmParameters.
func (*BasicAlgorithmParameters) DeepCopyInto ¶
func (in *BasicAlgorithmParameters) DeepCopyInto(out *BasicAlgorithmParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Cluster ¶
type Cluster struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.name) || (has(self.initProvider) && has(self.initProvider.name))",message="spec.forProvider.name is a required parameter" Spec ClusterSpec `json:"spec"` Status ClusterStatus `json:"status,omitempty"` }
Cluster is the Schema for the Clusters API. Manages a Cloud Dataproc cluster resource. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Cluster) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Cluster.
func (*Cluster) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Cluster) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Cluster) GetCondition ¶
func (mg *Cluster) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Cluster.
func (*Cluster) GetConnectionDetailsMapping ¶
GetConnectionDetailsMapping for this Cluster
func (*Cluster) GetDeletionPolicy ¶
func (mg *Cluster) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Cluster.
func (*Cluster) GetInitParameters ¶
GetInitParameters of this Cluster
func (*Cluster) GetManagementPolicies ¶
func (mg *Cluster) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this Cluster.
func (*Cluster) GetMergedParameters ¶
GetInitParameters of this Cluster
func (*Cluster) GetObservation ¶
GetObservation of this Cluster
func (*Cluster) GetParameters ¶
GetParameters of this Cluster
func (*Cluster) GetProviderConfigReference ¶
GetProviderConfigReference of this Cluster.
func (*Cluster) GetPublishConnectionDetailsTo ¶
func (mg *Cluster) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Cluster.
func (*Cluster) GetTerraformResourceType ¶
GetTerraformResourceType returns Terraform resource type for this Cluster
func (*Cluster) GetTerraformSchemaVersion ¶
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Cluster) GetWriteConnectionSecretToReference ¶
func (mg *Cluster) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Cluster.
func (*Cluster) LateInitialize ¶
LateInitialize this Cluster using its observed tfState. returns True if there are any spec changes for the resource.
func (*Cluster) ResolveReferences ¶
func (*Cluster) SetConditions ¶
SetConditions of this Cluster.
func (*Cluster) SetDeletionPolicy ¶
func (mg *Cluster) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Cluster.
func (*Cluster) SetManagementPolicies ¶
func (mg *Cluster) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this Cluster.
func (*Cluster) SetObservation ¶
SetObservation for this Cluster
func (*Cluster) SetParameters ¶
SetParameters for this Cluster
func (*Cluster) SetProviderConfigReference ¶
SetProviderConfigReference of this Cluster.
func (*Cluster) SetPublishConnectionDetailsTo ¶
func (mg *Cluster) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Cluster.
func (*Cluster) SetWriteConnectionSecretToReference ¶
func (mg *Cluster) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Cluster.
type ClusterConfigInitParameters ¶
type ClusterConfigInitParameters struct { // The autoscaling policy config associated with the cluster. // Note that once set, if autoscaling_config is the only field set in cluster_config, it can // only be removed by setting policy_uri = "", rather than removing the whole block. // Structure defined below. AutoscalingConfig *AutoscalingConfigInitParameters `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // A Dataproc NodeGroup resource is a group of Dataproc cluster nodes that execute an assigned role. // Structure defined below. AuxiliaryNodeGroups []AuxiliaryNodeGroupsInitParameters `json:"auxiliaryNodeGroups,omitempty" tf:"auxiliary_node_groups,omitempty"` // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. // Structure defined below. DataprocMetricConfig *DataprocMetricConfigInitParameters `json:"dataprocMetricConfig,omitempty" tf:"dataproc_metric_config,omitempty"` // The Customer managed encryption keys settings for the cluster. // Structure defined below. EncryptionConfig *EncryptionConfigInitParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // The config settings for port access on the cluster. // Structure defined below. EndpointConfig *EndpointConfigInitParameters `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // Common config settings for resources of Google Compute Engine cluster // instances, applicable to all instances in the cluster. Structure defined below. GceClusterConfig *GceClusterConfigInitParameters `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Commands to execute on each node after config is completed. // You can specify multiple versions of these. Structure defined below. InitializationAction []InitializationActionInitParameters `json:"initializationAction,omitempty" tf:"initialization_action,omitempty"` // The settings for auto deletion cluster schedule. // Structure defined below. LifecycleConfig *LifecycleConfigInitParameters `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // The Google Compute Engine config settings for the master instances // in a cluster. Structure defined below. MasterConfig *MasterConfigInitParameters `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // The config setting for metastore service with the cluster. // Structure defined below. MetastoreConfig *MetastoreConfigInitParameters `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Google Compute Engine config settings for the additional // instances in a cluster. Structure defined below. PreemptibleWorkerConfig *PreemptibleWorkerConfigInitParameters `json:"preemptibleWorkerConfig,omitempty" tf:"preemptible_worker_config,omitempty"` // Security related configuration. Structure defined below. SecurityConfig *SecurityConfigInitParameters `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // The config settings for software inside the cluster. // Structure defined below. SoftwareConfig *SoftwareConfigInitParameters `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // The Cloud Storage temp bucket used to store ephemeral cluster // and jobs data, such as Spark and MapReduce history files. // Note: If you don't explicitly specify a temp_bucket then GCP will auto create / assign one for you. TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // The Google Compute Engine config settings for the worker instances // in a cluster. Structure defined below. WorkerConfig *ClusterConfigWorkerConfigInitParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ClusterConfigInitParameters) DeepCopy ¶
func (in *ClusterConfigInitParameters) DeepCopy() *ClusterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigInitParameters.
func (*ClusterConfigInitParameters) DeepCopyInto ¶
func (in *ClusterConfigInitParameters) DeepCopyInto(out *ClusterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterConfigObservation ¶
type ClusterConfigObservation struct { // The autoscaling policy config associated with the cluster. // Note that once set, if autoscaling_config is the only field set in cluster_config, it can // only be removed by setting policy_uri = "", rather than removing the whole block. // Structure defined below. AutoscalingConfig *AutoscalingConfigObservation `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // A Dataproc NodeGroup resource is a group of Dataproc cluster nodes that execute an assigned role. // Structure defined below. AuxiliaryNodeGroups []AuxiliaryNodeGroupsObservation `json:"auxiliaryNodeGroups,omitempty" tf:"auxiliary_node_groups,omitempty"` // The name of the cloud storage bucket ultimately used to house the staging data // for the cluster. If staging_bucket is specified, it will contain this value, otherwise // it will be the auto generated name. Bucket *string `json:"bucket,omitempty" tf:"bucket,omitempty"` // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. // Structure defined below. DataprocMetricConfig *DataprocMetricConfigObservation `json:"dataprocMetricConfig,omitempty" tf:"dataproc_metric_config,omitempty"` // The Customer managed encryption keys settings for the cluster. // Structure defined below. EncryptionConfig *EncryptionConfigObservation `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // The config settings for port access on the cluster. // Structure defined below. EndpointConfig *EndpointConfigObservation `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // Common config settings for resources of Google Compute Engine cluster // instances, applicable to all instances in the cluster. Structure defined below. GceClusterConfig *GceClusterConfigObservation `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Commands to execute on each node after config is completed. // You can specify multiple versions of these. Structure defined below. InitializationAction []InitializationActionObservation `json:"initializationAction,omitempty" tf:"initialization_action,omitempty"` // The settings for auto deletion cluster schedule. // Structure defined below. LifecycleConfig *LifecycleConfigObservation `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // The Google Compute Engine config settings for the master instances // in a cluster. Structure defined below. MasterConfig *MasterConfigObservation `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // The config setting for metastore service with the cluster. // Structure defined below. MetastoreConfig *MetastoreConfigObservation `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Google Compute Engine config settings for the additional // instances in a cluster. Structure defined below. PreemptibleWorkerConfig *PreemptibleWorkerConfigObservation `json:"preemptibleWorkerConfig,omitempty" tf:"preemptible_worker_config,omitempty"` // Security related configuration. Structure defined below. SecurityConfig *SecurityConfigObservation `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // The config settings for software inside the cluster. // Structure defined below. SoftwareConfig *SoftwareConfigObservation `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // The Cloud Storage temp bucket used to store ephemeral cluster // and jobs data, such as Spark and MapReduce history files. // Note: If you don't explicitly specify a temp_bucket then GCP will auto create / assign one for you. TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // The Google Compute Engine config settings for the worker instances // in a cluster. Structure defined below. WorkerConfig *ClusterConfigWorkerConfigObservation `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ClusterConfigObservation) DeepCopy ¶
func (in *ClusterConfigObservation) DeepCopy() *ClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigObservation.
func (*ClusterConfigObservation) DeepCopyInto ¶
func (in *ClusterConfigObservation) DeepCopyInto(out *ClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterConfigParameters ¶
type ClusterConfigParameters struct { // The autoscaling policy config associated with the cluster. // Note that once set, if autoscaling_config is the only field set in cluster_config, it can // only be removed by setting policy_uri = "", rather than removing the whole block. // Structure defined below. // +kubebuilder:validation:Optional AutoscalingConfig *AutoscalingConfigParameters `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // A Dataproc NodeGroup resource is a group of Dataproc cluster nodes that execute an assigned role. // Structure defined below. // +kubebuilder:validation:Optional AuxiliaryNodeGroups []AuxiliaryNodeGroupsParameters `json:"auxiliaryNodeGroups,omitempty" tf:"auxiliary_node_groups,omitempty"` // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. // Structure defined below. // +kubebuilder:validation:Optional DataprocMetricConfig *DataprocMetricConfigParameters `json:"dataprocMetricConfig,omitempty" tf:"dataproc_metric_config,omitempty"` // The Customer managed encryption keys settings for the cluster. // Structure defined below. // +kubebuilder:validation:Optional EncryptionConfig *EncryptionConfigParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // The config settings for port access on the cluster. // Structure defined below. // +kubebuilder:validation:Optional EndpointConfig *EndpointConfigParameters `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // Common config settings for resources of Google Compute Engine cluster // instances, applicable to all instances in the cluster. Structure defined below. // +kubebuilder:validation:Optional GceClusterConfig *GceClusterConfigParameters `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Commands to execute on each node after config is completed. // You can specify multiple versions of these. Structure defined below. // +kubebuilder:validation:Optional InitializationAction []InitializationActionParameters `json:"initializationAction,omitempty" tf:"initialization_action,omitempty"` // The settings for auto deletion cluster schedule. // Structure defined below. // +kubebuilder:validation:Optional LifecycleConfig *LifecycleConfigParameters `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // The Google Compute Engine config settings for the master instances // in a cluster. Structure defined below. // +kubebuilder:validation:Optional MasterConfig *MasterConfigParameters `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // The config setting for metastore service with the cluster. // Structure defined below. // +kubebuilder:validation:Optional MetastoreConfig *MetastoreConfigParameters `json:"metastoreConfig,omitempty" tf:"metastore_config,omitempty"` // The Google Compute Engine config settings for the additional // instances in a cluster. Structure defined below. // +kubebuilder:validation:Optional PreemptibleWorkerConfig *PreemptibleWorkerConfigParameters `json:"preemptibleWorkerConfig,omitempty" tf:"preemptible_worker_config,omitempty"` // Security related configuration. Structure defined below. // +kubebuilder:validation:Optional SecurityConfig *SecurityConfigParameters `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // The config settings for software inside the cluster. // Structure defined below. // +kubebuilder:validation:Optional SoftwareConfig *SoftwareConfigParameters `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. // +kubebuilder:validation:Optional StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // The Cloud Storage temp bucket used to store ephemeral cluster // and jobs data, such as Spark and MapReduce history files. // Note: If you don't explicitly specify a temp_bucket then GCP will auto create / assign one for you. // +kubebuilder:validation:Optional TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // The Google Compute Engine config settings for the worker instances // in a cluster. Structure defined below. // +kubebuilder:validation:Optional WorkerConfig *ClusterConfigWorkerConfigParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ClusterConfigParameters) DeepCopy ¶
func (in *ClusterConfigParameters) DeepCopy() *ClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigParameters.
func (*ClusterConfigParameters) DeepCopyInto ¶
func (in *ClusterConfigParameters) DeepCopyInto(out *ClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterConfigWorkerConfigInitParameters ¶
type ClusterConfigWorkerConfigInitParameters struct { // The Compute Engine accelerator configuration for these instances. Can be specified multiple times. Accelerators []WorkerConfigAcceleratorsInitParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config DiskConfig *WorkerConfigDiskConfigInitParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // The name of a Google Compute Engine machine type // to create for the worker nodes. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The minimum number of primary worker instances to create. If min_num_instances is set, cluster creation will succeed if the number of primary workers created is at least equal to the min_num_instances number. MinNumInstances *float64 `json:"minNumInstances,omitempty" tf:"min_num_instances,omitempty"` // Specifies the number of worker nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 2). // There is currently a beta feature which allows you to run a // Single Node Cluster. // In order to take advantage of this you need to set // "dataproc:dataproc.allow.zero.workers" = "true" in // cluster_config.software_config.properties NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*ClusterConfigWorkerConfigInitParameters) DeepCopy ¶
func (in *ClusterConfigWorkerConfigInitParameters) DeepCopy() *ClusterConfigWorkerConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigWorkerConfigInitParameters.
func (*ClusterConfigWorkerConfigInitParameters) DeepCopyInto ¶
func (in *ClusterConfigWorkerConfigInitParameters) DeepCopyInto(out *ClusterConfigWorkerConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterConfigWorkerConfigObservation ¶
type ClusterConfigWorkerConfigObservation struct { // The Compute Engine accelerator configuration for these instances. Can be specified multiple times. Accelerators []WorkerConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config DiskConfig *WorkerConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // List of worker instance names which have been assigned // to the cluster. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // The name of a Google Compute Engine machine type // to create for the worker nodes. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The minimum number of primary worker instances to create. If min_num_instances is set, cluster creation will succeed if the number of primary workers created is at least equal to the min_num_instances number. MinNumInstances *float64 `json:"minNumInstances,omitempty" tf:"min_num_instances,omitempty"` // Specifies the number of worker nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 2). // There is currently a beta feature which allows you to run a // Single Node Cluster. // In order to take advantage of this you need to set // "dataproc:dataproc.allow.zero.workers" = "true" in // cluster_config.software_config.properties NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*ClusterConfigWorkerConfigObservation) DeepCopy ¶
func (in *ClusterConfigWorkerConfigObservation) DeepCopy() *ClusterConfigWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigWorkerConfigObservation.
func (*ClusterConfigWorkerConfigObservation) DeepCopyInto ¶
func (in *ClusterConfigWorkerConfigObservation) DeepCopyInto(out *ClusterConfigWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterConfigWorkerConfigParameters ¶
type ClusterConfigWorkerConfigParameters struct { // The Compute Engine accelerator configuration for these instances. Can be specified multiple times. // +kubebuilder:validation:Optional Accelerators []WorkerConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config // +kubebuilder:validation:Optional DiskConfig *WorkerConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. // +kubebuilder:validation:Optional ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // The name of a Google Compute Engine machine type // to create for the worker nodes. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The minimum number of primary worker instances to create. If min_num_instances is set, cluster creation will succeed if the number of primary workers created is at least equal to the min_num_instances number. // +kubebuilder:validation:Optional MinNumInstances *float64 `json:"minNumInstances,omitempty" tf:"min_num_instances,omitempty"` // Specifies the number of worker nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 2). // There is currently a beta feature which allows you to run a // Single Node Cluster. // In order to take advantage of this you need to set // "dataproc:dataproc.allow.zero.workers" = "true" in // cluster_config.software_config.properties // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*ClusterConfigWorkerConfigParameters) DeepCopy ¶
func (in *ClusterConfigWorkerConfigParameters) DeepCopy() *ClusterConfigWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterConfigWorkerConfigParameters.
func (*ClusterConfigWorkerConfigParameters) DeepCopyInto ¶
func (in *ClusterConfigWorkerConfigParameters) DeepCopyInto(out *ClusterConfigWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterInitParameters ¶
type ClusterInitParameters struct { // Allows you to configure various aspects of the cluster. // Structure defined below. ClusterConfig *ClusterConfigInitParameters `json:"clusterConfig,omitempty" tf:"cluster_config,omitempty"` // Does not affect auto scaling decomissioning from an autoscaling policy. // Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. // Timeout specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). // Default timeout is 0 (for forceful decommission), and the maximum allowed timeout is 1 day. (see JSON representation of // Duration). // Only supported on Dataproc image versions 1.2 and higher. // For more context see the docs GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout,omitempty"` // Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The name of the cluster, unique within the project and // zone. Name *string `json:"name,omitempty" tf:"name,omitempty"` // The ID of the project in which the cluster will exist. If it // is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // The region in which the cluster and associated nodes will be created in. // Defaults to global. Region *string `json:"region,omitempty" tf:"region,omitempty"` // Allows you to configure a virtual Dataproc on GKE cluster. // Structure defined below. VirtualClusterConfig *VirtualClusterConfigInitParameters `json:"virtualClusterConfig,omitempty" tf:"virtual_cluster_config,omitempty"` }
func (*ClusterInitParameters) DeepCopy ¶
func (in *ClusterInitParameters) DeepCopy() *ClusterInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterInitParameters.
func (*ClusterInitParameters) DeepCopyInto ¶
func (in *ClusterInitParameters) DeepCopyInto(out *ClusterInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterList ¶
type ClusterList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Cluster `json:"items"` }
ClusterList contains a list of Clusters
func (*ClusterList) DeepCopy ¶
func (in *ClusterList) DeepCopy() *ClusterList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterList.
func (*ClusterList) DeepCopyInto ¶
func (in *ClusterList) DeepCopyInto(out *ClusterList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*ClusterList) DeepCopyObject ¶
func (in *ClusterList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*ClusterList) GetItems ¶
func (l *ClusterList) GetItems() []resource.Managed
GetItems of this ClusterList.
type ClusterObservation ¶
type ClusterObservation struct { // Allows you to configure various aspects of the cluster. // Structure defined below. ClusterConfig *ClusterConfigObservation `json:"clusterConfig,omitempty" tf:"cluster_config,omitempty"` // (Computed) The list of labels (key/value pairs) to be applied to // instances in the cluster. GCP generates some itself including goog-dataproc-cluster-name // which is the name of the cluster. // +mapType=granular EffectiveLabels map[string]*string `json:"effectiveLabels,omitempty" tf:"effective_labels,omitempty"` // Does not affect auto scaling decomissioning from an autoscaling policy. // Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. // Timeout specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). // Default timeout is 0 (for forceful decommission), and the maximum allowed timeout is 1 day. (see JSON representation of // Duration). // Only supported on Dataproc image versions 1.2 and higher. // For more context see the docs GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` // Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The name of the cluster, unique within the project and // zone. Name *string `json:"name,omitempty" tf:"name,omitempty"` // The ID of the project in which the cluster will exist. If it // is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // The region in which the cluster and associated nodes will be created in. // Defaults to global. Region *string `json:"region,omitempty" tf:"region,omitempty"` // The combination of labels configured directly on the resource and default labels configured on the provider. // +mapType=granular TerraformLabels map[string]*string `json:"terraformLabels,omitempty" tf:"terraform_labels,omitempty"` // Allows you to configure a virtual Dataproc on GKE cluster. // Structure defined below. VirtualClusterConfig *VirtualClusterConfigObservation `json:"virtualClusterConfig,omitempty" tf:"virtual_cluster_config,omitempty"` }
func (*ClusterObservation) DeepCopy ¶
func (in *ClusterObservation) DeepCopy() *ClusterObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterObservation.
func (*ClusterObservation) DeepCopyInto ¶
func (in *ClusterObservation) DeepCopyInto(out *ClusterObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterParameters ¶
type ClusterParameters struct { // Allows you to configure various aspects of the cluster. // Structure defined below. // +kubebuilder:validation:Optional ClusterConfig *ClusterConfigParameters `json:"clusterConfig,omitempty" tf:"cluster_config,omitempty"` // Does not affect auto scaling decomissioning from an autoscaling policy. // Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. // Timeout specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). // Default timeout is 0 (for forceful decommission), and the maximum allowed timeout is 1 day. (see JSON representation of // Duration). // Only supported on Dataproc image versions 1.2 and higher. // For more context see the docs // +kubebuilder:validation:Optional GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout,omitempty"` // Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The name of the cluster, unique within the project and // zone. // +kubebuilder:validation:Optional Name *string `json:"name,omitempty" tf:"name,omitempty"` // The ID of the project in which the cluster will exist. If it // is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // The region in which the cluster and associated nodes will be created in. // Defaults to global. // +kubebuilder:validation:Optional Region *string `json:"region,omitempty" tf:"region,omitempty"` // Allows you to configure a virtual Dataproc on GKE cluster. // Structure defined below. // +kubebuilder:validation:Optional VirtualClusterConfig *VirtualClusterConfigParameters `json:"virtualClusterConfig,omitempty" tf:"virtual_cluster_config,omitempty"` }
func (*ClusterParameters) DeepCopy ¶
func (in *ClusterParameters) DeepCopy() *ClusterParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterParameters.
func (*ClusterParameters) DeepCopyInto ¶
func (in *ClusterParameters) DeepCopyInto(out *ClusterParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterSelectorInitParameters ¶
type ClusterSelectorInitParameters struct { // Required. The cluster labels. Cluster must have all labels to match. // +mapType=granular ClusterLabels map[string]*string `json:"clusterLabels,omitempty" tf:"cluster_labels,omitempty"` // The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ClusterSelectorInitParameters) DeepCopy ¶
func (in *ClusterSelectorInitParameters) DeepCopy() *ClusterSelectorInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSelectorInitParameters.
func (*ClusterSelectorInitParameters) DeepCopyInto ¶
func (in *ClusterSelectorInitParameters) DeepCopyInto(out *ClusterSelectorInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterSelectorObservation ¶
type ClusterSelectorObservation struct { // Required. The cluster labels. Cluster must have all labels to match. // +mapType=granular ClusterLabels map[string]*string `json:"clusterLabels,omitempty" tf:"cluster_labels,omitempty"` // The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ClusterSelectorObservation) DeepCopy ¶
func (in *ClusterSelectorObservation) DeepCopy() *ClusterSelectorObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSelectorObservation.
func (*ClusterSelectorObservation) DeepCopyInto ¶
func (in *ClusterSelectorObservation) DeepCopyInto(out *ClusterSelectorObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterSelectorParameters ¶
type ClusterSelectorParameters struct { // Required. The cluster labels. Cluster must have all labels to match. // +kubebuilder:validation:Optional // +mapType=granular ClusterLabels map[string]*string `json:"clusterLabels" tf:"cluster_labels,omitempty"` // The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f // +kubebuilder:validation:Optional Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ClusterSelectorParameters) DeepCopy ¶
func (in *ClusterSelectorParameters) DeepCopy() *ClusterSelectorParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSelectorParameters.
func (*ClusterSelectorParameters) DeepCopyInto ¶
func (in *ClusterSelectorParameters) DeepCopyInto(out *ClusterSelectorParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterSpec ¶
type ClusterSpec struct { v1.ResourceSpec `json:",inline"` ForProvider ClusterParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider ClusterInitParameters `json:"initProvider,omitempty"` }
ClusterSpec defines the desired state of Cluster
func (*ClusterSpec) DeepCopy ¶
func (in *ClusterSpec) DeepCopy() *ClusterSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterSpec.
func (*ClusterSpec) DeepCopyInto ¶
func (in *ClusterSpec) DeepCopyInto(out *ClusterSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ClusterStatus ¶
type ClusterStatus struct { v1.ResourceStatus `json:",inline"` AtProvider ClusterObservation `json:"atProvider,omitempty"` }
ClusterStatus defines the observed state of Cluster.
func (*ClusterStatus) DeepCopy ¶
func (in *ClusterStatus) DeepCopy() *ClusterStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ClusterStatus.
func (*ClusterStatus) DeepCopyInto ¶
func (in *ClusterStatus) DeepCopyInto(out *ClusterStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigAutoscalingConfigInitParameters ¶
type ConfigAutoscalingConfigInitParameters struct { // The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region. Policy *string `json:"policy,omitempty" tf:"policy,omitempty"` }
func (*ConfigAutoscalingConfigInitParameters) DeepCopy ¶
func (in *ConfigAutoscalingConfigInitParameters) DeepCopy() *ConfigAutoscalingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigAutoscalingConfigInitParameters.
func (*ConfigAutoscalingConfigInitParameters) DeepCopyInto ¶
func (in *ConfigAutoscalingConfigInitParameters) DeepCopyInto(out *ConfigAutoscalingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigAutoscalingConfigObservation ¶
type ConfigAutoscalingConfigObservation struct { // The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region. Policy *string `json:"policy,omitempty" tf:"policy,omitempty"` }
func (*ConfigAutoscalingConfigObservation) DeepCopy ¶
func (in *ConfigAutoscalingConfigObservation) DeepCopy() *ConfigAutoscalingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigAutoscalingConfigObservation.
func (*ConfigAutoscalingConfigObservation) DeepCopyInto ¶
func (in *ConfigAutoscalingConfigObservation) DeepCopyInto(out *ConfigAutoscalingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigAutoscalingConfigParameters ¶
type ConfigAutoscalingConfigParameters struct { // The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ Note that the policy must be in the same project and Dataproc region. // +kubebuilder:validation:Optional Policy *string `json:"policy,omitempty" tf:"policy,omitempty"` }
func (*ConfigAutoscalingConfigParameters) DeepCopy ¶
func (in *ConfigAutoscalingConfigParameters) DeepCopy() *ConfigAutoscalingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigAutoscalingConfigParameters.
func (*ConfigAutoscalingConfigParameters) DeepCopyInto ¶
func (in *ConfigAutoscalingConfigParameters) DeepCopyInto(out *ConfigAutoscalingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEncryptionConfigInitParameters ¶
type ConfigEncryptionConfigInitParameters struct { // The Cloud KMS key name to use for PD disk encryption for all instances in the cluster. GcePdKMSKeyName *string `json:"gcePdKmsKeyName,omitempty" tf:"gce_pd_kms_key_name,omitempty"` }
func (*ConfigEncryptionConfigInitParameters) DeepCopy ¶
func (in *ConfigEncryptionConfigInitParameters) DeepCopy() *ConfigEncryptionConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEncryptionConfigInitParameters.
func (*ConfigEncryptionConfigInitParameters) DeepCopyInto ¶
func (in *ConfigEncryptionConfigInitParameters) DeepCopyInto(out *ConfigEncryptionConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEncryptionConfigObservation ¶
type ConfigEncryptionConfigObservation struct { // The Cloud KMS key name to use for PD disk encryption for all instances in the cluster. GcePdKMSKeyName *string `json:"gcePdKmsKeyName,omitempty" tf:"gce_pd_kms_key_name,omitempty"` }
func (*ConfigEncryptionConfigObservation) DeepCopy ¶
func (in *ConfigEncryptionConfigObservation) DeepCopy() *ConfigEncryptionConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEncryptionConfigObservation.
func (*ConfigEncryptionConfigObservation) DeepCopyInto ¶
func (in *ConfigEncryptionConfigObservation) DeepCopyInto(out *ConfigEncryptionConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEncryptionConfigParameters ¶
type ConfigEncryptionConfigParameters struct { // The Cloud KMS key name to use for PD disk encryption for all instances in the cluster. // +kubebuilder:validation:Optional GcePdKMSKeyName *string `json:"gcePdKmsKeyName,omitempty" tf:"gce_pd_kms_key_name,omitempty"` }
func (*ConfigEncryptionConfigParameters) DeepCopy ¶
func (in *ConfigEncryptionConfigParameters) DeepCopy() *ConfigEncryptionConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEncryptionConfigParameters.
func (*ConfigEncryptionConfigParameters) DeepCopyInto ¶
func (in *ConfigEncryptionConfigParameters) DeepCopyInto(out *ConfigEncryptionConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEndpointConfigInitParameters ¶
type ConfigEndpointConfigInitParameters struct { // If true, enable http access to specific ports on the cluster from external sources. Defaults to false. EnableHTTPPortAccess *bool `json:"enableHttpPortAccess,omitempty" tf:"enable_http_port_access,omitempty"` }
func (*ConfigEndpointConfigInitParameters) DeepCopy ¶
func (in *ConfigEndpointConfigInitParameters) DeepCopy() *ConfigEndpointConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEndpointConfigInitParameters.
func (*ConfigEndpointConfigInitParameters) DeepCopyInto ¶
func (in *ConfigEndpointConfigInitParameters) DeepCopyInto(out *ConfigEndpointConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEndpointConfigObservation ¶
type ConfigEndpointConfigObservation struct { // If true, enable http access to specific ports on the cluster from external sources. Defaults to false. EnableHTTPPortAccess *bool `json:"enableHttpPortAccess,omitempty" tf:"enable_http_port_access,omitempty"` // Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true. // +mapType=granular HTTPPorts map[string]*string `json:"httpPorts,omitempty" tf:"http_ports,omitempty"` }
func (*ConfigEndpointConfigObservation) DeepCopy ¶
func (in *ConfigEndpointConfigObservation) DeepCopy() *ConfigEndpointConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEndpointConfigObservation.
func (*ConfigEndpointConfigObservation) DeepCopyInto ¶
func (in *ConfigEndpointConfigObservation) DeepCopyInto(out *ConfigEndpointConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigEndpointConfigParameters ¶
type ConfigEndpointConfigParameters struct { // If true, enable http access to specific ports on the cluster from external sources. Defaults to false. // +kubebuilder:validation:Optional EnableHTTPPortAccess *bool `json:"enableHttpPortAccess,omitempty" tf:"enable_http_port_access,omitempty"` }
func (*ConfigEndpointConfigParameters) DeepCopy ¶
func (in *ConfigEndpointConfigParameters) DeepCopy() *ConfigEndpointConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigEndpointConfigParameters.
func (*ConfigEndpointConfigParameters) DeepCopyInto ¶
func (in *ConfigEndpointConfigParameters) DeepCopyInto(out *ConfigEndpointConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigGceClusterConfigInitParameters ¶
type ConfigGceClusterConfigInitParameters struct { // If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // The Compute Engine metadata entries to add to all instances (see (https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). // +mapType=granular Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default` Network *string `json:"network,omitempty" tf:"network,omitempty"` // Node Group Affinity for sole-tenant clusters. NodeGroupAffinity *GceClusterConfigNodeGroupAffinityInitParameters `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL PrivateIPv6GoogleAccess *string `json:"privateIpv6GoogleAccess,omitempty" tf:"private_ipv6_google_access,omitempty"` // Reservation Affinity for consuming Zonal reservation. ReservationAffinity *GceClusterConfigReservationAffinityInitParameters `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used. ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below. ShieldedInstanceConfig *GceClusterConfigShieldedInstanceConfigInitParameters `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0 Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The Compute Engine tags to add to all instances (see (https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). // +listType=set Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ConfigGceClusterConfigInitParameters) DeepCopy ¶
func (in *ConfigGceClusterConfigInitParameters) DeepCopy() *ConfigGceClusterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigGceClusterConfigInitParameters.
func (*ConfigGceClusterConfigInitParameters) DeepCopyInto ¶
func (in *ConfigGceClusterConfigInitParameters) DeepCopyInto(out *ConfigGceClusterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigGceClusterConfigObservation ¶
type ConfigGceClusterConfigObservation struct { // If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // The Compute Engine metadata entries to add to all instances (see (https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). // +mapType=granular Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default` Network *string `json:"network,omitempty" tf:"network,omitempty"` // Node Group Affinity for sole-tenant clusters. NodeGroupAffinity *GceClusterConfigNodeGroupAffinityObservation `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL PrivateIPv6GoogleAccess *string `json:"privateIpv6GoogleAccess,omitempty" tf:"private_ipv6_google_access,omitempty"` // Reservation Affinity for consuming Zonal reservation. ReservationAffinity *GceClusterConfigReservationAffinityObservation `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used. ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below. ShieldedInstanceConfig *GceClusterConfigShieldedInstanceConfigObservation `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0 Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The Compute Engine tags to add to all instances (see (https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). // +listType=set Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ConfigGceClusterConfigObservation) DeepCopy ¶
func (in *ConfigGceClusterConfigObservation) DeepCopy() *ConfigGceClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigGceClusterConfigObservation.
func (*ConfigGceClusterConfigObservation) DeepCopyInto ¶
func (in *ConfigGceClusterConfigObservation) DeepCopyInto(out *ConfigGceClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigGceClusterConfigParameters ¶
type ConfigGceClusterConfigParameters struct { // If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This internal_ip_only restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. // +kubebuilder:validation:Optional InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // The Compute Engine metadata entries to add to all instances (see (https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). // +kubebuilder:validation:Optional // +mapType=granular Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither network_uri nor subnetwork_uri is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see /regions/global/default*default` // +kubebuilder:validation:Optional Network *string `json:"network,omitempty" tf:"network,omitempty"` // Node Group Affinity for sole-tenant clusters. // +kubebuilder:validation:Optional NodeGroupAffinity *GceClusterConfigNodeGroupAffinityParameters `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL // +kubebuilder:validation:Optional PrivateIPv6GoogleAccess *string `json:"privateIpv6GoogleAccess,omitempty" tf:"private_ipv6_google_access,omitempty"` // Reservation Affinity for consuming Zonal reservation. // +kubebuilder:validation:Optional ReservationAffinity *GceClusterConfigReservationAffinityParameters `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // The (https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used. // +kubebuilder:validation:Optional ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control // +kubebuilder:validation:Optional ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Shielded Instance Config for clusters using Compute Engine Shielded VMs. Structure defined below. // +kubebuilder:validation:Optional ShieldedInstanceConfig *GceClusterConfigShieldedInstanceConfigParameters `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects//regions/us-east1/subnetworks/sub0 * sub0 // +kubebuilder:validation:Optional Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The Compute Engine tags to add to all instances (see (https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). // +kubebuilder:validation:Optional // +listType=set Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/ * us-central1-f // +kubebuilder:validation:Optional Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*ConfigGceClusterConfigParameters) DeepCopy ¶
func (in *ConfigGceClusterConfigParameters) DeepCopy() *ConfigGceClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigGceClusterConfigParameters.
func (*ConfigGceClusterConfigParameters) DeepCopyInto ¶
func (in *ConfigGceClusterConfigParameters) DeepCopyInto(out *ConfigGceClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigInitParameters ¶
type ConfigInitParameters struct { // The number of local SSD disks to attach to the node, // which is limited by the maximum number of disks allowable per zone. LocalSsdCount *float64 `json:"localSsdCount,omitempty" tf:"local_ssd_count,omitempty"` // The name of a Compute Engine machine type. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Minimum CPU platform to be used by this instance. // The instance may be scheduled on the specified or a newer CPU platform. // Specify the friendly names of CPU platforms, such as "Intel Haswell" or "Intel Sandy Bridge". MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Whether the nodes are created as preemptible VM instances. // Preemptible nodes cannot be used in a node pool with the CONTROLLER role or in the DEFAULT node pool if the // CONTROLLER role is not assigned (the DEFAULT node pool will assume the CONTROLLER role). Preemptible *bool `json:"preemptible,omitempty" tf:"preemptible,omitempty"` // Spot flag for enabling Spot VM, which is a rebrand of the existing preemptible flag. Spot *bool `json:"spot,omitempty" tf:"spot,omitempty"` }
func (*ConfigInitParameters) DeepCopy ¶
func (in *ConfigInitParameters) DeepCopy() *ConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigInitParameters.
func (*ConfigInitParameters) DeepCopyInto ¶
func (in *ConfigInitParameters) DeepCopyInto(out *ConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigLifecycleConfigInitParameters ¶
type ConfigLifecycleConfigInitParameters struct { // The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). AutoDeleteTTL *string `json:"autoDeleteTtl,omitempty" tf:"auto_delete_ttl,omitempty"` // The time when cluster will be auto-deleted (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json). IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` }
func (*ConfigLifecycleConfigInitParameters) DeepCopy ¶
func (in *ConfigLifecycleConfigInitParameters) DeepCopy() *ConfigLifecycleConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigLifecycleConfigInitParameters.
func (*ConfigLifecycleConfigInitParameters) DeepCopyInto ¶
func (in *ConfigLifecycleConfigInitParameters) DeepCopyInto(out *ConfigLifecycleConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigLifecycleConfigObservation ¶
type ConfigLifecycleConfigObservation struct { // The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). AutoDeleteTTL *string `json:"autoDeleteTtl,omitempty" tf:"auto_delete_ttl,omitempty"` // The time when cluster will be auto-deleted (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json). IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` // Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). IdleStartTime *string `json:"idleStartTime,omitempty" tf:"idle_start_time,omitempty"` }
func (*ConfigLifecycleConfigObservation) DeepCopy ¶
func (in *ConfigLifecycleConfigObservation) DeepCopy() *ConfigLifecycleConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigLifecycleConfigObservation.
func (*ConfigLifecycleConfigObservation) DeepCopyInto ¶
func (in *ConfigLifecycleConfigObservation) DeepCopyInto(out *ConfigLifecycleConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigLifecycleConfigParameters ¶
type ConfigLifecycleConfigParameters struct { // The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). // +kubebuilder:validation:Optional AutoDeleteTTL *string `json:"autoDeleteTtl,omitempty" tf:"auto_delete_ttl,omitempty"` // The time when cluster will be auto-deleted (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). // +kubebuilder:validation:Optional AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json). // +kubebuilder:validation:Optional IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` }
func (*ConfigLifecycleConfigParameters) DeepCopy ¶
func (in *ConfigLifecycleConfigParameters) DeepCopy() *ConfigLifecycleConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigLifecycleConfigParameters.
func (*ConfigLifecycleConfigParameters) DeepCopyInto ¶
func (in *ConfigLifecycleConfigParameters) DeepCopyInto(out *ConfigLifecycleConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigAcceleratorsInitParameters ¶
type ConfigMasterConfigAcceleratorsInitParameters struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*ConfigMasterConfigAcceleratorsInitParameters) DeepCopy ¶
func (in *ConfigMasterConfigAcceleratorsInitParameters) DeepCopy() *ConfigMasterConfigAcceleratorsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigAcceleratorsInitParameters.
func (*ConfigMasterConfigAcceleratorsInitParameters) DeepCopyInto ¶
func (in *ConfigMasterConfigAcceleratorsInitParameters) DeepCopyInto(out *ConfigMasterConfigAcceleratorsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigAcceleratorsObservation ¶
type ConfigMasterConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*ConfigMasterConfigAcceleratorsObservation) DeepCopy ¶
func (in *ConfigMasterConfigAcceleratorsObservation) DeepCopy() *ConfigMasterConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigAcceleratorsObservation.
func (*ConfigMasterConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *ConfigMasterConfigAcceleratorsObservation) DeepCopyInto(out *ConfigMasterConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigAcceleratorsParameters ¶
type ConfigMasterConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*ConfigMasterConfigAcceleratorsParameters) DeepCopy ¶
func (in *ConfigMasterConfigAcceleratorsParameters) DeepCopy() *ConfigMasterConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigAcceleratorsParameters.
func (*ConfigMasterConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *ConfigMasterConfigAcceleratorsParameters) DeepCopyInto(out *ConfigMasterConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigDiskConfigInitParameters ¶
type ConfigMasterConfigDiskConfigInitParameters struct { // Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*ConfigMasterConfigDiskConfigInitParameters) DeepCopy ¶
func (in *ConfigMasterConfigDiskConfigInitParameters) DeepCopy() *ConfigMasterConfigDiskConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigDiskConfigInitParameters.
func (*ConfigMasterConfigDiskConfigInitParameters) DeepCopyInto ¶
func (in *ConfigMasterConfigDiskConfigInitParameters) DeepCopyInto(out *ConfigMasterConfigDiskConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigDiskConfigObservation ¶
type ConfigMasterConfigDiskConfigObservation struct { // Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*ConfigMasterConfigDiskConfigObservation) DeepCopy ¶
func (in *ConfigMasterConfigDiskConfigObservation) DeepCopy() *ConfigMasterConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigDiskConfigObservation.
func (*ConfigMasterConfigDiskConfigObservation) DeepCopyInto ¶
func (in *ConfigMasterConfigDiskConfigObservation) DeepCopyInto(out *ConfigMasterConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigDiskConfigParameters ¶
type ConfigMasterConfigDiskConfigParameters struct { // Size in GB of the boot disk (default is 500GB). // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*ConfigMasterConfigDiskConfigParameters) DeepCopy ¶
func (in *ConfigMasterConfigDiskConfigParameters) DeepCopy() *ConfigMasterConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigDiskConfigParameters.
func (*ConfigMasterConfigDiskConfigParameters) DeepCopyInto ¶
func (in *ConfigMasterConfigDiskConfigParameters) DeepCopyInto(out *ConfigMasterConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigInitParameters ¶
type ConfigMasterConfigInitParameters struct { // The Compute Engine accelerator configuration for these instances. Accelerators []ConfigMasterConfigAcceleratorsInitParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. DiskConfig *ConfigMasterConfigDiskConfigInitParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigMasterConfigInitParameters) DeepCopy ¶
func (in *ConfigMasterConfigInitParameters) DeepCopy() *ConfigMasterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigInitParameters.
func (*ConfigMasterConfigInitParameters) DeepCopyInto ¶
func (in *ConfigMasterConfigInitParameters) DeepCopyInto(out *ConfigMasterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigObservation ¶
type ConfigMasterConfigObservation struct { // The Compute Engine accelerator configuration for these instances. Accelerators []ConfigMasterConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. DiskConfig *ConfigMasterConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // Output only. Specifies that this instance group contains preemptible instances. IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. ManagedGroupConfig []ManagedGroupConfigObservation `json:"managedGroupConfig,omitempty" tf:"managed_group_config,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigMasterConfigObservation) DeepCopy ¶
func (in *ConfigMasterConfigObservation) DeepCopy() *ConfigMasterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigObservation.
func (*ConfigMasterConfigObservation) DeepCopyInto ¶
func (in *ConfigMasterConfigObservation) DeepCopyInto(out *ConfigMasterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigMasterConfigParameters ¶
type ConfigMasterConfigParameters struct { // The Compute Engine accelerator configuration for these instances. // +kubebuilder:validation:Optional Accelerators []ConfigMasterConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. // +kubebuilder:validation:Optional DiskConfig *ConfigMasterConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. // +kubebuilder:validation:Optional Image *string `json:"image,omitempty" tf:"image,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // +kubebuilder:validation:Optional Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigMasterConfigParameters) DeepCopy ¶
func (in *ConfigMasterConfigParameters) DeepCopy() *ConfigMasterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigMasterConfigParameters.
func (*ConfigMasterConfigParameters) DeepCopyInto ¶
func (in *ConfigMasterConfigParameters) DeepCopyInto(out *ConfigMasterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigObservation ¶
type ConfigObservation struct { // The number of local SSD disks to attach to the node, // which is limited by the maximum number of disks allowable per zone. LocalSsdCount *float64 `json:"localSsdCount,omitempty" tf:"local_ssd_count,omitempty"` // The name of a Compute Engine machine type. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Minimum CPU platform to be used by this instance. // The instance may be scheduled on the specified or a newer CPU platform. // Specify the friendly names of CPU platforms, such as "Intel Haswell" or "Intel Sandy Bridge". MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Whether the nodes are created as preemptible VM instances. // Preemptible nodes cannot be used in a node pool with the CONTROLLER role or in the DEFAULT node pool if the // CONTROLLER role is not assigned (the DEFAULT node pool will assume the CONTROLLER role). Preemptible *bool `json:"preemptible,omitempty" tf:"preemptible,omitempty"` // Spot flag for enabling Spot VM, which is a rebrand of the existing preemptible flag. Spot *bool `json:"spot,omitempty" tf:"spot,omitempty"` }
func (*ConfigObservation) DeepCopy ¶
func (in *ConfigObservation) DeepCopy() *ConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigObservation.
func (*ConfigObservation) DeepCopyInto ¶
func (in *ConfigObservation) DeepCopyInto(out *ConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigParameters ¶
type ConfigParameters struct { // The number of local SSD disks to attach to the node, // which is limited by the maximum number of disks allowable per zone. // +kubebuilder:validation:Optional LocalSsdCount *float64 `json:"localSsdCount,omitempty" tf:"local_ssd_count,omitempty"` // The name of a Compute Engine machine type. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Minimum CPU platform to be used by this instance. // The instance may be scheduled on the specified or a newer CPU platform. // Specify the friendly names of CPU platforms, such as "Intel Haswell" or "Intel Sandy Bridge". // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Whether the nodes are created as preemptible VM instances. // Preemptible nodes cannot be used in a node pool with the CONTROLLER role or in the DEFAULT node pool if the // CONTROLLER role is not assigned (the DEFAULT node pool will assume the CONTROLLER role). // +kubebuilder:validation:Optional Preemptible *bool `json:"preemptible,omitempty" tf:"preemptible,omitempty"` // Spot flag for enabling Spot VM, which is a rebrand of the existing preemptible flag. // +kubebuilder:validation:Optional Spot *bool `json:"spot,omitempty" tf:"spot,omitempty"` }
func (*ConfigParameters) DeepCopy ¶
func (in *ConfigParameters) DeepCopy() *ConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigParameters.
func (*ConfigParameters) DeepCopyInto ¶
func (in *ConfigParameters) DeepCopyInto(out *ConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecondaryWorkerConfigInitParameters ¶
type ConfigSecondaryWorkerConfigInitParameters struct { // The Compute Engine accelerator configuration for these instances. Accelerators []SecondaryWorkerConfigAcceleratorsInitParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. DiskConfig *SecondaryWorkerConfigDiskConfigInitParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigSecondaryWorkerConfigInitParameters) DeepCopy ¶
func (in *ConfigSecondaryWorkerConfigInitParameters) DeepCopy() *ConfigSecondaryWorkerConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecondaryWorkerConfigInitParameters.
func (*ConfigSecondaryWorkerConfigInitParameters) DeepCopyInto ¶
func (in *ConfigSecondaryWorkerConfigInitParameters) DeepCopyInto(out *ConfigSecondaryWorkerConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecondaryWorkerConfigObservation ¶
type ConfigSecondaryWorkerConfigObservation struct { // The Compute Engine accelerator configuration for these instances. Accelerators []SecondaryWorkerConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. DiskConfig *SecondaryWorkerConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // Output only. Specifies that this instance group contains preemptible instances. IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. ManagedGroupConfig []SecondaryWorkerConfigManagedGroupConfigObservation `json:"managedGroupConfig,omitempty" tf:"managed_group_config,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigSecondaryWorkerConfigObservation) DeepCopy ¶
func (in *ConfigSecondaryWorkerConfigObservation) DeepCopy() *ConfigSecondaryWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecondaryWorkerConfigObservation.
func (*ConfigSecondaryWorkerConfigObservation) DeepCopyInto ¶
func (in *ConfigSecondaryWorkerConfigObservation) DeepCopyInto(out *ConfigSecondaryWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecondaryWorkerConfigParameters ¶
type ConfigSecondaryWorkerConfigParameters struct { // The Compute Engine accelerator configuration for these instances. // +kubebuilder:validation:Optional Accelerators []SecondaryWorkerConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. // +kubebuilder:validation:Optional DiskConfig *SecondaryWorkerConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. // +kubebuilder:validation:Optional Image *string `json:"image,omitempty" tf:"image,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // +kubebuilder:validation:Optional Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigSecondaryWorkerConfigParameters) DeepCopy ¶
func (in *ConfigSecondaryWorkerConfigParameters) DeepCopy() *ConfigSecondaryWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecondaryWorkerConfigParameters.
func (*ConfigSecondaryWorkerConfigParameters) DeepCopyInto ¶
func (in *ConfigSecondaryWorkerConfigParameters) DeepCopyInto(out *ConfigSecondaryWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecurityConfigInitParameters ¶
type ConfigSecurityConfigInitParameters struct { // Kerberos related configuration. KerberosConfig *SecurityConfigKerberosConfigInitParameters `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` }
func (*ConfigSecurityConfigInitParameters) DeepCopy ¶
func (in *ConfigSecurityConfigInitParameters) DeepCopy() *ConfigSecurityConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecurityConfigInitParameters.
func (*ConfigSecurityConfigInitParameters) DeepCopyInto ¶
func (in *ConfigSecurityConfigInitParameters) DeepCopyInto(out *ConfigSecurityConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecurityConfigObservation ¶
type ConfigSecurityConfigObservation struct { // Kerberos related configuration. KerberosConfig *SecurityConfigKerberosConfigObservation `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` }
func (*ConfigSecurityConfigObservation) DeepCopy ¶
func (in *ConfigSecurityConfigObservation) DeepCopy() *ConfigSecurityConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecurityConfigObservation.
func (*ConfigSecurityConfigObservation) DeepCopyInto ¶
func (in *ConfigSecurityConfigObservation) DeepCopyInto(out *ConfigSecurityConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSecurityConfigParameters ¶
type ConfigSecurityConfigParameters struct { // Kerberos related configuration. // +kubebuilder:validation:Optional KerberosConfig *SecurityConfigKerberosConfigParameters `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` }
func (*ConfigSecurityConfigParameters) DeepCopy ¶
func (in *ConfigSecurityConfigParameters) DeepCopy() *ConfigSecurityConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSecurityConfigParameters.
func (*ConfigSecurityConfigParameters) DeepCopyInto ¶
func (in *ConfigSecurityConfigParameters) DeepCopyInto(out *ConfigSecurityConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSoftwareConfigInitParameters ¶
type ConfigSoftwareConfigInitParameters struct { // The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version. ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // The set of components to activate on the cluster. OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*ConfigSoftwareConfigInitParameters) DeepCopy ¶
func (in *ConfigSoftwareConfigInitParameters) DeepCopy() *ConfigSoftwareConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSoftwareConfigInitParameters.
func (*ConfigSoftwareConfigInitParameters) DeepCopyInto ¶
func (in *ConfigSoftwareConfigInitParameters) DeepCopyInto(out *ConfigSoftwareConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSoftwareConfigObservation ¶
type ConfigSoftwareConfigObservation struct { // The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version. ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // The set of components to activate on the cluster. OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*ConfigSoftwareConfigObservation) DeepCopy ¶
func (in *ConfigSoftwareConfigObservation) DeepCopy() *ConfigSoftwareConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSoftwareConfigObservation.
func (*ConfigSoftwareConfigObservation) DeepCopyInto ¶
func (in *ConfigSoftwareConfigObservation) DeepCopyInto(out *ConfigSoftwareConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigSoftwareConfigParameters ¶
type ConfigSoftwareConfigParameters struct { // The version of software inside the cluster. It must be one of the supported Dataproc Versions, such as "1.2" (including a subminor version, such as "1.2.29"), or the "preview" version. If unspecified, it defaults to the latest Debian version. // +kubebuilder:validation:Optional ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // The set of components to activate on the cluster. // +kubebuilder:validation:Optional OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*ConfigSoftwareConfigParameters) DeepCopy ¶
func (in *ConfigSoftwareConfigParameters) DeepCopy() *ConfigSoftwareConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigSoftwareConfigParameters.
func (*ConfigSoftwareConfigParameters) DeepCopyInto ¶
func (in *ConfigSoftwareConfigParameters) DeepCopyInto(out *ConfigSoftwareConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigAcceleratorsInitParameters ¶
type ConfigWorkerConfigAcceleratorsInitParameters struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*ConfigWorkerConfigAcceleratorsInitParameters) DeepCopy ¶
func (in *ConfigWorkerConfigAcceleratorsInitParameters) DeepCopy() *ConfigWorkerConfigAcceleratorsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigAcceleratorsInitParameters.
func (*ConfigWorkerConfigAcceleratorsInitParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigAcceleratorsInitParameters) DeepCopyInto(out *ConfigWorkerConfigAcceleratorsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigAcceleratorsObservation ¶
type ConfigWorkerConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*ConfigWorkerConfigAcceleratorsObservation) DeepCopy ¶
func (in *ConfigWorkerConfigAcceleratorsObservation) DeepCopy() *ConfigWorkerConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigAcceleratorsObservation.
func (*ConfigWorkerConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *ConfigWorkerConfigAcceleratorsObservation) DeepCopyInto(out *ConfigWorkerConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigAcceleratorsParameters ¶
type ConfigWorkerConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*ConfigWorkerConfigAcceleratorsParameters) DeepCopy ¶
func (in *ConfigWorkerConfigAcceleratorsParameters) DeepCopy() *ConfigWorkerConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigAcceleratorsParameters.
func (*ConfigWorkerConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigAcceleratorsParameters) DeepCopyInto(out *ConfigWorkerConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigDiskConfigInitParameters ¶
type ConfigWorkerConfigDiskConfigInitParameters struct { // Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*ConfigWorkerConfigDiskConfigInitParameters) DeepCopy ¶
func (in *ConfigWorkerConfigDiskConfigInitParameters) DeepCopy() *ConfigWorkerConfigDiskConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigDiskConfigInitParameters.
func (*ConfigWorkerConfigDiskConfigInitParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigDiskConfigInitParameters) DeepCopyInto(out *ConfigWorkerConfigDiskConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigDiskConfigObservation ¶
type ConfigWorkerConfigDiskConfigObservation struct { // Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*ConfigWorkerConfigDiskConfigObservation) DeepCopy ¶
func (in *ConfigWorkerConfigDiskConfigObservation) DeepCopy() *ConfigWorkerConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigDiskConfigObservation.
func (*ConfigWorkerConfigDiskConfigObservation) DeepCopyInto ¶
func (in *ConfigWorkerConfigDiskConfigObservation) DeepCopyInto(out *ConfigWorkerConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigDiskConfigParameters ¶
type ConfigWorkerConfigDiskConfigParameters struct { // Size in GB of the boot disk (default is 500GB). // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*ConfigWorkerConfigDiskConfigParameters) DeepCopy ¶
func (in *ConfigWorkerConfigDiskConfigParameters) DeepCopy() *ConfigWorkerConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigDiskConfigParameters.
func (*ConfigWorkerConfigDiskConfigParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigDiskConfigParameters) DeepCopyInto(out *ConfigWorkerConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigInitParameters ¶
type ConfigWorkerConfigInitParameters struct { // The Compute Engine accelerator configuration for these instances. Accelerators []ConfigWorkerConfigAcceleratorsInitParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. DiskConfig *ConfigWorkerConfigDiskConfigInitParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigWorkerConfigInitParameters) DeepCopy ¶
func (in *ConfigWorkerConfigInitParameters) DeepCopy() *ConfigWorkerConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigInitParameters.
func (*ConfigWorkerConfigInitParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigInitParameters) DeepCopyInto(out *ConfigWorkerConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigObservation ¶
type ConfigWorkerConfigObservation struct { // The Compute Engine accelerator configuration for these instances. Accelerators []ConfigWorkerConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. DiskConfig *ConfigWorkerConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. Image *string `json:"image,omitempty" tf:"image,omitempty"` // Output only. The list of instance names. Dataproc derives the names from cluster_name, num_instances, and the instance group. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // Output only. Specifies that this instance group contains preemptible instances. IsPreemptible *bool `json:"isPreemptible,omitempty" tf:"is_preemptible,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Output only. The config for Compute Engine Instance Group Manager that manages this group. This is only used for preemptible instance groups. ManagedGroupConfig []WorkerConfigManagedGroupConfigObservation `json:"managedGroupConfig,omitempty" tf:"managed_group_config,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigWorkerConfigObservation) DeepCopy ¶
func (in *ConfigWorkerConfigObservation) DeepCopy() *ConfigWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigObservation.
func (*ConfigWorkerConfigObservation) DeepCopyInto ¶
func (in *ConfigWorkerConfigObservation) DeepCopyInto(out *ConfigWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigWorkerConfigParameters ¶
type ConfigWorkerConfigParameters struct { // The Compute Engine accelerator configuration for these instances. // +kubebuilder:validation:Optional Accelerators []ConfigWorkerConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk option config settings. // +kubebuilder:validation:Optional DiskConfig *ConfigWorkerConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * https://www.googleapis.com/compute/beta/projects/ If the URI is unspecified, it will be inferred from SoftwareConfig.image_version or the system default. // +kubebuilder:validation:Optional Image *string `json:"image,omitempty" tf:"image,omitempty"` // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * https://www.googleapis.com/compute/v1/projects/(https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, n1-standard-2`. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Specifies the minimum cpu platform for the Instance Group. See (https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // The number of VM instances in the instance group. For master instance groups, must be set to 1. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the instance group. The default value for master and worker groups is NON_PREEMPTIBLE. This default cannot be changed. The default value for secondary instances is PREEMPTIBLE. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // +kubebuilder:validation:Optional Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*ConfigWorkerConfigParameters) DeepCopy ¶
func (in *ConfigWorkerConfigParameters) DeepCopy() *ConfigWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigWorkerConfigParameters.
func (*ConfigWorkerConfigParameters) DeepCopyInto ¶
func (in *ConfigWorkerConfigParameters) DeepCopyInto(out *ConfigWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConsumersInitParameters ¶
type ConsumersInitParameters struct { // The subnetwork of the customer project from which an IP address is reserved and used as the Dataproc Metastore service's endpoint. // It is accessible to hosts in the subnet and to all hosts in a subnet in the same region and same network. // There must be at least one IP address available in the subnet's primary range. The subnet is specified in the following form: // `projects/{projectNumber}/regions/{region_id}/subnetworks/{subnetwork_id} // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/compute/v1beta2.Subnetwork // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // Reference to a Subnetwork in compute to populate subnetwork. // +kubebuilder:validation:Optional SubnetworkRef *v1.Reference `json:"subnetworkRef,omitempty" tf:"-"` // Selector for a Subnetwork in compute to populate subnetwork. // +kubebuilder:validation:Optional SubnetworkSelector *v1.Selector `json:"subnetworkSelector,omitempty" tf:"-"` }
func (*ConsumersInitParameters) DeepCopy ¶
func (in *ConsumersInitParameters) DeepCopy() *ConsumersInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConsumersInitParameters.
func (*ConsumersInitParameters) DeepCopyInto ¶
func (in *ConsumersInitParameters) DeepCopyInto(out *ConsumersInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConsumersObservation ¶
type ConsumersObservation struct { // The URI of the endpoint used to access the metastore service. EndpointURI *string `json:"endpointUri,omitempty" tf:"endpoint_uri,omitempty"` // The subnetwork of the customer project from which an IP address is reserved and used as the Dataproc Metastore service's endpoint. // It is accessible to hosts in the subnet and to all hosts in a subnet in the same region and same network. // There must be at least one IP address available in the subnet's primary range. The subnet is specified in the following form: // `projects/{projectNumber}/regions/{region_id}/subnetworks/{subnetwork_id} Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` }
func (*ConsumersObservation) DeepCopy ¶
func (in *ConsumersObservation) DeepCopy() *ConsumersObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConsumersObservation.
func (*ConsumersObservation) DeepCopyInto ¶
func (in *ConsumersObservation) DeepCopyInto(out *ConsumersObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConsumersParameters ¶
type ConsumersParameters struct { // The subnetwork of the customer project from which an IP address is reserved and used as the Dataproc Metastore service's endpoint. // It is accessible to hosts in the subnet and to all hosts in a subnet in the same region and same network. // There must be at least one IP address available in the subnet's primary range. The subnet is specified in the following form: // `projects/{projectNumber}/regions/{region_id}/subnetworks/{subnetwork_id} // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/compute/v1beta2.Subnetwork // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // Reference to a Subnetwork in compute to populate subnetwork. // +kubebuilder:validation:Optional SubnetworkRef *v1.Reference `json:"subnetworkRef,omitempty" tf:"-"` // Selector for a Subnetwork in compute to populate subnetwork. // +kubebuilder:validation:Optional SubnetworkSelector *v1.Selector `json:"subnetworkSelector,omitempty" tf:"-"` }
func (*ConsumersParameters) DeepCopy ¶
func (in *ConsumersParameters) DeepCopy() *ConsumersParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConsumersParameters.
func (*ConsumersParameters) DeepCopyInto ¶
func (in *ConsumersParameters) DeepCopyInto(out *ConsumersParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataCatalogConfigInitParameters ¶
type DataCatalogConfigInitParameters struct { // Defines whether the metastore metadata should be synced to Data Catalog. The default value is to disable syncing metastore metadata to Data Catalog. Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` }
func (*DataCatalogConfigInitParameters) DeepCopy ¶
func (in *DataCatalogConfigInitParameters) DeepCopy() *DataCatalogConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataCatalogConfigInitParameters.
func (*DataCatalogConfigInitParameters) DeepCopyInto ¶
func (in *DataCatalogConfigInitParameters) DeepCopyInto(out *DataCatalogConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataCatalogConfigObservation ¶
type DataCatalogConfigObservation struct { // Defines whether the metastore metadata should be synced to Data Catalog. The default value is to disable syncing metastore metadata to Data Catalog. Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` }
func (*DataCatalogConfigObservation) DeepCopy ¶
func (in *DataCatalogConfigObservation) DeepCopy() *DataCatalogConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataCatalogConfigObservation.
func (*DataCatalogConfigObservation) DeepCopyInto ¶
func (in *DataCatalogConfigObservation) DeepCopyInto(out *DataCatalogConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataCatalogConfigParameters ¶
type DataCatalogConfigParameters struct { // Defines whether the metastore metadata should be synced to Data Catalog. The default value is to disable syncing metastore metadata to Data Catalog. // +kubebuilder:validation:Optional Enabled *bool `json:"enabled" tf:"enabled,omitempty"` }
func (*DataCatalogConfigParameters) DeepCopy ¶
func (in *DataCatalogConfigParameters) DeepCopy() *DataCatalogConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataCatalogConfigParameters.
func (*DataCatalogConfigParameters) DeepCopyInto ¶
func (in *DataCatalogConfigParameters) DeepCopyInto(out *DataCatalogConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataprocMetricConfigInitParameters ¶
type DataprocMetricConfigInitParameters struct { // Metrics sources to enable. Metrics []MetricsInitParameters `json:"metrics,omitempty" tf:"metrics,omitempty"` }
func (*DataprocMetricConfigInitParameters) DeepCopy ¶
func (in *DataprocMetricConfigInitParameters) DeepCopy() *DataprocMetricConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataprocMetricConfigInitParameters.
func (*DataprocMetricConfigInitParameters) DeepCopyInto ¶
func (in *DataprocMetricConfigInitParameters) DeepCopyInto(out *DataprocMetricConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataprocMetricConfigObservation ¶
type DataprocMetricConfigObservation struct { // Metrics sources to enable. Metrics []MetricsObservation `json:"metrics,omitempty" tf:"metrics,omitempty"` }
func (*DataprocMetricConfigObservation) DeepCopy ¶
func (in *DataprocMetricConfigObservation) DeepCopy() *DataprocMetricConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataprocMetricConfigObservation.
func (*DataprocMetricConfigObservation) DeepCopyInto ¶
func (in *DataprocMetricConfigObservation) DeepCopyInto(out *DataprocMetricConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DataprocMetricConfigParameters ¶
type DataprocMetricConfigParameters struct { // Metrics sources to enable. // +kubebuilder:validation:Optional Metrics []MetricsParameters `json:"metrics" tf:"metrics,omitempty"` }
func (*DataprocMetricConfigParameters) DeepCopy ¶
func (in *DataprocMetricConfigParameters) DeepCopy() *DataprocMetricConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataprocMetricConfigParameters.
func (*DataprocMetricConfigParameters) DeepCopyInto ¶
func (in *DataprocMetricConfigParameters) DeepCopyInto(out *DataprocMetricConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DiskConfigInitParameters ¶
type DiskConfigInitParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*DiskConfigInitParameters) DeepCopy ¶
func (in *DiskConfigInitParameters) DeepCopy() *DiskConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskConfigInitParameters.
func (*DiskConfigInitParameters) DeepCopyInto ¶
func (in *DiskConfigInitParameters) DeepCopyInto(out *DiskConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DiskConfigObservation ¶
type DiskConfigObservation struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*DiskConfigObservation) DeepCopy ¶
func (in *DiskConfigObservation) DeepCopy() *DiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskConfigObservation.
func (*DiskConfigObservation) DeepCopyInto ¶
func (in *DiskConfigObservation) DeepCopyInto(out *DiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DiskConfigParameters ¶
type DiskConfigParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. // +kubebuilder:validation:Optional LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*DiskConfigParameters) DeepCopy ¶
func (in *DiskConfigParameters) DeepCopy() *DiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DiskConfigParameters.
func (*DiskConfigParameters) DeepCopyInto ¶
func (in *DiskConfigParameters) DeepCopyInto(out *DiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfigInitParameters ¶
type EncryptionConfigInitParameters struct { // The Cloud KMS key name to use for PD disk encryption for // all instances in the cluster. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` }
func (*EncryptionConfigInitParameters) DeepCopy ¶
func (in *EncryptionConfigInitParameters) DeepCopy() *EncryptionConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfigInitParameters.
func (*EncryptionConfigInitParameters) DeepCopyInto ¶
func (in *EncryptionConfigInitParameters) DeepCopyInto(out *EncryptionConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfigObservation ¶
type EncryptionConfigObservation struct { // The Cloud KMS key name to use for PD disk encryption for // all instances in the cluster. KMSKeyName *string `json:"kmsKeyName,omitempty" tf:"kms_key_name,omitempty"` }
func (*EncryptionConfigObservation) DeepCopy ¶
func (in *EncryptionConfigObservation) DeepCopy() *EncryptionConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfigObservation.
func (*EncryptionConfigObservation) DeepCopyInto ¶
func (in *EncryptionConfigObservation) DeepCopyInto(out *EncryptionConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfigParameters ¶
type EncryptionConfigParameters struct { // The Cloud KMS key name to use for PD disk encryption for // all instances in the cluster. // +kubebuilder:validation:Optional KMSKeyName *string `json:"kmsKeyName" tf:"kms_key_name,omitempty"` }
func (*EncryptionConfigParameters) DeepCopy ¶
func (in *EncryptionConfigParameters) DeepCopy() *EncryptionConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfigParameters.
func (*EncryptionConfigParameters) DeepCopyInto ¶
func (in *EncryptionConfigParameters) DeepCopyInto(out *EncryptionConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EndpointConfigInitParameters ¶
type EndpointConfigInitParameters struct { // The flag to enable http access to specific ports // on the cluster from external sources (aka Component Gateway). Defaults to false. EnableHTTPPortAccess *bool `json:"enableHttpPortAccess,omitempty" tf:"enable_http_port_access,omitempty"` }
func (*EndpointConfigInitParameters) DeepCopy ¶
func (in *EndpointConfigInitParameters) DeepCopy() *EndpointConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EndpointConfigInitParameters.
func (*EndpointConfigInitParameters) DeepCopyInto ¶
func (in *EndpointConfigInitParameters) DeepCopyInto(out *EndpointConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EndpointConfigObservation ¶
type EndpointConfigObservation struct { // The flag to enable http access to specific ports // on the cluster from external sources (aka Component Gateway). Defaults to false. EnableHTTPPortAccess *bool `json:"enableHttpPortAccess,omitempty" tf:"enable_http_port_access,omitempty"` // The map of port descriptions to URLs. Will only be populated if // enable_http_port_access is true. // +mapType=granular HTTPPorts map[string]*string `json:"httpPorts,omitempty" tf:"http_ports,omitempty"` }
func (*EndpointConfigObservation) DeepCopy ¶
func (in *EndpointConfigObservation) DeepCopy() *EndpointConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EndpointConfigObservation.
func (*EndpointConfigObservation) DeepCopyInto ¶
func (in *EndpointConfigObservation) DeepCopyInto(out *EndpointConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EndpointConfigParameters ¶
type EndpointConfigParameters struct { // The flag to enable http access to specific ports // on the cluster from external sources (aka Component Gateway). Defaults to false. // +kubebuilder:validation:Optional EnableHTTPPortAccess *bool `json:"enableHttpPortAccess" tf:"enable_http_port_access,omitempty"` }
func (*EndpointConfigParameters) DeepCopy ¶
func (in *EndpointConfigParameters) DeepCopy() *EndpointConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EndpointConfigParameters.
func (*EndpointConfigParameters) DeepCopyInto ¶
func (in *EndpointConfigParameters) DeepCopyInto(out *EndpointConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigInitParameters ¶
type GceClusterConfigInitParameters struct { // By default, clusters are not restricted to internal IP addresses, // and will have ephemeral external IP addresses assigned to each instance. If set to true, all // instances in the cluster will only have internal IP addresses. Note: Private Google Access // (also known as privateIpGoogleAccess) must be enabled on the subnetwork that the cluster // will be launched in. InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // A map of the Compute Engine metadata entries to add to all instances // (see Project and instance metadata). // +mapType=granular Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // The name or self_link of the Google Compute Engine // network to the cluster will be part of. Conflicts with subnetwork. // If neither is specified, this defaults to the "default" network. Network *string `json:"network,omitempty" tf:"network,omitempty"` // Node Group Affinity for sole-tenant clusters. NodeGroupAffinity *NodeGroupAffinityInitParameters `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // Reservation Affinity for consuming zonal reservation. ReservationAffinity *ReservationAffinityInitParameters `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // The service account to be used by the Node VMs. // If not specified, the "default" service account is used. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/cloudplatform/v1beta1.ServiceAccount // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("email",true) ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // Reference to a ServiceAccount in cloudplatform to populate serviceAccount. // +kubebuilder:validation:Optional ServiceAccountRef *v1.Reference `json:"serviceAccountRef,omitempty" tf:"-"` // The set of Google API scopes // to be made available on all of the node VMs under the service_account // specified. Both OAuth2 URLs and gcloud // short names are supported. To allow full access to all Cloud APIs, use the // cloud-platform scope. See a complete list of scopes here. // +listType=set ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Selector for a ServiceAccount in cloudplatform to populate serviceAccount. // +kubebuilder:validation:Optional ServiceAccountSelector *v1.Selector `json:"serviceAccountSelector,omitempty" tf:"-"` // Shielded Instance Config for clusters using Compute Engine Shielded VMs. ShieldedInstanceConfig *ShieldedInstanceConfigInitParameters `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // The name or self_link of the Google Compute Engine // subnetwork the cluster will be part of. Conflicts with network. Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The list of instance tags applied to instances in the cluster. // Tags are used to identify valid sources or targets for network firewalls. // +listType=set Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // The GCP zone where your data is stored and used (i.e. where // the master and the worker nodes will be created in). If region is set to 'global' (default) // then zone is mandatory, otherwise GCP is able to make use of Auto Zone Placement // to determine this automatically for you. // Note: This setting additionally determines and restricts // which computing resources are available for use with other configs such as // cluster_config.master_config.machine_type and cluster_config.worker_config.machine_type. Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*GceClusterConfigInitParameters) DeepCopy ¶
func (in *GceClusterConfigInitParameters) DeepCopy() *GceClusterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigInitParameters.
func (*GceClusterConfigInitParameters) DeepCopyInto ¶
func (in *GceClusterConfigInitParameters) DeepCopyInto(out *GceClusterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigNodeGroupAffinityInitParameters ¶
type GceClusterConfigNodeGroupAffinityInitParameters struct { // Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1` NodeGroup *string `json:"nodeGroup,omitempty" tf:"node_group,omitempty"` }
func (*GceClusterConfigNodeGroupAffinityInitParameters) DeepCopy ¶
func (in *GceClusterConfigNodeGroupAffinityInitParameters) DeepCopy() *GceClusterConfigNodeGroupAffinityInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigNodeGroupAffinityInitParameters.
func (*GceClusterConfigNodeGroupAffinityInitParameters) DeepCopyInto ¶
func (in *GceClusterConfigNodeGroupAffinityInitParameters) DeepCopyInto(out *GceClusterConfigNodeGroupAffinityInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigNodeGroupAffinityObservation ¶
type GceClusterConfigNodeGroupAffinityObservation struct { // Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1` NodeGroup *string `json:"nodeGroup,omitempty" tf:"node_group,omitempty"` }
func (*GceClusterConfigNodeGroupAffinityObservation) DeepCopy ¶
func (in *GceClusterConfigNodeGroupAffinityObservation) DeepCopy() *GceClusterConfigNodeGroupAffinityObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigNodeGroupAffinityObservation.
func (*GceClusterConfigNodeGroupAffinityObservation) DeepCopyInto ¶
func (in *GceClusterConfigNodeGroupAffinityObservation) DeepCopyInto(out *GceClusterConfigNodeGroupAffinityObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigNodeGroupAffinityParameters ¶
type GceClusterConfigNodeGroupAffinityParameters struct { // Required. The URI of a sole-tenant /zones/us-central1-a/nodeGroups/node-group-1*node-group-1` // +kubebuilder:validation:Optional NodeGroup *string `json:"nodeGroup" tf:"node_group,omitempty"` }
func (*GceClusterConfigNodeGroupAffinityParameters) DeepCopy ¶
func (in *GceClusterConfigNodeGroupAffinityParameters) DeepCopy() *GceClusterConfigNodeGroupAffinityParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigNodeGroupAffinityParameters.
func (*GceClusterConfigNodeGroupAffinityParameters) DeepCopyInto ¶
func (in *GceClusterConfigNodeGroupAffinityParameters) DeepCopyInto(out *GceClusterConfigNodeGroupAffinityParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigObservation ¶
type GceClusterConfigObservation struct { // By default, clusters are not restricted to internal IP addresses, // and will have ephemeral external IP addresses assigned to each instance. If set to true, all // instances in the cluster will only have internal IP addresses. Note: Private Google Access // (also known as privateIpGoogleAccess) must be enabled on the subnetwork that the cluster // will be launched in. InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // A map of the Compute Engine metadata entries to add to all instances // (see Project and instance metadata). // +mapType=granular Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // The name or self_link of the Google Compute Engine // network to the cluster will be part of. Conflicts with subnetwork. // If neither is specified, this defaults to the "default" network. Network *string `json:"network,omitempty" tf:"network,omitempty"` // Node Group Affinity for sole-tenant clusters. NodeGroupAffinity *NodeGroupAffinityObservation `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // Reservation Affinity for consuming zonal reservation. ReservationAffinity *ReservationAffinityObservation `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // The service account to be used by the Node VMs. // If not specified, the "default" service account is used. ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // The set of Google API scopes // to be made available on all of the node VMs under the service_account // specified. Both OAuth2 URLs and gcloud // short names are supported. To allow full access to all Cloud APIs, use the // cloud-platform scope. See a complete list of scopes here. // +listType=set ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Shielded Instance Config for clusters using Compute Engine Shielded VMs. ShieldedInstanceConfig *ShieldedInstanceConfigObservation `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // The name or self_link of the Google Compute Engine // subnetwork the cluster will be part of. Conflicts with network. Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The list of instance tags applied to instances in the cluster. // Tags are used to identify valid sources or targets for network firewalls. // +listType=set Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // The GCP zone where your data is stored and used (i.e. where // the master and the worker nodes will be created in). If region is set to 'global' (default) // then zone is mandatory, otherwise GCP is able to make use of Auto Zone Placement // to determine this automatically for you. // Note: This setting additionally determines and restricts // which computing resources are available for use with other configs such as // cluster_config.master_config.machine_type and cluster_config.worker_config.machine_type. Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*GceClusterConfigObservation) DeepCopy ¶
func (in *GceClusterConfigObservation) DeepCopy() *GceClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigObservation.
func (*GceClusterConfigObservation) DeepCopyInto ¶
func (in *GceClusterConfigObservation) DeepCopyInto(out *GceClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigParameters ¶
type GceClusterConfigParameters struct { // By default, clusters are not restricted to internal IP addresses, // and will have ephemeral external IP addresses assigned to each instance. If set to true, all // instances in the cluster will only have internal IP addresses. Note: Private Google Access // (also known as privateIpGoogleAccess) must be enabled on the subnetwork that the cluster // will be launched in. // +kubebuilder:validation:Optional InternalIPOnly *bool `json:"internalIpOnly,omitempty" tf:"internal_ip_only,omitempty"` // A map of the Compute Engine metadata entries to add to all instances // (see Project and instance metadata). // +kubebuilder:validation:Optional // +mapType=granular Metadata map[string]*string `json:"metadata,omitempty" tf:"metadata,omitempty"` // The name or self_link of the Google Compute Engine // network to the cluster will be part of. Conflicts with subnetwork. // If neither is specified, this defaults to the "default" network. // +kubebuilder:validation:Optional Network *string `json:"network,omitempty" tf:"network,omitempty"` // Node Group Affinity for sole-tenant clusters. // +kubebuilder:validation:Optional NodeGroupAffinity *NodeGroupAffinityParameters `json:"nodeGroupAffinity,omitempty" tf:"node_group_affinity,omitempty"` // Reservation Affinity for consuming zonal reservation. // +kubebuilder:validation:Optional ReservationAffinity *ReservationAffinityParameters `json:"reservationAffinity,omitempty" tf:"reservation_affinity,omitempty"` // The service account to be used by the Node VMs. // If not specified, the "default" service account is used. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/cloudplatform/v1beta1.ServiceAccount // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("email",true) // +kubebuilder:validation:Optional ServiceAccount *string `json:"serviceAccount,omitempty" tf:"service_account,omitempty"` // Reference to a ServiceAccount in cloudplatform to populate serviceAccount. // +kubebuilder:validation:Optional ServiceAccountRef *v1.Reference `json:"serviceAccountRef,omitempty" tf:"-"` // The set of Google API scopes // to be made available on all of the node VMs under the service_account // specified. Both OAuth2 URLs and gcloud // short names are supported. To allow full access to all Cloud APIs, use the // cloud-platform scope. See a complete list of scopes here. // +kubebuilder:validation:Optional // +listType=set ServiceAccountScopes []*string `json:"serviceAccountScopes,omitempty" tf:"service_account_scopes,omitempty"` // Selector for a ServiceAccount in cloudplatform to populate serviceAccount. // +kubebuilder:validation:Optional ServiceAccountSelector *v1.Selector `json:"serviceAccountSelector,omitempty" tf:"-"` // Shielded Instance Config for clusters using Compute Engine Shielded VMs. // +kubebuilder:validation:Optional ShieldedInstanceConfig *ShieldedInstanceConfigParameters `json:"shieldedInstanceConfig,omitempty" tf:"shielded_instance_config,omitempty"` // The name or self_link of the Google Compute Engine // subnetwork the cluster will be part of. Conflicts with network. // +kubebuilder:validation:Optional Subnetwork *string `json:"subnetwork,omitempty" tf:"subnetwork,omitempty"` // The list of instance tags applied to instances in the cluster. // Tags are used to identify valid sources or targets for network firewalls. // +kubebuilder:validation:Optional // +listType=set Tags []*string `json:"tags,omitempty" tf:"tags,omitempty"` // The GCP zone where your data is stored and used (i.e. where // the master and the worker nodes will be created in). If region is set to 'global' (default) // then zone is mandatory, otherwise GCP is able to make use of Auto Zone Placement // to determine this automatically for you. // Note: This setting additionally determines and restricts // which computing resources are available for use with other configs such as // cluster_config.master_config.machine_type and cluster_config.worker_config.machine_type. // +kubebuilder:validation:Optional Zone *string `json:"zone,omitempty" tf:"zone,omitempty"` }
func (*GceClusterConfigParameters) DeepCopy ¶
func (in *GceClusterConfigParameters) DeepCopy() *GceClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigParameters.
func (*GceClusterConfigParameters) DeepCopyInto ¶
func (in *GceClusterConfigParameters) DeepCopyInto(out *GceClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigReservationAffinityInitParameters ¶
type GceClusterConfigReservationAffinityInitParameters struct { // Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Corresponds to the label key of reservation resource. Key *string `json:"key,omitempty" tf:"key,omitempty"` // Required. List of allowed values for the parameter. Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*GceClusterConfigReservationAffinityInitParameters) DeepCopy ¶
func (in *GceClusterConfigReservationAffinityInitParameters) DeepCopy() *GceClusterConfigReservationAffinityInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigReservationAffinityInitParameters.
func (*GceClusterConfigReservationAffinityInitParameters) DeepCopyInto ¶
func (in *GceClusterConfigReservationAffinityInitParameters) DeepCopyInto(out *GceClusterConfigReservationAffinityInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigReservationAffinityObservation ¶
type GceClusterConfigReservationAffinityObservation struct { // Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Corresponds to the label key of reservation resource. Key *string `json:"key,omitempty" tf:"key,omitempty"` // Required. List of allowed values for the parameter. Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*GceClusterConfigReservationAffinityObservation) DeepCopy ¶
func (in *GceClusterConfigReservationAffinityObservation) DeepCopy() *GceClusterConfigReservationAffinityObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigReservationAffinityObservation.
func (*GceClusterConfigReservationAffinityObservation) DeepCopyInto ¶
func (in *GceClusterConfigReservationAffinityObservation) DeepCopyInto(out *GceClusterConfigReservationAffinityObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigReservationAffinityParameters ¶
type GceClusterConfigReservationAffinityParameters struct { // Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION // +kubebuilder:validation:Optional ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Corresponds to the label key of reservation resource. // +kubebuilder:validation:Optional Key *string `json:"key,omitempty" tf:"key,omitempty"` // Required. List of allowed values for the parameter. // +kubebuilder:validation:Optional Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*GceClusterConfigReservationAffinityParameters) DeepCopy ¶
func (in *GceClusterConfigReservationAffinityParameters) DeepCopy() *GceClusterConfigReservationAffinityParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigReservationAffinityParameters.
func (*GceClusterConfigReservationAffinityParameters) DeepCopyInto ¶
func (in *GceClusterConfigReservationAffinityParameters) DeepCopyInto(out *GceClusterConfigReservationAffinityParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigShieldedInstanceConfigInitParameters ¶
type GceClusterConfigShieldedInstanceConfigInitParameters struct { // Defines whether instances have Integrity Monitoring enabled. EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Defines whether instances have Secure Boot enabled. EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Defines whether instances have the vTPM enabled. EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*GceClusterConfigShieldedInstanceConfigInitParameters) DeepCopy ¶
func (in *GceClusterConfigShieldedInstanceConfigInitParameters) DeepCopy() *GceClusterConfigShieldedInstanceConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigShieldedInstanceConfigInitParameters.
func (*GceClusterConfigShieldedInstanceConfigInitParameters) DeepCopyInto ¶
func (in *GceClusterConfigShieldedInstanceConfigInitParameters) DeepCopyInto(out *GceClusterConfigShieldedInstanceConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigShieldedInstanceConfigObservation ¶
type GceClusterConfigShieldedInstanceConfigObservation struct { // Defines whether instances have Integrity Monitoring enabled. EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Defines whether instances have Secure Boot enabled. EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Defines whether instances have the vTPM enabled. EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*GceClusterConfigShieldedInstanceConfigObservation) DeepCopy ¶
func (in *GceClusterConfigShieldedInstanceConfigObservation) DeepCopy() *GceClusterConfigShieldedInstanceConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigShieldedInstanceConfigObservation.
func (*GceClusterConfigShieldedInstanceConfigObservation) DeepCopyInto ¶
func (in *GceClusterConfigShieldedInstanceConfigObservation) DeepCopyInto(out *GceClusterConfigShieldedInstanceConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GceClusterConfigShieldedInstanceConfigParameters ¶
type GceClusterConfigShieldedInstanceConfigParameters struct { // Defines whether instances have Integrity Monitoring enabled. // +kubebuilder:validation:Optional EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Defines whether instances have Secure Boot enabled. // +kubebuilder:validation:Optional EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Defines whether instances have the vTPM enabled. // +kubebuilder:validation:Optional EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*GceClusterConfigShieldedInstanceConfigParameters) DeepCopy ¶
func (in *GceClusterConfigShieldedInstanceConfigParameters) DeepCopy() *GceClusterConfigShieldedInstanceConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GceClusterConfigShieldedInstanceConfigParameters.
func (*GceClusterConfigShieldedInstanceConfigParameters) DeepCopyInto ¶
func (in *GceClusterConfigShieldedInstanceConfigParameters) DeepCopyInto(out *GceClusterConfigShieldedInstanceConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GkeClusterConfigInitParameters ¶
type GkeClusterConfigInitParameters struct { // A target GKE cluster to deploy to. It must be in the same project and region as the Dataproc cluster // (the GKE cluster can be zonal or regional) GkeClusterTarget *string `json:"gkeClusterTarget,omitempty" tf:"gke_cluster_target,omitempty"` // GKE node pools where workloads will be scheduled. At least one node pool must be assigned the DEFAULT // GkeNodePoolTarget.Role. If a GkeNodePoolTarget is not specified, Dataproc constructs a DEFAULT GkeNodePoolTarget. // Each role can be given to only one GkeNodePoolTarget. All node pools must have the same location settings. NodePoolTarget []NodePoolTargetInitParameters `json:"nodePoolTarget,omitempty" tf:"node_pool_target,omitempty"` }
func (*GkeClusterConfigInitParameters) DeepCopy ¶
func (in *GkeClusterConfigInitParameters) DeepCopy() *GkeClusterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GkeClusterConfigInitParameters.
func (*GkeClusterConfigInitParameters) DeepCopyInto ¶
func (in *GkeClusterConfigInitParameters) DeepCopyInto(out *GkeClusterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GkeClusterConfigObservation ¶
type GkeClusterConfigObservation struct { // A target GKE cluster to deploy to. It must be in the same project and region as the Dataproc cluster // (the GKE cluster can be zonal or regional) GkeClusterTarget *string `json:"gkeClusterTarget,omitempty" tf:"gke_cluster_target,omitempty"` // GKE node pools where workloads will be scheduled. At least one node pool must be assigned the DEFAULT // GkeNodePoolTarget.Role. If a GkeNodePoolTarget is not specified, Dataproc constructs a DEFAULT GkeNodePoolTarget. // Each role can be given to only one GkeNodePoolTarget. All node pools must have the same location settings. NodePoolTarget []NodePoolTargetObservation `json:"nodePoolTarget,omitempty" tf:"node_pool_target,omitempty"` }
func (*GkeClusterConfigObservation) DeepCopy ¶
func (in *GkeClusterConfigObservation) DeepCopy() *GkeClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GkeClusterConfigObservation.
func (*GkeClusterConfigObservation) DeepCopyInto ¶
func (in *GkeClusterConfigObservation) DeepCopyInto(out *GkeClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GkeClusterConfigParameters ¶
type GkeClusterConfigParameters struct { // A target GKE cluster to deploy to. It must be in the same project and region as the Dataproc cluster // (the GKE cluster can be zonal or regional) // +kubebuilder:validation:Optional GkeClusterTarget *string `json:"gkeClusterTarget,omitempty" tf:"gke_cluster_target,omitempty"` // GKE node pools where workloads will be scheduled. At least one node pool must be assigned the DEFAULT // GkeNodePoolTarget.Role. If a GkeNodePoolTarget is not specified, Dataproc constructs a DEFAULT GkeNodePoolTarget. // Each role can be given to only one GkeNodePoolTarget. All node pools must have the same location settings. // +kubebuilder:validation:Optional NodePoolTarget []NodePoolTargetParameters `json:"nodePoolTarget,omitempty" tf:"node_pool_target,omitempty"` }
func (*GkeClusterConfigParameters) DeepCopy ¶
func (in *GkeClusterConfigParameters) DeepCopy() *GkeClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GkeClusterConfigParameters.
func (*GkeClusterConfigParameters) DeepCopyInto ¶
func (in *GkeClusterConfigParameters) DeepCopyInto(out *GkeClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopConfigInitParameters ¶
type HadoopConfigInitParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *LoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopConfigInitParameters) DeepCopy ¶
func (in *HadoopConfigInitParameters) DeepCopy() *HadoopConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopConfigInitParameters.
func (*HadoopConfigInitParameters) DeepCopyInto ¶
func (in *HadoopConfigInitParameters) DeepCopyInto(out *HadoopConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopConfigObservation ¶
type HadoopConfigObservation struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *LoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopConfigObservation) DeepCopy ¶
func (in *HadoopConfigObservation) DeepCopy() *HadoopConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopConfigObservation.
func (*HadoopConfigObservation) DeepCopyInto ¶
func (in *HadoopConfigObservation) DeepCopyInto(out *HadoopConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopConfigParameters ¶
type HadoopConfigParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig *LoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class // +kubebuilder:validation:Optional MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopConfigParameters) DeepCopy ¶
func (in *HadoopConfigParameters) DeepCopy() *HadoopConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopConfigParameters.
func (*HadoopConfigParameters) DeepCopyInto ¶
func (in *HadoopConfigParameters) DeepCopyInto(out *HadoopConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobInitParameters ¶
type HadoopJobInitParameters struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *HadoopJobLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopJobInitParameters) DeepCopy ¶
func (in *HadoopJobInitParameters) DeepCopy() *HadoopJobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobInitParameters.
func (*HadoopJobInitParameters) DeepCopyInto ¶
func (in *HadoopJobInitParameters) DeepCopyInto(out *HadoopJobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobLoggingConfigInitParameters ¶
type HadoopJobLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*HadoopJobLoggingConfigInitParameters) DeepCopy ¶
func (in *HadoopJobLoggingConfigInitParameters) DeepCopy() *HadoopJobLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobLoggingConfigInitParameters.
func (*HadoopJobLoggingConfigInitParameters) DeepCopyInto ¶
func (in *HadoopJobLoggingConfigInitParameters) DeepCopyInto(out *HadoopJobLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobLoggingConfigObservation ¶
type HadoopJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*HadoopJobLoggingConfigObservation) DeepCopy ¶
func (in *HadoopJobLoggingConfigObservation) DeepCopy() *HadoopJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobLoggingConfigObservation.
func (*HadoopJobLoggingConfigObservation) DeepCopyInto ¶
func (in *HadoopJobLoggingConfigObservation) DeepCopyInto(out *HadoopJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobLoggingConfigParameters ¶
type HadoopJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*HadoopJobLoggingConfigParameters) DeepCopy ¶
func (in *HadoopJobLoggingConfigParameters) DeepCopy() *HadoopJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobLoggingConfigParameters.
func (*HadoopJobLoggingConfigParameters) DeepCopyInto ¶
func (in *HadoopJobLoggingConfigParameters) DeepCopyInto(out *HadoopJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobObservation ¶
type HadoopJobObservation struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *HadoopJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopJobObservation) DeepCopy ¶
func (in *HadoopJobObservation) DeepCopy() *HadoopJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobObservation.
func (*HadoopJobObservation) DeepCopyInto ¶
func (in *HadoopJobObservation) DeepCopyInto(out *HadoopJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HadoopJobParameters ¶
type HadoopJobParameters struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig *HadoopJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. // +kubebuilder:validation:Optional MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*HadoopJobParameters) DeepCopy ¶
func (in *HadoopJobParameters) DeepCopy() *HadoopJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HadoopJobParameters.
func (*HadoopJobParameters) DeepCopyInto ¶
func (in *HadoopJobParameters) DeepCopyInto(out *HadoopJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveConfigInitParameters ¶
type HiveConfigInitParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Hive command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveConfigInitParameters) DeepCopy ¶
func (in *HiveConfigInitParameters) DeepCopy() *HiveConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveConfigInitParameters.
func (*HiveConfigInitParameters) DeepCopyInto ¶
func (in *HiveConfigInitParameters) DeepCopyInto(out *HiveConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveConfigObservation ¶
type HiveConfigObservation struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Hive command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveConfigObservation) DeepCopy ¶
func (in *HiveConfigObservation) DeepCopy() *HiveConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveConfigObservation.
func (*HiveConfigObservation) DeepCopyInto ¶
func (in *HiveConfigObservation) DeepCopyInto(out *HiveConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveConfigParameters ¶
type HiveConfigParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri // +kubebuilder:validation:Optional QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Hive command: SET name="value";). // +kubebuilder:validation:Optional // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveConfigParameters) DeepCopy ¶
func (in *HiveConfigParameters) DeepCopy() *HiveConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveConfigParameters.
func (*HiveConfigParameters) DeepCopyInto ¶
func (in *HiveConfigParameters) DeepCopyInto(out *HiveConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveJobInitParameters ¶
type HiveJobInitParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList *QueryListInitParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveJobInitParameters) DeepCopy ¶
func (in *HiveJobInitParameters) DeepCopy() *HiveJobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveJobInitParameters.
func (*HiveJobInitParameters) DeepCopyInto ¶
func (in *HiveJobInitParameters) DeepCopyInto(out *HiveJobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveJobObservation ¶
type HiveJobObservation struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList *QueryListObservation `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveJobObservation) DeepCopy ¶
func (in *HiveJobObservation) DeepCopy() *HiveJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveJobObservation.
func (*HiveJobObservation) DeepCopyInto ¶
func (in *HiveJobObservation) DeepCopyInto(out *HiveJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveJobParameters ¶
type HiveJobParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. // +kubebuilder:validation:Optional QueryList *QueryListParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +kubebuilder:validation:Optional // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*HiveJobParameters) DeepCopy ¶
func (in *HiveJobParameters) DeepCopy() *HiveJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveJobParameters.
func (*HiveJobParameters) DeepCopyInto ¶
func (in *HiveJobParameters) DeepCopyInto(out *HiveJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigInitParameters ¶
type HiveMetastoreConfigInitParameters struct { // A mapping of Hive metastore version to the auxiliary version configuration. // When specified, a secondary Hive metastore service is created along with the primary service. // All auxiliary versions must be less than the service's primary version. // The key is the auxiliary service name and it must match the regular expression a-z?. // This means that the first character must be a lowercase letter, and all the following characters must be hyphens, lowercase letters, or digits, except the last character, which cannot be a hyphen. // Structure is documented below. AuxiliaryVersions []AuxiliaryVersionsInitParameters `json:"auxiliaryVersions,omitempty" tf:"auxiliary_versions,omitempty"` // A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). // The mappings override system defaults (some keys cannot be overridden) // +mapType=granular ConfigOverrides map[string]*string `json:"configOverrides,omitempty" tf:"config_overrides,omitempty"` // The protocol to use for the metastore service endpoint. If unspecified, defaults to THRIFT. // Default value is THRIFT. // Possible values are: THRIFT, GRPC. EndpointProtocol *string `json:"endpointProtocol,omitempty" tf:"endpoint_protocol,omitempty"` // Information used to configure the Hive metastore service as a service principal in a Kerberos realm. // Structure is documented below. KerberosConfig *HiveMetastoreConfigKerberosConfigInitParameters `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` // The Hive metastore schema version. Version *string `json:"version,omitempty" tf:"version,omitempty"` }
func (*HiveMetastoreConfigInitParameters) DeepCopy ¶
func (in *HiveMetastoreConfigInitParameters) DeepCopy() *HiveMetastoreConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigInitParameters.
func (*HiveMetastoreConfigInitParameters) DeepCopyInto ¶
func (in *HiveMetastoreConfigInitParameters) DeepCopyInto(out *HiveMetastoreConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigKerberosConfigInitParameters ¶
type HiveMetastoreConfigKerberosConfigInitParameters struct { // A Kerberos keytab file that can be used to authenticate a service principal with a Kerberos Key Distribution Center (KDC). // Structure is documented below. Keytab *KeytabInitParameters `json:"keytab,omitempty" tf:"keytab,omitempty"` // A Cloud Storage URI that specifies the path to a krb5.conf file. It is of the form gs://{bucket_name}/path/to/krb5.conf, although the file does not need to be named krb5.conf explicitly. Krb5ConfigGcsURI *string `json:"krb5ConfigGcsUri,omitempty" tf:"krb5_config_gcs_uri,omitempty"` // A Kerberos principal that exists in the both the keytab the KDC to authenticate as. A typical principal is of the form "primary/instance@REALM", but there is no exact format. Principal *string `json:"principal,omitempty" tf:"principal,omitempty"` }
func (*HiveMetastoreConfigKerberosConfigInitParameters) DeepCopy ¶
func (in *HiveMetastoreConfigKerberosConfigInitParameters) DeepCopy() *HiveMetastoreConfigKerberosConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigKerberosConfigInitParameters.
func (*HiveMetastoreConfigKerberosConfigInitParameters) DeepCopyInto ¶
func (in *HiveMetastoreConfigKerberosConfigInitParameters) DeepCopyInto(out *HiveMetastoreConfigKerberosConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigKerberosConfigObservation ¶
type HiveMetastoreConfigKerberosConfigObservation struct { // A Kerberos keytab file that can be used to authenticate a service principal with a Kerberos Key Distribution Center (KDC). // Structure is documented below. Keytab *KeytabObservation `json:"keytab,omitempty" tf:"keytab,omitempty"` // A Cloud Storage URI that specifies the path to a krb5.conf file. It is of the form gs://{bucket_name}/path/to/krb5.conf, although the file does not need to be named krb5.conf explicitly. Krb5ConfigGcsURI *string `json:"krb5ConfigGcsUri,omitempty" tf:"krb5_config_gcs_uri,omitempty"` // A Kerberos principal that exists in the both the keytab the KDC to authenticate as. A typical principal is of the form "primary/instance@REALM", but there is no exact format. Principal *string `json:"principal,omitempty" tf:"principal,omitempty"` }
func (*HiveMetastoreConfigKerberosConfigObservation) DeepCopy ¶
func (in *HiveMetastoreConfigKerberosConfigObservation) DeepCopy() *HiveMetastoreConfigKerberosConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigKerberosConfigObservation.
func (*HiveMetastoreConfigKerberosConfigObservation) DeepCopyInto ¶
func (in *HiveMetastoreConfigKerberosConfigObservation) DeepCopyInto(out *HiveMetastoreConfigKerberosConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigKerberosConfigParameters ¶
type HiveMetastoreConfigKerberosConfigParameters struct { // A Kerberos keytab file that can be used to authenticate a service principal with a Kerberos Key Distribution Center (KDC). // Structure is documented below. // +kubebuilder:validation:Optional Keytab *KeytabParameters `json:"keytab" tf:"keytab,omitempty"` // A Cloud Storage URI that specifies the path to a krb5.conf file. It is of the form gs://{bucket_name}/path/to/krb5.conf, although the file does not need to be named krb5.conf explicitly. // +kubebuilder:validation:Optional Krb5ConfigGcsURI *string `json:"krb5ConfigGcsUri" tf:"krb5_config_gcs_uri,omitempty"` // A Kerberos principal that exists in the both the keytab the KDC to authenticate as. A typical principal is of the form "primary/instance@REALM", but there is no exact format. // +kubebuilder:validation:Optional Principal *string `json:"principal" tf:"principal,omitempty"` }
func (*HiveMetastoreConfigKerberosConfigParameters) DeepCopy ¶
func (in *HiveMetastoreConfigKerberosConfigParameters) DeepCopy() *HiveMetastoreConfigKerberosConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigKerberosConfigParameters.
func (*HiveMetastoreConfigKerberosConfigParameters) DeepCopyInto ¶
func (in *HiveMetastoreConfigKerberosConfigParameters) DeepCopyInto(out *HiveMetastoreConfigKerberosConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigObservation ¶
type HiveMetastoreConfigObservation struct { // A mapping of Hive metastore version to the auxiliary version configuration. // When specified, a secondary Hive metastore service is created along with the primary service. // All auxiliary versions must be less than the service's primary version. // The key is the auxiliary service name and it must match the regular expression a-z?. // This means that the first character must be a lowercase letter, and all the following characters must be hyphens, lowercase letters, or digits, except the last character, which cannot be a hyphen. // Structure is documented below. AuxiliaryVersions []AuxiliaryVersionsObservation `json:"auxiliaryVersions,omitempty" tf:"auxiliary_versions,omitempty"` // A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). // The mappings override system defaults (some keys cannot be overridden) // +mapType=granular ConfigOverrides map[string]*string `json:"configOverrides,omitempty" tf:"config_overrides,omitempty"` // The protocol to use for the metastore service endpoint. If unspecified, defaults to THRIFT. // Default value is THRIFT. // Possible values are: THRIFT, GRPC. EndpointProtocol *string `json:"endpointProtocol,omitempty" tf:"endpoint_protocol,omitempty"` // Information used to configure the Hive metastore service as a service principal in a Kerberos realm. // Structure is documented below. KerberosConfig *HiveMetastoreConfigKerberosConfigObservation `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` // The Hive metastore schema version. Version *string `json:"version,omitempty" tf:"version,omitempty"` }
func (*HiveMetastoreConfigObservation) DeepCopy ¶
func (in *HiveMetastoreConfigObservation) DeepCopy() *HiveMetastoreConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigObservation.
func (*HiveMetastoreConfigObservation) DeepCopyInto ¶
func (in *HiveMetastoreConfigObservation) DeepCopyInto(out *HiveMetastoreConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type HiveMetastoreConfigParameters ¶
type HiveMetastoreConfigParameters struct { // A mapping of Hive metastore version to the auxiliary version configuration. // When specified, a secondary Hive metastore service is created along with the primary service. // All auxiliary versions must be less than the service's primary version. // The key is the auxiliary service name and it must match the regular expression a-z?. // This means that the first character must be a lowercase letter, and all the following characters must be hyphens, lowercase letters, or digits, except the last character, which cannot be a hyphen. // Structure is documented below. // +kubebuilder:validation:Optional AuxiliaryVersions []AuxiliaryVersionsParameters `json:"auxiliaryVersions,omitempty" tf:"auxiliary_versions,omitempty"` // A mapping of Hive metastore configuration key-value pairs to apply to the Hive metastore (configured in hive-site.xml). // The mappings override system defaults (some keys cannot be overridden) // +kubebuilder:validation:Optional // +mapType=granular ConfigOverrides map[string]*string `json:"configOverrides,omitempty" tf:"config_overrides,omitempty"` // The protocol to use for the metastore service endpoint. If unspecified, defaults to THRIFT. // Default value is THRIFT. // Possible values are: THRIFT, GRPC. // +kubebuilder:validation:Optional EndpointProtocol *string `json:"endpointProtocol,omitempty" tf:"endpoint_protocol,omitempty"` // Information used to configure the Hive metastore service as a service principal in a Kerberos realm. // Structure is documented below. // +kubebuilder:validation:Optional KerberosConfig *HiveMetastoreConfigKerberosConfigParameters `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` // The Hive metastore schema version. // +kubebuilder:validation:Optional Version *string `json:"version" tf:"version,omitempty"` }
func (*HiveMetastoreConfigParameters) DeepCopy ¶
func (in *HiveMetastoreConfigParameters) DeepCopy() *HiveMetastoreConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HiveMetastoreConfigParameters.
func (*HiveMetastoreConfigParameters) DeepCopyInto ¶
func (in *HiveMetastoreConfigParameters) DeepCopyInto(out *HiveMetastoreConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionInitParameters ¶
type InitializationActionInitParameters struct { // The script to be executed during initialization of the cluster. // The script must be a GCS file with a gs:// prefix. Script *string `json:"script,omitempty" tf:"script,omitempty"` // The maximum duration (in seconds) which script is // allowed to take to execute its action. GCP will default to a predetermined // computed value if not set (currently 300). TimeoutSec *float64 `json:"timeoutSec,omitempty" tf:"timeout_sec,omitempty"` }
func (*InitializationActionInitParameters) DeepCopy ¶
func (in *InitializationActionInitParameters) DeepCopy() *InitializationActionInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionInitParameters.
func (*InitializationActionInitParameters) DeepCopyInto ¶
func (in *InitializationActionInitParameters) DeepCopyInto(out *InitializationActionInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionObservation ¶
type InitializationActionObservation struct { // The script to be executed during initialization of the cluster. // The script must be a GCS file with a gs:// prefix. Script *string `json:"script,omitempty" tf:"script,omitempty"` // The maximum duration (in seconds) which script is // allowed to take to execute its action. GCP will default to a predetermined // computed value if not set (currently 300). TimeoutSec *float64 `json:"timeoutSec,omitempty" tf:"timeout_sec,omitempty"` }
func (*InitializationActionObservation) DeepCopy ¶
func (in *InitializationActionObservation) DeepCopy() *InitializationActionObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionObservation.
func (*InitializationActionObservation) DeepCopyInto ¶
func (in *InitializationActionObservation) DeepCopyInto(out *InitializationActionObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionParameters ¶
type InitializationActionParameters struct { // The script to be executed during initialization of the cluster. // The script must be a GCS file with a gs:// prefix. // +kubebuilder:validation:Optional Script *string `json:"script" tf:"script,omitempty"` // The maximum duration (in seconds) which script is // allowed to take to execute its action. GCP will default to a predetermined // computed value if not set (currently 300). // +kubebuilder:validation:Optional TimeoutSec *float64 `json:"timeoutSec,omitempty" tf:"timeout_sec,omitempty"` }
func (*InitializationActionParameters) DeepCopy ¶
func (in *InitializationActionParameters) DeepCopy() *InitializationActionParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionParameters.
func (*InitializationActionParameters) DeepCopyInto ¶
func (in *InitializationActionParameters) DeepCopyInto(out *InitializationActionParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionsInitParameters ¶
type InitializationActionsInitParameters struct { // Required. Cloud Storage URI of executable file. ExecutableFile *string `json:"executableFile,omitempty" tf:"executable_file,omitempty"` // Amount of time executable has to complete. Default is 10 minutes (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period. ExecutionTimeout *string `json:"executionTimeout,omitempty" tf:"execution_timeout,omitempty"` }
func (*InitializationActionsInitParameters) DeepCopy ¶
func (in *InitializationActionsInitParameters) DeepCopy() *InitializationActionsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionsInitParameters.
func (*InitializationActionsInitParameters) DeepCopyInto ¶
func (in *InitializationActionsInitParameters) DeepCopyInto(out *InitializationActionsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionsObservation ¶
type InitializationActionsObservation struct { // Required. Cloud Storage URI of executable file. ExecutableFile *string `json:"executableFile,omitempty" tf:"executable_file,omitempty"` // Amount of time executable has to complete. Default is 10 minutes (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period. ExecutionTimeout *string `json:"executionTimeout,omitempty" tf:"execution_timeout,omitempty"` }
func (*InitializationActionsObservation) DeepCopy ¶
func (in *InitializationActionsObservation) DeepCopy() *InitializationActionsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionsObservation.
func (*InitializationActionsObservation) DeepCopyInto ¶
func (in *InitializationActionsObservation) DeepCopyInto(out *InitializationActionsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InitializationActionsParameters ¶
type InitializationActionsParameters struct { // Required. Cloud Storage URI of executable file. // +kubebuilder:validation:Optional ExecutableFile *string `json:"executableFile,omitempty" tf:"executable_file,omitempty"` // Amount of time executable has to complete. Default is 10 minutes (see JSON representation of (https://developers.google.com/protocol-buffers/docs/proto3#json)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period. // +kubebuilder:validation:Optional ExecutionTimeout *string `json:"executionTimeout,omitempty" tf:"execution_timeout,omitempty"` }
func (*InitializationActionsParameters) DeepCopy ¶
func (in *InitializationActionsParameters) DeepCopy() *InitializationActionsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InitializationActionsParameters.
func (*InitializationActionsParameters) DeepCopyInto ¶
func (in *InitializationActionsParameters) DeepCopyInto(out *InitializationActionsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceFlexibilityPolicyInitParameters ¶
type InstanceFlexibilityPolicyInitParameters struct { // List of instance selection options that the group will use when creating new VMs. InstanceSelectionList []InstanceSelectionListInitParameters `json:"instanceSelectionList,omitempty" tf:"instance_selection_list,omitempty"` }
func (*InstanceFlexibilityPolicyInitParameters) DeepCopy ¶
func (in *InstanceFlexibilityPolicyInitParameters) DeepCopy() *InstanceFlexibilityPolicyInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceFlexibilityPolicyInitParameters.
func (*InstanceFlexibilityPolicyInitParameters) DeepCopyInto ¶
func (in *InstanceFlexibilityPolicyInitParameters) DeepCopyInto(out *InstanceFlexibilityPolicyInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceFlexibilityPolicyObservation ¶
type InstanceFlexibilityPolicyObservation struct { // List of instance selection options that the group will use when creating new VMs. InstanceSelectionList []InstanceSelectionListObservation `json:"instanceSelectionList,omitempty" tf:"instance_selection_list,omitempty"` InstanceSelectionResults []InstanceSelectionResultsObservation `json:"instanceSelectionResults,omitempty" tf:"instance_selection_results,omitempty"` }
func (*InstanceFlexibilityPolicyObservation) DeepCopy ¶
func (in *InstanceFlexibilityPolicyObservation) DeepCopy() *InstanceFlexibilityPolicyObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceFlexibilityPolicyObservation.
func (*InstanceFlexibilityPolicyObservation) DeepCopyInto ¶
func (in *InstanceFlexibilityPolicyObservation) DeepCopyInto(out *InstanceFlexibilityPolicyObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceFlexibilityPolicyParameters ¶
type InstanceFlexibilityPolicyParameters struct { // List of instance selection options that the group will use when creating new VMs. // +kubebuilder:validation:Optional InstanceSelectionList []InstanceSelectionListParameters `json:"instanceSelectionList,omitempty" tf:"instance_selection_list,omitempty"` }
func (*InstanceFlexibilityPolicyParameters) DeepCopy ¶
func (in *InstanceFlexibilityPolicyParameters) DeepCopy() *InstanceFlexibilityPolicyParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceFlexibilityPolicyParameters.
func (*InstanceFlexibilityPolicyParameters) DeepCopyInto ¶
func (in *InstanceFlexibilityPolicyParameters) DeepCopyInto(out *InstanceFlexibilityPolicyParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceSelectionListInitParameters ¶
type InstanceSelectionListInitParameters struct { // Full machine-type names, e.g. "n1-standard-16". MachineTypes []*string `json:"machineTypes,omitempty" tf:"machine_types,omitempty"` // Preference of this instance selection. A lower number means higher preference. Dataproc will first try to create a VM based on the machine-type with priority rank and fallback to next rank based on availability. Machine types and instance selections with the same priority have the same preference. Rank *float64 `json:"rank,omitempty" tf:"rank,omitempty"` }
func (*InstanceSelectionListInitParameters) DeepCopy ¶
func (in *InstanceSelectionListInitParameters) DeepCopy() *InstanceSelectionListInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceSelectionListInitParameters.
func (*InstanceSelectionListInitParameters) DeepCopyInto ¶
func (in *InstanceSelectionListInitParameters) DeepCopyInto(out *InstanceSelectionListInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceSelectionListObservation ¶
type InstanceSelectionListObservation struct { // Full machine-type names, e.g. "n1-standard-16". MachineTypes []*string `json:"machineTypes,omitempty" tf:"machine_types,omitempty"` // Preference of this instance selection. A lower number means higher preference. Dataproc will first try to create a VM based on the machine-type with priority rank and fallback to next rank based on availability. Machine types and instance selections with the same priority have the same preference. Rank *float64 `json:"rank,omitempty" tf:"rank,omitempty"` }
func (*InstanceSelectionListObservation) DeepCopy ¶
func (in *InstanceSelectionListObservation) DeepCopy() *InstanceSelectionListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceSelectionListObservation.
func (*InstanceSelectionListObservation) DeepCopyInto ¶
func (in *InstanceSelectionListObservation) DeepCopyInto(out *InstanceSelectionListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceSelectionListParameters ¶
type InstanceSelectionListParameters struct { // Full machine-type names, e.g. "n1-standard-16". // +kubebuilder:validation:Optional MachineTypes []*string `json:"machineTypes,omitempty" tf:"machine_types,omitempty"` // Preference of this instance selection. A lower number means higher preference. Dataproc will first try to create a VM based on the machine-type with priority rank and fallback to next rank based on availability. Machine types and instance selections with the same priority have the same preference. // +kubebuilder:validation:Optional Rank *float64 `json:"rank,omitempty" tf:"rank,omitempty"` }
func (*InstanceSelectionListParameters) DeepCopy ¶
func (in *InstanceSelectionListParameters) DeepCopy() *InstanceSelectionListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceSelectionListParameters.
func (*InstanceSelectionListParameters) DeepCopyInto ¶
func (in *InstanceSelectionListParameters) DeepCopyInto(out *InstanceSelectionListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceSelectionResultsInitParameters ¶
type InstanceSelectionResultsInitParameters struct { }
func (*InstanceSelectionResultsInitParameters) DeepCopy ¶
func (in *InstanceSelectionResultsInitParameters) DeepCopy() *InstanceSelectionResultsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceSelectionResultsInitParameters.
func (*InstanceSelectionResultsInitParameters) DeepCopyInto ¶
func (in *InstanceSelectionResultsInitParameters) DeepCopyInto(out *InstanceSelectionResultsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceSelectionResultsObservation ¶
type InstanceSelectionResultsObservation struct { // The name of a Compute Engine machine type. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` VMCount *float64 `json:"vmCount,omitempty" tf:"vm_count,omitempty"` }
func (*InstanceSelectionResultsObservation) DeepCopy ¶
func (in *InstanceSelectionResultsObservation) DeepCopy() *InstanceSelectionResultsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceSelectionResultsObservation.
func (*InstanceSelectionResultsObservation) DeepCopyInto ¶
func (in *InstanceSelectionResultsObservation) DeepCopyInto(out *InstanceSelectionResultsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type InstanceSelectionResultsParameters ¶
type InstanceSelectionResultsParameters struct { }
func (*InstanceSelectionResultsParameters) DeepCopy ¶
func (in *InstanceSelectionResultsParameters) DeepCopy() *InstanceSelectionResultsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InstanceSelectionResultsParameters.
func (*InstanceSelectionResultsParameters) DeepCopyInto ¶
func (in *InstanceSelectionResultsParameters) DeepCopyInto(out *InstanceSelectionResultsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Job ¶
type Job struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.placement) || (has(self.initProvider) && has(self.initProvider.placement))",message="spec.forProvider.placement is a required parameter" Spec JobSpec `json:"spec"` Status JobStatus `json:"status,omitempty"` }
Job is the Schema for the Jobs API. Manages a job resource within a Dataproc cluster. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*Job) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job.
func (*Job) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Job) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*Job) GetCondition ¶
func (mg *Job) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this Job.
func (*Job) GetConnectionDetailsMapping ¶
GetConnectionDetailsMapping for this Job
func (*Job) GetDeletionPolicy ¶
func (mg *Job) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this Job.
func (*Job) GetInitParameters ¶
GetInitParameters of this Job
func (*Job) GetManagementPolicies ¶
func (mg *Job) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this Job.
func (*Job) GetMergedParameters ¶
GetInitParameters of this Job
func (*Job) GetObservation ¶
GetObservation of this Job
func (*Job) GetParameters ¶
GetParameters of this Job
func (*Job) GetProviderConfigReference ¶
GetProviderConfigReference of this Job.
func (*Job) GetPublishConnectionDetailsTo ¶
func (mg *Job) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this Job.
func (*Job) GetTerraformResourceType ¶
GetTerraformResourceType returns Terraform resource type for this Job
func (*Job) GetTerraformSchemaVersion ¶
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*Job) GetWriteConnectionSecretToReference ¶
func (mg *Job) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this Job.
func (*Job) LateInitialize ¶
LateInitialize this Job using its observed tfState. returns True if there are any spec changes for the resource.
func (*Job) ResolveReferences ¶
ResolveReferences of this Job.
func (*Job) SetConditions ¶
SetConditions of this Job.
func (*Job) SetDeletionPolicy ¶
func (mg *Job) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this Job.
func (*Job) SetManagementPolicies ¶
func (mg *Job) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this Job.
func (*Job) SetObservation ¶
SetObservation for this Job
func (*Job) SetParameters ¶
SetParameters for this Job
func (*Job) SetProviderConfigReference ¶
SetProviderConfigReference of this Job.
func (*Job) SetPublishConnectionDetailsTo ¶
func (mg *Job) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this Job.
func (*Job) SetWriteConnectionSecretToReference ¶
func (mg *Job) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this Job.
type JobInitParameters ¶
type JobInitParameters struct { // By default, you can only delete inactive jobs within // Dataproc. Setting this to true, and calling destroy, will ensure that the // job is first cancelled before issuing the delete. ForceDelete *bool `json:"forceDelete,omitempty" tf:"force_delete,omitempty"` HadoopConfig *HadoopConfigInitParameters `json:"hadoopConfig,omitempty" tf:"hadoop_config,omitempty"` HiveConfig *HiveConfigInitParameters `json:"hiveConfig,omitempty" tf:"hive_config,omitempty"` // The list of labels (key/value pairs) to add to the job. // Note: This field is non-authoritative, and will only manage the labels present in your configuration. // Please refer to the field 'effective_labels' for all of the labels present on the resource. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` PigConfig *PigConfigInitParameters `json:"pigConfig,omitempty" tf:"pig_config,omitempty"` Placement *PlacementInitParameters `json:"placement,omitempty" tf:"placement,omitempty"` PrestoConfig *PrestoConfigInitParameters `json:"prestoConfig,omitempty" tf:"presto_config,omitempty"` // The project in which the cluster can be found and jobs // subsequently run against. If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` PysparkConfig *PysparkConfigInitParameters `json:"pysparkConfig,omitempty" tf:"pyspark_config,omitempty"` Reference *ReferenceInitParameters `json:"reference,omitempty" tf:"reference,omitempty"` // The Cloud Dataproc region. This essentially determines which clusters are available // for this job to be submitted to. If not specified, defaults to global. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/dataproc/v1beta2.Cluster // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("region",false) Region *string `json:"region,omitempty" tf:"region,omitempty"` // Reference to a Cluster in dataproc to populate region. // +kubebuilder:validation:Optional RegionRef *v1.Reference `json:"regionRef,omitempty" tf:"-"` // Selector for a Cluster in dataproc to populate region. // +kubebuilder:validation:Optional RegionSelector *v1.Selector `json:"regionSelector,omitempty" tf:"-"` Scheduling *SchedulingInitParameters `json:"scheduling,omitempty" tf:"scheduling,omitempty"` SparkConfig *SparkConfigInitParameters `json:"sparkConfig,omitempty" tf:"spark_config,omitempty"` SparksqlConfig *SparksqlConfigInitParameters `json:"sparksqlConfig,omitempty" tf:"sparksql_config,omitempty"` }
func (*JobInitParameters) DeepCopy ¶
func (in *JobInitParameters) DeepCopy() *JobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobInitParameters.
func (*JobInitParameters) DeepCopyInto ¶
func (in *JobInitParameters) DeepCopyInto(out *JobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobList ¶
type JobList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []Job `json:"items"` }
JobList contains a list of Jobs
func (*JobList) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobList.
func (*JobList) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*JobList) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type JobObservation ¶
type JobObservation struct { // If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri. DriverControlsFilesURI *string `json:"driverControlsFilesUri,omitempty" tf:"driver_controls_files_uri,omitempty"` // A URI pointing to the location of the stdout of the job's driver program. DriverOutputResourceURI *string `json:"driverOutputResourceUri,omitempty" tf:"driver_output_resource_uri,omitempty"` // +mapType=granular EffectiveLabels map[string]*string `json:"effectiveLabels,omitempty" tf:"effective_labels,omitempty"` // By default, you can only delete inactive jobs within // Dataproc. Setting this to true, and calling destroy, will ensure that the // job is first cancelled before issuing the delete. ForceDelete *bool `json:"forceDelete,omitempty" tf:"force_delete,omitempty"` HadoopConfig *HadoopConfigObservation `json:"hadoopConfig,omitempty" tf:"hadoop_config,omitempty"` HiveConfig *HiveConfigObservation `json:"hiveConfig,omitempty" tf:"hive_config,omitempty"` ID *string `json:"id,omitempty" tf:"id,omitempty"` // The list of labels (key/value pairs) to add to the job. // Note: This field is non-authoritative, and will only manage the labels present in your configuration. // Please refer to the field 'effective_labels' for all of the labels present on the resource. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` PigConfig *PigConfigObservation `json:"pigConfig,omitempty" tf:"pig_config,omitempty"` Placement *PlacementObservation `json:"placement,omitempty" tf:"placement,omitempty"` PrestoConfig *PrestoConfigObservation `json:"prestoConfig,omitempty" tf:"presto_config,omitempty"` // The project in which the cluster can be found and jobs // subsequently run against. If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` PysparkConfig *PysparkConfigObservation `json:"pysparkConfig,omitempty" tf:"pyspark_config,omitempty"` Reference *ReferenceObservation `json:"reference,omitempty" tf:"reference,omitempty"` // The Cloud Dataproc region. This essentially determines which clusters are available // for this job to be submitted to. If not specified, defaults to global. Region *string `json:"region,omitempty" tf:"region,omitempty"` Scheduling *SchedulingObservation `json:"scheduling,omitempty" tf:"scheduling,omitempty"` SparkConfig *SparkConfigObservation `json:"sparkConfig,omitempty" tf:"spark_config,omitempty"` SparksqlConfig *SparksqlConfigObservation `json:"sparksqlConfig,omitempty" tf:"sparksql_config,omitempty"` Status []StatusObservation `json:"status,omitempty" tf:"status,omitempty"` // The combination of labels configured directly on the resource and default labels configured on the provider. // +mapType=granular TerraformLabels map[string]*string `json:"terraformLabels,omitempty" tf:"terraform_labels,omitempty"` }
func (*JobObservation) DeepCopy ¶
func (in *JobObservation) DeepCopy() *JobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobObservation.
func (*JobObservation) DeepCopyInto ¶
func (in *JobObservation) DeepCopyInto(out *JobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobParameters ¶
type JobParameters struct { // By default, you can only delete inactive jobs within // Dataproc. Setting this to true, and calling destroy, will ensure that the // job is first cancelled before issuing the delete. // +kubebuilder:validation:Optional ForceDelete *bool `json:"forceDelete,omitempty" tf:"force_delete,omitempty"` // +kubebuilder:validation:Optional HadoopConfig *HadoopConfigParameters `json:"hadoopConfig,omitempty" tf:"hadoop_config,omitempty"` // +kubebuilder:validation:Optional HiveConfig *HiveConfigParameters `json:"hiveConfig,omitempty" tf:"hive_config,omitempty"` // The list of labels (key/value pairs) to add to the job. // Note: This field is non-authoritative, and will only manage the labels present in your configuration. // Please refer to the field 'effective_labels' for all of the labels present on the resource. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // +kubebuilder:validation:Optional PigConfig *PigConfigParameters `json:"pigConfig,omitempty" tf:"pig_config,omitempty"` // +kubebuilder:validation:Optional Placement *PlacementParameters `json:"placement,omitempty" tf:"placement,omitempty"` // +kubebuilder:validation:Optional PrestoConfig *PrestoConfigParameters `json:"prestoConfig,omitempty" tf:"presto_config,omitempty"` // The project in which the cluster can be found and jobs // subsequently run against. If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // +kubebuilder:validation:Optional PysparkConfig *PysparkConfigParameters `json:"pysparkConfig,omitempty" tf:"pyspark_config,omitempty"` // +kubebuilder:validation:Optional Reference *ReferenceParameters `json:"reference,omitempty" tf:"reference,omitempty"` // The Cloud Dataproc region. This essentially determines which clusters are available // for this job to be submitted to. If not specified, defaults to global. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/dataproc/v1beta2.Cluster // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("region",false) // +kubebuilder:validation:Optional Region *string `json:"region,omitempty" tf:"region,omitempty"` // Reference to a Cluster in dataproc to populate region. // +kubebuilder:validation:Optional RegionRef *v1.Reference `json:"regionRef,omitempty" tf:"-"` // Selector for a Cluster in dataproc to populate region. // +kubebuilder:validation:Optional RegionSelector *v1.Selector `json:"regionSelector,omitempty" tf:"-"` // +kubebuilder:validation:Optional Scheduling *SchedulingParameters `json:"scheduling,omitempty" tf:"scheduling,omitempty"` // +kubebuilder:validation:Optional SparkConfig *SparkConfigParameters `json:"sparkConfig,omitempty" tf:"spark_config,omitempty"` // +kubebuilder:validation:Optional SparksqlConfig *SparksqlConfigParameters `json:"sparksqlConfig,omitempty" tf:"sparksql_config,omitempty"` }
func (*JobParameters) DeepCopy ¶
func (in *JobParameters) DeepCopy() *JobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobParameters.
func (*JobParameters) DeepCopyInto ¶
func (in *JobParameters) DeepCopyInto(out *JobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobSpec ¶
type JobSpec struct { v1.ResourceSpec `json:",inline"` ForProvider JobParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider JobInitParameters `json:"initProvider,omitempty"` }
JobSpec defines the desired state of Job
func (*JobSpec) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobSpec.
func (*JobSpec) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobStatus ¶
type JobStatus struct { v1.ResourceStatus `json:",inline"` AtProvider JobObservation `json:"atProvider,omitempty"` }
JobStatus defines the observed state of Job.
func (*JobStatus) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatus.
func (*JobStatus) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsInitParameters ¶
type JobsInitParameters struct { // Job is a Hadoop job. HadoopJob *HadoopJobInitParameters `json:"hadoopJob,omitempty" tf:"hadoop_job,omitempty"` // Job is a Hive job. HiveJob *HiveJobInitParameters `json:"hiveJob,omitempty" tf:"hive_job,omitempty"` // The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Job is a Pig job. PigJob *PigJobInitParameters `json:"pigJob,omitempty" tf:"pig_job,omitempty"` // The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow. PrerequisiteStepIds []*string `json:"prerequisiteStepIds,omitempty" tf:"prerequisite_step_ids,omitempty"` // Job is a Presto job. PrestoJob *PrestoJobInitParameters `json:"prestoJob,omitempty" tf:"presto_job,omitempty"` // Job is a PySpark job. PysparkJob *PysparkJobInitParameters `json:"pysparkJob,omitempty" tf:"pyspark_job,omitempty"` // Job scheduling configuration. Scheduling *JobsSchedulingInitParameters `json:"scheduling,omitempty" tf:"scheduling,omitempty"` // Job is a Spark job. SparkJob *SparkJobInitParameters `json:"sparkJob,omitempty" tf:"spark_job,omitempty"` // Job is a SparkR job. SparkRJob *SparkRJobInitParameters `json:"sparkRJob,omitempty" tf:"spark_r_job,omitempty"` // Job is a SparkSql job. SparkSQLJob *SparkSQLJobInitParameters `json:"sparkSqlJob,omitempty" tf:"spark_sql_job,omitempty"` // Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. StepID *string `json:"stepId,omitempty" tf:"step_id,omitempty"` }
func (*JobsInitParameters) DeepCopy ¶
func (in *JobsInitParameters) DeepCopy() *JobsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsInitParameters.
func (*JobsInitParameters) DeepCopyInto ¶
func (in *JobsInitParameters) DeepCopyInto(out *JobsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsObservation ¶
type JobsObservation struct { // Job is a Hadoop job. HadoopJob *HadoopJobObservation `json:"hadoopJob,omitempty" tf:"hadoop_job,omitempty"` // Job is a Hive job. HiveJob *HiveJobObservation `json:"hiveJob,omitempty" tf:"hive_job,omitempty"` // The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Job is a Pig job. PigJob *PigJobObservation `json:"pigJob,omitempty" tf:"pig_job,omitempty"` // The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow. PrerequisiteStepIds []*string `json:"prerequisiteStepIds,omitempty" tf:"prerequisite_step_ids,omitempty"` // Job is a Presto job. PrestoJob *PrestoJobObservation `json:"prestoJob,omitempty" tf:"presto_job,omitempty"` // Job is a PySpark job. PysparkJob *PysparkJobObservation `json:"pysparkJob,omitempty" tf:"pyspark_job,omitempty"` // Job scheduling configuration. Scheduling *JobsSchedulingObservation `json:"scheduling,omitempty" tf:"scheduling,omitempty"` // Job is a Spark job. SparkJob *SparkJobObservation `json:"sparkJob,omitempty" tf:"spark_job,omitempty"` // Job is a SparkR job. SparkRJob *SparkRJobObservation `json:"sparkRJob,omitempty" tf:"spark_r_job,omitempty"` // Job is a SparkSql job. SparkSQLJob *SparkSQLJobObservation `json:"sparkSqlJob,omitempty" tf:"spark_sql_job,omitempty"` // Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. StepID *string `json:"stepId,omitempty" tf:"step_id,omitempty"` }
func (*JobsObservation) DeepCopy ¶
func (in *JobsObservation) DeepCopy() *JobsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsObservation.
func (*JobsObservation) DeepCopyInto ¶
func (in *JobsObservation) DeepCopyInto(out *JobsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsParameters ¶
type JobsParameters struct { // Job is a Hadoop job. // +kubebuilder:validation:Optional HadoopJob *HadoopJobParameters `json:"hadoopJob,omitempty" tf:"hadoop_job,omitempty"` // Job is a Hive job. // +kubebuilder:validation:Optional HiveJob *HiveJobParameters `json:"hiveJob,omitempty" tf:"hive_job,omitempty"` // The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: {0,63} No more than 32 labels can be associated with a given job. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Job is a Pig job. // +kubebuilder:validation:Optional PigJob *PigJobParameters `json:"pigJob,omitempty" tf:"pig_job,omitempty"` // The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow. // +kubebuilder:validation:Optional PrerequisiteStepIds []*string `json:"prerequisiteStepIds,omitempty" tf:"prerequisite_step_ids,omitempty"` // Job is a Presto job. // +kubebuilder:validation:Optional PrestoJob *PrestoJobParameters `json:"prestoJob,omitempty" tf:"presto_job,omitempty"` // Job is a PySpark job. // +kubebuilder:validation:Optional PysparkJob *PysparkJobParameters `json:"pysparkJob,omitempty" tf:"pyspark_job,omitempty"` // Job scheduling configuration. // +kubebuilder:validation:Optional Scheduling *JobsSchedulingParameters `json:"scheduling,omitempty" tf:"scheduling,omitempty"` // Job is a Spark job. // +kubebuilder:validation:Optional SparkJob *SparkJobParameters `json:"sparkJob,omitempty" tf:"spark_job,omitempty"` // Job is a SparkR job. // +kubebuilder:validation:Optional SparkRJob *SparkRJobParameters `json:"sparkRJob,omitempty" tf:"spark_r_job,omitempty"` // Job is a SparkSql job. // +kubebuilder:validation:Optional SparkSQLJob *SparkSQLJobParameters `json:"sparkSqlJob,omitempty" tf:"spark_sql_job,omitempty"` // Required. The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job goog-dataproc-workflow-step-id label, and in field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. // +kubebuilder:validation:Optional StepID *string `json:"stepId" tf:"step_id,omitempty"` }
func (*JobsParameters) DeepCopy ¶
func (in *JobsParameters) DeepCopy() *JobsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsParameters.
func (*JobsParameters) DeepCopyInto ¶
func (in *JobsParameters) DeepCopyInto(out *JobsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsSchedulingInitParameters ¶
type JobsSchedulingInitParameters struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10. MaxFailuresPerHour *float64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour,omitempty"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240 MaxFailuresTotal *float64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total,omitempty"` }
func (*JobsSchedulingInitParameters) DeepCopy ¶
func (in *JobsSchedulingInitParameters) DeepCopy() *JobsSchedulingInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsSchedulingInitParameters.
func (*JobsSchedulingInitParameters) DeepCopyInto ¶
func (in *JobsSchedulingInitParameters) DeepCopyInto(out *JobsSchedulingInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsSchedulingObservation ¶
type JobsSchedulingObservation struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10. MaxFailuresPerHour *float64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour,omitempty"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240 MaxFailuresTotal *float64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total,omitempty"` }
func (*JobsSchedulingObservation) DeepCopy ¶
func (in *JobsSchedulingObservation) DeepCopy() *JobsSchedulingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsSchedulingObservation.
func (*JobsSchedulingObservation) DeepCopyInto ¶
func (in *JobsSchedulingObservation) DeepCopyInto(out *JobsSchedulingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type JobsSchedulingParameters ¶
type JobsSchedulingParameters struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10. // +kubebuilder:validation:Optional MaxFailuresPerHour *float64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour,omitempty"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240 // +kubebuilder:validation:Optional MaxFailuresTotal *float64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total,omitempty"` }
func (*JobsSchedulingParameters) DeepCopy ¶
func (in *JobsSchedulingParameters) DeepCopy() *JobsSchedulingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobsSchedulingParameters.
func (*JobsSchedulingParameters) DeepCopyInto ¶
func (in *JobsSchedulingParameters) DeepCopyInto(out *JobsSchedulingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KerberosConfigInitParameters ¶
type KerberosConfigInitParameters struct { // The admin server (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // The KDC (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // The remote realm the Dataproc on-cluster KDC will // trust, should the user enable cross realm trust. CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` // encrypted file containing the shared password between the on-cluster Kerberos realm // and the remote trusted realm, in a cross realm trust relationship. CrossRealmTrustSharedPasswordURI *string `json:"crossRealmTrustSharedPasswordUri,omitempty" tf:"cross_realm_trust_shared_password_uri,omitempty"` // Flag to indicate whether to Kerberize the cluster. EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // The URI of the KMS key used to encrypt various sensitive files. KMSKeyURI *string `json:"kmsKeyUri,omitempty" tf:"kms_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the master key of the KDC database. KdcDBKeyURI *string `json:"kdcDbKeyUri,omitempty" tf:"kdc_db_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided key. For the self-signed certificate, this password // is generated by Dataproc. KeyPasswordURI *string `json:"keyPasswordUri,omitempty" tf:"key_password_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided keystore. For the self-signed certificated, the password // is generated by Dataproc. KeystorePasswordURI *string `json:"keystorePasswordUri,omitempty" tf:"keystore_password_uri,omitempty"` // The Cloud Storage URI of the keystore file used for SSL encryption. // If not provided, Dataproc will provide a self-signed certificate. KeystoreURI *string `json:"keystoreUri,omitempty" tf:"keystore_uri,omitempty"` // The name of the on-cluster Kerberos realm. If not specified, the // uppercased domain of hostnames will be the realm. Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the root principal password. RootPrincipalPasswordURI *string `json:"rootPrincipalPasswordUri,omitempty" tf:"root_principal_password_uri,omitempty"` // The lifetime of the ticket granting ticket, in hours. TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the password to the user provided truststore. For the self-signed // certificate, this password is generated by Dataproc. TruststorePasswordURI *string `json:"truststorePasswordUri,omitempty" tf:"truststore_password_uri,omitempty"` // The Cloud Storage URI of the truststore file used for // SSL encryption. If not provided, Dataproc will provide a self-signed certificate. TruststoreURI *string `json:"truststoreUri,omitempty" tf:"truststore_uri,omitempty"` }
func (*KerberosConfigInitParameters) DeepCopy ¶
func (in *KerberosConfigInitParameters) DeepCopy() *KerberosConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KerberosConfigInitParameters.
func (*KerberosConfigInitParameters) DeepCopyInto ¶
func (in *KerberosConfigInitParameters) DeepCopyInto(out *KerberosConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KerberosConfigObservation ¶
type KerberosConfigObservation struct { // The admin server (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // The KDC (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // The remote realm the Dataproc on-cluster KDC will // trust, should the user enable cross realm trust. CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` // encrypted file containing the shared password between the on-cluster Kerberos realm // and the remote trusted realm, in a cross realm trust relationship. CrossRealmTrustSharedPasswordURI *string `json:"crossRealmTrustSharedPasswordUri,omitempty" tf:"cross_realm_trust_shared_password_uri,omitempty"` // Flag to indicate whether to Kerberize the cluster. EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // The URI of the KMS key used to encrypt various sensitive files. KMSKeyURI *string `json:"kmsKeyUri,omitempty" tf:"kms_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the master key of the KDC database. KdcDBKeyURI *string `json:"kdcDbKeyUri,omitempty" tf:"kdc_db_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided key. For the self-signed certificate, this password // is generated by Dataproc. KeyPasswordURI *string `json:"keyPasswordUri,omitempty" tf:"key_password_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided keystore. For the self-signed certificated, the password // is generated by Dataproc. KeystorePasswordURI *string `json:"keystorePasswordUri,omitempty" tf:"keystore_password_uri,omitempty"` // The Cloud Storage URI of the keystore file used for SSL encryption. // If not provided, Dataproc will provide a self-signed certificate. KeystoreURI *string `json:"keystoreUri,omitempty" tf:"keystore_uri,omitempty"` // The name of the on-cluster Kerberos realm. If not specified, the // uppercased domain of hostnames will be the realm. Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the root principal password. RootPrincipalPasswordURI *string `json:"rootPrincipalPasswordUri,omitempty" tf:"root_principal_password_uri,omitempty"` // The lifetime of the ticket granting ticket, in hours. TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the password to the user provided truststore. For the self-signed // certificate, this password is generated by Dataproc. TruststorePasswordURI *string `json:"truststorePasswordUri,omitempty" tf:"truststore_password_uri,omitempty"` // The Cloud Storage URI of the truststore file used for // SSL encryption. If not provided, Dataproc will provide a self-signed certificate. TruststoreURI *string `json:"truststoreUri,omitempty" tf:"truststore_uri,omitempty"` }
func (*KerberosConfigObservation) DeepCopy ¶
func (in *KerberosConfigObservation) DeepCopy() *KerberosConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KerberosConfigObservation.
func (*KerberosConfigObservation) DeepCopyInto ¶
func (in *KerberosConfigObservation) DeepCopyInto(out *KerberosConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KerberosConfigParameters ¶
type KerberosConfigParameters struct { // The admin server (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // The KDC (IP or hostname) for the // remote trusted realm in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // The remote realm the Dataproc on-cluster KDC will // trust, should the user enable cross realm trust. // +kubebuilder:validation:Optional CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` // encrypted file containing the shared password between the on-cluster Kerberos realm // and the remote trusted realm, in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustSharedPasswordURI *string `json:"crossRealmTrustSharedPasswordUri,omitempty" tf:"cross_realm_trust_shared_password_uri,omitempty"` // Flag to indicate whether to Kerberize the cluster. // +kubebuilder:validation:Optional EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // The URI of the KMS key used to encrypt various sensitive files. // +kubebuilder:validation:Optional KMSKeyURI *string `json:"kmsKeyUri" tf:"kms_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the master key of the KDC database. // +kubebuilder:validation:Optional KdcDBKeyURI *string `json:"kdcDbKeyUri,omitempty" tf:"kdc_db_key_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided key. For the self-signed certificate, this password // is generated by Dataproc. // +kubebuilder:validation:Optional KeyPasswordURI *string `json:"keyPasswordUri,omitempty" tf:"key_password_uri,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing // the password to the user provided keystore. For the self-signed certificated, the password // is generated by Dataproc. // +kubebuilder:validation:Optional KeystorePasswordURI *string `json:"keystorePasswordUri,omitempty" tf:"keystore_password_uri,omitempty"` // The Cloud Storage URI of the keystore file used for SSL encryption. // If not provided, Dataproc will provide a self-signed certificate. // +kubebuilder:validation:Optional KeystoreURI *string `json:"keystoreUri,omitempty" tf:"keystore_uri,omitempty"` // The name of the on-cluster Kerberos realm. If not specified, the // uppercased domain of hostnames will be the realm. // +kubebuilder:validation:Optional Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the root principal password. // +kubebuilder:validation:Optional RootPrincipalPasswordURI *string `json:"rootPrincipalPasswordUri" tf:"root_principal_password_uri,omitempty"` // The lifetime of the ticket granting ticket, in hours. // +kubebuilder:validation:Optional TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // The Cloud Storage URI of a KMS encrypted file // containing the password to the user provided truststore. For the self-signed // certificate, this password is generated by Dataproc. // +kubebuilder:validation:Optional TruststorePasswordURI *string `json:"truststorePasswordUri,omitempty" tf:"truststore_password_uri,omitempty"` // The Cloud Storage URI of the truststore file used for // SSL encryption. If not provided, Dataproc will provide a self-signed certificate. // +kubebuilder:validation:Optional TruststoreURI *string `json:"truststoreUri,omitempty" tf:"truststore_uri,omitempty"` }
func (*KerberosConfigParameters) DeepCopy ¶
func (in *KerberosConfigParameters) DeepCopy() *KerberosConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KerberosConfigParameters.
func (*KerberosConfigParameters) DeepCopyInto ¶
func (in *KerberosConfigParameters) DeepCopyInto(out *KerberosConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KeytabInitParameters ¶
type KeytabInitParameters struct { // The relative resource name of a Secret Manager secret version, in the following form: // "projects/{projectNumber}/secrets/{secret_id}/versions/{version_id}". CloudSecret *string `json:"cloudSecret,omitempty" tf:"cloud_secret,omitempty"` }
func (*KeytabInitParameters) DeepCopy ¶
func (in *KeytabInitParameters) DeepCopy() *KeytabInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KeytabInitParameters.
func (*KeytabInitParameters) DeepCopyInto ¶
func (in *KeytabInitParameters) DeepCopyInto(out *KeytabInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KeytabObservation ¶
type KeytabObservation struct { // The relative resource name of a Secret Manager secret version, in the following form: // "projects/{projectNumber}/secrets/{secret_id}/versions/{version_id}". CloudSecret *string `json:"cloudSecret,omitempty" tf:"cloud_secret,omitempty"` }
func (*KeytabObservation) DeepCopy ¶
func (in *KeytabObservation) DeepCopy() *KeytabObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KeytabObservation.
func (*KeytabObservation) DeepCopyInto ¶
func (in *KeytabObservation) DeepCopyInto(out *KeytabObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KeytabParameters ¶
type KeytabParameters struct { // The relative resource name of a Secret Manager secret version, in the following form: // "projects/{projectNumber}/secrets/{secret_id}/versions/{version_id}". // +kubebuilder:validation:Optional CloudSecret *string `json:"cloudSecret" tf:"cloud_secret,omitempty"` }
func (*KeytabParameters) DeepCopy ¶
func (in *KeytabParameters) DeepCopy() *KeytabParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KeytabParameters.
func (*KeytabParameters) DeepCopyInto ¶
func (in *KeytabParameters) DeepCopyInto(out *KeytabParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesClusterConfigInitParameters ¶
type KubernetesClusterConfigInitParameters struct { // The configuration for running the Dataproc cluster on GKE. GkeClusterConfig *GkeClusterConfigInitParameters `json:"gkeClusterConfig,omitempty" tf:"gke_cluster_config,omitempty"` // A namespace within the Kubernetes cluster to deploy into. // If this namespace does not exist, it is created. // If it exists, Dataproc verifies that another Dataproc VirtualCluster is not installed into it. // If not specified, the name of the Dataproc Cluster is used. KubernetesNamespace *string `json:"kubernetesNamespace,omitempty" tf:"kubernetes_namespace,omitempty"` // The software configuration for this Dataproc cluster running on Kubernetes. KubernetesSoftwareConfig *KubernetesSoftwareConfigInitParameters `json:"kubernetesSoftwareConfig,omitempty" tf:"kubernetes_software_config,omitempty"` }
func (*KubernetesClusterConfigInitParameters) DeepCopy ¶
func (in *KubernetesClusterConfigInitParameters) DeepCopy() *KubernetesClusterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesClusterConfigInitParameters.
func (*KubernetesClusterConfigInitParameters) DeepCopyInto ¶
func (in *KubernetesClusterConfigInitParameters) DeepCopyInto(out *KubernetesClusterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesClusterConfigObservation ¶
type KubernetesClusterConfigObservation struct { // The configuration for running the Dataproc cluster on GKE. GkeClusterConfig *GkeClusterConfigObservation `json:"gkeClusterConfig,omitempty" tf:"gke_cluster_config,omitempty"` // A namespace within the Kubernetes cluster to deploy into. // If this namespace does not exist, it is created. // If it exists, Dataproc verifies that another Dataproc VirtualCluster is not installed into it. // If not specified, the name of the Dataproc Cluster is used. KubernetesNamespace *string `json:"kubernetesNamespace,omitempty" tf:"kubernetes_namespace,omitempty"` // The software configuration for this Dataproc cluster running on Kubernetes. KubernetesSoftwareConfig *KubernetesSoftwareConfigObservation `json:"kubernetesSoftwareConfig,omitempty" tf:"kubernetes_software_config,omitempty"` }
func (*KubernetesClusterConfigObservation) DeepCopy ¶
func (in *KubernetesClusterConfigObservation) DeepCopy() *KubernetesClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesClusterConfigObservation.
func (*KubernetesClusterConfigObservation) DeepCopyInto ¶
func (in *KubernetesClusterConfigObservation) DeepCopyInto(out *KubernetesClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesClusterConfigParameters ¶
type KubernetesClusterConfigParameters struct { // The configuration for running the Dataproc cluster on GKE. // +kubebuilder:validation:Optional GkeClusterConfig *GkeClusterConfigParameters `json:"gkeClusterConfig" tf:"gke_cluster_config,omitempty"` // A namespace within the Kubernetes cluster to deploy into. // If this namespace does not exist, it is created. // If it exists, Dataproc verifies that another Dataproc VirtualCluster is not installed into it. // If not specified, the name of the Dataproc Cluster is used. // +kubebuilder:validation:Optional KubernetesNamespace *string `json:"kubernetesNamespace,omitempty" tf:"kubernetes_namespace,omitempty"` // The software configuration for this Dataproc cluster running on Kubernetes. // +kubebuilder:validation:Optional KubernetesSoftwareConfig *KubernetesSoftwareConfigParameters `json:"kubernetesSoftwareConfig" tf:"kubernetes_software_config,omitempty"` }
func (*KubernetesClusterConfigParameters) DeepCopy ¶
func (in *KubernetesClusterConfigParameters) DeepCopy() *KubernetesClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesClusterConfigParameters.
func (*KubernetesClusterConfigParameters) DeepCopyInto ¶
func (in *KubernetesClusterConfigParameters) DeepCopyInto(out *KubernetesClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesSoftwareConfigInitParameters ¶
type KubernetesSoftwareConfigInitParameters struct { // The components that should be installed in this Dataproc cluster. The key must be a string from the // KubernetesComponent enumeration. The value is the version of the software to be installed. At least one entry must be specified. // +mapType=granular ComponentVersion map[string]*string `json:"componentVersion,omitempty" tf:"component_version,omitempty"` // The properties to set on daemon config files. Property keys are specified in prefix:property format, // for example spark:spark.kubernetes.container.image. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*KubernetesSoftwareConfigInitParameters) DeepCopy ¶
func (in *KubernetesSoftwareConfigInitParameters) DeepCopy() *KubernetesSoftwareConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesSoftwareConfigInitParameters.
func (*KubernetesSoftwareConfigInitParameters) DeepCopyInto ¶
func (in *KubernetesSoftwareConfigInitParameters) DeepCopyInto(out *KubernetesSoftwareConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesSoftwareConfigObservation ¶
type KubernetesSoftwareConfigObservation struct { // The components that should be installed in this Dataproc cluster. The key must be a string from the // KubernetesComponent enumeration. The value is the version of the software to be installed. At least one entry must be specified. // +mapType=granular ComponentVersion map[string]*string `json:"componentVersion,omitempty" tf:"component_version,omitempty"` // The properties to set on daemon config files. Property keys are specified in prefix:property format, // for example spark:spark.kubernetes.container.image. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*KubernetesSoftwareConfigObservation) DeepCopy ¶
func (in *KubernetesSoftwareConfigObservation) DeepCopy() *KubernetesSoftwareConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesSoftwareConfigObservation.
func (*KubernetesSoftwareConfigObservation) DeepCopyInto ¶
func (in *KubernetesSoftwareConfigObservation) DeepCopyInto(out *KubernetesSoftwareConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type KubernetesSoftwareConfigParameters ¶
type KubernetesSoftwareConfigParameters struct { // The components that should be installed in this Dataproc cluster. The key must be a string from the // KubernetesComponent enumeration. The value is the version of the software to be installed. At least one entry must be specified. // +kubebuilder:validation:Optional // +mapType=granular ComponentVersion map[string]*string `json:"componentVersion" tf:"component_version,omitempty"` // The properties to set on daemon config files. Property keys are specified in prefix:property format, // for example spark:spark.kubernetes.container.image. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*KubernetesSoftwareConfigParameters) DeepCopy ¶
func (in *KubernetesSoftwareConfigParameters) DeepCopy() *KubernetesSoftwareConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new KubernetesSoftwareConfigParameters.
func (*KubernetesSoftwareConfigParameters) DeepCopyInto ¶
func (in *KubernetesSoftwareConfigParameters) DeepCopyInto(out *KubernetesSoftwareConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LifecycleConfigInitParameters ¶
type LifecycleConfigInitParameters struct { // The time when cluster will be auto-deleted. // A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. // Example: "2014-10-02T15:01:23.045123456Z". AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // The duration to keep the cluster alive while idling // (no jobs running). After this TTL, the cluster will be deleted. Valid range: [10m, 14d]. IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` }
func (*LifecycleConfigInitParameters) DeepCopy ¶
func (in *LifecycleConfigInitParameters) DeepCopy() *LifecycleConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LifecycleConfigInitParameters.
func (*LifecycleConfigInitParameters) DeepCopyInto ¶
func (in *LifecycleConfigInitParameters) DeepCopyInto(out *LifecycleConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LifecycleConfigObservation ¶
type LifecycleConfigObservation struct { // The time when cluster will be auto-deleted. // A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. // Example: "2014-10-02T15:01:23.045123456Z". AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // The duration to keep the cluster alive while idling // (no jobs running). After this TTL, the cluster will be deleted. Valid range: [10m, 14d]. IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` // Time when the cluster became idle // (most recent job finished) and became eligible for deletion due to idleness. IdleStartTime *string `json:"idleStartTime,omitempty" tf:"idle_start_time,omitempty"` }
func (*LifecycleConfigObservation) DeepCopy ¶
func (in *LifecycleConfigObservation) DeepCopy() *LifecycleConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LifecycleConfigObservation.
func (*LifecycleConfigObservation) DeepCopyInto ¶
func (in *LifecycleConfigObservation) DeepCopyInto(out *LifecycleConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LifecycleConfigParameters ¶
type LifecycleConfigParameters struct { // The time when cluster will be auto-deleted. // A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. // Example: "2014-10-02T15:01:23.045123456Z". // +kubebuilder:validation:Optional AutoDeleteTime *string `json:"autoDeleteTime,omitempty" tf:"auto_delete_time,omitempty"` // The duration to keep the cluster alive while idling // (no jobs running). After this TTL, the cluster will be deleted. Valid range: [10m, 14d]. // +kubebuilder:validation:Optional IdleDeleteTTL *string `json:"idleDeleteTtl,omitempty" tf:"idle_delete_ttl,omitempty"` }
func (*LifecycleConfigParameters) DeepCopy ¶
func (in *LifecycleConfigParameters) DeepCopy() *LifecycleConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LifecycleConfigParameters.
func (*LifecycleConfigParameters) DeepCopyInto ¶
func (in *LifecycleConfigParameters) DeepCopyInto(out *LifecycleConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoggingConfigInitParameters ¶
type LoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*LoggingConfigInitParameters) DeepCopy ¶
func (in *LoggingConfigInitParameters) DeepCopy() *LoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoggingConfigInitParameters.
func (*LoggingConfigInitParameters) DeepCopyInto ¶
func (in *LoggingConfigInitParameters) DeepCopyInto(out *LoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoggingConfigObservation ¶
type LoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*LoggingConfigObservation) DeepCopy ¶
func (in *LoggingConfigObservation) DeepCopy() *LoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoggingConfigObservation.
func (*LoggingConfigObservation) DeepCopyInto ¶
func (in *LoggingConfigObservation) DeepCopyInto(out *LoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LoggingConfigParameters ¶
type LoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*LoggingConfigParameters) DeepCopy ¶
func (in *LoggingConfigParameters) DeepCopy() *LoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoggingConfigParameters.
func (*LoggingConfigParameters) DeepCopyInto ¶
func (in *LoggingConfigParameters) DeepCopyInto(out *LoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MaintenanceWindowInitParameters ¶
type MaintenanceWindowInitParameters struct { // The day of week, when the window starts. // Possible values are: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. DayOfWeek *string `json:"dayOfWeek,omitempty" tf:"day_of_week,omitempty"` // The hour of day (0-23) when the window starts. HourOfDay *float64 `json:"hourOfDay,omitempty" tf:"hour_of_day,omitempty"` }
func (*MaintenanceWindowInitParameters) DeepCopy ¶
func (in *MaintenanceWindowInitParameters) DeepCopy() *MaintenanceWindowInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaintenanceWindowInitParameters.
func (*MaintenanceWindowInitParameters) DeepCopyInto ¶
func (in *MaintenanceWindowInitParameters) DeepCopyInto(out *MaintenanceWindowInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MaintenanceWindowObservation ¶
type MaintenanceWindowObservation struct { // The day of week, when the window starts. // Possible values are: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. DayOfWeek *string `json:"dayOfWeek,omitempty" tf:"day_of_week,omitempty"` // The hour of day (0-23) when the window starts. HourOfDay *float64 `json:"hourOfDay,omitempty" tf:"hour_of_day,omitempty"` }
func (*MaintenanceWindowObservation) DeepCopy ¶
func (in *MaintenanceWindowObservation) DeepCopy() *MaintenanceWindowObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaintenanceWindowObservation.
func (*MaintenanceWindowObservation) DeepCopyInto ¶
func (in *MaintenanceWindowObservation) DeepCopyInto(out *MaintenanceWindowObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MaintenanceWindowParameters ¶
type MaintenanceWindowParameters struct { // The day of week, when the window starts. // Possible values are: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. // +kubebuilder:validation:Optional DayOfWeek *string `json:"dayOfWeek" tf:"day_of_week,omitempty"` // The hour of day (0-23) when the window starts. // +kubebuilder:validation:Optional HourOfDay *float64 `json:"hourOfDay" tf:"hour_of_day,omitempty"` }
func (*MaintenanceWindowParameters) DeepCopy ¶
func (in *MaintenanceWindowParameters) DeepCopy() *MaintenanceWindowParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaintenanceWindowParameters.
func (*MaintenanceWindowParameters) DeepCopyInto ¶
func (in *MaintenanceWindowParameters) DeepCopyInto(out *MaintenanceWindowParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterConfigInitParameters ¶
type ManagedClusterConfigInitParameters struct { // Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset. AutoscalingConfig *ConfigAutoscalingConfigInitParameters `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // Encryption settings for the cluster. EncryptionConfig *ConfigEncryptionConfigInitParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // Port/endpoint configuration for this cluster EndpointConfig *ConfigEndpointConfigInitParameters `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // The shared Compute Engine config settings for all instances in a cluster. GceClusterConfig *ConfigGceClusterConfigInitParameters `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi InitializationActions []InitializationActionsInitParameters `json:"initializationActions,omitempty" tf:"initialization_actions,omitempty"` // Lifecycle setting for the cluster. LifecycleConfig *ConfigLifecycleConfigInitParameters `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. MasterConfig *ConfigMasterConfigInitParameters `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. SecondaryWorkerConfig *ConfigSecondaryWorkerConfigInitParameters `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Security settings for the cluster. SecurityConfig *ConfigSecurityConfigInitParameters `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // The config settings for software inside the cluster. SoftwareConfig *ConfigSoftwareConfigInitParameters `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. WorkerConfig *ConfigWorkerConfigInitParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ManagedClusterConfigInitParameters) DeepCopy ¶
func (in *ManagedClusterConfigInitParameters) DeepCopy() *ManagedClusterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterConfigInitParameters.
func (*ManagedClusterConfigInitParameters) DeepCopyInto ¶
func (in *ManagedClusterConfigInitParameters) DeepCopyInto(out *ManagedClusterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterConfigObservation ¶
type ManagedClusterConfigObservation struct { // Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset. AutoscalingConfig *ConfigAutoscalingConfigObservation `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // Encryption settings for the cluster. EncryptionConfig *ConfigEncryptionConfigObservation `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // Port/endpoint configuration for this cluster EndpointConfig *ConfigEndpointConfigObservation `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // The shared Compute Engine config settings for all instances in a cluster. GceClusterConfig *ConfigGceClusterConfigObservation `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi InitializationActions []InitializationActionsObservation `json:"initializationActions,omitempty" tf:"initialization_actions,omitempty"` // Lifecycle setting for the cluster. LifecycleConfig *ConfigLifecycleConfigObservation `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. MasterConfig *ConfigMasterConfigObservation `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. SecondaryWorkerConfig *ConfigSecondaryWorkerConfigObservation `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Security settings for the cluster. SecurityConfig *ConfigSecurityConfigObservation `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // The config settings for software inside the cluster. SoftwareConfig *ConfigSoftwareConfigObservation `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. WorkerConfig *ConfigWorkerConfigObservation `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ManagedClusterConfigObservation) DeepCopy ¶
func (in *ManagedClusterConfigObservation) DeepCopy() *ManagedClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterConfigObservation.
func (*ManagedClusterConfigObservation) DeepCopyInto ¶
func (in *ManagedClusterConfigObservation) DeepCopyInto(out *ManagedClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterConfigParameters ¶
type ManagedClusterConfigParameters struct { // Autoscaling config for the policy associated with the cluster. Cluster does not autoscale if this field is unset. // +kubebuilder:validation:Optional AutoscalingConfig *ConfigAutoscalingConfigParameters `json:"autoscalingConfig,omitempty" tf:"autoscaling_config,omitempty"` // Encryption settings for the cluster. // +kubebuilder:validation:Optional EncryptionConfig *ConfigEncryptionConfigParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // Port/endpoint configuration for this cluster // +kubebuilder:validation:Optional EndpointConfig *ConfigEndpointConfigParameters `json:"endpointConfig,omitempty" tf:"endpoint_config,omitempty"` // The shared Compute Engine config settings for all instances in a cluster. // +kubebuilder:validation:Optional GceClusterConfig *ConfigGceClusterConfigParameters `json:"gceClusterConfig,omitempty" tf:"gce_cluster_config,omitempty"` // Commands to execute on each node after config is completed. By default, executables are run on master and all worker nodes. You can test a node's role metadata to run an executable on a master or worker node, as shown below using curl (you can also use wget): ROLE=$(curl -H Metadata-Flavor:Google http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) if ; then ... master specific actions ... else ... worker specific actions ... fi // +kubebuilder:validation:Optional InitializationActions []InitializationActionsParameters `json:"initializationActions,omitempty" tf:"initialization_actions,omitempty"` // Lifecycle setting for the cluster. // +kubebuilder:validation:Optional LifecycleConfig *ConfigLifecycleConfigParameters `json:"lifecycleConfig,omitempty" tf:"lifecycle_config,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. // +kubebuilder:validation:Optional MasterConfig *ConfigMasterConfigParameters `json:"masterConfig,omitempty" tf:"master_config,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. // +kubebuilder:validation:Optional SecondaryWorkerConfig *ConfigSecondaryWorkerConfigParameters `json:"secondaryWorkerConfig,omitempty" tf:"secondary_worker_config,omitempty"` // Security settings for the cluster. // +kubebuilder:validation:Optional SecurityConfig *ConfigSecurityConfigParameters `json:"securityConfig,omitempty" tf:"security_config,omitempty"` // The config settings for software inside the cluster. // +kubebuilder:validation:Optional SoftwareConfig *ConfigSoftwareConfigParameters `json:"softwareConfig,omitempty" tf:"software_config,omitempty"` // A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). // +kubebuilder:validation:Optional StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` // A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. // +kubebuilder:validation:Optional TempBucket *string `json:"tempBucket,omitempty" tf:"temp_bucket,omitempty"` // The Compute Engine config settings for additional worker instances in a cluster. // +kubebuilder:validation:Optional WorkerConfig *ConfigWorkerConfigParameters `json:"workerConfig,omitempty" tf:"worker_config,omitempty"` }
func (*ManagedClusterConfigParameters) DeepCopy ¶
func (in *ManagedClusterConfigParameters) DeepCopy() *ManagedClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterConfigParameters.
func (*ManagedClusterConfigParameters) DeepCopyInto ¶
func (in *ManagedClusterConfigParameters) DeepCopyInto(out *ManagedClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterInitParameters ¶
type ManagedClusterInitParameters struct { // Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters. ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"` // Required. The cluster configuration. Config *ManagedClusterConfigInitParameters `json:"config,omitempty" tf:"config,omitempty"` // The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` }
func (*ManagedClusterInitParameters) DeepCopy ¶
func (in *ManagedClusterInitParameters) DeepCopy() *ManagedClusterInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterInitParameters.
func (*ManagedClusterInitParameters) DeepCopyInto ¶
func (in *ManagedClusterInitParameters) DeepCopyInto(out *ManagedClusterInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterObservation ¶
type ManagedClusterObservation struct { // Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters. ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"` // Required. The cluster configuration. Config *ManagedClusterConfigObservation `json:"config,omitempty" tf:"config,omitempty"` // The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` }
func (*ManagedClusterObservation) DeepCopy ¶
func (in *ManagedClusterObservation) DeepCopy() *ManagedClusterObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterObservation.
func (*ManagedClusterObservation) DeepCopyInto ¶
func (in *ManagedClusterObservation) DeepCopyInto(out *ManagedClusterObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedClusterParameters ¶
type ManagedClusterParameters struct { // Required. The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters. // +kubebuilder:validation:Optional ClusterName *string `json:"clusterName" tf:"cluster_name,omitempty"` // Required. The cluster configuration. // +kubebuilder:validation:Optional Config *ManagedClusterConfigParameters `json:"config" tf:"config,omitempty"` // The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` }
func (*ManagedClusterParameters) DeepCopy ¶
func (in *ManagedClusterParameters) DeepCopy() *ManagedClusterParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedClusterParameters.
func (*ManagedClusterParameters) DeepCopyInto ¶
func (in *ManagedClusterParameters) DeepCopyInto(out *ManagedClusterParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedGroupConfigInitParameters ¶
type ManagedGroupConfigInitParameters struct { }
func (*ManagedGroupConfigInitParameters) DeepCopy ¶
func (in *ManagedGroupConfigInitParameters) DeepCopy() *ManagedGroupConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedGroupConfigInitParameters.
func (*ManagedGroupConfigInitParameters) DeepCopyInto ¶
func (in *ManagedGroupConfigInitParameters) DeepCopyInto(out *ManagedGroupConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedGroupConfigObservation ¶
type ManagedGroupConfigObservation struct { // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name,omitempty"` // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name,omitempty"` }
func (*ManagedGroupConfigObservation) DeepCopy ¶
func (in *ManagedGroupConfigObservation) DeepCopy() *ManagedGroupConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedGroupConfigObservation.
func (*ManagedGroupConfigObservation) DeepCopyInto ¶
func (in *ManagedGroupConfigObservation) DeepCopyInto(out *ManagedGroupConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ManagedGroupConfigParameters ¶
type ManagedGroupConfigParameters struct { }
func (*ManagedGroupConfigParameters) DeepCopy ¶
func (in *ManagedGroupConfigParameters) DeepCopy() *ManagedGroupConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ManagedGroupConfigParameters.
func (*ManagedGroupConfigParameters) DeepCopyInto ¶
func (in *ManagedGroupConfigParameters) DeepCopyInto(out *ManagedGroupConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigAcceleratorsInitParameters ¶
type MasterConfigAcceleratorsInitParameters struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*MasterConfigAcceleratorsInitParameters) DeepCopy ¶
func (in *MasterConfigAcceleratorsInitParameters) DeepCopy() *MasterConfigAcceleratorsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigAcceleratorsInitParameters.
func (*MasterConfigAcceleratorsInitParameters) DeepCopyInto ¶
func (in *MasterConfigAcceleratorsInitParameters) DeepCopyInto(out *MasterConfigAcceleratorsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigAcceleratorsObservation ¶
type MasterConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*MasterConfigAcceleratorsObservation) DeepCopy ¶
func (in *MasterConfigAcceleratorsObservation) DeepCopy() *MasterConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigAcceleratorsObservation.
func (*MasterConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *MasterConfigAcceleratorsObservation) DeepCopyInto(out *MasterConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigAcceleratorsParameters ¶
type MasterConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType" tf:"accelerator_type,omitempty"` }
func (*MasterConfigAcceleratorsParameters) DeepCopy ¶
func (in *MasterConfigAcceleratorsParameters) DeepCopy() *MasterConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigAcceleratorsParameters.
func (*MasterConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *MasterConfigAcceleratorsParameters) DeepCopyInto(out *MasterConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigDiskConfigInitParameters ¶
type MasterConfigDiskConfigInitParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*MasterConfigDiskConfigInitParameters) DeepCopy ¶
func (in *MasterConfigDiskConfigInitParameters) DeepCopy() *MasterConfigDiskConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigDiskConfigInitParameters.
func (*MasterConfigDiskConfigInitParameters) DeepCopyInto ¶
func (in *MasterConfigDiskConfigInitParameters) DeepCopyInto(out *MasterConfigDiskConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigDiskConfigObservation ¶
type MasterConfigDiskConfigObservation struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*MasterConfigDiskConfigObservation) DeepCopy ¶
func (in *MasterConfigDiskConfigObservation) DeepCopy() *MasterConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigDiskConfigObservation.
func (*MasterConfigDiskConfigObservation) DeepCopyInto ¶
func (in *MasterConfigDiskConfigObservation) DeepCopyInto(out *MasterConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigDiskConfigParameters ¶
type MasterConfigDiskConfigParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. // +kubebuilder:validation:Optional LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*MasterConfigDiskConfigParameters) DeepCopy ¶
func (in *MasterConfigDiskConfigParameters) DeepCopy() *MasterConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigDiskConfigParameters.
func (*MasterConfigDiskConfigParameters) DeepCopyInto ¶
func (in *MasterConfigDiskConfigParameters) DeepCopyInto(out *MasterConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigInitParameters ¶
type MasterConfigInitParameters struct { // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. Accelerators []MasterConfigAcceleratorsInitParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config DiskConfig *MasterConfigDiskConfigInitParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // The name of a Google Compute Engine machine type // to create for the master. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of master nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 1). NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*MasterConfigInitParameters) DeepCopy ¶
func (in *MasterConfigInitParameters) DeepCopy() *MasterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigInitParameters.
func (*MasterConfigInitParameters) DeepCopyInto ¶
func (in *MasterConfigInitParameters) DeepCopyInto(out *MasterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigObservation ¶
type MasterConfigObservation struct { // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. Accelerators []MasterConfigAcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config DiskConfig *MasterConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // List of worker instance names which have been assigned // to the cluster. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // The name of a Google Compute Engine machine type // to create for the master. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of master nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 1). NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*MasterConfigObservation) DeepCopy ¶
func (in *MasterConfigObservation) DeepCopy() *MasterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigObservation.
func (*MasterConfigObservation) DeepCopyInto ¶
func (in *MasterConfigObservation) DeepCopyInto(out *MasterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MasterConfigParameters ¶
type MasterConfigParameters struct { // The Compute Engine accelerator (GPU) configuration for these instances. Can be specified multiple times. // +kubebuilder:validation:Optional Accelerators []MasterConfigAcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config // +kubebuilder:validation:Optional DiskConfig *MasterConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The URI for the image to use for this worker. See the guide // for more information. // +kubebuilder:validation:Optional ImageURI *string `json:"imageUri,omitempty" tf:"image_uri,omitempty"` // The name of a Google Compute Engine machine type // to create for the master. If not specified, GCP will default to a predetermined // computed value (currently n1-standard-4). // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // The name of a minimum generation of CPU family // for the master. If not specified, GCP will default to a predetermined computed value // for each zone. See the guide // for details about which CPU families are available (and defaulted) for each zone. // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of master nodes to create. // If not specified, GCP will default to a predetermined computed value (currently 1). // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*MasterConfigParameters) DeepCopy ¶
func (in *MasterConfigParameters) DeepCopy() *MasterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MasterConfigParameters.
func (*MasterConfigParameters) DeepCopyInto ¶
func (in *MasterConfigParameters) DeepCopyInto(out *MasterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetadataIntegrationInitParameters ¶
type MetadataIntegrationInitParameters struct { // The integration config for the Data Catalog service. // Structure is documented below. DataCatalogConfig *DataCatalogConfigInitParameters `json:"dataCatalogConfig,omitempty" tf:"data_catalog_config,omitempty"` }
func (*MetadataIntegrationInitParameters) DeepCopy ¶
func (in *MetadataIntegrationInitParameters) DeepCopy() *MetadataIntegrationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetadataIntegrationInitParameters.
func (*MetadataIntegrationInitParameters) DeepCopyInto ¶
func (in *MetadataIntegrationInitParameters) DeepCopyInto(out *MetadataIntegrationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetadataIntegrationObservation ¶
type MetadataIntegrationObservation struct { // The integration config for the Data Catalog service. // Structure is documented below. DataCatalogConfig *DataCatalogConfigObservation `json:"dataCatalogConfig,omitempty" tf:"data_catalog_config,omitempty"` }
func (*MetadataIntegrationObservation) DeepCopy ¶
func (in *MetadataIntegrationObservation) DeepCopy() *MetadataIntegrationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetadataIntegrationObservation.
func (*MetadataIntegrationObservation) DeepCopyInto ¶
func (in *MetadataIntegrationObservation) DeepCopyInto(out *MetadataIntegrationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetadataIntegrationParameters ¶
type MetadataIntegrationParameters struct { // The integration config for the Data Catalog service. // Structure is documented below. // +kubebuilder:validation:Optional DataCatalogConfig *DataCatalogConfigParameters `json:"dataCatalogConfig" tf:"data_catalog_config,omitempty"` }
func (*MetadataIntegrationParameters) DeepCopy ¶
func (in *MetadataIntegrationParameters) DeepCopy() *MetadataIntegrationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetadataIntegrationParameters.
func (*MetadataIntegrationParameters) DeepCopyInto ¶
func (in *MetadataIntegrationParameters) DeepCopyInto(out *MetadataIntegrationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreConfigInitParameters ¶
type MetastoreConfigInitParameters struct { // Resource name of an existing Dataproc Metastore service. DataprocMetastoreService *string `json:"dataprocMetastoreService,omitempty" tf:"dataproc_metastore_service,omitempty"` }
func (*MetastoreConfigInitParameters) DeepCopy ¶
func (in *MetastoreConfigInitParameters) DeepCopy() *MetastoreConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreConfigInitParameters.
func (*MetastoreConfigInitParameters) DeepCopyInto ¶
func (in *MetastoreConfigInitParameters) DeepCopyInto(out *MetastoreConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreConfigObservation ¶
type MetastoreConfigObservation struct { // Resource name of an existing Dataproc Metastore service. DataprocMetastoreService *string `json:"dataprocMetastoreService,omitempty" tf:"dataproc_metastore_service,omitempty"` }
func (*MetastoreConfigObservation) DeepCopy ¶
func (in *MetastoreConfigObservation) DeepCopy() *MetastoreConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreConfigObservation.
func (*MetastoreConfigObservation) DeepCopyInto ¶
func (in *MetastoreConfigObservation) DeepCopyInto(out *MetastoreConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreConfigParameters ¶
type MetastoreConfigParameters struct { // Resource name of an existing Dataproc Metastore service. // +kubebuilder:validation:Optional DataprocMetastoreService *string `json:"dataprocMetastoreService" tf:"dataproc_metastore_service,omitempty"` }
func (*MetastoreConfigParameters) DeepCopy ¶
func (in *MetastoreConfigParameters) DeepCopy() *MetastoreConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreConfigParameters.
func (*MetastoreConfigParameters) DeepCopyInto ¶
func (in *MetastoreConfigParameters) DeepCopyInto(out *MetastoreConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreService ¶
type MetastoreService struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec MetastoreServiceSpec `json:"spec"` Status MetastoreServiceStatus `json:"status,omitempty"` }
MetastoreService is the Schema for the MetastoreServices API. A managed metastore service that serves metadata queries. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*MetastoreService) DeepCopy ¶
func (in *MetastoreService) DeepCopy() *MetastoreService
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreService.
func (*MetastoreService) DeepCopyInto ¶
func (in *MetastoreService) DeepCopyInto(out *MetastoreService)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*MetastoreService) DeepCopyObject ¶
func (in *MetastoreService) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*MetastoreService) GetCondition ¶
func (mg *MetastoreService) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this MetastoreService.
func (*MetastoreService) GetConnectionDetailsMapping ¶
func (tr *MetastoreService) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this MetastoreService
func (*MetastoreService) GetDeletionPolicy ¶
func (mg *MetastoreService) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this MetastoreService.
func (*MetastoreService) GetID ¶
func (tr *MetastoreService) GetID() string
GetID returns ID of underlying Terraform resource of this MetastoreService
func (*MetastoreService) GetInitParameters ¶
func (tr *MetastoreService) GetInitParameters() (map[string]any, error)
GetInitParameters of this MetastoreService
func (*MetastoreService) GetManagementPolicies ¶
func (mg *MetastoreService) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this MetastoreService.
func (*MetastoreService) GetMergedParameters ¶
func (tr *MetastoreService) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this MetastoreService
func (*MetastoreService) GetObservation ¶
func (tr *MetastoreService) GetObservation() (map[string]any, error)
GetObservation of this MetastoreService
func (*MetastoreService) GetParameters ¶
func (tr *MetastoreService) GetParameters() (map[string]any, error)
GetParameters of this MetastoreService
func (*MetastoreService) GetProviderConfigReference ¶
func (mg *MetastoreService) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this MetastoreService.
func (*MetastoreService) GetPublishConnectionDetailsTo ¶
func (mg *MetastoreService) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this MetastoreService.
func (*MetastoreService) GetTerraformResourceType ¶
func (mg *MetastoreService) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this MetastoreService
func (*MetastoreService) GetTerraformSchemaVersion ¶
func (tr *MetastoreService) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*MetastoreService) GetWriteConnectionSecretToReference ¶
func (mg *MetastoreService) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this MetastoreService.
func (*MetastoreService) Hub ¶
func (tr *MetastoreService) Hub()
Hub marks this type as a conversion hub.
func (*MetastoreService) LateInitialize ¶
func (tr *MetastoreService) LateInitialize(attrs []byte) (bool, error)
LateInitialize this MetastoreService using its observed tfState. returns True if there are any spec changes for the resource.
func (*MetastoreService) ResolveReferences ¶
ResolveReferences of this MetastoreService.
func (*MetastoreService) SetConditions ¶
func (mg *MetastoreService) SetConditions(c ...xpv1.Condition)
SetConditions of this MetastoreService.
func (*MetastoreService) SetDeletionPolicy ¶
func (mg *MetastoreService) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this MetastoreService.
func (*MetastoreService) SetManagementPolicies ¶
func (mg *MetastoreService) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this MetastoreService.
func (*MetastoreService) SetObservation ¶
func (tr *MetastoreService) SetObservation(obs map[string]any) error
SetObservation for this MetastoreService
func (*MetastoreService) SetParameters ¶
func (tr *MetastoreService) SetParameters(params map[string]any) error
SetParameters for this MetastoreService
func (*MetastoreService) SetProviderConfigReference ¶
func (mg *MetastoreService) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this MetastoreService.
func (*MetastoreService) SetPublishConnectionDetailsTo ¶
func (mg *MetastoreService) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this MetastoreService.
func (*MetastoreService) SetWriteConnectionSecretToReference ¶
func (mg *MetastoreService) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this MetastoreService.
type MetastoreServiceEncryptionConfigInitParameters ¶
type MetastoreServiceEncryptionConfigInitParameters struct { // The fully qualified customer provided Cloud KMS key name to use for customer data encryption. // Use the following format: projects/([^/]+)/locations/([^/]+)/keyRings/([^/]+)/cryptoKeys/([^/]+) // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/kms/v1beta2.CryptoKey // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` // Reference to a CryptoKey in kms to populate kmsKey. // +kubebuilder:validation:Optional KMSKeyRef *v1.Reference `json:"kmsKeyRef,omitempty" tf:"-"` // Selector for a CryptoKey in kms to populate kmsKey. // +kubebuilder:validation:Optional KMSKeySelector *v1.Selector `json:"kmsKeySelector,omitempty" tf:"-"` }
func (*MetastoreServiceEncryptionConfigInitParameters) DeepCopy ¶
func (in *MetastoreServiceEncryptionConfigInitParameters) DeepCopy() *MetastoreServiceEncryptionConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceEncryptionConfigInitParameters.
func (*MetastoreServiceEncryptionConfigInitParameters) DeepCopyInto ¶
func (in *MetastoreServiceEncryptionConfigInitParameters) DeepCopyInto(out *MetastoreServiceEncryptionConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceEncryptionConfigObservation ¶
type MetastoreServiceEncryptionConfigObservation struct { // The fully qualified customer provided Cloud KMS key name to use for customer data encryption. // Use the following format: projects/([^/]+)/locations/([^/]+)/keyRings/([^/]+)/cryptoKeys/([^/]+) KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` }
func (*MetastoreServiceEncryptionConfigObservation) DeepCopy ¶
func (in *MetastoreServiceEncryptionConfigObservation) DeepCopy() *MetastoreServiceEncryptionConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceEncryptionConfigObservation.
func (*MetastoreServiceEncryptionConfigObservation) DeepCopyInto ¶
func (in *MetastoreServiceEncryptionConfigObservation) DeepCopyInto(out *MetastoreServiceEncryptionConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceEncryptionConfigParameters ¶
type MetastoreServiceEncryptionConfigParameters struct { // The fully qualified customer provided Cloud KMS key name to use for customer data encryption. // Use the following format: projects/([^/]+)/locations/([^/]+)/keyRings/([^/]+)/cryptoKeys/([^/]+) // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/kms/v1beta2.CryptoKey // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractResourceID() // +kubebuilder:validation:Optional KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` // Reference to a CryptoKey in kms to populate kmsKey. // +kubebuilder:validation:Optional KMSKeyRef *v1.Reference `json:"kmsKeyRef,omitempty" tf:"-"` // Selector for a CryptoKey in kms to populate kmsKey. // +kubebuilder:validation:Optional KMSKeySelector *v1.Selector `json:"kmsKeySelector,omitempty" tf:"-"` }
func (*MetastoreServiceEncryptionConfigParameters) DeepCopy ¶
func (in *MetastoreServiceEncryptionConfigParameters) DeepCopy() *MetastoreServiceEncryptionConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceEncryptionConfigParameters.
func (*MetastoreServiceEncryptionConfigParameters) DeepCopyInto ¶
func (in *MetastoreServiceEncryptionConfigParameters) DeepCopyInto(out *MetastoreServiceEncryptionConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceInitParameters ¶
type MetastoreServiceInitParameters struct { // The database type that the Metastore service stores its data. // Default value is MYSQL. // Possible values are: MYSQL, SPANNER. DatabaseType *string `json:"databaseType,omitempty" tf:"database_type,omitempty"` // Information used to configure the Dataproc Metastore service to encrypt // customer data at rest. // Structure is documented below. EncryptionConfig *MetastoreServiceEncryptionConfigInitParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // Configuration information specific to running Hive metastore software as the metastore service. // Structure is documented below. HiveMetastoreConfig *HiveMetastoreConfigInitParameters `json:"hiveMetastoreConfig,omitempty" tf:"hive_metastore_config,omitempty"` // User-defined labels for the metastore service. // Note: This field is non-authoritative, and will only manage the labels present in your configuration. // Please refer to the field effective_labels for all of the labels present on the resource. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The one hour maintenance window of the metastore service. // This specifies when the service can be restarted for maintenance purposes in UTC time. // Maintenance window is not needed for services with the SPANNER database type. // Structure is documented below. MaintenanceWindow *MaintenanceWindowInitParameters `json:"maintenanceWindow,omitempty" tf:"maintenance_window,omitempty"` // The setting that defines how metastore metadata should be integrated with external services and systems. // Structure is documented below. MetadataIntegration *MetadataIntegrationInitParameters `json:"metadataIntegration,omitempty" tf:"metadata_integration,omitempty"` // The relative resource name of the VPC network on which the instance can be accessed. It is specified in the following form: // "projects/{projectNumber}/global/networks/{network_id}". Network *string `json:"network,omitempty" tf:"network,omitempty"` // The configuration specifying the network settings for the Dataproc Metastore service. // Structure is documented below. NetworkConfig *NetworkConfigInitParameters `json:"networkConfig,omitempty" tf:"network_config,omitempty"` // The TCP port at which the metastore service is reached. Default: 9083. Port *float64 `json:"port,omitempty" tf:"port,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // The release channel of the service. If unspecified, defaults to STABLE. // Default value is STABLE. // Possible values are: CANARY, STABLE. ReleaseChannel *string `json:"releaseChannel,omitempty" tf:"release_channel,omitempty"` // Represents the scaling configuration of a metastore service. // Structure is documented below. ScalingConfig *ScalingConfigInitParameters `json:"scalingConfig,omitempty" tf:"scaling_config,omitempty"` // The configuration of scheduled backup for the metastore service. // Structure is documented below. ScheduledBackup *ScheduledBackupInitParameters `json:"scheduledBackup,omitempty" tf:"scheduled_backup,omitempty"` // The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON. // Structure is documented below. TelemetryConfig *TelemetryConfigInitParameters `json:"telemetryConfig,omitempty" tf:"telemetry_config,omitempty"` // The tier of the service. // Possible values are: DEVELOPER, ENTERPRISE. Tier *string `json:"tier,omitempty" tf:"tier,omitempty"` }
func (*MetastoreServiceInitParameters) DeepCopy ¶
func (in *MetastoreServiceInitParameters) DeepCopy() *MetastoreServiceInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceInitParameters.
func (*MetastoreServiceInitParameters) DeepCopyInto ¶
func (in *MetastoreServiceInitParameters) DeepCopyInto(out *MetastoreServiceInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceList ¶
type MetastoreServiceList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []MetastoreService `json:"items"` }
MetastoreServiceList contains a list of MetastoreServices
func (*MetastoreServiceList) DeepCopy ¶
func (in *MetastoreServiceList) DeepCopy() *MetastoreServiceList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceList.
func (*MetastoreServiceList) DeepCopyInto ¶
func (in *MetastoreServiceList) DeepCopyInto(out *MetastoreServiceList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*MetastoreServiceList) DeepCopyObject ¶
func (in *MetastoreServiceList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*MetastoreServiceList) GetItems ¶
func (l *MetastoreServiceList) GetItems() []resource.Managed
GetItems of this MetastoreServiceList.
type MetastoreServiceObservation ¶
type MetastoreServiceObservation struct { // A Cloud Storage URI (starting with gs://) that specifies where artifacts related to the metastore service are stored. ArtifactGcsURI *string `json:"artifactGcsUri,omitempty" tf:"artifact_gcs_uri,omitempty"` // The database type that the Metastore service stores its data. // Default value is MYSQL. // Possible values are: MYSQL, SPANNER. DatabaseType *string `json:"databaseType,omitempty" tf:"database_type,omitempty"` // +mapType=granular EffectiveLabels map[string]*string `json:"effectiveLabels,omitempty" tf:"effective_labels,omitempty"` // Information used to configure the Dataproc Metastore service to encrypt // customer data at rest. // Structure is documented below. EncryptionConfig *MetastoreServiceEncryptionConfigObservation `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // The URI of the endpoint used to access the metastore service. EndpointURI *string `json:"endpointUri,omitempty" tf:"endpoint_uri,omitempty"` // Configuration information specific to running Hive metastore software as the metastore service. // Structure is documented below. HiveMetastoreConfig *HiveMetastoreConfigObservation `json:"hiveMetastoreConfig,omitempty" tf:"hive_metastore_config,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/services/{{service_id}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // User-defined labels for the metastore service. // Note: This field is non-authoritative, and will only manage the labels present in your configuration. // Please refer to the field effective_labels for all of the labels present on the resource. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The location where the metastore service should reside. // The default value is global. Location *string `json:"location,omitempty" tf:"location,omitempty"` // The one hour maintenance window of the metastore service. // This specifies when the service can be restarted for maintenance purposes in UTC time. // Maintenance window is not needed for services with the SPANNER database type. // Structure is documented below. MaintenanceWindow *MaintenanceWindowObservation `json:"maintenanceWindow,omitempty" tf:"maintenance_window,omitempty"` // The setting that defines how metastore metadata should be integrated with external services and systems. // Structure is documented below. MetadataIntegration *MetadataIntegrationObservation `json:"metadataIntegration,omitempty" tf:"metadata_integration,omitempty"` // The relative resource name of the metastore service. Name *string `json:"name,omitempty" tf:"name,omitempty"` // The relative resource name of the VPC network on which the instance can be accessed. It is specified in the following form: // "projects/{projectNumber}/global/networks/{network_id}". Network *string `json:"network,omitempty" tf:"network,omitempty"` // The configuration specifying the network settings for the Dataproc Metastore service. // Structure is documented below. NetworkConfig *NetworkConfigObservation `json:"networkConfig,omitempty" tf:"network_config,omitempty"` // The TCP port at which the metastore service is reached. Default: 9083. Port *float64 `json:"port,omitempty" tf:"port,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. Project *string `json:"project,omitempty" tf:"project,omitempty"` // The release channel of the service. If unspecified, defaults to STABLE. // Default value is STABLE. // Possible values are: CANARY, STABLE. ReleaseChannel *string `json:"releaseChannel,omitempty" tf:"release_channel,omitempty"` // Represents the scaling configuration of a metastore service. // Structure is documented below. ScalingConfig *ScalingConfigObservation `json:"scalingConfig,omitempty" tf:"scaling_config,omitempty"` // The configuration of scheduled backup for the metastore service. // Structure is documented below. ScheduledBackup *ScheduledBackupObservation `json:"scheduledBackup,omitempty" tf:"scheduled_backup,omitempty"` // The current state of the metastore service. State *string `json:"state,omitempty" tf:"state,omitempty"` // Additional information about the current state of the metastore service, if available. StateMessage *string `json:"stateMessage,omitempty" tf:"state_message,omitempty"` // The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON. // Structure is documented below. TelemetryConfig *TelemetryConfigObservation `json:"telemetryConfig,omitempty" tf:"telemetry_config,omitempty"` // The combination of labels configured directly on the resource // and default labels configured on the provider. // +mapType=granular TerraformLabels map[string]*string `json:"terraformLabels,omitempty" tf:"terraform_labels,omitempty"` // The tier of the service. // Possible values are: DEVELOPER, ENTERPRISE. Tier *string `json:"tier,omitempty" tf:"tier,omitempty"` // The globally unique resource identifier of the metastore service. UID *string `json:"uid,omitempty" tf:"uid,omitempty"` }
func (*MetastoreServiceObservation) DeepCopy ¶
func (in *MetastoreServiceObservation) DeepCopy() *MetastoreServiceObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceObservation.
func (*MetastoreServiceObservation) DeepCopyInto ¶
func (in *MetastoreServiceObservation) DeepCopyInto(out *MetastoreServiceObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceParameters ¶
type MetastoreServiceParameters struct { // The database type that the Metastore service stores its data. // Default value is MYSQL. // Possible values are: MYSQL, SPANNER. // +kubebuilder:validation:Optional DatabaseType *string `json:"databaseType,omitempty" tf:"database_type,omitempty"` // Information used to configure the Dataproc Metastore service to encrypt // customer data at rest. // Structure is documented below. // +kubebuilder:validation:Optional EncryptionConfig *MetastoreServiceEncryptionConfigParameters `json:"encryptionConfig,omitempty" tf:"encryption_config,omitempty"` // Configuration information specific to running Hive metastore software as the metastore service. // Structure is documented below. // +kubebuilder:validation:Optional HiveMetastoreConfig *HiveMetastoreConfigParameters `json:"hiveMetastoreConfig,omitempty" tf:"hive_metastore_config,omitempty"` // User-defined labels for the metastore service. // Note: This field is non-authoritative, and will only manage the labels present in your configuration. // Please refer to the field effective_labels for all of the labels present on the resource. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The location where the metastore service should reside. // The default value is global. // +kubebuilder:validation:Optional Location *string `json:"location,omitempty" tf:"location,omitempty"` // The one hour maintenance window of the metastore service. // This specifies when the service can be restarted for maintenance purposes in UTC time. // Maintenance window is not needed for services with the SPANNER database type. // Structure is documented below. // +kubebuilder:validation:Optional MaintenanceWindow *MaintenanceWindowParameters `json:"maintenanceWindow,omitempty" tf:"maintenance_window,omitempty"` // The setting that defines how metastore metadata should be integrated with external services and systems. // Structure is documented below. // +kubebuilder:validation:Optional MetadataIntegration *MetadataIntegrationParameters `json:"metadataIntegration,omitempty" tf:"metadata_integration,omitempty"` // The relative resource name of the VPC network on which the instance can be accessed. It is specified in the following form: // "projects/{projectNumber}/global/networks/{network_id}". // +kubebuilder:validation:Optional Network *string `json:"network,omitempty" tf:"network,omitempty"` // The configuration specifying the network settings for the Dataproc Metastore service. // Structure is documented below. // +kubebuilder:validation:Optional NetworkConfig *NetworkConfigParameters `json:"networkConfig,omitempty" tf:"network_config,omitempty"` // The TCP port at which the metastore service is reached. Default: 9083. // +kubebuilder:validation:Optional Port *float64 `json:"port,omitempty" tf:"port,omitempty"` // The ID of the project in which the resource belongs. // If it is not provided, the provider project is used. // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // The release channel of the service. If unspecified, defaults to STABLE. // Default value is STABLE. // Possible values are: CANARY, STABLE. // +kubebuilder:validation:Optional ReleaseChannel *string `json:"releaseChannel,omitempty" tf:"release_channel,omitempty"` // Represents the scaling configuration of a metastore service. // Structure is documented below. // +kubebuilder:validation:Optional ScalingConfig *ScalingConfigParameters `json:"scalingConfig,omitempty" tf:"scaling_config,omitempty"` // The configuration of scheduled backup for the metastore service. // Structure is documented below. // +kubebuilder:validation:Optional ScheduledBackup *ScheduledBackupParameters `json:"scheduledBackup,omitempty" tf:"scheduled_backup,omitempty"` // The configuration specifying telemetry settings for the Dataproc Metastore service. If unspecified defaults to JSON. // Structure is documented below. // +kubebuilder:validation:Optional TelemetryConfig *TelemetryConfigParameters `json:"telemetryConfig,omitempty" tf:"telemetry_config,omitempty"` // The tier of the service. // Possible values are: DEVELOPER, ENTERPRISE. // +kubebuilder:validation:Optional Tier *string `json:"tier,omitempty" tf:"tier,omitempty"` }
func (*MetastoreServiceParameters) DeepCopy ¶
func (in *MetastoreServiceParameters) DeepCopy() *MetastoreServiceParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceParameters.
func (*MetastoreServiceParameters) DeepCopyInto ¶
func (in *MetastoreServiceParameters) DeepCopyInto(out *MetastoreServiceParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceSpec ¶
type MetastoreServiceSpec struct { v1.ResourceSpec `json:",inline"` ForProvider MetastoreServiceParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider MetastoreServiceInitParameters `json:"initProvider,omitempty"` }
MetastoreServiceSpec defines the desired state of MetastoreService
func (*MetastoreServiceSpec) DeepCopy ¶
func (in *MetastoreServiceSpec) DeepCopy() *MetastoreServiceSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceSpec.
func (*MetastoreServiceSpec) DeepCopyInto ¶
func (in *MetastoreServiceSpec) DeepCopyInto(out *MetastoreServiceSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetastoreServiceStatus ¶
type MetastoreServiceStatus struct { v1.ResourceStatus `json:",inline"` AtProvider MetastoreServiceObservation `json:"atProvider,omitempty"` }
MetastoreServiceStatus defines the observed state of MetastoreService.
func (*MetastoreServiceStatus) DeepCopy ¶
func (in *MetastoreServiceStatus) DeepCopy() *MetastoreServiceStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetastoreServiceStatus.
func (*MetastoreServiceStatus) DeepCopyInto ¶
func (in *MetastoreServiceStatus) DeepCopyInto(out *MetastoreServiceStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetricsInitParameters ¶
type MetricsInitParameters struct { // One or more [available OSS metrics] (https://cloud.google.com/dataproc/docs/guides/monitoring#available_oss_metrics) to collect for the metric course. // +listType=set MetricOverrides []*string `json:"metricOverrides,omitempty" tf:"metric_overrides,omitempty"` // A source for the collection of Dataproc OSS metrics (see available OSS metrics). MetricSource *string `json:"metricSource,omitempty" tf:"metric_source,omitempty"` }
func (*MetricsInitParameters) DeepCopy ¶
func (in *MetricsInitParameters) DeepCopy() *MetricsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricsInitParameters.
func (*MetricsInitParameters) DeepCopyInto ¶
func (in *MetricsInitParameters) DeepCopyInto(out *MetricsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetricsObservation ¶
type MetricsObservation struct { // One or more [available OSS metrics] (https://cloud.google.com/dataproc/docs/guides/monitoring#available_oss_metrics) to collect for the metric course. // +listType=set MetricOverrides []*string `json:"metricOverrides,omitempty" tf:"metric_overrides,omitempty"` // A source for the collection of Dataproc OSS metrics (see available OSS metrics). MetricSource *string `json:"metricSource,omitempty" tf:"metric_source,omitempty"` }
func (*MetricsObservation) DeepCopy ¶
func (in *MetricsObservation) DeepCopy() *MetricsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricsObservation.
func (*MetricsObservation) DeepCopyInto ¶
func (in *MetricsObservation) DeepCopyInto(out *MetricsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type MetricsParameters ¶
type MetricsParameters struct { // One or more [available OSS metrics] (https://cloud.google.com/dataproc/docs/guides/monitoring#available_oss_metrics) to collect for the metric course. // +kubebuilder:validation:Optional // +listType=set MetricOverrides []*string `json:"metricOverrides,omitempty" tf:"metric_overrides,omitempty"` // A source for the collection of Dataproc OSS metrics (see available OSS metrics). // +kubebuilder:validation:Optional MetricSource *string `json:"metricSource" tf:"metric_source,omitempty"` }
func (*MetricsParameters) DeepCopy ¶
func (in *MetricsParameters) DeepCopy() *MetricsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetricsParameters.
func (*MetricsParameters) DeepCopyInto ¶
func (in *MetricsParameters) DeepCopyInto(out *MetricsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NetworkConfigInitParameters ¶
type NetworkConfigInitParameters struct { // The consumer-side network configuration for the Dataproc Metastore instance. // Structure is documented below. Consumers []ConsumersInitParameters `json:"consumers,omitempty" tf:"consumers,omitempty"` }
func (*NetworkConfigInitParameters) DeepCopy ¶
func (in *NetworkConfigInitParameters) DeepCopy() *NetworkConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NetworkConfigInitParameters.
func (*NetworkConfigInitParameters) DeepCopyInto ¶
func (in *NetworkConfigInitParameters) DeepCopyInto(out *NetworkConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NetworkConfigObservation ¶
type NetworkConfigObservation struct { // The consumer-side network configuration for the Dataproc Metastore instance. // Structure is documented below. Consumers []ConsumersObservation `json:"consumers,omitempty" tf:"consumers,omitempty"` }
func (*NetworkConfigObservation) DeepCopy ¶
func (in *NetworkConfigObservation) DeepCopy() *NetworkConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NetworkConfigObservation.
func (*NetworkConfigObservation) DeepCopyInto ¶
func (in *NetworkConfigObservation) DeepCopyInto(out *NetworkConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NetworkConfigParameters ¶
type NetworkConfigParameters struct { // The consumer-side network configuration for the Dataproc Metastore instance. // Structure is documented below. // +kubebuilder:validation:Optional Consumers []ConsumersParameters `json:"consumers" tf:"consumers,omitempty"` }
func (*NetworkConfigParameters) DeepCopy ¶
func (in *NetworkConfigParameters) DeepCopy() *NetworkConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NetworkConfigParameters.
func (*NetworkConfigParameters) DeepCopyInto ¶
func (in *NetworkConfigParameters) DeepCopyInto(out *NetworkConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupAffinityInitParameters ¶
type NodeGroupAffinityInitParameters struct { // The URI of a sole-tenant node group resource that the cluster will be created on. NodeGroupURI *string `json:"nodeGroupUri,omitempty" tf:"node_group_uri,omitempty"` }
func (*NodeGroupAffinityInitParameters) DeepCopy ¶
func (in *NodeGroupAffinityInitParameters) DeepCopy() *NodeGroupAffinityInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupAffinityInitParameters.
func (*NodeGroupAffinityInitParameters) DeepCopyInto ¶
func (in *NodeGroupAffinityInitParameters) DeepCopyInto(out *NodeGroupAffinityInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupAffinityObservation ¶
type NodeGroupAffinityObservation struct { // The URI of a sole-tenant node group resource that the cluster will be created on. NodeGroupURI *string `json:"nodeGroupUri,omitempty" tf:"node_group_uri,omitempty"` }
func (*NodeGroupAffinityObservation) DeepCopy ¶
func (in *NodeGroupAffinityObservation) DeepCopy() *NodeGroupAffinityObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupAffinityObservation.
func (*NodeGroupAffinityObservation) DeepCopyInto ¶
func (in *NodeGroupAffinityObservation) DeepCopyInto(out *NodeGroupAffinityObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupAffinityParameters ¶
type NodeGroupAffinityParameters struct { // The URI of a sole-tenant node group resource that the cluster will be created on. // +kubebuilder:validation:Optional NodeGroupURI *string `json:"nodeGroupUri" tf:"node_group_uri,omitempty"` }
func (*NodeGroupAffinityParameters) DeepCopy ¶
func (in *NodeGroupAffinityParameters) DeepCopy() *NodeGroupAffinityParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupAffinityParameters.
func (*NodeGroupAffinityParameters) DeepCopyInto ¶
func (in *NodeGroupAffinityParameters) DeepCopyInto(out *NodeGroupAffinityParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupConfigInitParameters ¶
type NodeGroupConfigInitParameters struct { // The Compute Engine accelerator configuration for these instances. Can be specified multiple times. Accelerators []AcceleratorsInitParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config DiskConfig *DiskConfigInitParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The name of a Compute Engine machine type. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Minimum CPU platform to be used by this instance. // The instance may be scheduled on the specified or a newer CPU platform. // Specify the friendly names of CPU platforms, such as "Intel Haswell" or "Intel Sandy Bridge". MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of master nodes to create. // Please set a number greater than 0. Node Group must have at least 1 instance. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*NodeGroupConfigInitParameters) DeepCopy ¶
func (in *NodeGroupConfigInitParameters) DeepCopy() *NodeGroupConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupConfigInitParameters.
func (*NodeGroupConfigInitParameters) DeepCopyInto ¶
func (in *NodeGroupConfigInitParameters) DeepCopyInto(out *NodeGroupConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupConfigObservation ¶
type NodeGroupConfigObservation struct { // The Compute Engine accelerator configuration for these instances. Can be specified multiple times. Accelerators []AcceleratorsObservation `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config DiskConfig *DiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // List of worker instance names which have been assigned // to the cluster. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // The name of a Compute Engine machine type. MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Minimum CPU platform to be used by this instance. // The instance may be scheduled on the specified or a newer CPU platform. // Specify the friendly names of CPU platforms, such as "Intel Haswell" or "Intel Sandy Bridge". MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of master nodes to create. // Please set a number greater than 0. Node Group must have at least 1 instance. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*NodeGroupConfigObservation) DeepCopy ¶
func (in *NodeGroupConfigObservation) DeepCopy() *NodeGroupConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupConfigObservation.
func (*NodeGroupConfigObservation) DeepCopyInto ¶
func (in *NodeGroupConfigObservation) DeepCopyInto(out *NodeGroupConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupConfigParameters ¶
type NodeGroupConfigParameters struct { // The Compute Engine accelerator configuration for these instances. Can be specified multiple times. // +kubebuilder:validation:Optional Accelerators []AcceleratorsParameters `json:"accelerators,omitempty" tf:"accelerators,omitempty"` // Disk Config // +kubebuilder:validation:Optional DiskConfig *DiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // The name of a Compute Engine machine type. // +kubebuilder:validation:Optional MachineType *string `json:"machineType,omitempty" tf:"machine_type,omitempty"` // Minimum CPU platform to be used by this instance. // The instance may be scheduled on the specified or a newer CPU platform. // Specify the friendly names of CPU platforms, such as "Intel Haswell" or "Intel Sandy Bridge". // +kubebuilder:validation:Optional MinCPUPlatform *string `json:"minCpuPlatform,omitempty" tf:"min_cpu_platform,omitempty"` // Specifies the number of master nodes to create. // Please set a number greater than 0. Node Group must have at least 1 instance. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` }
func (*NodeGroupConfigParameters) DeepCopy ¶
func (in *NodeGroupConfigParameters) DeepCopy() *NodeGroupConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupConfigParameters.
func (*NodeGroupConfigParameters) DeepCopyInto ¶
func (in *NodeGroupConfigParameters) DeepCopyInto(out *NodeGroupConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupInitParameters ¶
type NodeGroupInitParameters struct { // The node group instance group configuration. NodeGroupConfig *NodeGroupConfigInitParameters `json:"nodeGroupConfig,omitempty" tf:"node_group_config,omitempty"` // The roles associated with the GKE node pool. // One of "DEFAULT", "CONTROLLER", "SPARK_DRIVER" or "SPARK_EXECUTOR". Roles []*string `json:"roles,omitempty" tf:"roles,omitempty"` }
func (*NodeGroupInitParameters) DeepCopy ¶
func (in *NodeGroupInitParameters) DeepCopy() *NodeGroupInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupInitParameters.
func (*NodeGroupInitParameters) DeepCopyInto ¶
func (in *NodeGroupInitParameters) DeepCopyInto(out *NodeGroupInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupObservation ¶
type NodeGroupObservation struct { // The name of the cluster, unique within the project and // zone. Name *string `json:"name,omitempty" tf:"name,omitempty"` // The node group instance group configuration. NodeGroupConfig *NodeGroupConfigObservation `json:"nodeGroupConfig,omitempty" tf:"node_group_config,omitempty"` // The roles associated with the GKE node pool. // One of "DEFAULT", "CONTROLLER", "SPARK_DRIVER" or "SPARK_EXECUTOR". Roles []*string `json:"roles,omitempty" tf:"roles,omitempty"` }
func (*NodeGroupObservation) DeepCopy ¶
func (in *NodeGroupObservation) DeepCopy() *NodeGroupObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupObservation.
func (*NodeGroupObservation) DeepCopyInto ¶
func (in *NodeGroupObservation) DeepCopyInto(out *NodeGroupObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodeGroupParameters ¶
type NodeGroupParameters struct { // The node group instance group configuration. // +kubebuilder:validation:Optional NodeGroupConfig *NodeGroupConfigParameters `json:"nodeGroupConfig,omitempty" tf:"node_group_config,omitempty"` // The roles associated with the GKE node pool. // One of "DEFAULT", "CONTROLLER", "SPARK_DRIVER" or "SPARK_EXECUTOR". // +kubebuilder:validation:Optional Roles []*string `json:"roles" tf:"roles,omitempty"` }
func (*NodeGroupParameters) DeepCopy ¶
func (in *NodeGroupParameters) DeepCopy() *NodeGroupParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodeGroupParameters.
func (*NodeGroupParameters) DeepCopyInto ¶
func (in *NodeGroupParameters) DeepCopyInto(out *NodeGroupParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolConfigInitParameters ¶
type NodePoolConfigInitParameters struct { // The autoscaler configuration for this node pool. // The autoscaler is enabled only when a valid configuration is present. Autoscaling *AutoscalingInitParameters `json:"autoscaling,omitempty" tf:"autoscaling,omitempty"` // The node pool configuration. Config *ConfigInitParameters `json:"config,omitempty" tf:"config,omitempty"` // The list of Compute Engine zones where node pool nodes associated // with a Dataproc on GKE virtual cluster will be located. // +listType=set Locations []*string `json:"locations,omitempty" tf:"locations,omitempty"` }
func (*NodePoolConfigInitParameters) DeepCopy ¶
func (in *NodePoolConfigInitParameters) DeepCopy() *NodePoolConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolConfigInitParameters.
func (*NodePoolConfigInitParameters) DeepCopyInto ¶
func (in *NodePoolConfigInitParameters) DeepCopyInto(out *NodePoolConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolConfigObservation ¶
type NodePoolConfigObservation struct { // The autoscaler configuration for this node pool. // The autoscaler is enabled only when a valid configuration is present. Autoscaling *AutoscalingObservation `json:"autoscaling,omitempty" tf:"autoscaling,omitempty"` // The node pool configuration. Config *ConfigObservation `json:"config,omitempty" tf:"config,omitempty"` // The list of Compute Engine zones where node pool nodes associated // with a Dataproc on GKE virtual cluster will be located. // +listType=set Locations []*string `json:"locations,omitempty" tf:"locations,omitempty"` }
func (*NodePoolConfigObservation) DeepCopy ¶
func (in *NodePoolConfigObservation) DeepCopy() *NodePoolConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolConfigObservation.
func (*NodePoolConfigObservation) DeepCopyInto ¶
func (in *NodePoolConfigObservation) DeepCopyInto(out *NodePoolConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolConfigParameters ¶
type NodePoolConfigParameters struct { // The autoscaler configuration for this node pool. // The autoscaler is enabled only when a valid configuration is present. // +kubebuilder:validation:Optional Autoscaling *AutoscalingParameters `json:"autoscaling,omitempty" tf:"autoscaling,omitempty"` // The node pool configuration. // +kubebuilder:validation:Optional Config *ConfigParameters `json:"config,omitempty" tf:"config,omitempty"` // The list of Compute Engine zones where node pool nodes associated // with a Dataproc on GKE virtual cluster will be located. // +kubebuilder:validation:Optional // +listType=set Locations []*string `json:"locations" tf:"locations,omitempty"` }
func (*NodePoolConfigParameters) DeepCopy ¶
func (in *NodePoolConfigParameters) DeepCopy() *NodePoolConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolConfigParameters.
func (*NodePoolConfigParameters) DeepCopyInto ¶
func (in *NodePoolConfigParameters) DeepCopyInto(out *NodePoolConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolTargetInitParameters ¶
type NodePoolTargetInitParameters struct { // The target GKE node pool. NodePool *string `json:"nodePool,omitempty" tf:"node_pool,omitempty"` // (Input only) The configuration for the GKE node pool. // If specified, Dataproc attempts to create a node pool with the specified shape. // If one with the same name already exists, it is verified against all specified fields. // If a field differs, the virtual cluster creation will fail. NodePoolConfig *NodePoolConfigInitParameters `json:"nodePoolConfig,omitempty" tf:"node_pool_config,omitempty"` // The roles associated with the GKE node pool. // One of "DEFAULT", "CONTROLLER", "SPARK_DRIVER" or "SPARK_EXECUTOR". // +listType=set Roles []*string `json:"roles,omitempty" tf:"roles,omitempty"` }
func (*NodePoolTargetInitParameters) DeepCopy ¶
func (in *NodePoolTargetInitParameters) DeepCopy() *NodePoolTargetInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolTargetInitParameters.
func (*NodePoolTargetInitParameters) DeepCopyInto ¶
func (in *NodePoolTargetInitParameters) DeepCopyInto(out *NodePoolTargetInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolTargetObservation ¶
type NodePoolTargetObservation struct { // The target GKE node pool. NodePool *string `json:"nodePool,omitempty" tf:"node_pool,omitempty"` // (Input only) The configuration for the GKE node pool. // If specified, Dataproc attempts to create a node pool with the specified shape. // If one with the same name already exists, it is verified against all specified fields. // If a field differs, the virtual cluster creation will fail. NodePoolConfig *NodePoolConfigObservation `json:"nodePoolConfig,omitempty" tf:"node_pool_config,omitempty"` // The roles associated with the GKE node pool. // One of "DEFAULT", "CONTROLLER", "SPARK_DRIVER" or "SPARK_EXECUTOR". // +listType=set Roles []*string `json:"roles,omitempty" tf:"roles,omitempty"` }
func (*NodePoolTargetObservation) DeepCopy ¶
func (in *NodePoolTargetObservation) DeepCopy() *NodePoolTargetObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolTargetObservation.
func (*NodePoolTargetObservation) DeepCopyInto ¶
func (in *NodePoolTargetObservation) DeepCopyInto(out *NodePoolTargetObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type NodePoolTargetParameters ¶
type NodePoolTargetParameters struct { // The target GKE node pool. // +kubebuilder:validation:Optional NodePool *string `json:"nodePool" tf:"node_pool,omitempty"` // (Input only) The configuration for the GKE node pool. // If specified, Dataproc attempts to create a node pool with the specified shape. // If one with the same name already exists, it is verified against all specified fields. // If a field differs, the virtual cluster creation will fail. // +kubebuilder:validation:Optional NodePoolConfig *NodePoolConfigParameters `json:"nodePoolConfig,omitempty" tf:"node_pool_config,omitempty"` // The roles associated with the GKE node pool. // One of "DEFAULT", "CONTROLLER", "SPARK_DRIVER" or "SPARK_EXECUTOR". // +kubebuilder:validation:Optional // +listType=set Roles []*string `json:"roles" tf:"roles,omitempty"` }
func (*NodePoolTargetParameters) DeepCopy ¶
func (in *NodePoolTargetParameters) DeepCopy() *NodePoolTargetParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NodePoolTargetParameters.
func (*NodePoolTargetParameters) DeepCopyInto ¶
func (in *NodePoolTargetParameters) DeepCopyInto(out *NodePoolTargetParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ParametersInitParameters ¶
type ParametersInitParameters struct { // Brief description of the parameter. Must not exceed 1024 characters. Description *string `json:"description,omitempty" tf:"description,omitempty"` // Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args Fields []*string `json:"fields,omitempty" tf:"fields,omitempty"` // Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters. Name *string `json:"name,omitempty" tf:"name,omitempty"` // Validation rules to be applied to this parameter's value. Validation *ValidationInitParameters `json:"validation,omitempty" tf:"validation,omitempty"` }
func (*ParametersInitParameters) DeepCopy ¶
func (in *ParametersInitParameters) DeepCopy() *ParametersInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParametersInitParameters.
func (*ParametersInitParameters) DeepCopyInto ¶
func (in *ParametersInitParameters) DeepCopyInto(out *ParametersInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ParametersObservation ¶
type ParametersObservation struct { // Brief description of the parameter. Must not exceed 1024 characters. Description *string `json:"description,omitempty" tf:"description,omitempty"` // Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args Fields []*string `json:"fields,omitempty" tf:"fields,omitempty"` // Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters. Name *string `json:"name,omitempty" tf:"name,omitempty"` // Validation rules to be applied to this parameter's value. Validation *ValidationObservation `json:"validation,omitempty" tf:"validation,omitempty"` }
func (*ParametersObservation) DeepCopy ¶
func (in *ParametersObservation) DeepCopy() *ParametersObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParametersObservation.
func (*ParametersObservation) DeepCopyInto ¶
func (in *ParametersObservation) DeepCopyInto(out *ParametersObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ParametersParameters ¶
type ParametersParameters struct { // Brief description of the parameter. Must not exceed 1024 characters. // +kubebuilder:validation:Optional Description *string `json:"description,omitempty" tf:"description,omitempty"` // Required. Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a .sparkJob.args // +kubebuilder:validation:Optional Fields []*string `json:"fields" tf:"fields,omitempty"` // Required. Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters. // +kubebuilder:validation:Optional Name *string `json:"name" tf:"name,omitempty"` // Validation rules to be applied to this parameter's value. // +kubebuilder:validation:Optional Validation *ValidationParameters `json:"validation,omitempty" tf:"validation,omitempty"` }
func (*ParametersParameters) DeepCopy ¶
func (in *ParametersParameters) DeepCopy() *ParametersParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParametersParameters.
func (*ParametersParameters) DeepCopyInto ¶
func (in *ParametersParameters) DeepCopyInto(out *ParametersParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigInitParameters ¶
type PigConfigInitParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *PigConfigLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Pig command: name=[value]). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigConfigInitParameters) DeepCopy ¶
func (in *PigConfigInitParameters) DeepCopy() *PigConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigInitParameters.
func (*PigConfigInitParameters) DeepCopyInto ¶
func (in *PigConfigInitParameters) DeepCopyInto(out *PigConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigLoggingConfigInitParameters ¶
type PigConfigLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PigConfigLoggingConfigInitParameters) DeepCopy ¶
func (in *PigConfigLoggingConfigInitParameters) DeepCopy() *PigConfigLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigLoggingConfigInitParameters.
func (*PigConfigLoggingConfigInitParameters) DeepCopyInto ¶
func (in *PigConfigLoggingConfigInitParameters) DeepCopyInto(out *PigConfigLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigLoggingConfigObservation ¶
type PigConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PigConfigLoggingConfigObservation) DeepCopy ¶
func (in *PigConfigLoggingConfigObservation) DeepCopy() *PigConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigLoggingConfigObservation.
func (*PigConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *PigConfigLoggingConfigObservation) DeepCopyInto(out *PigConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigLoggingConfigParameters ¶
type PigConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*PigConfigLoggingConfigParameters) DeepCopy ¶
func (in *PigConfigLoggingConfigParameters) DeepCopy() *PigConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigLoggingConfigParameters.
func (*PigConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *PigConfigLoggingConfigParameters) DeepCopyInto(out *PigConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigObservation ¶
type PigConfigObservation struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *PigConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Pig command: name=[value]). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigConfigObservation) DeepCopy ¶
func (in *PigConfigObservation) DeepCopy() *PigConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigObservation.
func (*PigConfigObservation) DeepCopyInto ¶
func (in *PigConfigObservation) DeepCopyInto(out *PigConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigConfigParameters ¶
type PigConfigParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig *PigConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS URI of file containing Hive script to execute as the job. // Conflicts with query_list // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of Hive queries or statements to execute as part of the job. // Conflicts with query_file_uri // +kubebuilder:validation:Optional QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Pig command: name=[value]). // +kubebuilder:validation:Optional // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigConfigParameters) DeepCopy ¶
func (in *PigConfigParameters) DeepCopy() *PigConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigConfigParameters.
func (*PigConfigParameters) DeepCopyInto ¶
func (in *PigConfigParameters) DeepCopyInto(out *PigConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobInitParameters ¶
type PigJobInitParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *PigJobLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList *PigJobQueryListInitParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigJobInitParameters) DeepCopy ¶
func (in *PigJobInitParameters) DeepCopy() *PigJobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobInitParameters.
func (*PigJobInitParameters) DeepCopyInto ¶
func (in *PigJobInitParameters) DeepCopyInto(out *PigJobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobLoggingConfigInitParameters ¶
type PigJobLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PigJobLoggingConfigInitParameters) DeepCopy ¶
func (in *PigJobLoggingConfigInitParameters) DeepCopy() *PigJobLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobLoggingConfigInitParameters.
func (*PigJobLoggingConfigInitParameters) DeepCopyInto ¶
func (in *PigJobLoggingConfigInitParameters) DeepCopyInto(out *PigJobLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobLoggingConfigObservation ¶
type PigJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PigJobLoggingConfigObservation) DeepCopy ¶
func (in *PigJobLoggingConfigObservation) DeepCopy() *PigJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobLoggingConfigObservation.
func (*PigJobLoggingConfigObservation) DeepCopyInto ¶
func (in *PigJobLoggingConfigObservation) DeepCopyInto(out *PigJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobLoggingConfigParameters ¶
type PigJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PigJobLoggingConfigParameters) DeepCopy ¶
func (in *PigJobLoggingConfigParameters) DeepCopy() *PigJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobLoggingConfigParameters.
func (*PigJobLoggingConfigParameters) DeepCopyInto ¶
func (in *PigJobLoggingConfigParameters) DeepCopyInto(out *PigJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobObservation ¶
type PigJobObservation struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *PigJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList *PigJobQueryListObservation `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigJobObservation) DeepCopy ¶
func (in *PigJobObservation) DeepCopy() *PigJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobObservation.
func (*PigJobObservation) DeepCopyInto ¶
func (in *PigJobObservation) DeepCopyInto(out *PigJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobParameters ¶
type PigJobParameters struct { // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig *PigJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. // +kubebuilder:validation:Optional QueryList *PigJobQueryListParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +kubebuilder:validation:Optional // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*PigJobParameters) DeepCopy ¶
func (in *PigJobParameters) DeepCopy() *PigJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobParameters.
func (*PigJobParameters) DeepCopyInto ¶
func (in *PigJobParameters) DeepCopyInto(out *PigJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobQueryListInitParameters ¶
type PigJobQueryListInitParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*PigJobQueryListInitParameters) DeepCopy ¶
func (in *PigJobQueryListInitParameters) DeepCopy() *PigJobQueryListInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobQueryListInitParameters.
func (*PigJobQueryListInitParameters) DeepCopyInto ¶
func (in *PigJobQueryListInitParameters) DeepCopyInto(out *PigJobQueryListInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobQueryListObservation ¶
type PigJobQueryListObservation struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*PigJobQueryListObservation) DeepCopy ¶
func (in *PigJobQueryListObservation) DeepCopy() *PigJobQueryListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobQueryListObservation.
func (*PigJobQueryListObservation) DeepCopyInto ¶
func (in *PigJobQueryListObservation) DeepCopyInto(out *PigJobQueryListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PigJobQueryListParameters ¶
type PigJobQueryListParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } // +kubebuilder:validation:Optional Queries []*string `json:"queries" tf:"queries,omitempty"` }
func (*PigJobQueryListParameters) DeepCopy ¶
func (in *PigJobQueryListParameters) DeepCopy() *PigJobQueryListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PigJobQueryListParameters.
func (*PigJobQueryListParameters) DeepCopyInto ¶
func (in *PigJobQueryListParameters) DeepCopyInto(out *PigJobQueryListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PlacementInitParameters ¶
type PlacementInitParameters struct { // The name of the cluster where the job // will be submitted. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/dataproc/v1beta2.Cluster // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("name",false) ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"` // Reference to a Cluster in dataproc to populate clusterName. // +kubebuilder:validation:Optional ClusterNameRef *v1.Reference `json:"clusterNameRef,omitempty" tf:"-"` // Selector for a Cluster in dataproc to populate clusterName. // +kubebuilder:validation:Optional ClusterNameSelector *v1.Selector `json:"clusterNameSelector,omitempty" tf:"-"` }
func (*PlacementInitParameters) DeepCopy ¶
func (in *PlacementInitParameters) DeepCopy() *PlacementInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PlacementInitParameters.
func (*PlacementInitParameters) DeepCopyInto ¶
func (in *PlacementInitParameters) DeepCopyInto(out *PlacementInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PlacementObservation ¶
type PlacementObservation struct { // The name of the cluster where the job // will be submitted. ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"` // A cluster UUID generated by the Cloud Dataproc service when the job is submitted. ClusterUUID *string `json:"clusterUuid,omitempty" tf:"cluster_uuid,omitempty"` }
func (*PlacementObservation) DeepCopy ¶
func (in *PlacementObservation) DeepCopy() *PlacementObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PlacementObservation.
func (*PlacementObservation) DeepCopyInto ¶
func (in *PlacementObservation) DeepCopyInto(out *PlacementObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PlacementParameters ¶
type PlacementParameters struct { // The name of the cluster where the job // will be submitted. // +crossplane:generate:reference:type=github.com/upbound/provider-gcp/apis/dataproc/v1beta2.Cluster // +crossplane:generate:reference:extractor=github.com/crossplane/upjet/pkg/resource.ExtractParamPath("name",false) // +kubebuilder:validation:Optional ClusterName *string `json:"clusterName,omitempty" tf:"cluster_name,omitempty"` // Reference to a Cluster in dataproc to populate clusterName. // +kubebuilder:validation:Optional ClusterNameRef *v1.Reference `json:"clusterNameRef,omitempty" tf:"-"` // Selector for a Cluster in dataproc to populate clusterName. // +kubebuilder:validation:Optional ClusterNameSelector *v1.Selector `json:"clusterNameSelector,omitempty" tf:"-"` }
func (*PlacementParameters) DeepCopy ¶
func (in *PlacementParameters) DeepCopy() *PlacementParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PlacementParameters.
func (*PlacementParameters) DeepCopyInto ¶
func (in *PlacementParameters) DeepCopyInto(out *PlacementParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigDiskConfigInitParameters ¶
type PreemptibleWorkerConfigDiskConfigInitParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*PreemptibleWorkerConfigDiskConfigInitParameters) DeepCopy ¶
func (in *PreemptibleWorkerConfigDiskConfigInitParameters) DeepCopy() *PreemptibleWorkerConfigDiskConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigDiskConfigInitParameters.
func (*PreemptibleWorkerConfigDiskConfigInitParameters) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigDiskConfigInitParameters) DeepCopyInto(out *PreemptibleWorkerConfigDiskConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigDiskConfigObservation ¶
type PreemptibleWorkerConfigDiskConfigObservation struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*PreemptibleWorkerConfigDiskConfigObservation) DeepCopy ¶
func (in *PreemptibleWorkerConfigDiskConfigObservation) DeepCopy() *PreemptibleWorkerConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigDiskConfigObservation.
func (*PreemptibleWorkerConfigDiskConfigObservation) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigDiskConfigObservation) DeepCopyInto(out *PreemptibleWorkerConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigDiskConfigParameters ¶
type PreemptibleWorkerConfigDiskConfigParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. // +kubebuilder:validation:Optional LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*PreemptibleWorkerConfigDiskConfigParameters) DeepCopy ¶
func (in *PreemptibleWorkerConfigDiskConfigParameters) DeepCopy() *PreemptibleWorkerConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigDiskConfigParameters.
func (*PreemptibleWorkerConfigDiskConfigParameters) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigDiskConfigParameters) DeepCopyInto(out *PreemptibleWorkerConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigInitParameters ¶
type PreemptibleWorkerConfigInitParameters struct { // Disk Config DiskConfig *PreemptibleWorkerConfigDiskConfigInitParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Instance flexibility Policy allowing a mixture of VM shapes and provisioning models. InstanceFlexibilityPolicy *InstanceFlexibilityPolicyInitParameters `json:"instanceFlexibilityPolicy,omitempty" tf:"instance_flexibility_policy,omitempty"` // Specifies the number of preemptible nodes to create. // Defaults to 0. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the secondary workers. The default value is PREEMPTIBLE // Accepted values are: Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*PreemptibleWorkerConfigInitParameters) DeepCopy ¶
func (in *PreemptibleWorkerConfigInitParameters) DeepCopy() *PreemptibleWorkerConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigInitParameters.
func (*PreemptibleWorkerConfigInitParameters) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigInitParameters) DeepCopyInto(out *PreemptibleWorkerConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigObservation ¶
type PreemptibleWorkerConfigObservation struct { // Disk Config DiskConfig *PreemptibleWorkerConfigDiskConfigObservation `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Instance flexibility Policy allowing a mixture of VM shapes and provisioning models. InstanceFlexibilityPolicy *InstanceFlexibilityPolicyObservation `json:"instanceFlexibilityPolicy,omitempty" tf:"instance_flexibility_policy,omitempty"` // List of worker instance names which have been assigned // to the cluster. InstanceNames []*string `json:"instanceNames,omitempty" tf:"instance_names,omitempty"` // Specifies the number of preemptible nodes to create. // Defaults to 0. NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the secondary workers. The default value is PREEMPTIBLE // Accepted values are: Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*PreemptibleWorkerConfigObservation) DeepCopy ¶
func (in *PreemptibleWorkerConfigObservation) DeepCopy() *PreemptibleWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigObservation.
func (*PreemptibleWorkerConfigObservation) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigObservation) DeepCopyInto(out *PreemptibleWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PreemptibleWorkerConfigParameters ¶
type PreemptibleWorkerConfigParameters struct { // Disk Config // +kubebuilder:validation:Optional DiskConfig *PreemptibleWorkerConfigDiskConfigParameters `json:"diskConfig,omitempty" tf:"disk_config,omitempty"` // Instance flexibility Policy allowing a mixture of VM shapes and provisioning models. // +kubebuilder:validation:Optional InstanceFlexibilityPolicy *InstanceFlexibilityPolicyParameters `json:"instanceFlexibilityPolicy,omitempty" tf:"instance_flexibility_policy,omitempty"` // Specifies the number of preemptible nodes to create. // Defaults to 0. // +kubebuilder:validation:Optional NumInstances *float64 `json:"numInstances,omitempty" tf:"num_instances,omitempty"` // Specifies the preemptibility of the secondary workers. The default value is PREEMPTIBLE // Accepted values are: // +kubebuilder:validation:Optional Preemptibility *string `json:"preemptibility,omitempty" tf:"preemptibility,omitempty"` }
func (*PreemptibleWorkerConfigParameters) DeepCopy ¶
func (in *PreemptibleWorkerConfigParameters) DeepCopy() *PreemptibleWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PreemptibleWorkerConfigParameters.
func (*PreemptibleWorkerConfigParameters) DeepCopyInto ¶
func (in *PreemptibleWorkerConfigParameters) DeepCopyInto(out *PreemptibleWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigInitParameters ¶
type PrestoConfigInitParameters struct { // Presto client tags to attach to this query. ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` LoggingConfig *PrestoConfigLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The format in which query output will be displayed. See the Presto documentation for supported output formats. OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoConfigInitParameters) DeepCopy ¶
func (in *PrestoConfigInitParameters) DeepCopy() *PrestoConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigInitParameters.
func (*PrestoConfigInitParameters) DeepCopyInto ¶
func (in *PrestoConfigInitParameters) DeepCopyInto(out *PrestoConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigLoggingConfigInitParameters ¶
type PrestoConfigLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PrestoConfigLoggingConfigInitParameters) DeepCopy ¶
func (in *PrestoConfigLoggingConfigInitParameters) DeepCopy() *PrestoConfigLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigLoggingConfigInitParameters.
func (*PrestoConfigLoggingConfigInitParameters) DeepCopyInto ¶
func (in *PrestoConfigLoggingConfigInitParameters) DeepCopyInto(out *PrestoConfigLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigLoggingConfigObservation ¶
type PrestoConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PrestoConfigLoggingConfigObservation) DeepCopy ¶
func (in *PrestoConfigLoggingConfigObservation) DeepCopy() *PrestoConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigLoggingConfigObservation.
func (*PrestoConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *PrestoConfigLoggingConfigObservation) DeepCopyInto(out *PrestoConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigLoggingConfigParameters ¶
type PrestoConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*PrestoConfigLoggingConfigParameters) DeepCopy ¶
func (in *PrestoConfigLoggingConfigParameters) DeepCopy() *PrestoConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigLoggingConfigParameters.
func (*PrestoConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *PrestoConfigLoggingConfigParameters) DeepCopyInto(out *PrestoConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigObservation ¶
type PrestoConfigObservation struct { // Presto client tags to attach to this query. ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` LoggingConfig *PrestoConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The format in which query output will be displayed. See the Presto documentation for supported output formats. OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoConfigObservation) DeepCopy ¶
func (in *PrestoConfigObservation) DeepCopy() *PrestoConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigObservation.
func (*PrestoConfigObservation) DeepCopyInto ¶
func (in *PrestoConfigObservation) DeepCopyInto(out *PrestoConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoConfigParameters ¶
type PrestoConfigParameters struct { // Presto client tags to attach to this query. // +kubebuilder:validation:Optional ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // +kubebuilder:validation:Optional LoggingConfig *PrestoConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The format in which query output will be displayed. See the Presto documentation for supported output formats. // +kubebuilder:validation:Optional OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri // +kubebuilder:validation:Optional QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoConfigParameters) DeepCopy ¶
func (in *PrestoConfigParameters) DeepCopy() *PrestoConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoConfigParameters.
func (*PrestoConfigParameters) DeepCopyInto ¶
func (in *PrestoConfigParameters) DeepCopyInto(out *PrestoConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobInitParameters ¶
type PrestoJobInitParameters struct { // Presto client tags to attach to this query ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // The runtime log config for job execution. LoggingConfig *PrestoJobLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The format in which query output will be displayed. See the Presto documentation for supported output formats OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList *PrestoJobQueryListInitParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoJobInitParameters) DeepCopy ¶
func (in *PrestoJobInitParameters) DeepCopy() *PrestoJobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobInitParameters.
func (*PrestoJobInitParameters) DeepCopyInto ¶
func (in *PrestoJobInitParameters) DeepCopyInto(out *PrestoJobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobLoggingConfigInitParameters ¶
type PrestoJobLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PrestoJobLoggingConfigInitParameters) DeepCopy ¶
func (in *PrestoJobLoggingConfigInitParameters) DeepCopy() *PrestoJobLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobLoggingConfigInitParameters.
func (*PrestoJobLoggingConfigInitParameters) DeepCopyInto ¶
func (in *PrestoJobLoggingConfigInitParameters) DeepCopyInto(out *PrestoJobLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobLoggingConfigObservation ¶
type PrestoJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PrestoJobLoggingConfigObservation) DeepCopy ¶
func (in *PrestoJobLoggingConfigObservation) DeepCopy() *PrestoJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobLoggingConfigObservation.
func (*PrestoJobLoggingConfigObservation) DeepCopyInto ¶
func (in *PrestoJobLoggingConfigObservation) DeepCopyInto(out *PrestoJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobLoggingConfigParameters ¶
type PrestoJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PrestoJobLoggingConfigParameters) DeepCopy ¶
func (in *PrestoJobLoggingConfigParameters) DeepCopy() *PrestoJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobLoggingConfigParameters.
func (*PrestoJobLoggingConfigParameters) DeepCopyInto ¶
func (in *PrestoJobLoggingConfigParameters) DeepCopyInto(out *PrestoJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobObservation ¶
type PrestoJobObservation struct { // Presto client tags to attach to this query ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // The runtime log config for job execution. LoggingConfig *PrestoJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The format in which query output will be displayed. See the Presto documentation for supported output formats OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList *PrestoJobQueryListObservation `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoJobObservation) DeepCopy ¶
func (in *PrestoJobObservation) DeepCopy() *PrestoJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobObservation.
func (*PrestoJobObservation) DeepCopyInto ¶
func (in *PrestoJobObservation) DeepCopyInto(out *PrestoJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobParameters ¶
type PrestoJobParameters struct { // Presto client tags to attach to this query // +kubebuilder:validation:Optional ClientTags []*string `json:"clientTags,omitempty" tf:"client_tags,omitempty"` // Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. // +kubebuilder:validation:Optional ContinueOnFailure *bool `json:"continueOnFailure,omitempty" tf:"continue_on_failure,omitempty"` // The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig *PrestoJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The format in which query output will be displayed. See the Presto documentation for supported output formats // +kubebuilder:validation:Optional OutputFormat *string `json:"outputFormat,omitempty" tf:"output_format,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. // +kubebuilder:validation:Optional QueryList *PrestoJobQueryListParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` }
func (*PrestoJobParameters) DeepCopy ¶
func (in *PrestoJobParameters) DeepCopy() *PrestoJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobParameters.
func (*PrestoJobParameters) DeepCopyInto ¶
func (in *PrestoJobParameters) DeepCopyInto(out *PrestoJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobQueryListInitParameters ¶
type PrestoJobQueryListInitParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*PrestoJobQueryListInitParameters) DeepCopy ¶
func (in *PrestoJobQueryListInitParameters) DeepCopy() *PrestoJobQueryListInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobQueryListInitParameters.
func (*PrestoJobQueryListInitParameters) DeepCopyInto ¶
func (in *PrestoJobQueryListInitParameters) DeepCopyInto(out *PrestoJobQueryListInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobQueryListObservation ¶
type PrestoJobQueryListObservation struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*PrestoJobQueryListObservation) DeepCopy ¶
func (in *PrestoJobQueryListObservation) DeepCopy() *PrestoJobQueryListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobQueryListObservation.
func (*PrestoJobQueryListObservation) DeepCopyInto ¶
func (in *PrestoJobQueryListObservation) DeepCopyInto(out *PrestoJobQueryListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PrestoJobQueryListParameters ¶
type PrestoJobQueryListParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } // +kubebuilder:validation:Optional Queries []*string `json:"queries" tf:"queries,omitempty"` }
func (*PrestoJobQueryListParameters) DeepCopy ¶
func (in *PrestoJobQueryListParameters) DeepCopy() *PrestoJobQueryListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrestoJobQueryListParameters.
func (*PrestoJobQueryListParameters) DeepCopyInto ¶
func (in *PrestoJobQueryListParameters) DeepCopyInto(out *PrestoJobQueryListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigInitParameters ¶
type PysparkConfigInitParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *PysparkConfigLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The HCFS URI of the main Python file to use as the driver. Must be a .py file. MainPythonFileURI *string `json:"mainPythonFileUri,omitempty" tf:"main_python_file_uri,omitempty"` // A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkConfigInitParameters) DeepCopy ¶
func (in *PysparkConfigInitParameters) DeepCopy() *PysparkConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigInitParameters.
func (*PysparkConfigInitParameters) DeepCopyInto ¶
func (in *PysparkConfigInitParameters) DeepCopyInto(out *PysparkConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigLoggingConfigInitParameters ¶
type PysparkConfigLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PysparkConfigLoggingConfigInitParameters) DeepCopy ¶
func (in *PysparkConfigLoggingConfigInitParameters) DeepCopy() *PysparkConfigLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigLoggingConfigInitParameters.
func (*PysparkConfigLoggingConfigInitParameters) DeepCopyInto ¶
func (in *PysparkConfigLoggingConfigInitParameters) DeepCopyInto(out *PysparkConfigLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigLoggingConfigObservation ¶
type PysparkConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PysparkConfigLoggingConfigObservation) DeepCopy ¶
func (in *PysparkConfigLoggingConfigObservation) DeepCopy() *PysparkConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigLoggingConfigObservation.
func (*PysparkConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *PysparkConfigLoggingConfigObservation) DeepCopyInto(out *PysparkConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigLoggingConfigParameters ¶
type PysparkConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*PysparkConfigLoggingConfigParameters) DeepCopy ¶
func (in *PysparkConfigLoggingConfigParameters) DeepCopy() *PysparkConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigLoggingConfigParameters.
func (*PysparkConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *PysparkConfigLoggingConfigParameters) DeepCopyInto(out *PysparkConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigObservation ¶
type PysparkConfigObservation struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *PysparkConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The HCFS URI of the main Python file to use as the driver. Must be a .py file. MainPythonFileURI *string `json:"mainPythonFileUri,omitempty" tf:"main_python_file_uri,omitempty"` // A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkConfigObservation) DeepCopy ¶
func (in *PysparkConfigObservation) DeepCopy() *PysparkConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigObservation.
func (*PysparkConfigObservation) DeepCopyInto ¶
func (in *PysparkConfigObservation) DeepCopyInto(out *PysparkConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkConfigParameters ¶
type PysparkConfigParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig *PysparkConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The HCFS URI of the main Python file to use as the driver. Must be a .py file. // +kubebuilder:validation:Optional MainPythonFileURI *string `json:"mainPythonFileUri" tf:"main_python_file_uri,omitempty"` // A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. // +kubebuilder:validation:Optional PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkConfigParameters) DeepCopy ¶
func (in *PysparkConfigParameters) DeepCopy() *PysparkConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkConfigParameters.
func (*PysparkConfigParameters) DeepCopyInto ¶
func (in *PysparkConfigParameters) DeepCopyInto(out *PysparkConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobInitParameters ¶
type PysparkJobInitParameters struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *PysparkJobLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file. MainPythonFileURI *string `json:"mainPythonFileUri,omitempty" tf:"main_python_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkJobInitParameters) DeepCopy ¶
func (in *PysparkJobInitParameters) DeepCopy() *PysparkJobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobInitParameters.
func (*PysparkJobInitParameters) DeepCopyInto ¶
func (in *PysparkJobInitParameters) DeepCopyInto(out *PysparkJobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobLoggingConfigInitParameters ¶
type PysparkJobLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PysparkJobLoggingConfigInitParameters) DeepCopy ¶
func (in *PysparkJobLoggingConfigInitParameters) DeepCopy() *PysparkJobLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobLoggingConfigInitParameters.
func (*PysparkJobLoggingConfigInitParameters) DeepCopyInto ¶
func (in *PysparkJobLoggingConfigInitParameters) DeepCopyInto(out *PysparkJobLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobLoggingConfigObservation ¶
type PysparkJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PysparkJobLoggingConfigObservation) DeepCopy ¶
func (in *PysparkJobLoggingConfigObservation) DeepCopy() *PysparkJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobLoggingConfigObservation.
func (*PysparkJobLoggingConfigObservation) DeepCopyInto ¶
func (in *PysparkJobLoggingConfigObservation) DeepCopyInto(out *PysparkJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobLoggingConfigParameters ¶
type PysparkJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*PysparkJobLoggingConfigParameters) DeepCopy ¶
func (in *PysparkJobLoggingConfigParameters) DeepCopy() *PysparkJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobLoggingConfigParameters.
func (*PysparkJobLoggingConfigParameters) DeepCopyInto ¶
func (in *PysparkJobLoggingConfigParameters) DeepCopyInto(out *PysparkJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobObservation ¶
type PysparkJobObservation struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *PysparkJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file. MainPythonFileURI *string `json:"mainPythonFileUri,omitempty" tf:"main_python_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkJobObservation) DeepCopy ¶
func (in *PysparkJobObservation) DeepCopy() *PysparkJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobObservation.
func (*PysparkJobObservation) DeepCopyInto ¶
func (in *PysparkJobObservation) DeepCopyInto(out *PysparkJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PysparkJobParameters ¶
type PysparkJobParameters struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig *PysparkJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file. // +kubebuilder:validation:Optional MainPythonFileURI *string `json:"mainPythonFileUri" tf:"main_python_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. // +kubebuilder:validation:Optional PythonFileUris []*string `json:"pythonFileUris,omitempty" tf:"python_file_uris,omitempty"` }
func (*PysparkJobParameters) DeepCopy ¶
func (in *PysparkJobParameters) DeepCopy() *PysparkJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PysparkJobParameters.
func (*PysparkJobParameters) DeepCopyInto ¶
func (in *PysparkJobParameters) DeepCopyInto(out *PysparkJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryListInitParameters ¶
type QueryListInitParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*QueryListInitParameters) DeepCopy ¶
func (in *QueryListInitParameters) DeepCopy() *QueryListInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryListInitParameters.
func (*QueryListInitParameters) DeepCopyInto ¶
func (in *QueryListInitParameters) DeepCopyInto(out *QueryListInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryListObservation ¶
type QueryListObservation struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*QueryListObservation) DeepCopy ¶
func (in *QueryListObservation) DeepCopy() *QueryListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryListObservation.
func (*QueryListObservation) DeepCopyInto ¶
func (in *QueryListObservation) DeepCopyInto(out *QueryListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type QueryListParameters ¶
type QueryListParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } // +kubebuilder:validation:Optional Queries []*string `json:"queries" tf:"queries,omitempty"` }
func (*QueryListParameters) DeepCopy ¶
func (in *QueryListParameters) DeepCopy() *QueryListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryListParameters.
func (*QueryListParameters) DeepCopyInto ¶
func (in *QueryListParameters) DeepCopyInto(out *QueryListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReferenceInitParameters ¶
type ReferenceInitParameters struct {
JobID *string `json:"jobId,omitempty" tf:"job_id,omitempty"`
}
func (*ReferenceInitParameters) DeepCopy ¶
func (in *ReferenceInitParameters) DeepCopy() *ReferenceInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReferenceInitParameters.
func (*ReferenceInitParameters) DeepCopyInto ¶
func (in *ReferenceInitParameters) DeepCopyInto(out *ReferenceInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReferenceObservation ¶
type ReferenceObservation struct {
JobID *string `json:"jobId,omitempty" tf:"job_id,omitempty"`
}
func (*ReferenceObservation) DeepCopy ¶
func (in *ReferenceObservation) DeepCopy() *ReferenceObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReferenceObservation.
func (*ReferenceObservation) DeepCopyInto ¶
func (in *ReferenceObservation) DeepCopyInto(out *ReferenceObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReferenceParameters ¶
type ReferenceParameters struct { // +kubebuilder:validation:Optional JobID *string `json:"jobId,omitempty" tf:"job_id,omitempty"` }
func (*ReferenceParameters) DeepCopy ¶
func (in *ReferenceParameters) DeepCopy() *ReferenceParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReferenceParameters.
func (*ReferenceParameters) DeepCopyInto ¶
func (in *ReferenceParameters) DeepCopyInto(out *ReferenceParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RegexInitParameters ¶
type RegexInitParameters struct { // Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient). Regexes []*string `json:"regexes,omitempty" tf:"regexes,omitempty"` }
func (*RegexInitParameters) DeepCopy ¶
func (in *RegexInitParameters) DeepCopy() *RegexInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RegexInitParameters.
func (*RegexInitParameters) DeepCopyInto ¶
func (in *RegexInitParameters) DeepCopyInto(out *RegexInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RegexObservation ¶
type RegexObservation struct { // Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient). Regexes []*string `json:"regexes,omitempty" tf:"regexes,omitempty"` }
func (*RegexObservation) DeepCopy ¶
func (in *RegexObservation) DeepCopy() *RegexObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RegexObservation.
func (*RegexObservation) DeepCopyInto ¶
func (in *RegexObservation) DeepCopyInto(out *RegexObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RegexParameters ¶
type RegexParameters struct { // Required. RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient). // +kubebuilder:validation:Optional Regexes []*string `json:"regexes" tf:"regexes,omitempty"` }
func (*RegexParameters) DeepCopy ¶
func (in *RegexParameters) DeepCopy() *RegexParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RegexParameters.
func (*RegexParameters) DeepCopyInto ¶
func (in *RegexParameters) DeepCopyInto(out *RegexParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationAffinityInitParameters ¶
type ReservationAffinityInitParameters struct { // Corresponds to the type of reservation consumption. ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Corresponds to the label key of reservation resource. Key *string `json:"key,omitempty" tf:"key,omitempty"` // Corresponds to the label values of reservation resource. // +listType=set Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*ReservationAffinityInitParameters) DeepCopy ¶
func (in *ReservationAffinityInitParameters) DeepCopy() *ReservationAffinityInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationAffinityInitParameters.
func (*ReservationAffinityInitParameters) DeepCopyInto ¶
func (in *ReservationAffinityInitParameters) DeepCopyInto(out *ReservationAffinityInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationAffinityObservation ¶
type ReservationAffinityObservation struct { // Corresponds to the type of reservation consumption. ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Corresponds to the label key of reservation resource. Key *string `json:"key,omitempty" tf:"key,omitempty"` // Corresponds to the label values of reservation resource. // +listType=set Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*ReservationAffinityObservation) DeepCopy ¶
func (in *ReservationAffinityObservation) DeepCopy() *ReservationAffinityObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationAffinityObservation.
func (*ReservationAffinityObservation) DeepCopyInto ¶
func (in *ReservationAffinityObservation) DeepCopyInto(out *ReservationAffinityObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ReservationAffinityParameters ¶
type ReservationAffinityParameters struct { // Corresponds to the type of reservation consumption. // +kubebuilder:validation:Optional ConsumeReservationType *string `json:"consumeReservationType,omitempty" tf:"consume_reservation_type,omitempty"` // Corresponds to the label key of reservation resource. // +kubebuilder:validation:Optional Key *string `json:"key,omitempty" tf:"key,omitempty"` // Corresponds to the label values of reservation resource. // +kubebuilder:validation:Optional // +listType=set Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*ReservationAffinityParameters) DeepCopy ¶
func (in *ReservationAffinityParameters) DeepCopy() *ReservationAffinityParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ReservationAffinityParameters.
func (*ReservationAffinityParameters) DeepCopyInto ¶
func (in *ReservationAffinityParameters) DeepCopyInto(out *ReservationAffinityParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScalingConfigInitParameters ¶
type ScalingConfigInitParameters struct { // Metastore instance sizes. // Possible values are: EXTRA_SMALL, SMALL, MEDIUM, LARGE, EXTRA_LARGE. InstanceSize *string `json:"instanceSize,omitempty" tf:"instance_size,omitempty"` // Scaling factor, in increments of 0.1 for values less than 1.0, and increments of 1.0 for values greater than 1.0. ScalingFactor *float64 `json:"scalingFactor,omitempty" tf:"scaling_factor,omitempty"` }
func (*ScalingConfigInitParameters) DeepCopy ¶
func (in *ScalingConfigInitParameters) DeepCopy() *ScalingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScalingConfigInitParameters.
func (*ScalingConfigInitParameters) DeepCopyInto ¶
func (in *ScalingConfigInitParameters) DeepCopyInto(out *ScalingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScalingConfigObservation ¶
type ScalingConfigObservation struct { // Metastore instance sizes. // Possible values are: EXTRA_SMALL, SMALL, MEDIUM, LARGE, EXTRA_LARGE. InstanceSize *string `json:"instanceSize,omitempty" tf:"instance_size,omitempty"` // Scaling factor, in increments of 0.1 for values less than 1.0, and increments of 1.0 for values greater than 1.0. ScalingFactor *float64 `json:"scalingFactor,omitempty" tf:"scaling_factor,omitempty"` }
func (*ScalingConfigObservation) DeepCopy ¶
func (in *ScalingConfigObservation) DeepCopy() *ScalingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScalingConfigObservation.
func (*ScalingConfigObservation) DeepCopyInto ¶
func (in *ScalingConfigObservation) DeepCopyInto(out *ScalingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScalingConfigParameters ¶
type ScalingConfigParameters struct { // Metastore instance sizes. // Possible values are: EXTRA_SMALL, SMALL, MEDIUM, LARGE, EXTRA_LARGE. // +kubebuilder:validation:Optional InstanceSize *string `json:"instanceSize,omitempty" tf:"instance_size,omitempty"` // Scaling factor, in increments of 0.1 for values less than 1.0, and increments of 1.0 for values greater than 1.0. // +kubebuilder:validation:Optional ScalingFactor *float64 `json:"scalingFactor,omitempty" tf:"scaling_factor,omitempty"` }
func (*ScalingConfigParameters) DeepCopy ¶
func (in *ScalingConfigParameters) DeepCopy() *ScalingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScalingConfigParameters.
func (*ScalingConfigParameters) DeepCopyInto ¶
func (in *ScalingConfigParameters) DeepCopyInto(out *ScalingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScheduledBackupInitParameters ¶ added in v1.5.0
type ScheduledBackupInitParameters struct { // A Cloud Storage URI of a folder, in the format gs://<bucket_name>/<path_inside_bucket>. A sub-folder <backup_folder> containing backup files will be stored below it. BackupLocation *string `json:"backupLocation,omitempty" tf:"backup_location,omitempty"` // The scheduled interval in Cron format, see https://en.wikipedia.org/wiki/Cron The default is empty: scheduled backup is not enabled. Must be specified to enable scheduled backups. CronSchedule *string `json:"cronSchedule,omitempty" tf:"cron_schedule,omitempty"` // Defines whether the scheduled backup is enabled. The default value is false. Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` // Specifies the time zone to be used when interpreting cronSchedule. Must be a time zone name from the time zone database (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones), e.g. America/Los_Angeles or Africa/Abidjan. If left unspecified, the default is UTC. TimeZone *string `json:"timeZone,omitempty" tf:"time_zone,omitempty"` }
func (*ScheduledBackupInitParameters) DeepCopy ¶ added in v1.5.0
func (in *ScheduledBackupInitParameters) DeepCopy() *ScheduledBackupInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScheduledBackupInitParameters.
func (*ScheduledBackupInitParameters) DeepCopyInto ¶ added in v1.5.0
func (in *ScheduledBackupInitParameters) DeepCopyInto(out *ScheduledBackupInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScheduledBackupObservation ¶ added in v1.5.0
type ScheduledBackupObservation struct { // A Cloud Storage URI of a folder, in the format gs://<bucket_name>/<path_inside_bucket>. A sub-folder <backup_folder> containing backup files will be stored below it. BackupLocation *string `json:"backupLocation,omitempty" tf:"backup_location,omitempty"` // The scheduled interval in Cron format, see https://en.wikipedia.org/wiki/Cron The default is empty: scheduled backup is not enabled. Must be specified to enable scheduled backups. CronSchedule *string `json:"cronSchedule,omitempty" tf:"cron_schedule,omitempty"` // Defines whether the scheduled backup is enabled. The default value is false. Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` // Specifies the time zone to be used when interpreting cronSchedule. Must be a time zone name from the time zone database (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones), e.g. America/Los_Angeles or Africa/Abidjan. If left unspecified, the default is UTC. TimeZone *string `json:"timeZone,omitempty" tf:"time_zone,omitempty"` }
func (*ScheduledBackupObservation) DeepCopy ¶ added in v1.5.0
func (in *ScheduledBackupObservation) DeepCopy() *ScheduledBackupObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScheduledBackupObservation.
func (*ScheduledBackupObservation) DeepCopyInto ¶ added in v1.5.0
func (in *ScheduledBackupObservation) DeepCopyInto(out *ScheduledBackupObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ScheduledBackupParameters ¶ added in v1.5.0
type ScheduledBackupParameters struct { // A Cloud Storage URI of a folder, in the format gs://<bucket_name>/<path_inside_bucket>. A sub-folder <backup_folder> containing backup files will be stored below it. // +kubebuilder:validation:Optional BackupLocation *string `json:"backupLocation" tf:"backup_location,omitempty"` // The scheduled interval in Cron format, see https://en.wikipedia.org/wiki/Cron The default is empty: scheduled backup is not enabled. Must be specified to enable scheduled backups. // +kubebuilder:validation:Optional CronSchedule *string `json:"cronSchedule,omitempty" tf:"cron_schedule,omitempty"` // Defines whether the scheduled backup is enabled. The default value is false. // +kubebuilder:validation:Optional Enabled *bool `json:"enabled,omitempty" tf:"enabled,omitempty"` // Specifies the time zone to be used when interpreting cronSchedule. Must be a time zone name from the time zone database (https://en.wikipedia.org/wiki/List_of_tz_database_time_zones), e.g. America/Los_Angeles or Africa/Abidjan. If left unspecified, the default is UTC. // +kubebuilder:validation:Optional TimeZone *string `json:"timeZone,omitempty" tf:"time_zone,omitempty"` }
func (*ScheduledBackupParameters) DeepCopy ¶ added in v1.5.0
func (in *ScheduledBackupParameters) DeepCopy() *ScheduledBackupParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScheduledBackupParameters.
func (*ScheduledBackupParameters) DeepCopyInto ¶ added in v1.5.0
func (in *ScheduledBackupParameters) DeepCopyInto(out *ScheduledBackupParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SchedulingInitParameters ¶
type SchedulingInitParameters struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. MaxFailuresPerHour *float64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour,omitempty"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. MaxFailuresTotal *float64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total,omitempty"` }
func (*SchedulingInitParameters) DeepCopy ¶
func (in *SchedulingInitParameters) DeepCopy() *SchedulingInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SchedulingInitParameters.
func (*SchedulingInitParameters) DeepCopyInto ¶
func (in *SchedulingInitParameters) DeepCopyInto(out *SchedulingInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SchedulingObservation ¶
type SchedulingObservation struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. MaxFailuresPerHour *float64 `json:"maxFailuresPerHour,omitempty" tf:"max_failures_per_hour,omitempty"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. MaxFailuresTotal *float64 `json:"maxFailuresTotal,omitempty" tf:"max_failures_total,omitempty"` }
func (*SchedulingObservation) DeepCopy ¶
func (in *SchedulingObservation) DeepCopy() *SchedulingObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SchedulingObservation.
func (*SchedulingObservation) DeepCopyInto ¶
func (in *SchedulingObservation) DeepCopyInto(out *SchedulingObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SchedulingParameters ¶
type SchedulingParameters struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. // +kubebuilder:validation:Optional MaxFailuresPerHour *float64 `json:"maxFailuresPerHour" tf:"max_failures_per_hour,omitempty"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. // +kubebuilder:validation:Optional MaxFailuresTotal *float64 `json:"maxFailuresTotal" tf:"max_failures_total,omitempty"` }
func (*SchedulingParameters) DeepCopy ¶
func (in *SchedulingParameters) DeepCopy() *SchedulingParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SchedulingParameters.
func (*SchedulingParameters) DeepCopyInto ¶
func (in *SchedulingParameters) DeepCopyInto(out *SchedulingParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigAcceleratorsInitParameters ¶
type SecondaryWorkerConfigAcceleratorsInitParameters struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*SecondaryWorkerConfigAcceleratorsInitParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigAcceleratorsInitParameters) DeepCopy() *SecondaryWorkerConfigAcceleratorsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigAcceleratorsInitParameters.
func (*SecondaryWorkerConfigAcceleratorsInitParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigAcceleratorsInitParameters) DeepCopyInto(out *SecondaryWorkerConfigAcceleratorsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigAcceleratorsObservation ¶
type SecondaryWorkerConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*SecondaryWorkerConfigAcceleratorsObservation) DeepCopy ¶
func (in *SecondaryWorkerConfigAcceleratorsObservation) DeepCopy() *SecondaryWorkerConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigAcceleratorsObservation.
func (*SecondaryWorkerConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *SecondaryWorkerConfigAcceleratorsObservation) DeepCopyInto(out *SecondaryWorkerConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigAcceleratorsParameters ¶
type SecondaryWorkerConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. See (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*SecondaryWorkerConfigAcceleratorsParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigAcceleratorsParameters) DeepCopy() *SecondaryWorkerConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigAcceleratorsParameters.
func (*SecondaryWorkerConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigAcceleratorsParameters) DeepCopyInto(out *SecondaryWorkerConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigDiskConfigInitParameters ¶
type SecondaryWorkerConfigDiskConfigInitParameters struct { // Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*SecondaryWorkerConfigDiskConfigInitParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigDiskConfigInitParameters) DeepCopy() *SecondaryWorkerConfigDiskConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigDiskConfigInitParameters.
func (*SecondaryWorkerConfigDiskConfigInitParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigDiskConfigInitParameters) DeepCopyInto(out *SecondaryWorkerConfigDiskConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigDiskConfigObservation ¶
type SecondaryWorkerConfigDiskConfigObservation struct { // Size in GB of the boot disk (default is 500GB). BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*SecondaryWorkerConfigDiskConfigObservation) DeepCopy ¶
func (in *SecondaryWorkerConfigDiskConfigObservation) DeepCopy() *SecondaryWorkerConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigDiskConfigObservation.
func (*SecondaryWorkerConfigDiskConfigObservation) DeepCopyInto ¶
func (in *SecondaryWorkerConfigDiskConfigObservation) DeepCopyInto(out *SecondaryWorkerConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigDiskConfigParameters ¶
type SecondaryWorkerConfigDiskConfigParameters struct { // Size in GB of the boot disk (default is 500GB). // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // Type of the boot disk (default is "pd-standard"). Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk Hard Disk Drive). // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and (https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*SecondaryWorkerConfigDiskConfigParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigDiskConfigParameters) DeepCopy() *SecondaryWorkerConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigDiskConfigParameters.
func (*SecondaryWorkerConfigDiskConfigParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigDiskConfigParameters) DeepCopyInto(out *SecondaryWorkerConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigInitParameters ¶
type SecondaryWorkerConfigInitParameters struct { // Maximum number of instances for this group. Note that by default, clusters will not use // secondary workers. Required for secondary workers if the minimum secondary instances is set. // Bounds: [minInstances, ). Defaults to 0. MaxInstances *float64 `json:"maxInstances,omitempty" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [0, maxInstances]. Defaults to 0. MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*SecondaryWorkerConfigInitParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigInitParameters) DeepCopy() *SecondaryWorkerConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigInitParameters.
func (*SecondaryWorkerConfigInitParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigInitParameters) DeepCopyInto(out *SecondaryWorkerConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigManagedGroupConfigInitParameters ¶
type SecondaryWorkerConfigManagedGroupConfigInitParameters struct { }
func (*SecondaryWorkerConfigManagedGroupConfigInitParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigManagedGroupConfigInitParameters) DeepCopy() *SecondaryWorkerConfigManagedGroupConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigManagedGroupConfigInitParameters.
func (*SecondaryWorkerConfigManagedGroupConfigInitParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigManagedGroupConfigInitParameters) DeepCopyInto(out *SecondaryWorkerConfigManagedGroupConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigManagedGroupConfigObservation ¶
type SecondaryWorkerConfigManagedGroupConfigObservation struct { // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name,omitempty"` // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name,omitempty"` }
func (*SecondaryWorkerConfigManagedGroupConfigObservation) DeepCopy ¶
func (in *SecondaryWorkerConfigManagedGroupConfigObservation) DeepCopy() *SecondaryWorkerConfigManagedGroupConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigManagedGroupConfigObservation.
func (*SecondaryWorkerConfigManagedGroupConfigObservation) DeepCopyInto ¶
func (in *SecondaryWorkerConfigManagedGroupConfigObservation) DeepCopyInto(out *SecondaryWorkerConfigManagedGroupConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigManagedGroupConfigParameters ¶
type SecondaryWorkerConfigManagedGroupConfigParameters struct { }
func (*SecondaryWorkerConfigManagedGroupConfigParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigManagedGroupConfigParameters) DeepCopy() *SecondaryWorkerConfigManagedGroupConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigManagedGroupConfigParameters.
func (*SecondaryWorkerConfigManagedGroupConfigParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigManagedGroupConfigParameters) DeepCopyInto(out *SecondaryWorkerConfigManagedGroupConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigObservation ¶
type SecondaryWorkerConfigObservation struct { // Maximum number of instances for this group. Note that by default, clusters will not use // secondary workers. Required for secondary workers if the minimum secondary instances is set. // Bounds: [minInstances, ). Defaults to 0. MaxInstances *float64 `json:"maxInstances,omitempty" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [0, maxInstances]. Defaults to 0. MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*SecondaryWorkerConfigObservation) DeepCopy ¶
func (in *SecondaryWorkerConfigObservation) DeepCopy() *SecondaryWorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigObservation.
func (*SecondaryWorkerConfigObservation) DeepCopyInto ¶
func (in *SecondaryWorkerConfigObservation) DeepCopyInto(out *SecondaryWorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecondaryWorkerConfigParameters ¶
type SecondaryWorkerConfigParameters struct { // Maximum number of instances for this group. Note that by default, clusters will not use // secondary workers. Required for secondary workers if the minimum secondary instances is set. // Bounds: [minInstances, ). Defaults to 0. // +kubebuilder:validation:Optional MaxInstances *float64 `json:"maxInstances,omitempty" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [0, maxInstances]. Defaults to 0. // +kubebuilder:validation:Optional MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. // +kubebuilder:validation:Optional Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*SecondaryWorkerConfigParameters) DeepCopy ¶
func (in *SecondaryWorkerConfigParameters) DeepCopy() *SecondaryWorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecondaryWorkerConfigParameters.
func (*SecondaryWorkerConfigParameters) DeepCopyInto ¶
func (in *SecondaryWorkerConfigParameters) DeepCopyInto(out *SecondaryWorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigInitParameters ¶
type SecurityConfigInitParameters struct { // Kerberos Configuration KerberosConfig *KerberosConfigInitParameters `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` }
func (*SecurityConfigInitParameters) DeepCopy ¶
func (in *SecurityConfigInitParameters) DeepCopy() *SecurityConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigInitParameters.
func (*SecurityConfigInitParameters) DeepCopyInto ¶
func (in *SecurityConfigInitParameters) DeepCopyInto(out *SecurityConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigKerberosConfigInitParameters ¶
type SecurityConfigKerberosConfigInitParameters struct { // The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship. CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship. CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust. CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` CrossRealmTrustSharedPassword *string `json:"crossRealmTrustSharedPassword,omitempty" tf:"cross_realm_trust_shared_password,omitempty"` // Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster. EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // The uri of the KMS key used to encrypt various sensitive files. KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database. KdcDBKey *string `json:"kdcDbKey,omitempty" tf:"kdc_db_key,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc. KeyPassword *string `json:"keyPassword,omitempty" tf:"key_password,omitempty"` // The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. Keystore *string `json:"keystore,omitempty" tf:"keystore,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc. KeystorePassword *string `json:"keystorePassword,omitempty" tf:"keystore_password,omitempty"` // The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm. Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the root principal password. RootPrincipalPassword *string `json:"rootPrincipalPassword,omitempty" tf:"root_principal_password,omitempty"` // The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used. TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. Truststore *string `json:"truststore,omitempty" tf:"truststore,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc. TruststorePassword *string `json:"truststorePassword,omitempty" tf:"truststore_password,omitempty"` }
func (*SecurityConfigKerberosConfigInitParameters) DeepCopy ¶
func (in *SecurityConfigKerberosConfigInitParameters) DeepCopy() *SecurityConfigKerberosConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigKerberosConfigInitParameters.
func (*SecurityConfigKerberosConfigInitParameters) DeepCopyInto ¶
func (in *SecurityConfigKerberosConfigInitParameters) DeepCopyInto(out *SecurityConfigKerberosConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigKerberosConfigObservation ¶
type SecurityConfigKerberosConfigObservation struct { // The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship. CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship. CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust. CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` CrossRealmTrustSharedPassword *string `json:"crossRealmTrustSharedPassword,omitempty" tf:"cross_realm_trust_shared_password,omitempty"` // Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster. EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // The uri of the KMS key used to encrypt various sensitive files. KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database. KdcDBKey *string `json:"kdcDbKey,omitempty" tf:"kdc_db_key,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc. KeyPassword *string `json:"keyPassword,omitempty" tf:"key_password,omitempty"` // The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. Keystore *string `json:"keystore,omitempty" tf:"keystore,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc. KeystorePassword *string `json:"keystorePassword,omitempty" tf:"keystore_password,omitempty"` // The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm. Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the root principal password. RootPrincipalPassword *string `json:"rootPrincipalPassword,omitempty" tf:"root_principal_password,omitempty"` // The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used. TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. Truststore *string `json:"truststore,omitempty" tf:"truststore,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc. TruststorePassword *string `json:"truststorePassword,omitempty" tf:"truststore_password,omitempty"` }
func (*SecurityConfigKerberosConfigObservation) DeepCopy ¶
func (in *SecurityConfigKerberosConfigObservation) DeepCopy() *SecurityConfigKerberosConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigKerberosConfigObservation.
func (*SecurityConfigKerberosConfigObservation) DeepCopyInto ¶
func (in *SecurityConfigKerberosConfigObservation) DeepCopyInto(out *SecurityConfigKerberosConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigKerberosConfigParameters ¶
type SecurityConfigKerberosConfigParameters struct { // The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustAdminServer *string `json:"crossRealmTrustAdminServer,omitempty" tf:"cross_realm_trust_admin_server,omitempty"` // The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship. // +kubebuilder:validation:Optional CrossRealmTrustKdc *string `json:"crossRealmTrustKdc,omitempty" tf:"cross_realm_trust_kdc,omitempty"` // The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust. // +kubebuilder:validation:Optional CrossRealmTrustRealm *string `json:"crossRealmTrustRealm,omitempty" tf:"cross_realm_trust_realm,omitempty"` // +kubebuilder:validation:Optional CrossRealmTrustSharedPassword *string `json:"crossRealmTrustSharedPassword,omitempty" tf:"cross_realm_trust_shared_password,omitempty"` // Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster. // +kubebuilder:validation:Optional EnableKerberos *bool `json:"enableKerberos,omitempty" tf:"enable_kerberos,omitempty"` // The uri of the KMS key used to encrypt various sensitive files. // +kubebuilder:validation:Optional KMSKey *string `json:"kmsKey,omitempty" tf:"kms_key,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database. // +kubebuilder:validation:Optional KdcDBKey *string `json:"kdcDbKey,omitempty" tf:"kdc_db_key,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc. // +kubebuilder:validation:Optional KeyPassword *string `json:"keyPassword,omitempty" tf:"key_password,omitempty"` // The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. // +kubebuilder:validation:Optional Keystore *string `json:"keystore,omitempty" tf:"keystore,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc. // +kubebuilder:validation:Optional KeystorePassword *string `json:"keystorePassword,omitempty" tf:"keystore_password,omitempty"` // The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm. // +kubebuilder:validation:Optional Realm *string `json:"realm,omitempty" tf:"realm,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the root principal password. // +kubebuilder:validation:Optional RootPrincipalPassword *string `json:"rootPrincipalPassword,omitempty" tf:"root_principal_password,omitempty"` // The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used. // +kubebuilder:validation:Optional TgtLifetimeHours *float64 `json:"tgtLifetimeHours,omitempty" tf:"tgt_lifetime_hours,omitempty"` // The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. // +kubebuilder:validation:Optional Truststore *string `json:"truststore,omitempty" tf:"truststore,omitempty"` // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc. // +kubebuilder:validation:Optional TruststorePassword *string `json:"truststorePassword,omitempty" tf:"truststore_password,omitempty"` }
func (*SecurityConfigKerberosConfigParameters) DeepCopy ¶
func (in *SecurityConfigKerberosConfigParameters) DeepCopy() *SecurityConfigKerberosConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigKerberosConfigParameters.
func (*SecurityConfigKerberosConfigParameters) DeepCopyInto ¶
func (in *SecurityConfigKerberosConfigParameters) DeepCopyInto(out *SecurityConfigKerberosConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigObservation ¶
type SecurityConfigObservation struct { // Kerberos Configuration KerberosConfig *KerberosConfigObservation `json:"kerberosConfig,omitempty" tf:"kerberos_config,omitempty"` }
func (*SecurityConfigObservation) DeepCopy ¶
func (in *SecurityConfigObservation) DeepCopy() *SecurityConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigObservation.
func (*SecurityConfigObservation) DeepCopyInto ¶
func (in *SecurityConfigObservation) DeepCopyInto(out *SecurityConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecurityConfigParameters ¶
type SecurityConfigParameters struct { // Kerberos Configuration // +kubebuilder:validation:Optional KerberosConfig *KerberosConfigParameters `json:"kerberosConfig" tf:"kerberos_config,omitempty"` }
func (*SecurityConfigParameters) DeepCopy ¶
func (in *SecurityConfigParameters) DeepCopy() *SecurityConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecurityConfigParameters.
func (*SecurityConfigParameters) DeepCopyInto ¶
func (in *SecurityConfigParameters) DeepCopyInto(out *SecurityConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ShieldedInstanceConfigInitParameters ¶
type ShieldedInstanceConfigInitParameters struct { // Defines whether instances have integrity monitoring enabled. EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Defines whether instances have Secure Boot enabled. EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Defines whether instances have the vTPM enabled. EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*ShieldedInstanceConfigInitParameters) DeepCopy ¶
func (in *ShieldedInstanceConfigInitParameters) DeepCopy() *ShieldedInstanceConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ShieldedInstanceConfigInitParameters.
func (*ShieldedInstanceConfigInitParameters) DeepCopyInto ¶
func (in *ShieldedInstanceConfigInitParameters) DeepCopyInto(out *ShieldedInstanceConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ShieldedInstanceConfigObservation ¶
type ShieldedInstanceConfigObservation struct { // Defines whether instances have integrity monitoring enabled. EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Defines whether instances have Secure Boot enabled. EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Defines whether instances have the vTPM enabled. EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*ShieldedInstanceConfigObservation) DeepCopy ¶
func (in *ShieldedInstanceConfigObservation) DeepCopy() *ShieldedInstanceConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ShieldedInstanceConfigObservation.
func (*ShieldedInstanceConfigObservation) DeepCopyInto ¶
func (in *ShieldedInstanceConfigObservation) DeepCopyInto(out *ShieldedInstanceConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ShieldedInstanceConfigParameters ¶
type ShieldedInstanceConfigParameters struct { // Defines whether instances have integrity monitoring enabled. // +kubebuilder:validation:Optional EnableIntegrityMonitoring *bool `json:"enableIntegrityMonitoring,omitempty" tf:"enable_integrity_monitoring,omitempty"` // Defines whether instances have Secure Boot enabled. // +kubebuilder:validation:Optional EnableSecureBoot *bool `json:"enableSecureBoot,omitempty" tf:"enable_secure_boot,omitempty"` // Defines whether instances have the vTPM enabled. // +kubebuilder:validation:Optional EnableVtpm *bool `json:"enableVtpm,omitempty" tf:"enable_vtpm,omitempty"` }
func (*ShieldedInstanceConfigParameters) DeepCopy ¶
func (in *ShieldedInstanceConfigParameters) DeepCopy() *ShieldedInstanceConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ShieldedInstanceConfigParameters.
func (*ShieldedInstanceConfigParameters) DeepCopyInto ¶
func (in *ShieldedInstanceConfigParameters) DeepCopyInto(out *ShieldedInstanceConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SoftwareConfigInitParameters ¶
type SoftwareConfigInitParameters struct { // The Cloud Dataproc image version to use // for the cluster - this controls the sets of software versions // installed onto the nodes when you create clusters. If not specified, defaults to the // latest version. For a list of valid versions see // Cloud Dataproc versions ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // The set of optional components to activate on the cluster. See Available Optional Components. // +listType=set OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // A list of override and additional properties (key/value pairs) // used to modify various aspects of the common configuration files used when creating // a cluster. For a list of valid properties please see // Cluster properties // +mapType=granular OverrideProperties map[string]*string `json:"overrideProperties,omitempty" tf:"override_properties,omitempty"` }
func (*SoftwareConfigInitParameters) DeepCopy ¶
func (in *SoftwareConfigInitParameters) DeepCopy() *SoftwareConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SoftwareConfigInitParameters.
func (*SoftwareConfigInitParameters) DeepCopyInto ¶
func (in *SoftwareConfigInitParameters) DeepCopyInto(out *SoftwareConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SoftwareConfigObservation ¶
type SoftwareConfigObservation struct { // The Cloud Dataproc image version to use // for the cluster - this controls the sets of software versions // installed onto the nodes when you create clusters. If not specified, defaults to the // latest version. For a list of valid versions see // Cloud Dataproc versions ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // The set of optional components to activate on the cluster. See Available Optional Components. // +listType=set OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // A list of override and additional properties (key/value pairs) // used to modify various aspects of the common configuration files used when creating // a cluster. For a list of valid properties please see // Cluster properties // +mapType=granular OverrideProperties map[string]*string `json:"overrideProperties,omitempty" tf:"override_properties,omitempty"` // The properties to set on daemon config files. Property keys are specified in prefix:property format, // for example spark:spark.kubernetes.container.image. Properties map[string]string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SoftwareConfigObservation) DeepCopy ¶
func (in *SoftwareConfigObservation) DeepCopy() *SoftwareConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SoftwareConfigObservation.
func (*SoftwareConfigObservation) DeepCopyInto ¶
func (in *SoftwareConfigObservation) DeepCopyInto(out *SoftwareConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SoftwareConfigParameters ¶
type SoftwareConfigParameters struct { // The Cloud Dataproc image version to use // for the cluster - this controls the sets of software versions // installed onto the nodes when you create clusters. If not specified, defaults to the // latest version. For a list of valid versions see // Cloud Dataproc versions // +kubebuilder:validation:Optional ImageVersion *string `json:"imageVersion,omitempty" tf:"image_version,omitempty"` // The set of optional components to activate on the cluster. See Available Optional Components. // +kubebuilder:validation:Optional // +listType=set OptionalComponents []*string `json:"optionalComponents,omitempty" tf:"optional_components,omitempty"` // A list of override and additional properties (key/value pairs) // used to modify various aspects of the common configuration files used when creating // a cluster. For a list of valid properties please see // Cluster properties // +kubebuilder:validation:Optional // +mapType=granular OverrideProperties map[string]*string `json:"overrideProperties,omitempty" tf:"override_properties,omitempty"` }
func (*SoftwareConfigParameters) DeepCopy ¶
func (in *SoftwareConfigParameters) DeepCopy() *SoftwareConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SoftwareConfigParameters.
func (*SoftwareConfigParameters) DeepCopyInto ¶
func (in *SoftwareConfigParameters) DeepCopyInto(out *SoftwareConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigInitParameters ¶
type SparkConfigInitParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *SparkConfigLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The class containing the main method of the driver. Must be in a // provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of jar file containing // the driver jar. Conflicts with main_class MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkConfigInitParameters) DeepCopy ¶
func (in *SparkConfigInitParameters) DeepCopy() *SparkConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigInitParameters.
func (*SparkConfigInitParameters) DeepCopyInto ¶
func (in *SparkConfigInitParameters) DeepCopyInto(out *SparkConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigLoggingConfigInitParameters ¶
type SparkConfigLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkConfigLoggingConfigInitParameters) DeepCopy ¶
func (in *SparkConfigLoggingConfigInitParameters) DeepCopy() *SparkConfigLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigLoggingConfigInitParameters.
func (*SparkConfigLoggingConfigInitParameters) DeepCopyInto ¶
func (in *SparkConfigLoggingConfigInitParameters) DeepCopyInto(out *SparkConfigLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigLoggingConfigObservation ¶
type SparkConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkConfigLoggingConfigObservation) DeepCopy ¶
func (in *SparkConfigLoggingConfigObservation) DeepCopy() *SparkConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigLoggingConfigObservation.
func (*SparkConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *SparkConfigLoggingConfigObservation) DeepCopyInto(out *SparkConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigLoggingConfigParameters ¶
type SparkConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*SparkConfigLoggingConfigParameters) DeepCopy ¶
func (in *SparkConfigLoggingConfigParameters) DeepCopy() *SparkConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigLoggingConfigParameters.
func (*SparkConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *SparkConfigLoggingConfigParameters) DeepCopyInto(out *SparkConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigObservation ¶
type SparkConfigObservation struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *SparkConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The class containing the main method of the driver. Must be in a // provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of jar file containing // the driver jar. Conflicts with main_class MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkConfigObservation) DeepCopy ¶
func (in *SparkConfigObservation) DeepCopy() *SparkConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigObservation.
func (*SparkConfigObservation) DeepCopyInto ¶
func (in *SparkConfigObservation) DeepCopyInto(out *SparkConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkConfigParameters ¶
type SparkConfigParameters struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig *SparkConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The class containing the main method of the driver. Must be in a // provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of jar file containing // the driver jar. Conflicts with main_class // +kubebuilder:validation:Optional MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkConfigParameters) DeepCopy ¶
func (in *SparkConfigParameters) DeepCopy() *SparkConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkConfigParameters.
func (*SparkConfigParameters) DeepCopyInto ¶
func (in *SparkConfigParameters) DeepCopyInto(out *SparkConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkHistoryServerConfigInitParameters ¶
type SparkHistoryServerConfigInitParameters struct { // Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload. DataprocCluster *string `json:"dataprocCluster,omitempty" tf:"dataproc_cluster,omitempty"` }
func (*SparkHistoryServerConfigInitParameters) DeepCopy ¶
func (in *SparkHistoryServerConfigInitParameters) DeepCopy() *SparkHistoryServerConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkHistoryServerConfigInitParameters.
func (*SparkHistoryServerConfigInitParameters) DeepCopyInto ¶
func (in *SparkHistoryServerConfigInitParameters) DeepCopyInto(out *SparkHistoryServerConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkHistoryServerConfigObservation ¶
type SparkHistoryServerConfigObservation struct { // Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload. DataprocCluster *string `json:"dataprocCluster,omitempty" tf:"dataproc_cluster,omitempty"` }
func (*SparkHistoryServerConfigObservation) DeepCopy ¶
func (in *SparkHistoryServerConfigObservation) DeepCopy() *SparkHistoryServerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkHistoryServerConfigObservation.
func (*SparkHistoryServerConfigObservation) DeepCopyInto ¶
func (in *SparkHistoryServerConfigObservation) DeepCopyInto(out *SparkHistoryServerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkHistoryServerConfigParameters ¶
type SparkHistoryServerConfigParameters struct { // Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload. // +kubebuilder:validation:Optional DataprocCluster *string `json:"dataprocCluster,omitempty" tf:"dataproc_cluster,omitempty"` }
func (*SparkHistoryServerConfigParameters) DeepCopy ¶
func (in *SparkHistoryServerConfigParameters) DeepCopy() *SparkHistoryServerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkHistoryServerConfigParameters.
func (*SparkHistoryServerConfigParameters) DeepCopyInto ¶
func (in *SparkHistoryServerConfigParameters) DeepCopyInto(out *SparkHistoryServerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobInitParameters ¶
type SparkJobInitParameters struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *SparkJobLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkJobInitParameters) DeepCopy ¶
func (in *SparkJobInitParameters) DeepCopy() *SparkJobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobInitParameters.
func (*SparkJobInitParameters) DeepCopyInto ¶
func (in *SparkJobInitParameters) DeepCopyInto(out *SparkJobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobLoggingConfigInitParameters ¶
type SparkJobLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkJobLoggingConfigInitParameters) DeepCopy ¶
func (in *SparkJobLoggingConfigInitParameters) DeepCopy() *SparkJobLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobLoggingConfigInitParameters.
func (*SparkJobLoggingConfigInitParameters) DeepCopyInto ¶
func (in *SparkJobLoggingConfigInitParameters) DeepCopyInto(out *SparkJobLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobLoggingConfigObservation ¶
type SparkJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkJobLoggingConfigObservation) DeepCopy ¶
func (in *SparkJobLoggingConfigObservation) DeepCopy() *SparkJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobLoggingConfigObservation.
func (*SparkJobLoggingConfigObservation) DeepCopyInto ¶
func (in *SparkJobLoggingConfigObservation) DeepCopyInto(out *SparkJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobLoggingConfigParameters ¶
type SparkJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkJobLoggingConfigParameters) DeepCopy ¶
func (in *SparkJobLoggingConfigParameters) DeepCopy() *SparkJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobLoggingConfigParameters.
func (*SparkJobLoggingConfigParameters) DeepCopyInto ¶
func (in *SparkJobLoggingConfigParameters) DeepCopyInto(out *SparkJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobObservation ¶
type SparkJobObservation struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *SparkJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkJobObservation) DeepCopy ¶
func (in *SparkJobObservation) DeepCopy() *SparkJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobObservation.
func (*SparkJobObservation) DeepCopyInto ¶
func (in *SparkJobObservation) DeepCopyInto(out *SparkJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkJobParameters ¶
type SparkJobParameters struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig *SparkJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. // +kubebuilder:validation:Optional MainClass *string `json:"mainClass,omitempty" tf:"main_class,omitempty"` // The HCFS URI of the jar file that contains the main class. // +kubebuilder:validation:Optional MainJarFileURI *string `json:"mainJarFileUri,omitempty" tf:"main_jar_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkJobParameters) DeepCopy ¶
func (in *SparkJobParameters) DeepCopy() *SparkJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkJobParameters.
func (*SparkJobParameters) DeepCopyInto ¶
func (in *SparkJobParameters) DeepCopyInto(out *SparkJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobInitParameters ¶
type SparkRJobInitParameters struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *SparkRJobLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main R file to use as the driver. Must be a .R file. MainRFileURI *string `json:"mainRFileUri,omitempty" tf:"main_r_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkRJobInitParameters) DeepCopy ¶
func (in *SparkRJobInitParameters) DeepCopy() *SparkRJobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobInitParameters.
func (*SparkRJobInitParameters) DeepCopyInto ¶
func (in *SparkRJobInitParameters) DeepCopyInto(out *SparkRJobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobLoggingConfigInitParameters ¶
type SparkRJobLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkRJobLoggingConfigInitParameters) DeepCopy ¶
func (in *SparkRJobLoggingConfigInitParameters) DeepCopy() *SparkRJobLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobLoggingConfigInitParameters.
func (*SparkRJobLoggingConfigInitParameters) DeepCopyInto ¶
func (in *SparkRJobLoggingConfigInitParameters) DeepCopyInto(out *SparkRJobLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobLoggingConfigObservation ¶
type SparkRJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkRJobLoggingConfigObservation) DeepCopy ¶
func (in *SparkRJobLoggingConfigObservation) DeepCopy() *SparkRJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobLoggingConfigObservation.
func (*SparkRJobLoggingConfigObservation) DeepCopyInto ¶
func (in *SparkRJobLoggingConfigObservation) DeepCopyInto(out *SparkRJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobLoggingConfigParameters ¶
type SparkRJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkRJobLoggingConfigParameters) DeepCopy ¶
func (in *SparkRJobLoggingConfigParameters) DeepCopy() *SparkRJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobLoggingConfigParameters.
func (*SparkRJobLoggingConfigParameters) DeepCopyInto ¶
func (in *SparkRJobLoggingConfigParameters) DeepCopyInto(out *SparkRJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobObservation ¶
type SparkRJobObservation struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *SparkRJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main R file to use as the driver. Must be a .R file. MainRFileURI *string `json:"mainRFileUri,omitempty" tf:"main_r_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkRJobObservation) DeepCopy ¶
func (in *SparkRJobObservation) DeepCopy() *SparkRJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobObservation.
func (*SparkRJobObservation) DeepCopyInto ¶
func (in *SparkRJobObservation) DeepCopyInto(out *SparkRJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkRJobParameters ¶
type SparkRJobParameters struct { // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // +kubebuilder:validation:Optional ArchiveUris []*string `json:"archiveUris,omitempty" tf:"archive_uris,omitempty"` // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // +kubebuilder:validation:Optional Args []*string `json:"args,omitempty" tf:"args,omitempty"` // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // +kubebuilder:validation:Optional FileUris []*string `json:"fileUris,omitempty" tf:"file_uris,omitempty"` // The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig *SparkRJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // Required. The HCFS URI of the main R file to use as the driver. Must be a .R file. // +kubebuilder:validation:Optional MainRFileURI *string `json:"mainRFileUri" tf:"main_r_file_uri,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` }
func (*SparkRJobParameters) DeepCopy ¶
func (in *SparkRJobParameters) DeepCopy() *SparkRJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkRJobParameters.
func (*SparkRJobParameters) DeepCopyInto ¶
func (in *SparkRJobParameters) DeepCopyInto(out *SparkRJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobInitParameters ¶
type SparkSQLJobInitParameters struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *SparkSQLJobLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList *SparkSQLJobQueryListInitParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparkSQLJobInitParameters) DeepCopy ¶
func (in *SparkSQLJobInitParameters) DeepCopy() *SparkSQLJobInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobInitParameters.
func (*SparkSQLJobInitParameters) DeepCopyInto ¶
func (in *SparkSQLJobInitParameters) DeepCopyInto(out *SparkSQLJobInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobLoggingConfigInitParameters ¶
type SparkSQLJobLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkSQLJobLoggingConfigInitParameters) DeepCopy ¶
func (in *SparkSQLJobLoggingConfigInitParameters) DeepCopy() *SparkSQLJobLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobLoggingConfigInitParameters.
func (*SparkSQLJobLoggingConfigInitParameters) DeepCopyInto ¶
func (in *SparkSQLJobLoggingConfigInitParameters) DeepCopyInto(out *SparkSQLJobLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobLoggingConfigObservation ¶
type SparkSQLJobLoggingConfigObservation struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkSQLJobLoggingConfigObservation) DeepCopy ¶
func (in *SparkSQLJobLoggingConfigObservation) DeepCopy() *SparkSQLJobLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobLoggingConfigObservation.
func (*SparkSQLJobLoggingConfigObservation) DeepCopyInto ¶
func (in *SparkSQLJobLoggingConfigObservation) DeepCopyInto(out *SparkSQLJobLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobLoggingConfigParameters ¶
type SparkSQLJobLoggingConfigParameters struct { // The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparkSQLJobLoggingConfigParameters) DeepCopy ¶
func (in *SparkSQLJobLoggingConfigParameters) DeepCopy() *SparkSQLJobLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobLoggingConfigParameters.
func (*SparkSQLJobLoggingConfigParameters) DeepCopyInto ¶
func (in *SparkSQLJobLoggingConfigParameters) DeepCopyInto(out *SparkSQLJobLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobObservation ¶
type SparkSQLJobObservation struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. LoggingConfig *SparkSQLJobLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. QueryList *SparkSQLJobQueryListObservation `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparkSQLJobObservation) DeepCopy ¶
func (in *SparkSQLJobObservation) DeepCopy() *SparkSQLJobObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobObservation.
func (*SparkSQLJobObservation) DeepCopyInto ¶
func (in *SparkSQLJobObservation) DeepCopyInto(out *SparkSQLJobObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobParameters ¶
type SparkSQLJobParameters struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // The runtime log config for job execution. // +kubebuilder:validation:Optional LoggingConfig *SparkSQLJobLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // A list of queries. // +kubebuilder:validation:Optional QueryList *SparkSQLJobQueryListParameters `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +kubebuilder:validation:Optional // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparkSQLJobParameters) DeepCopy ¶
func (in *SparkSQLJobParameters) DeepCopy() *SparkSQLJobParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobParameters.
func (*SparkSQLJobParameters) DeepCopyInto ¶
func (in *SparkSQLJobParameters) DeepCopyInto(out *SparkSQLJobParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobQueryListInitParameters ¶
type SparkSQLJobQueryListInitParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*SparkSQLJobQueryListInitParameters) DeepCopy ¶
func (in *SparkSQLJobQueryListInitParameters) DeepCopy() *SparkSQLJobQueryListInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobQueryListInitParameters.
func (*SparkSQLJobQueryListInitParameters) DeepCopyInto ¶
func (in *SparkSQLJobQueryListInitParameters) DeepCopyInto(out *SparkSQLJobQueryListInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobQueryListObservation ¶
type SparkSQLJobQueryListObservation struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } Queries []*string `json:"queries,omitempty" tf:"queries,omitempty"` }
func (*SparkSQLJobQueryListObservation) DeepCopy ¶
func (in *SparkSQLJobQueryListObservation) DeepCopy() *SparkSQLJobQueryListObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobQueryListObservation.
func (*SparkSQLJobQueryListObservation) DeepCopyInto ¶
func (in *SparkSQLJobQueryListObservation) DeepCopyInto(out *SparkSQLJobQueryListObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparkSQLJobQueryListParameters ¶
type SparkSQLJobQueryListParameters struct { // Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": } } // +kubebuilder:validation:Optional Queries []*string `json:"queries" tf:"queries,omitempty"` }
func (*SparkSQLJobQueryListParameters) DeepCopy ¶
func (in *SparkSQLJobQueryListParameters) DeepCopy() *SparkSQLJobQueryListParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkSQLJobQueryListParameters.
func (*SparkSQLJobQueryListParameters) DeepCopyInto ¶
func (in *SparkSQLJobQueryListParameters) DeepCopyInto(out *SparkSQLJobQueryListParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigInitParameters ¶
type SparksqlConfigInitParameters struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *SparksqlConfigLoggingConfigInitParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparksqlConfigInitParameters) DeepCopy ¶
func (in *SparksqlConfigInitParameters) DeepCopy() *SparksqlConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigInitParameters.
func (*SparksqlConfigInitParameters) DeepCopyInto ¶
func (in *SparksqlConfigInitParameters) DeepCopyInto(out *SparksqlConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigLoggingConfigInitParameters ¶
type SparksqlConfigLoggingConfigInitParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparksqlConfigLoggingConfigInitParameters) DeepCopy ¶
func (in *SparksqlConfigLoggingConfigInitParameters) DeepCopy() *SparksqlConfigLoggingConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigLoggingConfigInitParameters.
func (*SparksqlConfigLoggingConfigInitParameters) DeepCopyInto ¶
func (in *SparksqlConfigLoggingConfigInitParameters) DeepCopyInto(out *SparksqlConfigLoggingConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigLoggingConfigObservation ¶
type SparksqlConfigLoggingConfigObservation struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels,omitempty" tf:"driver_log_levels,omitempty"` }
func (*SparksqlConfigLoggingConfigObservation) DeepCopy ¶
func (in *SparksqlConfigLoggingConfigObservation) DeepCopy() *SparksqlConfigLoggingConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigLoggingConfigObservation.
func (*SparksqlConfigLoggingConfigObservation) DeepCopyInto ¶
func (in *SparksqlConfigLoggingConfigObservation) DeepCopyInto(out *SparksqlConfigLoggingConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigLoggingConfigParameters ¶
type SparksqlConfigLoggingConfigParameters struct { // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // +kubebuilder:validation:Optional // +mapType=granular DriverLogLevels map[string]*string `json:"driverLogLevels" tf:"driver_log_levels,omitempty"` }
func (*SparksqlConfigLoggingConfigParameters) DeepCopy ¶
func (in *SparksqlConfigLoggingConfigParameters) DeepCopy() *SparksqlConfigLoggingConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigLoggingConfigParameters.
func (*SparksqlConfigLoggingConfigParameters) DeepCopyInto ¶
func (in *SparksqlConfigLoggingConfigParameters) DeepCopyInto(out *SparksqlConfigLoggingConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigObservation ¶
type SparksqlConfigObservation struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` LoggingConfig *SparksqlConfigLoggingConfigObservation `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparksqlConfigObservation) DeepCopy ¶
func (in *SparksqlConfigObservation) DeepCopy() *SparksqlConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigObservation.
func (*SparksqlConfigObservation) DeepCopyInto ¶
func (in *SparksqlConfigObservation) DeepCopyInto(out *SparksqlConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SparksqlConfigParameters ¶
type SparksqlConfigParameters struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. // +kubebuilder:validation:Optional JarFileUris []*string `json:"jarFileUris,omitempty" tf:"jar_file_uris,omitempty"` // +kubebuilder:validation:Optional LoggingConfig *SparksqlConfigLoggingConfigParameters `json:"loggingConfig,omitempty" tf:"logging_config,omitempty"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. // +kubebuilder:validation:Optional // +mapType=granular Properties map[string]*string `json:"properties,omitempty" tf:"properties,omitempty"` // The HCFS URI of the script that contains SQL queries. // Conflicts with query_list // +kubebuilder:validation:Optional QueryFileURI *string `json:"queryFileUri,omitempty" tf:"query_file_uri,omitempty"` // The list of SQL queries or statements to execute as part of the job. // Conflicts with query_file_uri // +kubebuilder:validation:Optional QueryList []*string `json:"queryList,omitempty" tf:"query_list,omitempty"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // +kubebuilder:validation:Optional // +mapType=granular ScriptVariables map[string]*string `json:"scriptVariables,omitempty" tf:"script_variables,omitempty"` }
func (*SparksqlConfigParameters) DeepCopy ¶
func (in *SparksqlConfigParameters) DeepCopy() *SparksqlConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparksqlConfigParameters.
func (*SparksqlConfigParameters) DeepCopyInto ¶
func (in *SparksqlConfigParameters) DeepCopyInto(out *SparksqlConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StatusInitParameters ¶
type StatusInitParameters struct { }
func (*StatusInitParameters) DeepCopy ¶
func (in *StatusInitParameters) DeepCopy() *StatusInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatusInitParameters.
func (*StatusInitParameters) DeepCopyInto ¶
func (in *StatusInitParameters) DeepCopyInto(out *StatusInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StatusObservation ¶
type StatusObservation struct { // Optional job state details, such as an error description if the state is ERROR. Details *string `json:"details,omitempty" tf:"details,omitempty"` // A state message specifying the overall job state. State *string `json:"state,omitempty" tf:"state,omitempty"` // The time when this state was entered. StateStartTime *string `json:"stateStartTime,omitempty" tf:"state_start_time,omitempty"` // Additional state information, which includes status reported by the agent. Substate *string `json:"substate,omitempty" tf:"substate,omitempty"` }
func (*StatusObservation) DeepCopy ¶
func (in *StatusObservation) DeepCopy() *StatusObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatusObservation.
func (*StatusObservation) DeepCopyInto ¶
func (in *StatusObservation) DeepCopyInto(out *StatusObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StatusParameters ¶
type StatusParameters struct { }
func (*StatusParameters) DeepCopy ¶
func (in *StatusParameters) DeepCopy() *StatusParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StatusParameters.
func (*StatusParameters) DeepCopyInto ¶
func (in *StatusParameters) DeepCopyInto(out *StatusParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TelemetryConfigInitParameters ¶
type TelemetryConfigInitParameters struct { // The output format of the Dataproc Metastore service's logs. // Default value is JSON. // Possible values are: LEGACY, JSON. LogFormat *string `json:"logFormat,omitempty" tf:"log_format,omitempty"` }
func (*TelemetryConfigInitParameters) DeepCopy ¶
func (in *TelemetryConfigInitParameters) DeepCopy() *TelemetryConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TelemetryConfigInitParameters.
func (*TelemetryConfigInitParameters) DeepCopyInto ¶
func (in *TelemetryConfigInitParameters) DeepCopyInto(out *TelemetryConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TelemetryConfigObservation ¶
type TelemetryConfigObservation struct { // The output format of the Dataproc Metastore service's logs. // Default value is JSON. // Possible values are: LEGACY, JSON. LogFormat *string `json:"logFormat,omitempty" tf:"log_format,omitempty"` }
func (*TelemetryConfigObservation) DeepCopy ¶
func (in *TelemetryConfigObservation) DeepCopy() *TelemetryConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TelemetryConfigObservation.
func (*TelemetryConfigObservation) DeepCopyInto ¶
func (in *TelemetryConfigObservation) DeepCopyInto(out *TelemetryConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TelemetryConfigParameters ¶
type TelemetryConfigParameters struct { // The output format of the Dataproc Metastore service's logs. // Default value is JSON. // Possible values are: LEGACY, JSON. // +kubebuilder:validation:Optional LogFormat *string `json:"logFormat,omitempty" tf:"log_format,omitempty"` }
func (*TelemetryConfigParameters) DeepCopy ¶
func (in *TelemetryConfigParameters) DeepCopy() *TelemetryConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TelemetryConfigParameters.
func (*TelemetryConfigParameters) DeepCopyInto ¶
func (in *TelemetryConfigParameters) DeepCopyInto(out *TelemetryConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValidationInitParameters ¶
type ValidationInitParameters struct { // Validation based on regular expressions. Regex *RegexInitParameters `json:"regex,omitempty" tf:"regex,omitempty"` // Required. List of allowed values for the parameter. Values *ValuesInitParameters `json:"values,omitempty" tf:"values,omitempty"` }
func (*ValidationInitParameters) DeepCopy ¶
func (in *ValidationInitParameters) DeepCopy() *ValidationInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidationInitParameters.
func (*ValidationInitParameters) DeepCopyInto ¶
func (in *ValidationInitParameters) DeepCopyInto(out *ValidationInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValidationObservation ¶
type ValidationObservation struct { // Validation based on regular expressions. Regex *RegexObservation `json:"regex,omitempty" tf:"regex,omitempty"` // Required. List of allowed values for the parameter. Values *ValuesObservation `json:"values,omitempty" tf:"values,omitempty"` }
func (*ValidationObservation) DeepCopy ¶
func (in *ValidationObservation) DeepCopy() *ValidationObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidationObservation.
func (*ValidationObservation) DeepCopyInto ¶
func (in *ValidationObservation) DeepCopyInto(out *ValidationObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValidationParameters ¶
type ValidationParameters struct { // Validation based on regular expressions. // +kubebuilder:validation:Optional Regex *RegexParameters `json:"regex,omitempty" tf:"regex,omitempty"` // Required. List of allowed values for the parameter. // +kubebuilder:validation:Optional Values *ValuesParameters `json:"values,omitempty" tf:"values,omitempty"` }
func (*ValidationParameters) DeepCopy ¶
func (in *ValidationParameters) DeepCopy() *ValidationParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValidationParameters.
func (*ValidationParameters) DeepCopyInto ¶
func (in *ValidationParameters) DeepCopyInto(out *ValidationParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValuesInitParameters ¶
type ValuesInitParameters struct { // Required. List of allowed values for the parameter. Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*ValuesInitParameters) DeepCopy ¶
func (in *ValuesInitParameters) DeepCopy() *ValuesInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValuesInitParameters.
func (*ValuesInitParameters) DeepCopyInto ¶
func (in *ValuesInitParameters) DeepCopyInto(out *ValuesInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValuesObservation ¶
type ValuesObservation struct { // Required. List of allowed values for the parameter. Values []*string `json:"values,omitempty" tf:"values,omitempty"` }
func (*ValuesObservation) DeepCopy ¶
func (in *ValuesObservation) DeepCopy() *ValuesObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValuesObservation.
func (*ValuesObservation) DeepCopyInto ¶
func (in *ValuesObservation) DeepCopyInto(out *ValuesObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ValuesParameters ¶
type ValuesParameters struct { // Required. List of allowed values for the parameter. // +kubebuilder:validation:Optional Values []*string `json:"values" tf:"values,omitempty"` }
func (*ValuesParameters) DeepCopy ¶
func (in *ValuesParameters) DeepCopy() *ValuesParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ValuesParameters.
func (*ValuesParameters) DeepCopyInto ¶
func (in *ValuesParameters) DeepCopyInto(out *ValuesParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type VirtualClusterConfigInitParameters ¶
type VirtualClusterConfigInitParameters struct { // Configuration of auxiliary services used by this cluster. // Structure defined below. AuxiliaryServicesConfig *AuxiliaryServicesConfigInitParameters `json:"auxiliaryServicesConfig,omitempty" tf:"auxiliary_services_config,omitempty"` // The configuration for running the Dataproc cluster on Kubernetes. // Structure defined below. KubernetesClusterConfig *KubernetesClusterConfigInitParameters `json:"kubernetesClusterConfig,omitempty" tf:"kubernetes_cluster_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` }
func (*VirtualClusterConfigInitParameters) DeepCopy ¶
func (in *VirtualClusterConfigInitParameters) DeepCopy() *VirtualClusterConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VirtualClusterConfigInitParameters.
func (*VirtualClusterConfigInitParameters) DeepCopyInto ¶
func (in *VirtualClusterConfigInitParameters) DeepCopyInto(out *VirtualClusterConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type VirtualClusterConfigObservation ¶
type VirtualClusterConfigObservation struct { // Configuration of auxiliary services used by this cluster. // Structure defined below. AuxiliaryServicesConfig *AuxiliaryServicesConfigObservation `json:"auxiliaryServicesConfig,omitempty" tf:"auxiliary_services_config,omitempty"` // The configuration for running the Dataproc cluster on Kubernetes. // Structure defined below. KubernetesClusterConfig *KubernetesClusterConfigObservation `json:"kubernetesClusterConfig,omitempty" tf:"kubernetes_cluster_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` }
func (*VirtualClusterConfigObservation) DeepCopy ¶
func (in *VirtualClusterConfigObservation) DeepCopy() *VirtualClusterConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VirtualClusterConfigObservation.
func (*VirtualClusterConfigObservation) DeepCopyInto ¶
func (in *VirtualClusterConfigObservation) DeepCopyInto(out *VirtualClusterConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type VirtualClusterConfigParameters ¶
type VirtualClusterConfigParameters struct { // Configuration of auxiliary services used by this cluster. // Structure defined below. // +kubebuilder:validation:Optional AuxiliaryServicesConfig *AuxiliaryServicesConfigParameters `json:"auxiliaryServicesConfig,omitempty" tf:"auxiliary_services_config,omitempty"` // The configuration for running the Dataproc cluster on Kubernetes. // Structure defined below. // +kubebuilder:validation:Optional KubernetesClusterConfig *KubernetesClusterConfigParameters `json:"kubernetesClusterConfig,omitempty" tf:"kubernetes_cluster_config,omitempty"` // The Cloud Storage staging bucket used to stage files, // such as Hadoop jars, between client machines and the cluster. // Note: If you don't explicitly specify a staging_bucket // then GCP will auto create / assign one for you. However, you are not guaranteed // an auto generated bucket which is solely dedicated to your cluster; it may be shared // with other clusters in the same region/zone also choosing to use the auto generation // option. // +kubebuilder:validation:Optional StagingBucket *string `json:"stagingBucket,omitempty" tf:"staging_bucket,omitempty"` }
func (*VirtualClusterConfigParameters) DeepCopy ¶
func (in *VirtualClusterConfigParameters) DeepCopy() *VirtualClusterConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VirtualClusterConfigParameters.
func (*VirtualClusterConfigParameters) DeepCopyInto ¶
func (in *VirtualClusterConfigParameters) DeepCopyInto(out *VirtualClusterConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigAcceleratorsInitParameters ¶
type WorkerConfigAcceleratorsInitParameters struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*WorkerConfigAcceleratorsInitParameters) DeepCopy ¶
func (in *WorkerConfigAcceleratorsInitParameters) DeepCopy() *WorkerConfigAcceleratorsInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigAcceleratorsInitParameters.
func (*WorkerConfigAcceleratorsInitParameters) DeepCopyInto ¶
func (in *WorkerConfigAcceleratorsInitParameters) DeepCopyInto(out *WorkerConfigAcceleratorsInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigAcceleratorsObservation ¶
type WorkerConfigAcceleratorsObservation struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. AcceleratorCount *float64 `json:"acceleratorCount,omitempty" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. AcceleratorType *string `json:"acceleratorType,omitempty" tf:"accelerator_type,omitempty"` }
func (*WorkerConfigAcceleratorsObservation) DeepCopy ¶
func (in *WorkerConfigAcceleratorsObservation) DeepCopy() *WorkerConfigAcceleratorsObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigAcceleratorsObservation.
func (*WorkerConfigAcceleratorsObservation) DeepCopyInto ¶
func (in *WorkerConfigAcceleratorsObservation) DeepCopyInto(out *WorkerConfigAcceleratorsObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigAcceleratorsParameters ¶
type WorkerConfigAcceleratorsParameters struct { // The number of the accelerator cards of this type exposed to this instance. Often restricted to one of 1, 2, 4, or 8. // +kubebuilder:validation:Optional AcceleratorCount *float64 `json:"acceleratorCount" tf:"accelerator_count,omitempty"` // The short name of the accelerator type to expose to this instance. For example, nvidia-tesla-k80. // +kubebuilder:validation:Optional AcceleratorType *string `json:"acceleratorType" tf:"accelerator_type,omitempty"` }
func (*WorkerConfigAcceleratorsParameters) DeepCopy ¶
func (in *WorkerConfigAcceleratorsParameters) DeepCopy() *WorkerConfigAcceleratorsParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigAcceleratorsParameters.
func (*WorkerConfigAcceleratorsParameters) DeepCopyInto ¶
func (in *WorkerConfigAcceleratorsParameters) DeepCopyInto(out *WorkerConfigAcceleratorsParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigDiskConfigInitParameters ¶
type WorkerConfigDiskConfigInitParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*WorkerConfigDiskConfigInitParameters) DeepCopy ¶
func (in *WorkerConfigDiskConfigInitParameters) DeepCopy() *WorkerConfigDiskConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigDiskConfigInitParameters.
func (*WorkerConfigDiskConfigInitParameters) DeepCopyInto ¶
func (in *WorkerConfigDiskConfigInitParameters) DeepCopyInto(out *WorkerConfigDiskConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigDiskConfigObservation ¶
type WorkerConfigDiskConfigObservation struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*WorkerConfigDiskConfigObservation) DeepCopy ¶
func (in *WorkerConfigDiskConfigObservation) DeepCopy() *WorkerConfigDiskConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigDiskConfigObservation.
func (*WorkerConfigDiskConfigObservation) DeepCopyInto ¶
func (in *WorkerConfigDiskConfigObservation) DeepCopyInto(out *WorkerConfigDiskConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigDiskConfigParameters ¶
type WorkerConfigDiskConfigParameters struct { // Size of the primary disk attached to each node, specified // in GB. The primary disk contains the boot volume and system libraries, and the // smallest allowed disk size is 10GB. GCP will default to a predetermined // computed value if not set (currently 500GB). Note: If SSDs are not // attached, it also contains the HDFS data blocks and Hadoop working directories. // +kubebuilder:validation:Optional BootDiskSizeGb *float64 `json:"bootDiskSizeGb,omitempty" tf:"boot_disk_size_gb,omitempty"` // The disk type of the primary disk attached to each node. // One of "pd-ssd" or "pd-standard". Defaults to "pd-standard". // +kubebuilder:validation:Optional BootDiskType *string `json:"bootDiskType,omitempty" tf:"boot_disk_type,omitempty"` // Optional. Interface type of local SSDs (default is "scsi"). // Valid values: "scsi" (Small Computer System Interface), "nvme" (Non-Volatile // Memory Express). See // local SSD performance. // +kubebuilder:validation:Optional LocalSsdInterface *string `json:"localSsdInterface,omitempty" tf:"local_ssd_interface,omitempty"` // The amount of local SSD disks that will be // attached to each master cluster node. Defaults to 0. // +kubebuilder:validation:Optional NumLocalSsds *float64 `json:"numLocalSsds,omitempty" tf:"num_local_ssds,omitempty"` }
func (*WorkerConfigDiskConfigParameters) DeepCopy ¶
func (in *WorkerConfigDiskConfigParameters) DeepCopy() *WorkerConfigDiskConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigDiskConfigParameters.
func (*WorkerConfigDiskConfigParameters) DeepCopyInto ¶
func (in *WorkerConfigDiskConfigParameters) DeepCopyInto(out *WorkerConfigDiskConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigInitParameters ¶
type WorkerConfigInitParameters struct { // Maximum number of instances for this group. MaxInstances *float64 `json:"maxInstances,omitempty" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [2, maxInstances]. Defaults to 2. MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*WorkerConfigInitParameters) DeepCopy ¶
func (in *WorkerConfigInitParameters) DeepCopy() *WorkerConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigInitParameters.
func (*WorkerConfigInitParameters) DeepCopyInto ¶
func (in *WorkerConfigInitParameters) DeepCopyInto(out *WorkerConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigManagedGroupConfigInitParameters ¶
type WorkerConfigManagedGroupConfigInitParameters struct { }
func (*WorkerConfigManagedGroupConfigInitParameters) DeepCopy ¶
func (in *WorkerConfigManagedGroupConfigInitParameters) DeepCopy() *WorkerConfigManagedGroupConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigManagedGroupConfigInitParameters.
func (*WorkerConfigManagedGroupConfigInitParameters) DeepCopyInto ¶
func (in *WorkerConfigManagedGroupConfigInitParameters) DeepCopyInto(out *WorkerConfigManagedGroupConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigManagedGroupConfigObservation ¶
type WorkerConfigManagedGroupConfigObservation struct { // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceGroupManagerName *string `json:"instanceGroupManagerName,omitempty" tf:"instance_group_manager_name,omitempty"` // Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For projects.regions.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/regions/{region}/workflowTemplates/{template_id} * For projects.locations.workflowTemplates, the resource name of the template has the following format: projects/{project_id}/locations/{location}/workflowTemplates/{template_id} InstanceTemplateName *string `json:"instanceTemplateName,omitempty" tf:"instance_template_name,omitempty"` }
func (*WorkerConfigManagedGroupConfigObservation) DeepCopy ¶
func (in *WorkerConfigManagedGroupConfigObservation) DeepCopy() *WorkerConfigManagedGroupConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigManagedGroupConfigObservation.
func (*WorkerConfigManagedGroupConfigObservation) DeepCopyInto ¶
func (in *WorkerConfigManagedGroupConfigObservation) DeepCopyInto(out *WorkerConfigManagedGroupConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigManagedGroupConfigParameters ¶
type WorkerConfigManagedGroupConfigParameters struct { }
func (*WorkerConfigManagedGroupConfigParameters) DeepCopy ¶
func (in *WorkerConfigManagedGroupConfigParameters) DeepCopy() *WorkerConfigManagedGroupConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigManagedGroupConfigParameters.
func (*WorkerConfigManagedGroupConfigParameters) DeepCopyInto ¶
func (in *WorkerConfigManagedGroupConfigParameters) DeepCopyInto(out *WorkerConfigManagedGroupConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigObservation ¶
type WorkerConfigObservation struct { // Maximum number of instances for this group. MaxInstances *float64 `json:"maxInstances,omitempty" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [2, maxInstances]. Defaults to 2. MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*WorkerConfigObservation) DeepCopy ¶
func (in *WorkerConfigObservation) DeepCopy() *WorkerConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigObservation.
func (*WorkerConfigObservation) DeepCopyInto ¶
func (in *WorkerConfigObservation) DeepCopyInto(out *WorkerConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkerConfigParameters ¶
type WorkerConfigParameters struct { // Maximum number of instances for this group. // +kubebuilder:validation:Optional MaxInstances *float64 `json:"maxInstances" tf:"max_instances,omitempty"` // Minimum number of instances for this group. Bounds: [2, maxInstances]. Defaults to 2. // +kubebuilder:validation:Optional MinInstances *float64 `json:"minInstances,omitempty" tf:"min_instances,omitempty"` // Weight for the instance group, which is used to determine the fraction of total workers // in the cluster from this instance group. For example, if primary workers have weight 2, // and secondary workers have weight 1, the cluster will have approximately 2 primary workers // for each secondary worker. // The cluster may not reach the specified balance if constrained by min/max bounds or other // autoscaling settings. For example, if maxInstances for secondary workers is 0, then only // primary workers will be added. The cluster can also be out of balance when created. // If weight is not set on any instance group, the cluster will default to equal weight for // all groups: the cluster will attempt to maintain an equal number of workers in each group // within the configured size bounds for each group. If weight is set for one group only, // the cluster will default to zero weight on the unset group. For example if weight is set // only on primary workers, the cluster will use primary workers only and no secondary workers. // +kubebuilder:validation:Optional Weight *float64 `json:"weight,omitempty" tf:"weight,omitempty"` }
func (*WorkerConfigParameters) DeepCopy ¶
func (in *WorkerConfigParameters) DeepCopy() *WorkerConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkerConfigParameters.
func (*WorkerConfigParameters) DeepCopyInto ¶
func (in *WorkerConfigParameters) DeepCopyInto(out *WorkerConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplate ¶
type WorkflowTemplate struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.jobs) || (has(self.initProvider) && has(self.initProvider.jobs))",message="spec.forProvider.jobs is a required parameter" // +kubebuilder:validation:XValidation:rule="!('*' in self.managementPolicies || 'Create' in self.managementPolicies || 'Update' in self.managementPolicies) || has(self.forProvider.placement) || (has(self.initProvider) && has(self.initProvider.placement))",message="spec.forProvider.placement is a required parameter" Spec WorkflowTemplateSpec `json:"spec"` Status WorkflowTemplateStatus `json:"status,omitempty"` }
WorkflowTemplate is the Schema for the WorkflowTemplates API. A Workflow Template is a reusable workflow configuration. +kubebuilder:printcolumn:name="SYNCED",type="string",JSONPath=".status.conditions[?(@.type=='Synced')].status" +kubebuilder:printcolumn:name="READY",type="string",JSONPath=".status.conditions[?(@.type=='Ready')].status" +kubebuilder:printcolumn:name="EXTERNAL-NAME",type="string",JSONPath=".metadata.annotations.crossplane\\.io/external-name" +kubebuilder:printcolumn:name="AGE",type="date",JSONPath=".metadata.creationTimestamp" +kubebuilder:resource:scope=Cluster,categories={crossplane,managed,gcp}
func (*WorkflowTemplate) DeepCopy ¶
func (in *WorkflowTemplate) DeepCopy() *WorkflowTemplate
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplate.
func (*WorkflowTemplate) DeepCopyInto ¶
func (in *WorkflowTemplate) DeepCopyInto(out *WorkflowTemplate)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*WorkflowTemplate) DeepCopyObject ¶
func (in *WorkflowTemplate) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*WorkflowTemplate) GetCondition ¶
func (mg *WorkflowTemplate) GetCondition(ct xpv1.ConditionType) xpv1.Condition
GetCondition of this WorkflowTemplate.
func (*WorkflowTemplate) GetConnectionDetailsMapping ¶
func (tr *WorkflowTemplate) GetConnectionDetailsMapping() map[string]string
GetConnectionDetailsMapping for this WorkflowTemplate
func (*WorkflowTemplate) GetDeletionPolicy ¶
func (mg *WorkflowTemplate) GetDeletionPolicy() xpv1.DeletionPolicy
GetDeletionPolicy of this WorkflowTemplate.
func (*WorkflowTemplate) GetID ¶
func (tr *WorkflowTemplate) GetID() string
GetID returns ID of underlying Terraform resource of this WorkflowTemplate
func (*WorkflowTemplate) GetInitParameters ¶
func (tr *WorkflowTemplate) GetInitParameters() (map[string]any, error)
GetInitParameters of this WorkflowTemplate
func (*WorkflowTemplate) GetManagementPolicies ¶
func (mg *WorkflowTemplate) GetManagementPolicies() xpv1.ManagementPolicies
GetManagementPolicies of this WorkflowTemplate.
func (*WorkflowTemplate) GetMergedParameters ¶
func (tr *WorkflowTemplate) GetMergedParameters(shouldMergeInitProvider bool) (map[string]any, error)
GetInitParameters of this WorkflowTemplate
func (*WorkflowTemplate) GetObservation ¶
func (tr *WorkflowTemplate) GetObservation() (map[string]any, error)
GetObservation of this WorkflowTemplate
func (*WorkflowTemplate) GetParameters ¶
func (tr *WorkflowTemplate) GetParameters() (map[string]any, error)
GetParameters of this WorkflowTemplate
func (*WorkflowTemplate) GetProviderConfigReference ¶
func (mg *WorkflowTemplate) GetProviderConfigReference() *xpv1.Reference
GetProviderConfigReference of this WorkflowTemplate.
func (*WorkflowTemplate) GetPublishConnectionDetailsTo ¶
func (mg *WorkflowTemplate) GetPublishConnectionDetailsTo() *xpv1.PublishConnectionDetailsTo
GetPublishConnectionDetailsTo of this WorkflowTemplate.
func (*WorkflowTemplate) GetTerraformResourceType ¶
func (mg *WorkflowTemplate) GetTerraformResourceType() string
GetTerraformResourceType returns Terraform resource type for this WorkflowTemplate
func (*WorkflowTemplate) GetTerraformSchemaVersion ¶
func (tr *WorkflowTemplate) GetTerraformSchemaVersion() int
GetTerraformSchemaVersion returns the associated Terraform schema version
func (*WorkflowTemplate) GetWriteConnectionSecretToReference ¶
func (mg *WorkflowTemplate) GetWriteConnectionSecretToReference() *xpv1.SecretReference
GetWriteConnectionSecretToReference of this WorkflowTemplate.
func (*WorkflowTemplate) Hub ¶
func (tr *WorkflowTemplate) Hub()
Hub marks this type as a conversion hub.
func (*WorkflowTemplate) LateInitialize ¶
func (tr *WorkflowTemplate) LateInitialize(attrs []byte) (bool, error)
LateInitialize this WorkflowTemplate using its observed tfState. returns True if there are any spec changes for the resource.
func (*WorkflowTemplate) SetConditions ¶
func (mg *WorkflowTemplate) SetConditions(c ...xpv1.Condition)
SetConditions of this WorkflowTemplate.
func (*WorkflowTemplate) SetDeletionPolicy ¶
func (mg *WorkflowTemplate) SetDeletionPolicy(r xpv1.DeletionPolicy)
SetDeletionPolicy of this WorkflowTemplate.
func (*WorkflowTemplate) SetManagementPolicies ¶
func (mg *WorkflowTemplate) SetManagementPolicies(r xpv1.ManagementPolicies)
SetManagementPolicies of this WorkflowTemplate.
func (*WorkflowTemplate) SetObservation ¶
func (tr *WorkflowTemplate) SetObservation(obs map[string]any) error
SetObservation for this WorkflowTemplate
func (*WorkflowTemplate) SetParameters ¶
func (tr *WorkflowTemplate) SetParameters(params map[string]any) error
SetParameters for this WorkflowTemplate
func (*WorkflowTemplate) SetProviderConfigReference ¶
func (mg *WorkflowTemplate) SetProviderConfigReference(r *xpv1.Reference)
SetProviderConfigReference of this WorkflowTemplate.
func (*WorkflowTemplate) SetPublishConnectionDetailsTo ¶
func (mg *WorkflowTemplate) SetPublishConnectionDetailsTo(r *xpv1.PublishConnectionDetailsTo)
SetPublishConnectionDetailsTo of this WorkflowTemplate.
func (*WorkflowTemplate) SetWriteConnectionSecretToReference ¶
func (mg *WorkflowTemplate) SetWriteConnectionSecretToReference(r *xpv1.SecretReference)
SetWriteConnectionSecretToReference of this WorkflowTemplate.
type WorkflowTemplateInitParameters ¶
type WorkflowTemplateInitParameters struct { // (Beta only) Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration must be from 10 minutes ("10m") to 24 hours ("24h" or "1d"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a (/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted. DagTimeout *string `json:"dagTimeout,omitempty" tf:"dag_timeout,omitempty"` // Required. The Directed Acyclic Graph of Jobs to submit. Jobs []JobsInitParameters `json:"jobs,omitempty" tf:"jobs,omitempty"` // The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated. Parameters []ParametersInitParameters `json:"parameters,omitempty" tf:"parameters,omitempty"` // Required. WorkflowTemplate scheduling information. Placement *WorkflowTemplatePlacementInitParameters `json:"placement,omitempty" tf:"placement,omitempty"` // The project for the resource Project *string `json:"project,omitempty" tf:"project,omitempty"` // Used to perform a consistent read-modify-write. This field should be left blank for a CreateWorkflowTemplate request. It is required for an UpdateWorkflowTemplate request, and must match the current server version. A typical update template flow would fetch the current template with a GetWorkflowTemplate request, which will return the current template with the version field filled in with the current server version. The user updates other fields in the template, then returns it as part of the UpdateWorkflowTemplate request. Version *float64 `json:"version,omitempty" tf:"version,omitempty"` }
func (*WorkflowTemplateInitParameters) DeepCopy ¶
func (in *WorkflowTemplateInitParameters) DeepCopy() *WorkflowTemplateInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateInitParameters.
func (*WorkflowTemplateInitParameters) DeepCopyInto ¶
func (in *WorkflowTemplateInitParameters) DeepCopyInto(out *WorkflowTemplateInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplateList ¶
type WorkflowTemplateList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []WorkflowTemplate `json:"items"` }
WorkflowTemplateList contains a list of WorkflowTemplates
func (*WorkflowTemplateList) DeepCopy ¶
func (in *WorkflowTemplateList) DeepCopy() *WorkflowTemplateList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateList.
func (*WorkflowTemplateList) DeepCopyInto ¶
func (in *WorkflowTemplateList) DeepCopyInto(out *WorkflowTemplateList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*WorkflowTemplateList) DeepCopyObject ¶
func (in *WorkflowTemplateList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*WorkflowTemplateList) GetItems ¶
func (l *WorkflowTemplateList) GetItems() []resource.Managed
GetItems of this WorkflowTemplateList.
type WorkflowTemplateObservation ¶
type WorkflowTemplateObservation struct { // Output only. The time template was created. CreateTime *string `json:"createTime,omitempty" tf:"create_time,omitempty"` // (Beta only) Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration must be from 10 minutes ("10m") to 24 hours ("24h" or "1d"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a (/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted. DagTimeout *string `json:"dagTimeout,omitempty" tf:"dag_timeout,omitempty"` // +mapType=granular EffectiveLabels map[string]*string `json:"effectiveLabels,omitempty" tf:"effective_labels,omitempty"` // an identifier for the resource with format projects/{{project}}/locations/{{location}}/workflowTemplates/{{name}} ID *string `json:"id,omitempty" tf:"id,omitempty"` // Required. The Directed Acyclic Graph of Jobs to submit. Jobs []JobsObservation `json:"jobs,omitempty" tf:"jobs,omitempty"` // The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The location for the resource Location *string `json:"location,omitempty" tf:"location,omitempty"` // Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated. Parameters []ParametersObservation `json:"parameters,omitempty" tf:"parameters,omitempty"` // Required. WorkflowTemplate scheduling information. Placement *WorkflowTemplatePlacementObservation `json:"placement,omitempty" tf:"placement,omitempty"` // The project for the resource Project *string `json:"project,omitempty" tf:"project,omitempty"` // +mapType=granular TerraformLabels map[string]*string `json:"terraformLabels,omitempty" tf:"terraform_labels,omitempty"` // Output only. The time template was last updated. UpdateTime *string `json:"updateTime,omitempty" tf:"update_time,omitempty"` // Used to perform a consistent read-modify-write. This field should be left blank for a CreateWorkflowTemplate request. It is required for an UpdateWorkflowTemplate request, and must match the current server version. A typical update template flow would fetch the current template with a GetWorkflowTemplate request, which will return the current template with the version field filled in with the current server version. The user updates other fields in the template, then returns it as part of the UpdateWorkflowTemplate request. Version *float64 `json:"version,omitempty" tf:"version,omitempty"` }
func (*WorkflowTemplateObservation) DeepCopy ¶
func (in *WorkflowTemplateObservation) DeepCopy() *WorkflowTemplateObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateObservation.
func (*WorkflowTemplateObservation) DeepCopyInto ¶
func (in *WorkflowTemplateObservation) DeepCopyInto(out *WorkflowTemplateObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplateParameters ¶
type WorkflowTemplateParameters struct { // (Beta only) Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h", and "d" suffixes for second, minute, hour, and day duration values, respectively. The timeout duration must be from 10 minutes ("10m") to 24 hours ("24h" or "1d"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a (/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted. // +kubebuilder:validation:Optional DagTimeout *string `json:"dagTimeout,omitempty" tf:"dag_timeout,omitempty"` // Required. The Directed Acyclic Graph of Jobs to submit. // +kubebuilder:validation:Optional Jobs []JobsParameters `json:"jobs,omitempty" tf:"jobs,omitempty"` // The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: {0,63} No more than 32 labels can be associated with a given cluster. // +kubebuilder:validation:Optional // +mapType=granular Labels map[string]*string `json:"labels,omitempty" tf:"labels,omitempty"` // The location for the resource // +kubebuilder:validation:Required Location *string `json:"location" tf:"location,omitempty"` // Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated. // +kubebuilder:validation:Optional Parameters []ParametersParameters `json:"parameters,omitempty" tf:"parameters,omitempty"` // Required. WorkflowTemplate scheduling information. // +kubebuilder:validation:Optional Placement *WorkflowTemplatePlacementParameters `json:"placement,omitempty" tf:"placement,omitempty"` // The project for the resource // +kubebuilder:validation:Optional Project *string `json:"project,omitempty" tf:"project,omitempty"` // Used to perform a consistent read-modify-write. This field should be left blank for a CreateWorkflowTemplate request. It is required for an UpdateWorkflowTemplate request, and must match the current server version. A typical update template flow would fetch the current template with a GetWorkflowTemplate request, which will return the current template with the version field filled in with the current server version. The user updates other fields in the template, then returns it as part of the UpdateWorkflowTemplate request. // +kubebuilder:validation:Optional Version *float64 `json:"version,omitempty" tf:"version,omitempty"` }
func (*WorkflowTemplateParameters) DeepCopy ¶
func (in *WorkflowTemplateParameters) DeepCopy() *WorkflowTemplateParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateParameters.
func (*WorkflowTemplateParameters) DeepCopyInto ¶
func (in *WorkflowTemplateParameters) DeepCopyInto(out *WorkflowTemplateParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplatePlacementInitParameters ¶
type WorkflowTemplatePlacementInitParameters struct { // A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted. ClusterSelector *ClusterSelectorInitParameters `json:"clusterSelector,omitempty" tf:"cluster_selector,omitempty"` // A cluster that is managed by the workflow. ManagedCluster *ManagedClusterInitParameters `json:"managedCluster,omitempty" tf:"managed_cluster,omitempty"` }
func (*WorkflowTemplatePlacementInitParameters) DeepCopy ¶
func (in *WorkflowTemplatePlacementInitParameters) DeepCopy() *WorkflowTemplatePlacementInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplatePlacementInitParameters.
func (*WorkflowTemplatePlacementInitParameters) DeepCopyInto ¶
func (in *WorkflowTemplatePlacementInitParameters) DeepCopyInto(out *WorkflowTemplatePlacementInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplatePlacementObservation ¶
type WorkflowTemplatePlacementObservation struct { // A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted. ClusterSelector *ClusterSelectorObservation `json:"clusterSelector,omitempty" tf:"cluster_selector,omitempty"` // A cluster that is managed by the workflow. ManagedCluster *ManagedClusterObservation `json:"managedCluster,omitempty" tf:"managed_cluster,omitempty"` }
func (*WorkflowTemplatePlacementObservation) DeepCopy ¶
func (in *WorkflowTemplatePlacementObservation) DeepCopy() *WorkflowTemplatePlacementObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplatePlacementObservation.
func (*WorkflowTemplatePlacementObservation) DeepCopyInto ¶
func (in *WorkflowTemplatePlacementObservation) DeepCopyInto(out *WorkflowTemplatePlacementObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplatePlacementParameters ¶
type WorkflowTemplatePlacementParameters struct { // A selector that chooses target cluster for jobs based on metadata. The selector is evaluated at the time each job is submitted. // +kubebuilder:validation:Optional ClusterSelector *ClusterSelectorParameters `json:"clusterSelector,omitempty" tf:"cluster_selector,omitempty"` // A cluster that is managed by the workflow. // +kubebuilder:validation:Optional ManagedCluster *ManagedClusterParameters `json:"managedCluster,omitempty" tf:"managed_cluster,omitempty"` }
func (*WorkflowTemplatePlacementParameters) DeepCopy ¶
func (in *WorkflowTemplatePlacementParameters) DeepCopy() *WorkflowTemplatePlacementParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplatePlacementParameters.
func (*WorkflowTemplatePlacementParameters) DeepCopyInto ¶
func (in *WorkflowTemplatePlacementParameters) DeepCopyInto(out *WorkflowTemplatePlacementParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplateSpec ¶
type WorkflowTemplateSpec struct { v1.ResourceSpec `json:",inline"` ForProvider WorkflowTemplateParameters `json:"forProvider"` // THIS IS A BETA FIELD. It will be honored // unless the Management Policies feature flag is disabled. // InitProvider holds the same fields as ForProvider, with the exception // of Identifier and other resource reference fields. The fields that are // in InitProvider are merged into ForProvider when the resource is created. // The same fields are also added to the terraform ignore_changes hook, to // avoid updating them after creation. This is useful for fields that are // required on creation, but we do not desire to update them after creation, // for example because of an external controller is managing them, like an // autoscaler. InitProvider WorkflowTemplateInitParameters `json:"initProvider,omitempty"` }
WorkflowTemplateSpec defines the desired state of WorkflowTemplate
func (*WorkflowTemplateSpec) DeepCopy ¶
func (in *WorkflowTemplateSpec) DeepCopy() *WorkflowTemplateSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateSpec.
func (*WorkflowTemplateSpec) DeepCopyInto ¶
func (in *WorkflowTemplateSpec) DeepCopyInto(out *WorkflowTemplateSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type WorkflowTemplateStatus ¶
type WorkflowTemplateStatus struct { v1.ResourceStatus `json:",inline"` AtProvider WorkflowTemplateObservation `json:"atProvider,omitempty"` }
WorkflowTemplateStatus defines the observed state of WorkflowTemplate.
func (*WorkflowTemplateStatus) DeepCopy ¶
func (in *WorkflowTemplateStatus) DeepCopy() *WorkflowTemplateStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowTemplateStatus.
func (*WorkflowTemplateStatus) DeepCopyInto ¶
func (in *WorkflowTemplateStatus) DeepCopyInto(out *WorkflowTemplateStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type YarnConfigInitParameters ¶
type YarnConfigInitParameters struct { // Timeout for YARN graceful decommissioning of Node Managers. Specifies the // duration to wait for jobs to complete before forcefully removing workers // (and potentially interrupting jobs). Only applicable to downscaling operations. // Bounds: [0s, 1d]. GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // remove workers. A scale-down factor of 1 will result in scaling down so that there // is no available memory remaining after the update (more aggressive scaling). // A scale-down factor of 0 disables removing workers, which can be beneficial for // autoscaling a single job. // Bounds: [0.0, 1.0]. ScaleDownFactor *float64 `json:"scaleDownFactor,omitempty" tf:"scale_down_factor,omitempty"` // Minimum scale-down threshold as a fraction of total cluster size before scaling occurs. // For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler must // recommend at least a 2 worker scale-down for the cluster to scale. A threshold of 0 // means the autoscaler will scale down on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. ScaleDownMinWorkerFraction *float64 `json:"scaleDownMinWorkerFraction,omitempty" tf:"scale_down_min_worker_fraction,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // add workers. A scale-up factor of 1.0 will result in scaling up so that there // is no pending memory remaining after the update (more aggressive scaling). // A scale-up factor closer to 0 will result in a smaller magnitude of scaling up // (less aggressive scaling). // Bounds: [0.0, 1.0]. ScaleUpFactor *float64 `json:"scaleUpFactor,omitempty" tf:"scale_up_factor,omitempty"` // Minimum scale-up threshold as a fraction of total cluster size before scaling // occurs. For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler // must recommend at least a 2-worker scale-up for the cluster to scale. A threshold of // 0 means the autoscaler will scale up on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. ScaleUpMinWorkerFraction *float64 `json:"scaleUpMinWorkerFraction,omitempty" tf:"scale_up_min_worker_fraction,omitempty"` }
func (*YarnConfigInitParameters) DeepCopy ¶
func (in *YarnConfigInitParameters) DeepCopy() *YarnConfigInitParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new YarnConfigInitParameters.
func (*YarnConfigInitParameters) DeepCopyInto ¶
func (in *YarnConfigInitParameters) DeepCopyInto(out *YarnConfigInitParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type YarnConfigObservation ¶
type YarnConfigObservation struct { // Timeout for YARN graceful decommissioning of Node Managers. Specifies the // duration to wait for jobs to complete before forcefully removing workers // (and potentially interrupting jobs). Only applicable to downscaling operations. // Bounds: [0s, 1d]. GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout,omitempty" tf:"graceful_decommission_timeout,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // remove workers. A scale-down factor of 1 will result in scaling down so that there // is no available memory remaining after the update (more aggressive scaling). // A scale-down factor of 0 disables removing workers, which can be beneficial for // autoscaling a single job. // Bounds: [0.0, 1.0]. ScaleDownFactor *float64 `json:"scaleDownFactor,omitempty" tf:"scale_down_factor,omitempty"` // Minimum scale-down threshold as a fraction of total cluster size before scaling occurs. // For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler must // recommend at least a 2 worker scale-down for the cluster to scale. A threshold of 0 // means the autoscaler will scale down on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. ScaleDownMinWorkerFraction *float64 `json:"scaleDownMinWorkerFraction,omitempty" tf:"scale_down_min_worker_fraction,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // add workers. A scale-up factor of 1.0 will result in scaling up so that there // is no pending memory remaining after the update (more aggressive scaling). // A scale-up factor closer to 0 will result in a smaller magnitude of scaling up // (less aggressive scaling). // Bounds: [0.0, 1.0]. ScaleUpFactor *float64 `json:"scaleUpFactor,omitempty" tf:"scale_up_factor,omitempty"` // Minimum scale-up threshold as a fraction of total cluster size before scaling // occurs. For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler // must recommend at least a 2-worker scale-up for the cluster to scale. A threshold of // 0 means the autoscaler will scale up on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. ScaleUpMinWorkerFraction *float64 `json:"scaleUpMinWorkerFraction,omitempty" tf:"scale_up_min_worker_fraction,omitempty"` }
func (*YarnConfigObservation) DeepCopy ¶
func (in *YarnConfigObservation) DeepCopy() *YarnConfigObservation
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new YarnConfigObservation.
func (*YarnConfigObservation) DeepCopyInto ¶
func (in *YarnConfigObservation) DeepCopyInto(out *YarnConfigObservation)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type YarnConfigParameters ¶
type YarnConfigParameters struct { // Timeout for YARN graceful decommissioning of Node Managers. Specifies the // duration to wait for jobs to complete before forcefully removing workers // (and potentially interrupting jobs). Only applicable to downscaling operations. // Bounds: [0s, 1d]. // +kubebuilder:validation:Optional GracefulDecommissionTimeout *string `json:"gracefulDecommissionTimeout" tf:"graceful_decommission_timeout,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // remove workers. A scale-down factor of 1 will result in scaling down so that there // is no available memory remaining after the update (more aggressive scaling). // A scale-down factor of 0 disables removing workers, which can be beneficial for // autoscaling a single job. // Bounds: [0.0, 1.0]. // +kubebuilder:validation:Optional ScaleDownFactor *float64 `json:"scaleDownFactor" tf:"scale_down_factor,omitempty"` // Minimum scale-down threshold as a fraction of total cluster size before scaling occurs. // For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler must // recommend at least a 2 worker scale-down for the cluster to scale. A threshold of 0 // means the autoscaler will scale down on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. // +kubebuilder:validation:Optional ScaleDownMinWorkerFraction *float64 `json:"scaleDownMinWorkerFraction,omitempty" tf:"scale_down_min_worker_fraction,omitempty"` // Fraction of average pending memory in the last cooldown period for which to // add workers. A scale-up factor of 1.0 will result in scaling up so that there // is no pending memory remaining after the update (more aggressive scaling). // A scale-up factor closer to 0 will result in a smaller magnitude of scaling up // (less aggressive scaling). // Bounds: [0.0, 1.0]. // +kubebuilder:validation:Optional ScaleUpFactor *float64 `json:"scaleUpFactor" tf:"scale_up_factor,omitempty"` // Minimum scale-up threshold as a fraction of total cluster size before scaling // occurs. For example, in a 20-worker cluster, a threshold of 0.1 means the autoscaler // must recommend at least a 2-worker scale-up for the cluster to scale. A threshold of // 0 means the autoscaler will scale up on any recommended change. // Bounds: [0.0, 1.0]. Default: 0.0. // +kubebuilder:validation:Optional ScaleUpMinWorkerFraction *float64 `json:"scaleUpMinWorkerFraction,omitempty" tf:"scale_up_min_worker_fraction,omitempty"` }
func (*YarnConfigParameters) DeepCopy ¶
func (in *YarnConfigParameters) DeepCopy() *YarnConfigParameters
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new YarnConfigParameters.
func (*YarnConfigParameters) DeepCopyInto ¶
func (in *YarnConfigParameters) DeepCopyInto(out *YarnConfigParameters)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
Source Files ¶
- zz_autoscalingpolicy_terraformed.go
- zz_autoscalingpolicy_types.go
- zz_cluster_terraformed.go
- zz_cluster_types.go
- zz_generated.conversion_hubs.go
- zz_generated.deepcopy.go
- zz_generated.managed.go
- zz_generated.managedlist.go
- zz_generated.resolvers.go
- zz_groupversion_info.go
- zz_job_terraformed.go
- zz_job_types.go
- zz_metastoreservice_terraformed.go
- zz_metastoreservice_types.go
- zz_workflowtemplate_terraformed.go
- zz_workflowtemplate_types.go