Documentation ¶
Index ¶
- func DataprocWorkflowTemplate_IsConstruct(x interface{}) *bool
- func DataprocWorkflowTemplate_IsTerraformElement(x interface{}) *bool
- func DataprocWorkflowTemplate_IsTerraformResource(x interface{}) *bool
- func DataprocWorkflowTemplate_TfResourceType() *string
- func NewDataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsHadoopJobOutputReference_Override(d DataprocWorkflowTemplateJobsHadoopJobOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsHiveJobOutputReference_Override(d DataprocWorkflowTemplateJobsHiveJobOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference_Override(d DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsList_Override(d DataprocWorkflowTemplateJobsList, ...)
- func NewDataprocWorkflowTemplateJobsOutputReference_Override(d DataprocWorkflowTemplateJobsOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsPigJobOutputReference_Override(d DataprocWorkflowTemplateJobsPigJobOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference_Override(d DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsPrestoJobOutputReference_Override(d DataprocWorkflowTemplateJobsPrestoJobOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference_Override(d DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsPysparkJobOutputReference_Override(d DataprocWorkflowTemplateJobsPysparkJobOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsSchedulingOutputReference_Override(d DataprocWorkflowTemplateJobsSchedulingOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsSparkJobOutputReference_Override(d DataprocWorkflowTemplateJobsSparkJobOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsSparkRJobOutputReference_Override(d DataprocWorkflowTemplateJobsSparkRJobOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsSparkSqlJobOutputReference_Override(d DataprocWorkflowTemplateJobsSparkSqlJobOutputReference, ...)
- func NewDataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference_Override(d DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference, ...)
- func NewDataprocWorkflowTemplateParametersList_Override(d DataprocWorkflowTemplateParametersList, ...)
- func NewDataprocWorkflowTemplateParametersOutputReference_Override(d DataprocWorkflowTemplateParametersOutputReference, ...)
- func NewDataprocWorkflowTemplateParametersValidationOutputReference_Override(d DataprocWorkflowTemplateParametersValidationOutputReference, ...)
- func NewDataprocWorkflowTemplateParametersValidationRegexOutputReference_Override(d DataprocWorkflowTemplateParametersValidationRegexOutputReference, ...)
- func NewDataprocWorkflowTemplateParametersValidationValuesOutputReference_Override(d DataprocWorkflowTemplateParametersValidationValuesOutputReference, ...)
- func NewDataprocWorkflowTemplatePlacementClusterSelectorOutputReference_Override(d DataprocWorkflowTemplatePlacementClusterSelectorOutputReference, ...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference, ...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference_Override(...)
- func NewDataprocWorkflowTemplatePlacementManagedClusterOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterOutputReference, ...)
- func NewDataprocWorkflowTemplatePlacementOutputReference_Override(d DataprocWorkflowTemplatePlacementOutputReference, ...)
- func NewDataprocWorkflowTemplateTimeoutsOutputReference_Override(d DataprocWorkflowTemplateTimeoutsOutputReference, ...)
- func NewDataprocWorkflowTemplate_Override(d DataprocWorkflowTemplate, scope constructs.Construct, id *string, ...)
- type DataprocWorkflowTemplate
- type DataprocWorkflowTemplateConfig
- type DataprocWorkflowTemplateJobs
- type DataprocWorkflowTemplateJobsHadoopJob
- type DataprocWorkflowTemplateJobsHadoopJobLoggingConfig
- type DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference
- type DataprocWorkflowTemplateJobsHadoopJobOutputReference
- type DataprocWorkflowTemplateJobsHiveJob
- type DataprocWorkflowTemplateJobsHiveJobOutputReference
- type DataprocWorkflowTemplateJobsHiveJobQueryListStruct
- type DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference
- type DataprocWorkflowTemplateJobsList
- type DataprocWorkflowTemplateJobsOutputReference
- type DataprocWorkflowTemplateJobsPigJob
- type DataprocWorkflowTemplateJobsPigJobLoggingConfig
- type DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference
- type DataprocWorkflowTemplateJobsPigJobOutputReference
- type DataprocWorkflowTemplateJobsPigJobQueryListStruct
- type DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference
- type DataprocWorkflowTemplateJobsPrestoJob
- type DataprocWorkflowTemplateJobsPrestoJobLoggingConfig
- type DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference
- type DataprocWorkflowTemplateJobsPrestoJobOutputReference
- type DataprocWorkflowTemplateJobsPrestoJobQueryListStruct
- type DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference
- type DataprocWorkflowTemplateJobsPysparkJob
- type DataprocWorkflowTemplateJobsPysparkJobLoggingConfig
- type DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference
- type DataprocWorkflowTemplateJobsPysparkJobOutputReference
- type DataprocWorkflowTemplateJobsScheduling
- type DataprocWorkflowTemplateJobsSchedulingOutputReference
- type DataprocWorkflowTemplateJobsSparkJob
- type DataprocWorkflowTemplateJobsSparkJobLoggingConfig
- type DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference
- type DataprocWorkflowTemplateJobsSparkJobOutputReference
- type DataprocWorkflowTemplateJobsSparkRJob
- type DataprocWorkflowTemplateJobsSparkRJobLoggingConfig
- type DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference
- type DataprocWorkflowTemplateJobsSparkRJobOutputReference
- type DataprocWorkflowTemplateJobsSparkSqlJob
- type DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig
- type DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference
- type DataprocWorkflowTemplateJobsSparkSqlJobOutputReference
- type DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct
- type DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference
- type DataprocWorkflowTemplateParameters
- type DataprocWorkflowTemplateParametersList
- type DataprocWorkflowTemplateParametersOutputReference
- type DataprocWorkflowTemplateParametersValidation
- type DataprocWorkflowTemplateParametersValidationOutputReference
- type DataprocWorkflowTemplateParametersValidationRegex
- type DataprocWorkflowTemplateParametersValidationRegexOutputReference
- type DataprocWorkflowTemplateParametersValidationValues
- type DataprocWorkflowTemplateParametersValidationValuesOutputReference
- type DataprocWorkflowTemplatePlacement
- type DataprocWorkflowTemplatePlacementClusterSelector
- type DataprocWorkflowTemplatePlacementClusterSelectorOutputReference
- type DataprocWorkflowTemplatePlacementManagedCluster
- type DataprocWorkflowTemplatePlacementManagedClusterConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity
- type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity
- type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActions
- type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList
- type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerators
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerators
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerators
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference
- type DataprocWorkflowTemplatePlacementManagedClusterOutputReference
- type DataprocWorkflowTemplatePlacementOutputReference
- type DataprocWorkflowTemplateTimeouts
- type DataprocWorkflowTemplateTimeoutsOutputReference
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func DataprocWorkflowTemplate_IsConstruct ¶
func DataprocWorkflowTemplate_IsConstruct(x interface{}) *bool
Checks if `x` is a construct.
Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.
Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.
Returns: true if `x` is an object created from a class which extends `Construct`.
func DataprocWorkflowTemplate_IsTerraformElement ¶
func DataprocWorkflowTemplate_IsTerraformElement(x interface{}) *bool
Experimental.
func DataprocWorkflowTemplate_IsTerraformResource ¶
func DataprocWorkflowTemplate_IsTerraformResource(x interface{}) *bool
Experimental.
func DataprocWorkflowTemplate_TfResourceType ¶
func DataprocWorkflowTemplate_TfResourceType() *string
func NewDataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsHadoopJobOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsHadoopJobOutputReference_Override(d DataprocWorkflowTemplateJobsHadoopJobOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsHiveJobOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsHiveJobOutputReference_Override(d DataprocWorkflowTemplateJobsHiveJobOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference_Override(d DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsList_Override ¶
func NewDataprocWorkflowTemplateJobsList_Override(d DataprocWorkflowTemplateJobsList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplateJobsOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsOutputReference_Override(d DataprocWorkflowTemplateJobsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsPigJobOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsPigJobOutputReference_Override(d DataprocWorkflowTemplateJobsPigJobOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference_Override(d DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsPrestoJobOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsPrestoJobOutputReference_Override(d DataprocWorkflowTemplateJobsPrestoJobOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference_Override(d DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsPysparkJobOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsPysparkJobOutputReference_Override(d DataprocWorkflowTemplateJobsPysparkJobOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsSchedulingOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsSchedulingOutputReference_Override(d DataprocWorkflowTemplateJobsSchedulingOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsSparkJobOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsSparkJobOutputReference_Override(d DataprocWorkflowTemplateJobsSparkJobOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsSparkRJobOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsSparkRJobOutputReference_Override(d DataprocWorkflowTemplateJobsSparkRJobOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference_Override(d DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsSparkSqlJobOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsSparkSqlJobOutputReference_Override(d DataprocWorkflowTemplateJobsSparkSqlJobOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference_Override ¶
func NewDataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference_Override(d DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateParametersList_Override ¶
func NewDataprocWorkflowTemplateParametersList_Override(d DataprocWorkflowTemplateParametersList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplateParametersOutputReference_Override ¶
func NewDataprocWorkflowTemplateParametersOutputReference_Override(d DataprocWorkflowTemplateParametersOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplateParametersValidationOutputReference_Override ¶
func NewDataprocWorkflowTemplateParametersValidationOutputReference_Override(d DataprocWorkflowTemplateParametersValidationOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateParametersValidationRegexOutputReference_Override ¶
func NewDataprocWorkflowTemplateParametersValidationRegexOutputReference_Override(d DataprocWorkflowTemplateParametersValidationRegexOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateParametersValidationValuesOutputReference_Override ¶
func NewDataprocWorkflowTemplateParametersValidationValuesOutputReference_Override(d DataprocWorkflowTemplateParametersValidationValuesOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementClusterSelectorOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementClusterSelectorOutputReference_Override(d DataprocWorkflowTemplatePlacementClusterSelectorOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementManagedClusterOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterOutputReference_Override(d DataprocWorkflowTemplatePlacementManagedClusterOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplatePlacementOutputReference_Override ¶
func NewDataprocWorkflowTemplatePlacementOutputReference_Override(d DataprocWorkflowTemplatePlacementOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplateTimeoutsOutputReference_Override ¶
func NewDataprocWorkflowTemplateTimeoutsOutputReference_Override(d DataprocWorkflowTemplateTimeoutsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocWorkflowTemplate_Override ¶
func NewDataprocWorkflowTemplate_Override(d DataprocWorkflowTemplate, scope constructs.Construct, id *string, config *DataprocWorkflowTemplateConfig)
Create a new {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template google_dataproc_workflow_template} Resource.
Types ¶
type DataprocWorkflowTemplate ¶
type DataprocWorkflowTemplate interface { cdktf.TerraformResource // Experimental. CdktfStack() cdktf.TerraformStack // Experimental. Connection() interface{} // Experimental. SetConnection(val interface{}) // Experimental. ConstructNodeMetadata() *map[string]interface{} // Experimental. Count() interface{} // Experimental. SetCount(val interface{}) CreateTime() *string DagTimeout() *string SetDagTimeout(val *string) DagTimeoutInput() *string // Experimental. DependsOn() *[]*string // Experimental. SetDependsOn(val *[]*string) // Experimental. ForEach() cdktf.ITerraformIterator // Experimental. SetForEach(val cdktf.ITerraformIterator) // Experimental. Fqn() *string // Experimental. FriendlyUniqueId() *string Id() *string SetId(val *string) IdInput() *string Jobs() DataprocWorkflowTemplateJobsList JobsInput() interface{} Labels() *map[string]*string SetLabels(val *map[string]*string) LabelsInput() *map[string]*string // Experimental. Lifecycle() *cdktf.TerraformResourceLifecycle // Experimental. SetLifecycle(val *cdktf.TerraformResourceLifecycle) Location() *string SetLocation(val *string) LocationInput() *string Name() *string SetName(val *string) NameInput() *string // The tree node. Node() constructs.Node Parameters() DataprocWorkflowTemplateParametersList ParametersInput() interface{} Placement() DataprocWorkflowTemplatePlacementOutputReference PlacementInput() *DataprocWorkflowTemplatePlacement Project() *string SetProject(val *string) ProjectInput() *string // Experimental. Provider() cdktf.TerraformProvider // Experimental. SetProvider(val cdktf.TerraformProvider) // Experimental. Provisioners() *[]interface{} // Experimental. SetProvisioners(val *[]interface{}) // Experimental. RawOverrides() interface{} // Experimental. TerraformGeneratorMetadata() *cdktf.TerraformProviderGeneratorMetadata // Experimental. TerraformMetaArguments() *map[string]interface{} // Experimental. TerraformResourceType() *string Timeouts() DataprocWorkflowTemplateTimeoutsOutputReference TimeoutsInput() interface{} UpdateTime() *string Version() *float64 SetVersion(val *float64) VersionInput() *float64 // Experimental. AddOverride(path *string, value interface{}) // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationForAttribute(terraformAttribute *string) cdktf.IResolvable // Overrides the auto-generated logical ID with a specific ID. // Experimental. OverrideLogicalId(newLogicalId *string) PutJobs(value interface{}) PutParameters(value interface{}) PutPlacement(value *DataprocWorkflowTemplatePlacement) PutTimeouts(value *DataprocWorkflowTemplateTimeouts) ResetDagTimeout() ResetId() ResetLabels() // Resets a previously passed logical Id to use the auto-generated logical id again. // Experimental. ResetOverrideLogicalId() ResetParameters() ResetProject() ResetTimeouts() ResetVersion() SynthesizeAttributes() *map[string]interface{} // Experimental. ToMetadata() interface{} // Returns a string representation of this construct. ToString() *string // Adds this resource to the terraform JSON output. // Experimental. ToTerraform() interface{} }
Represents a {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template google_dataproc_workflow_template}.
func NewDataprocWorkflowTemplate ¶
func NewDataprocWorkflowTemplate(scope constructs.Construct, id *string, config *DataprocWorkflowTemplateConfig) DataprocWorkflowTemplate
Create a new {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template google_dataproc_workflow_template} Resource.
type DataprocWorkflowTemplateConfig ¶
type DataprocWorkflowTemplateConfig struct { // Experimental. Connection interface{} `field:"optional" json:"connection" yaml:"connection"` // Experimental. Count interface{} `field:"optional" json:"count" yaml:"count"` // Experimental. DependsOn *[]cdktf.ITerraformDependable `field:"optional" json:"dependsOn" yaml:"dependsOn"` // Experimental. ForEach cdktf.ITerraformIterator `field:"optional" json:"forEach" yaml:"forEach"` // Experimental. Lifecycle *cdktf.TerraformResourceLifecycle `field:"optional" json:"lifecycle" yaml:"lifecycle"` // Experimental. Provider cdktf.TerraformProvider `field:"optional" json:"provider" yaml:"provider"` // Experimental. Provisioners *[]interface{} `field:"optional" json:"provisioners" yaml:"provisioners"` // jobs block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#jobs DataprocWorkflowTemplate#jobs} Jobs interface{} `field:"required" json:"jobs" yaml:"jobs"` // The location for the resource. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#location DataprocWorkflowTemplate#location} Location *string `field:"required" json:"location" yaml:"location"` // Output only. // // The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For `projects.regions.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * For `projects.locations.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#name DataprocWorkflowTemplate#name} Name *string `field:"required" json:"name" yaml:"name"` // placement block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#placement DataprocWorkflowTemplate#placement} Placement *DataprocWorkflowTemplatePlacement `field:"required" json:"placement" yaml:"placement"` // Optional. // // Timeout duration for the DAG of jobs, expressed in seconds (see [JSON representation of duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a [managed cluster](/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#dag_timeout DataprocWorkflowTemplate#dag_timeout} DagTimeout *string `field:"optional" json:"dagTimeout" yaml:"dagTimeout"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#id DataprocWorkflowTemplate#id}. // // Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. // If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable. Id *string `field:"optional" json:"id" yaml:"id"` // Optional. // // The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label **keys** must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a template. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#labels DataprocWorkflowTemplate#labels} Labels *map[string]*string `field:"optional" json:"labels" yaml:"labels"` // parameters block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#parameters DataprocWorkflowTemplate#parameters} Parameters interface{} `field:"optional" json:"parameters" yaml:"parameters"` // The project for the resource. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#project DataprocWorkflowTemplate#project} Project *string `field:"optional" json:"project" yaml:"project"` // timeouts block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#timeouts DataprocWorkflowTemplate#timeouts} Timeouts *DataprocWorkflowTemplateTimeouts `field:"optional" json:"timeouts" yaml:"timeouts"` // Output only. The current version of this workflow template. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#version DataprocWorkflowTemplate#version} Version *float64 `field:"optional" json:"version" yaml:"version"` }
type DataprocWorkflowTemplateJobs ¶
type DataprocWorkflowTemplateJobs struct { // Required. // // The step id. The id must be unique among all jobs within the template. The step id is used as prefix for job id, as job `goog-dataproc-workflow-step-id` label, and in prerequisiteStepIds field from other steps. The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#step_id DataprocWorkflowTemplate#step_id} StepId *string `field:"required" json:"stepId" yaml:"stepId"` // hadoop_job block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#hadoop_job DataprocWorkflowTemplate#hadoop_job} HadoopJob *DataprocWorkflowTemplateJobsHadoopJob `field:"optional" json:"hadoopJob" yaml:"hadoopJob"` // hive_job block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#hive_job DataprocWorkflowTemplate#hive_job} HiveJob *DataprocWorkflowTemplateJobsHiveJob `field:"optional" json:"hiveJob" yaml:"hiveJob"` // Optional. // // The labels to associate with this job. Label keys must be between 1 and 63 characters long, and must conform to the following regular expression: p{Ll}p{Lo}{0,62} Label values must be between 1 and 63 characters long, and must conform to the following regular expression: [p{Ll}p{Lo}p{N}_-]{0,63} No more than 32 labels can be associated with a given job. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#labels DataprocWorkflowTemplate#labels} Labels *map[string]*string `field:"optional" json:"labels" yaml:"labels"` // pig_job block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#pig_job DataprocWorkflowTemplate#pig_job} PigJob *DataprocWorkflowTemplateJobsPigJob `field:"optional" json:"pigJob" yaml:"pigJob"` // Optional. // // The optional list of prerequisite job step_ids. If not specified, the job will start at the beginning of workflow. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#prerequisite_step_ids DataprocWorkflowTemplate#prerequisite_step_ids} PrerequisiteStepIds *[]*string `field:"optional" json:"prerequisiteStepIds" yaml:"prerequisiteStepIds"` // presto_job block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#presto_job DataprocWorkflowTemplate#presto_job} PrestoJob *DataprocWorkflowTemplateJobsPrestoJob `field:"optional" json:"prestoJob" yaml:"prestoJob"` // pyspark_job block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#pyspark_job DataprocWorkflowTemplate#pyspark_job} PysparkJob *DataprocWorkflowTemplateJobsPysparkJob `field:"optional" json:"pysparkJob" yaml:"pysparkJob"` // scheduling block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#scheduling DataprocWorkflowTemplate#scheduling} Scheduling *DataprocWorkflowTemplateJobsScheduling `field:"optional" json:"scheduling" yaml:"scheduling"` // spark_job block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#spark_job DataprocWorkflowTemplate#spark_job} SparkJob *DataprocWorkflowTemplateJobsSparkJob `field:"optional" json:"sparkJob" yaml:"sparkJob"` // spark_r_job block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#spark_r_job DataprocWorkflowTemplate#spark_r_job} SparkRJob *DataprocWorkflowTemplateJobsSparkRJob `field:"optional" json:"sparkRJob" yaml:"sparkRJob"` // spark_sql_job block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#spark_sql_job DataprocWorkflowTemplate#spark_sql_job} SparkSqlJob *DataprocWorkflowTemplateJobsSparkSqlJob `field:"optional" json:"sparkSqlJob" yaml:"sparkSqlJob"` }
type DataprocWorkflowTemplateJobsHadoopJob ¶
type DataprocWorkflowTemplateJobsHadoopJob struct { // Optional. // // HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#archive_uris DataprocWorkflowTemplate#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // Optional. // // The arguments to pass to the driver. Do not include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#args DataprocWorkflowTemplate#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // Optional. // // HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#file_uris DataprocWorkflowTemplate#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // Optional. Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config} LoggingConfig *DataprocWorkflowTemplateJobsHadoopJobLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // The name of the driver's main class. // // The jar file containing the class must be in the default CLASSPATH or specified in `jar_file_uris`. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#main_class DataprocWorkflowTemplate#main_class} MainClass *string `field:"optional" json:"mainClass" yaml:"mainClass"` // The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#main_jar_file_uri DataprocWorkflowTemplate#main_jar_file_uri} MainJarFileUri *string `field:"optional" json:"mainJarFileUri" yaml:"mainJarFileUri"` // Optional. // // A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` }
type DataprocWorkflowTemplateJobsHadoopJobLoggingConfig ¶
type DataprocWorkflowTemplateJobsHadoopJobLoggingConfig struct { // The per-package log levels for the driver. // // This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels} DriverLogLevels *map[string]*string `field:"optional" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference ¶
type DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsHadoopJobLoggingConfig SetInternalValue(val *DataprocWorkflowTemplateJobsHadoopJobLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDriverLogLevels() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference ¶
func NewDataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference
type DataprocWorkflowTemplateJobsHadoopJobOutputReference ¶
type DataprocWorkflowTemplateJobsHadoopJobOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsHadoopJob SetInternalValue(val *DataprocWorkflowTemplateJobsHadoopJob) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference LoggingConfigInput() *DataprocWorkflowTemplateJobsHadoopJobLoggingConfig MainClass() *string SetMainClass(val *string) MainClassInput() *string MainJarFileUri() *string SetMainJarFileUri(val *string) MainJarFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocWorkflowTemplateJobsHadoopJobLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetMainClass() ResetMainJarFileUri() ResetProperties() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsHadoopJobOutputReference ¶
func NewDataprocWorkflowTemplateJobsHadoopJobOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsHadoopJobOutputReference
type DataprocWorkflowTemplateJobsHiveJob ¶
type DataprocWorkflowTemplateJobsHiveJob struct { // Optional. // // Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#continue_on_failure DataprocWorkflowTemplate#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // Optional. // // HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // Optional. // // A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // The HCFS URI of the script that contains Hive queries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#query_file_uri DataprocWorkflowTemplate#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // query_list block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#query_list DataprocWorkflowTemplate#query_list} QueryList *DataprocWorkflowTemplateJobsHiveJobQueryListStruct `field:"optional" json:"queryList" yaml:"queryList"` // Optional. Mapping of query variable names to values (equivalent to the Hive command: `SET name="value";`). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#script_variables DataprocWorkflowTemplate#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type DataprocWorkflowTemplateJobsHiveJobOutputReference ¶
type DataprocWorkflowTemplateJobsHiveJobOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsHiveJob SetInternalValue(val *DataprocWorkflowTemplateJobsHiveJob) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference QueryListInput() *DataprocWorkflowTemplateJobsHiveJobQueryListStruct ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutQueryList(value *DataprocWorkflowTemplateJobsHiveJobQueryListStruct) ResetContinueOnFailure() ResetJarFileUris() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsHiveJobOutputReference ¶
func NewDataprocWorkflowTemplateJobsHiveJobOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsHiveJobOutputReference
type DataprocWorkflowTemplateJobsHiveJobQueryListStruct ¶
type DataprocWorkflowTemplateJobsHiveJobQueryListStruct struct { // Required. // // The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } } // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#queries DataprocWorkflowTemplate#queries} Queries *[]*string `field:"required" json:"queries" yaml:"queries"` }
type DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference ¶
type DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsHiveJobQueryListStruct SetInternalValue(val *DataprocWorkflowTemplateJobsHiveJobQueryListStruct) Queries() *[]*string SetQueries(val *[]*string) QueriesInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference ¶
func NewDataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference
type DataprocWorkflowTemplateJobsList ¶
type DataprocWorkflowTemplateJobsList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplateJobsOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsList ¶
func NewDataprocWorkflowTemplateJobsList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplateJobsList
type DataprocWorkflowTemplateJobsOutputReference ¶
type DataprocWorkflowTemplateJobsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string HadoopJob() DataprocWorkflowTemplateJobsHadoopJobOutputReference HadoopJobInput() *DataprocWorkflowTemplateJobsHadoopJob HiveJob() DataprocWorkflowTemplateJobsHiveJobOutputReference HiveJobInput() *DataprocWorkflowTemplateJobsHiveJob InternalValue() interface{} SetInternalValue(val interface{}) Labels() *map[string]*string SetLabels(val *map[string]*string) LabelsInput() *map[string]*string PigJob() DataprocWorkflowTemplateJobsPigJobOutputReference PigJobInput() *DataprocWorkflowTemplateJobsPigJob PrerequisiteStepIds() *[]*string SetPrerequisiteStepIds(val *[]*string) PrerequisiteStepIdsInput() *[]*string PrestoJob() DataprocWorkflowTemplateJobsPrestoJobOutputReference PrestoJobInput() *DataprocWorkflowTemplateJobsPrestoJob PysparkJob() DataprocWorkflowTemplateJobsPysparkJobOutputReference PysparkJobInput() *DataprocWorkflowTemplateJobsPysparkJob Scheduling() DataprocWorkflowTemplateJobsSchedulingOutputReference SchedulingInput() *DataprocWorkflowTemplateJobsScheduling SparkJob() DataprocWorkflowTemplateJobsSparkJobOutputReference SparkJobInput() *DataprocWorkflowTemplateJobsSparkJob SparkRJob() DataprocWorkflowTemplateJobsSparkRJobOutputReference SparkRJobInput() *DataprocWorkflowTemplateJobsSparkRJob SparkSqlJob() DataprocWorkflowTemplateJobsSparkSqlJobOutputReference SparkSqlJobInput() *DataprocWorkflowTemplateJobsSparkSqlJob StepId() *string SetStepId(val *string) StepIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutHadoopJob(value *DataprocWorkflowTemplateJobsHadoopJob) PutHiveJob(value *DataprocWorkflowTemplateJobsHiveJob) PutPigJob(value *DataprocWorkflowTemplateJobsPigJob) PutPrestoJob(value *DataprocWorkflowTemplateJobsPrestoJob) PutPysparkJob(value *DataprocWorkflowTemplateJobsPysparkJob) PutScheduling(value *DataprocWorkflowTemplateJobsScheduling) PutSparkJob(value *DataprocWorkflowTemplateJobsSparkJob) PutSparkRJob(value *DataprocWorkflowTemplateJobsSparkRJob) PutSparkSqlJob(value *DataprocWorkflowTemplateJobsSparkSqlJob) ResetHadoopJob() ResetHiveJob() ResetLabels() ResetPigJob() ResetPrerequisiteStepIds() ResetPrestoJob() ResetPysparkJob() ResetScheduling() ResetSparkJob() ResetSparkRJob() ResetSparkSqlJob() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsOutputReference ¶
func NewDataprocWorkflowTemplateJobsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplateJobsOutputReference
type DataprocWorkflowTemplateJobsPigJob ¶
type DataprocWorkflowTemplateJobsPigJob struct { // Optional. // // Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#continue_on_failure DataprocWorkflowTemplate#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // Optional. // // HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config} LoggingConfig *DataprocWorkflowTemplateJobsPigJobLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // Optional. // // A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // The HCFS URI of the script that contains the Pig queries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#query_file_uri DataprocWorkflowTemplate#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // query_list block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#query_list DataprocWorkflowTemplate#query_list} QueryList *DataprocWorkflowTemplateJobsPigJobQueryListStruct `field:"optional" json:"queryList" yaml:"queryList"` // Optional. Mapping of query variable names to values (equivalent to the Pig command: `name=[value]`). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#script_variables DataprocWorkflowTemplate#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type DataprocWorkflowTemplateJobsPigJobLoggingConfig ¶
type DataprocWorkflowTemplateJobsPigJobLoggingConfig struct { // The per-package log levels for the driver. // // This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels} DriverLogLevels *map[string]*string `field:"optional" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference ¶
type DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsPigJobLoggingConfig SetInternalValue(val *DataprocWorkflowTemplateJobsPigJobLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDriverLogLevels() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference ¶
func NewDataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference
type DataprocWorkflowTemplateJobsPigJobOutputReference ¶
type DataprocWorkflowTemplateJobsPigJobOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsPigJob SetInternalValue(val *DataprocWorkflowTemplateJobsPigJob) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference LoggingConfigInput() *DataprocWorkflowTemplateJobsPigJobLoggingConfig Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference QueryListInput() *DataprocWorkflowTemplateJobsPigJobQueryListStruct ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocWorkflowTemplateJobsPigJobLoggingConfig) PutQueryList(value *DataprocWorkflowTemplateJobsPigJobQueryListStruct) ResetContinueOnFailure() ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsPigJobOutputReference ¶
func NewDataprocWorkflowTemplateJobsPigJobOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsPigJobOutputReference
type DataprocWorkflowTemplateJobsPigJobQueryListStruct ¶
type DataprocWorkflowTemplateJobsPigJobQueryListStruct struct { // Required. // // The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } } // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#queries DataprocWorkflowTemplate#queries} Queries *[]*string `field:"required" json:"queries" yaml:"queries"` }
type DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference ¶
type DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsPigJobQueryListStruct SetInternalValue(val *DataprocWorkflowTemplateJobsPigJobQueryListStruct) Queries() *[]*string SetQueries(val *[]*string) QueriesInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference ¶
func NewDataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference
type DataprocWorkflowTemplateJobsPrestoJob ¶
type DataprocWorkflowTemplateJobsPrestoJob struct { // Optional. Presto client tags to attach to this query. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#client_tags DataprocWorkflowTemplate#client_tags} ClientTags *[]*string `field:"optional" json:"clientTags" yaml:"clientTags"` // Optional. // // Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#continue_on_failure DataprocWorkflowTemplate#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config} LoggingConfig *DataprocWorkflowTemplateJobsPrestoJobLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // Optional. The format in which query output will be displayed. See the Presto documentation for supported output formats. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#output_format DataprocWorkflowTemplate#output_format} OutputFormat *string `field:"optional" json:"outputFormat" yaml:"outputFormat"` // Optional. // // A mapping of property names to values. Used to set Presto [session properties](https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // The HCFS URI of the script that contains SQL queries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#query_file_uri DataprocWorkflowTemplate#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // query_list block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#query_list DataprocWorkflowTemplate#query_list} QueryList *DataprocWorkflowTemplateJobsPrestoJobQueryListStruct `field:"optional" json:"queryList" yaml:"queryList"` }
type DataprocWorkflowTemplateJobsPrestoJobLoggingConfig ¶
type DataprocWorkflowTemplateJobsPrestoJobLoggingConfig struct { // The per-package log levels for the driver. // // This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels} DriverLogLevels *map[string]*string `field:"optional" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference ¶
type DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsPrestoJobLoggingConfig SetInternalValue(val *DataprocWorkflowTemplateJobsPrestoJobLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDriverLogLevels() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference ¶
func NewDataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference
type DataprocWorkflowTemplateJobsPrestoJobOutputReference ¶
type DataprocWorkflowTemplateJobsPrestoJobOutputReference interface { cdktf.ComplexObject ClientTags() *[]*string SetClientTags(val *[]*string) ClientTagsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsPrestoJob SetInternalValue(val *DataprocWorkflowTemplateJobsPrestoJob) LoggingConfig() DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference LoggingConfigInput() *DataprocWorkflowTemplateJobsPrestoJobLoggingConfig OutputFormat() *string SetOutputFormat(val *string) OutputFormatInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference QueryListInput() *DataprocWorkflowTemplateJobsPrestoJobQueryListStruct // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocWorkflowTemplateJobsPrestoJobLoggingConfig) PutQueryList(value *DataprocWorkflowTemplateJobsPrestoJobQueryListStruct) ResetClientTags() ResetContinueOnFailure() ResetLoggingConfig() ResetOutputFormat() ResetProperties() ResetQueryFileUri() ResetQueryList() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsPrestoJobOutputReference ¶
func NewDataprocWorkflowTemplateJobsPrestoJobOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsPrestoJobOutputReference
type DataprocWorkflowTemplateJobsPrestoJobQueryListStruct ¶
type DataprocWorkflowTemplateJobsPrestoJobQueryListStruct struct { // Required. // // The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } } // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#queries DataprocWorkflowTemplate#queries} Queries *[]*string `field:"required" json:"queries" yaml:"queries"` }
type DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference ¶
type DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsPrestoJobQueryListStruct SetInternalValue(val *DataprocWorkflowTemplateJobsPrestoJobQueryListStruct) Queries() *[]*string SetQueries(val *[]*string) QueriesInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference ¶
func NewDataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference
type DataprocWorkflowTemplateJobsPysparkJob ¶
type DataprocWorkflowTemplateJobsPysparkJob struct { // Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#main_python_file_uri DataprocWorkflowTemplate#main_python_file_uri} MainPythonFileUri *string `field:"required" json:"mainPythonFileUri" yaml:"mainPythonFileUri"` // Optional. // // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#archive_uris DataprocWorkflowTemplate#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // Optional. // // The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#args DataprocWorkflowTemplate#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // Optional. // // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#file_uris DataprocWorkflowTemplate#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config} LoggingConfig *DataprocWorkflowTemplateJobsPysparkJobLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // Optional. // // A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // Optional. // // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#python_file_uris DataprocWorkflowTemplate#python_file_uris} PythonFileUris *[]*string `field:"optional" json:"pythonFileUris" yaml:"pythonFileUris"` }
type DataprocWorkflowTemplateJobsPysparkJobLoggingConfig ¶
type DataprocWorkflowTemplateJobsPysparkJobLoggingConfig struct { // The per-package log levels for the driver. // // This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels} DriverLogLevels *map[string]*string `field:"optional" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference ¶
type DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsPysparkJobLoggingConfig SetInternalValue(val *DataprocWorkflowTemplateJobsPysparkJobLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDriverLogLevels() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference ¶
func NewDataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference
type DataprocWorkflowTemplateJobsPysparkJobOutputReference ¶
type DataprocWorkflowTemplateJobsPysparkJobOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsPysparkJob SetInternalValue(val *DataprocWorkflowTemplateJobsPysparkJob) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference LoggingConfigInput() *DataprocWorkflowTemplateJobsPysparkJobLoggingConfig MainPythonFileUri() *string SetMainPythonFileUri(val *string) MainPythonFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string PythonFileUris() *[]*string SetPythonFileUris(val *[]*string) PythonFileUrisInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocWorkflowTemplateJobsPysparkJobLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetPythonFileUris() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsPysparkJobOutputReference ¶
func NewDataprocWorkflowTemplateJobsPysparkJobOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsPysparkJobOutputReference
type DataprocWorkflowTemplateJobsScheduling ¶
type DataprocWorkflowTemplateJobsScheduling struct { // Optional. // // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#max_failures_per_hour DataprocWorkflowTemplate#max_failures_per_hour} MaxFailuresPerHour *float64 `field:"optional" json:"maxFailuresPerHour" yaml:"maxFailuresPerHour"` // Optional. // // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#max_failures_total DataprocWorkflowTemplate#max_failures_total} MaxFailuresTotal *float64 `field:"optional" json:"maxFailuresTotal" yaml:"maxFailuresTotal"` }
type DataprocWorkflowTemplateJobsSchedulingOutputReference ¶
type DataprocWorkflowTemplateJobsSchedulingOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsScheduling SetInternalValue(val *DataprocWorkflowTemplateJobsScheduling) MaxFailuresPerHour() *float64 SetMaxFailuresPerHour(val *float64) MaxFailuresPerHourInput() *float64 MaxFailuresTotal() *float64 SetMaxFailuresTotal(val *float64) MaxFailuresTotalInput() *float64 // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetMaxFailuresPerHour() ResetMaxFailuresTotal() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsSchedulingOutputReference ¶
func NewDataprocWorkflowTemplateJobsSchedulingOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsSchedulingOutputReference
type DataprocWorkflowTemplateJobsSparkJob ¶
type DataprocWorkflowTemplateJobsSparkJob struct { // Optional. // // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#archive_uris DataprocWorkflowTemplate#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // Optional. // // The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#args DataprocWorkflowTemplate#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // Optional. // // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#file_uris DataprocWorkflowTemplate#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config} LoggingConfig *DataprocWorkflowTemplateJobsSparkJobLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // The name of the driver's main class. // // The jar file that contains the class must be in the default CLASSPATH or specified in `jar_file_uris`. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#main_class DataprocWorkflowTemplate#main_class} MainClass *string `field:"optional" json:"mainClass" yaml:"mainClass"` // The HCFS URI of the jar file that contains the main class. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#main_jar_file_uri DataprocWorkflowTemplate#main_jar_file_uri} MainJarFileUri *string `field:"optional" json:"mainJarFileUri" yaml:"mainJarFileUri"` // Optional. // // A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` }
type DataprocWorkflowTemplateJobsSparkJobLoggingConfig ¶
type DataprocWorkflowTemplateJobsSparkJobLoggingConfig struct { // The per-package log levels for the driver. // // This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels} DriverLogLevels *map[string]*string `field:"optional" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference ¶
type DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsSparkJobLoggingConfig SetInternalValue(val *DataprocWorkflowTemplateJobsSparkJobLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDriverLogLevels() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference ¶
func NewDataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference
type DataprocWorkflowTemplateJobsSparkJobOutputReference ¶
type DataprocWorkflowTemplateJobsSparkJobOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsSparkJob SetInternalValue(val *DataprocWorkflowTemplateJobsSparkJob) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference LoggingConfigInput() *DataprocWorkflowTemplateJobsSparkJobLoggingConfig MainClass() *string SetMainClass(val *string) MainClassInput() *string MainJarFileUri() *string SetMainJarFileUri(val *string) MainJarFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocWorkflowTemplateJobsSparkJobLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetMainClass() ResetMainJarFileUri() ResetProperties() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsSparkJobOutputReference ¶
func NewDataprocWorkflowTemplateJobsSparkJobOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsSparkJobOutputReference
type DataprocWorkflowTemplateJobsSparkRJob ¶
type DataprocWorkflowTemplateJobsSparkRJob struct { // Required. The HCFS URI of the main R file to use as the driver. Must be a .R file. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#main_r_file_uri DataprocWorkflowTemplate#main_r_file_uri} MainRFileUri *string `field:"required" json:"mainRFileUri" yaml:"mainRFileUri"` // Optional. // // HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#archive_uris DataprocWorkflowTemplate#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // Optional. // // The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#args DataprocWorkflowTemplate#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // Optional. // // HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#file_uris DataprocWorkflowTemplate#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config} LoggingConfig *DataprocWorkflowTemplateJobsSparkRJobLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // Optional. // // A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` }
type DataprocWorkflowTemplateJobsSparkRJobLoggingConfig ¶
type DataprocWorkflowTemplateJobsSparkRJobLoggingConfig struct { // The per-package log levels for the driver. // // This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels} DriverLogLevels *map[string]*string `field:"optional" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference ¶
type DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsSparkRJobLoggingConfig SetInternalValue(val *DataprocWorkflowTemplateJobsSparkRJobLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDriverLogLevels() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference ¶
func NewDataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference
type DataprocWorkflowTemplateJobsSparkRJobOutputReference ¶
type DataprocWorkflowTemplateJobsSparkRJobOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsSparkRJob SetInternalValue(val *DataprocWorkflowTemplateJobsSparkRJob) LoggingConfig() DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference LoggingConfigInput() *DataprocWorkflowTemplateJobsSparkRJobLoggingConfig MainRFileUri() *string SetMainRFileUri(val *string) MainRFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocWorkflowTemplateJobsSparkRJobLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetLoggingConfig() ResetProperties() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsSparkRJobOutputReference ¶
func NewDataprocWorkflowTemplateJobsSparkRJobOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsSparkRJobOutputReference
type DataprocWorkflowTemplateJobsSparkSqlJob ¶
type DataprocWorkflowTemplateJobsSparkSqlJob struct { // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config} LoggingConfig *DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // Optional. // // A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // The HCFS URI of the script that contains SQL queries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#query_file_uri DataprocWorkflowTemplate#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // query_list block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#query_list DataprocWorkflowTemplate#query_list} QueryList *DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct `field:"optional" json:"queryList" yaml:"queryList"` // Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#script_variables DataprocWorkflowTemplate#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig ¶
type DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig struct { // The per-package log levels for the driver. // // This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels} DriverLogLevels *map[string]*string `field:"optional" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference ¶
type DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig SetInternalValue(val *DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDriverLogLevels() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference ¶
func NewDataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference
type DataprocWorkflowTemplateJobsSparkSqlJobOutputReference ¶
type DataprocWorkflowTemplateJobsSparkSqlJobOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsSparkSqlJob SetInternalValue(val *DataprocWorkflowTemplateJobsSparkSqlJob) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference LoggingConfigInput() *DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference QueryListInput() *DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig) PutQueryList(value *DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct) ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsSparkSqlJobOutputReference ¶
func NewDataprocWorkflowTemplateJobsSparkSqlJobOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsSparkSqlJobOutputReference
type DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct ¶
type DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct struct { // Required. // // The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } } // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#queries DataprocWorkflowTemplate#queries} Queries *[]*string `field:"required" json:"queries" yaml:"queries"` }
type DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference ¶
type DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct SetInternalValue(val *DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct) Queries() *[]*string SetQueries(val *[]*string) QueriesInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference ¶
func NewDataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference
type DataprocWorkflowTemplateParameters ¶
type DataprocWorkflowTemplateParameters struct { // Required. // // Paths to all fields that the parameter replaces. A field is allowed to appear in at most one parameter's list of field paths. A field path is similar in syntax to a google.protobuf.FieldMask. For example, a field path that references the zone field of a workflow template's cluster selector would be specified as `placement.clusterSelector.zone`. Also, field paths can reference fields using the following syntax: * Values in maps can be referenced by key: * labels['key'] * placement.clusterSelector.clusterLabels['key'] * placement.managedCluster.labels['key'] * placement.clusterSelector.clusterLabels['key'] * jobs['step-id'].labels['key'] * Jobs in the jobs list can be referenced by step-id: * jobs['step-id'].hadoopJob.mainJarFileUri * jobs['step-id'].hiveJob.queryFileUri * jobs['step-id'].pySparkJob.mainPythonFileUri * jobs['step-id'].hadoopJob.jarFileUris[0] * jobs['step-id'].hadoopJob.archiveUris[0] * jobs['step-id'].hadoopJob.fileUris[0] * jobs['step-id'].pySparkJob.pythonFileUris[0] * Items in repeated fields can be referenced by a zero-based index: * jobs['step-id'].sparkJob.args[0] * Other examples: * jobs['step-id'].hadoopJob.properties['key'] * jobs['step-id'].hadoopJob.args[0] * jobs['step-id'].hiveJob.scriptVariables['key'] * jobs['step-id'].hadoopJob.mainJarFileUri * placement.clusterSelector.zone It may not be possible to parameterize maps and repeated fields in their entirety since only individual map values and individual items in repeated fields can be referenced. For example, the following field paths are invalid: - placement.clusterSelector.clusterLabels - jobs['step-id'].sparkJob.args // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#fields DataprocWorkflowTemplate#fields} Fields *[]*string `field:"required" json:"fields" yaml:"fields"` // Required. // // Parameter name. The parameter name is used as the key, and paired with the parameter value, which are passed to the template when the template is instantiated. The name must contain only capital letters (A-Z), numbers (0-9), and underscores (_), and must not start with a number. The maximum length is 40 characters. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#name DataprocWorkflowTemplate#name} Name *string `field:"required" json:"name" yaml:"name"` // Optional. Brief description of the parameter. Must not exceed 1024 characters. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#description DataprocWorkflowTemplate#description} Description *string `field:"optional" json:"description" yaml:"description"` // validation block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#validation DataprocWorkflowTemplate#validation} Validation *DataprocWorkflowTemplateParametersValidation `field:"optional" json:"validation" yaml:"validation"` }
type DataprocWorkflowTemplateParametersList ¶
type DataprocWorkflowTemplateParametersList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplateParametersOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateParametersList ¶
func NewDataprocWorkflowTemplateParametersList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplateParametersList
type DataprocWorkflowTemplateParametersOutputReference ¶
type DataprocWorkflowTemplateParametersOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string Description() *string SetDescription(val *string) DescriptionInput() *string Fields() *[]*string SetFields(val *[]*string) FieldsInput() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) Name() *string SetName(val *string) NameInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) Validation() DataprocWorkflowTemplateParametersValidationOutputReference ValidationInput() *DataprocWorkflowTemplateParametersValidation // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutValidation(value *DataprocWorkflowTemplateParametersValidation) ResetDescription() ResetValidation() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateParametersOutputReference ¶
func NewDataprocWorkflowTemplateParametersOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplateParametersOutputReference
type DataprocWorkflowTemplateParametersValidation ¶
type DataprocWorkflowTemplateParametersValidation struct { // regex block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#regex DataprocWorkflowTemplate#regex} Regex *DataprocWorkflowTemplateParametersValidationRegex `field:"optional" json:"regex" yaml:"regex"` // values block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#values DataprocWorkflowTemplate#values} Values *DataprocWorkflowTemplateParametersValidationValues `field:"optional" json:"values" yaml:"values"` }
type DataprocWorkflowTemplateParametersValidationOutputReference ¶
type DataprocWorkflowTemplateParametersValidationOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateParametersValidation SetInternalValue(val *DataprocWorkflowTemplateParametersValidation) Regex() DataprocWorkflowTemplateParametersValidationRegexOutputReference RegexInput() *DataprocWorkflowTemplateParametersValidationRegex // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) Values() DataprocWorkflowTemplateParametersValidationValuesOutputReference ValuesInput() *DataprocWorkflowTemplateParametersValidationValues // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutRegex(value *DataprocWorkflowTemplateParametersValidationRegex) PutValues(value *DataprocWorkflowTemplateParametersValidationValues) ResetRegex() ResetValues() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateParametersValidationOutputReference ¶
func NewDataprocWorkflowTemplateParametersValidationOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateParametersValidationOutputReference
type DataprocWorkflowTemplateParametersValidationRegex ¶
type DataprocWorkflowTemplateParametersValidationRegex struct { // Required. // // RE2 regular expressions used to validate the parameter's value. The value must match the regex in its entirety (substring matches are not sufficient). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#regexes DataprocWorkflowTemplate#regexes} Regexes *[]*string `field:"required" json:"regexes" yaml:"regexes"` }
type DataprocWorkflowTemplateParametersValidationRegexOutputReference ¶
type DataprocWorkflowTemplateParametersValidationRegexOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateParametersValidationRegex SetInternalValue(val *DataprocWorkflowTemplateParametersValidationRegex) Regexes() *[]*string SetRegexes(val *[]*string) RegexesInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateParametersValidationRegexOutputReference ¶
func NewDataprocWorkflowTemplateParametersValidationRegexOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateParametersValidationRegexOutputReference
type DataprocWorkflowTemplateParametersValidationValues ¶
type DataprocWorkflowTemplateParametersValidationValues struct { // Required. List of allowed values for the parameter. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#values DataprocWorkflowTemplate#values} Values *[]*string `field:"required" json:"values" yaml:"values"` }
type DataprocWorkflowTemplateParametersValidationValuesOutputReference ¶
type DataprocWorkflowTemplateParametersValidationValuesOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplateParametersValidationValues SetInternalValue(val *DataprocWorkflowTemplateParametersValidationValues) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) Values() *[]*string SetValues(val *[]*string) ValuesInput() *[]*string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateParametersValidationValuesOutputReference ¶
func NewDataprocWorkflowTemplateParametersValidationValuesOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateParametersValidationValuesOutputReference
type DataprocWorkflowTemplatePlacement ¶
type DataprocWorkflowTemplatePlacement struct { // cluster_selector block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#cluster_selector DataprocWorkflowTemplate#cluster_selector} ClusterSelector *DataprocWorkflowTemplatePlacementClusterSelector `field:"optional" json:"clusterSelector" yaml:"clusterSelector"` // managed_cluster block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#managed_cluster DataprocWorkflowTemplate#managed_cluster} ManagedCluster *DataprocWorkflowTemplatePlacementManagedCluster `field:"optional" json:"managedCluster" yaml:"managedCluster"` }
type DataprocWorkflowTemplatePlacementClusterSelector ¶
type DataprocWorkflowTemplatePlacementClusterSelector struct { // Required. The cluster labels. Cluster must have all labels to match. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#cluster_labels DataprocWorkflowTemplate#cluster_labels} ClusterLabels *map[string]*string `field:"required" json:"clusterLabels" yaml:"clusterLabels"` // Optional. // // The zone where workflow process executes. This parameter does not affect the selection of the cluster. If unspecified, the zone of the first cluster matching the selector is used. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#zone DataprocWorkflowTemplate#zone} Zone *string `field:"optional" json:"zone" yaml:"zone"` }
type DataprocWorkflowTemplatePlacementClusterSelectorOutputReference ¶
type DataprocWorkflowTemplatePlacementClusterSelectorOutputReference interface { cdktf.ComplexObject ClusterLabels() *map[string]*string SetClusterLabels(val *map[string]*string) ClusterLabelsInput() *map[string]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementClusterSelector SetInternalValue(val *DataprocWorkflowTemplatePlacementClusterSelector) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) Zone() *string SetZone(val *string) ZoneInput() *string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetZone() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementClusterSelectorOutputReference ¶
func NewDataprocWorkflowTemplatePlacementClusterSelectorOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementClusterSelectorOutputReference
type DataprocWorkflowTemplatePlacementManagedCluster ¶
type DataprocWorkflowTemplatePlacementManagedCluster struct { // Required. // // The cluster name prefix. A unique cluster name will be formed by appending a random suffix. The name must contain only lower-case letters (a-z), numbers (0-9), and hyphens (-). Must begin with a letter. Cannot begin or end with hyphen. Must consist of between 2 and 35 characters. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#cluster_name DataprocWorkflowTemplate#cluster_name} ClusterName *string `field:"required" json:"clusterName" yaml:"clusterName"` // config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#config DataprocWorkflowTemplate#config} Config *DataprocWorkflowTemplatePlacementManagedClusterConfig `field:"required" json:"config" yaml:"config"` // Optional. // // The labels to associate with this cluster. Label keys must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: p{Ll}p{Lo}{0,62} Label values must be between 1 and 63 characters long, and must conform to the following PCRE regular expression: [p{Ll}p{Lo}p{N}_-]{0,63} No more than 32 labels can be associated with a given cluster. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#labels DataprocWorkflowTemplate#labels} Labels *map[string]*string `field:"optional" json:"labels" yaml:"labels"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfig struct { // autoscaling_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#autoscaling_config DataprocWorkflowTemplate#autoscaling_config} AutoscalingConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig `field:"optional" json:"autoscalingConfig" yaml:"autoscalingConfig"` // encryption_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#encryption_config DataprocWorkflowTemplate#encryption_config} EncryptionConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig `field:"optional" json:"encryptionConfig" yaml:"encryptionConfig"` // endpoint_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#endpoint_config DataprocWorkflowTemplate#endpoint_config} EndpointConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig `field:"optional" json:"endpointConfig" yaml:"endpointConfig"` // gce_cluster_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#gce_cluster_config DataprocWorkflowTemplate#gce_cluster_config} GceClusterConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig `field:"optional" json:"gceClusterConfig" yaml:"gceClusterConfig"` // initialization_actions block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#initialization_actions DataprocWorkflowTemplate#initialization_actions} InitializationActions interface{} `field:"optional" json:"initializationActions" yaml:"initializationActions"` // lifecycle_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#lifecycle_config DataprocWorkflowTemplate#lifecycle_config} LifecycleConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig `field:"optional" json:"lifecycleConfig" yaml:"lifecycleConfig"` // master_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#master_config DataprocWorkflowTemplate#master_config} MasterConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig `field:"optional" json:"masterConfig" yaml:"masterConfig"` // secondary_worker_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#secondary_worker_config DataprocWorkflowTemplate#secondary_worker_config} SecondaryWorkerConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig `field:"optional" json:"secondaryWorkerConfig" yaml:"secondaryWorkerConfig"` // security_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#security_config DataprocWorkflowTemplate#security_config} SecurityConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig `field:"optional" json:"securityConfig" yaml:"securityConfig"` // software_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#software_config DataprocWorkflowTemplate#software_config} SoftwareConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig `field:"optional" json:"softwareConfig" yaml:"softwareConfig"` // Optional. // // A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's staging bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket (see [Dataproc staging bucket](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). **This field requires a Cloud Storage bucket name, not a URI to a Cloud Storage bucket.** // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#staging_bucket DataprocWorkflowTemplate#staging_bucket} StagingBucket *string `field:"optional" json:"stagingBucket" yaml:"stagingBucket"` // Optional. // // A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. **This field requires a Cloud Storage bucket name, not a URI to a Cloud Storage bucket.** // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#temp_bucket DataprocWorkflowTemplate#temp_bucket} TempBucket *string `field:"optional" json:"tempBucket" yaml:"tempBucket"` // worker_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#worker_config DataprocWorkflowTemplate#worker_config} WorkerConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig `field:"optional" json:"workerConfig" yaml:"workerConfig"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig struct { // Optional. // // The autoscaling policy used by the cluster. Only resource names including projectid and location (region) are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` Note that the policy must be in the same project and Dataproc region. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#policy DataprocWorkflowTemplate#policy} Policy *string `field:"optional" json:"policy" yaml:"policy"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig) Policy() *string SetPolicy(val *string) PolicyInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetPolicy() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig struct { // Optional. The Cloud KMS key name to use for PD disk encryption for all instances in the cluster. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#gce_pd_kms_key_name DataprocWorkflowTemplate#gce_pd_kms_key_name} GcePdKmsKeyName *string `field:"optional" json:"gcePdKmsKeyName" yaml:"gcePdKmsKeyName"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string GcePdKmsKeyName() *string SetGcePdKmsKeyName(val *string) GcePdKmsKeyNameInput() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetGcePdKmsKeyName() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig struct { // Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#enable_http_port_access DataprocWorkflowTemplate#enable_http_port_access} EnableHttpPortAccess interface{} `field:"optional" json:"enableHttpPortAccess" yaml:"enableHttpPortAccess"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string EnableHttpPortAccess() interface{} SetEnableHttpPortAccess(val interface{}) EnableHttpPortAccessInput() interface{} // Experimental. Fqn() *string HttpPorts() cdktf.StringMap InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetEnableHttpPortAccess() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig struct { // Optional. // // If true, all instances in the cluster will only have internal IP addresses. By default, clusters are not restricted to internal IP addresses, and will have ephemeral external IP addresses assigned to each instance. This `internal_ip_only` restriction can only be enabled for subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#internal_ip_only DataprocWorkflowTemplate#internal_ip_only} InternalIpOnly interface{} `field:"optional" json:"internalIpOnly" yaml:"internalIpOnly"` // The Compute Engine metadata entries to add to all instances (see [Project and instance metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#metadata DataprocWorkflowTemplate#metadata} Metadata *map[string]*string `field:"optional" json:"metadata" yaml:"metadata"` // Optional. // // The Compute Engine network to be used for machine communications. Cannot be specified with subnetwork_uri. If neither `network_uri` nor `subnetwork_uri` is specified, the "default" network of the project is used, if it exists. Cannot be a "Custom Subnet Network" (see [Using Subnetworks](https://cloud.google.com/compute/docs/subnetworks) for more information). A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` * `projects/[project_id]/regions/global/default` * `default` // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#network DataprocWorkflowTemplate#network} Network *string `field:"optional" json:"network" yaml:"network"` // node_group_affinity block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#node_group_affinity DataprocWorkflowTemplate#node_group_affinity} NodeGroupAffinity *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity `field:"optional" json:"nodeGroupAffinity" yaml:"nodeGroupAffinity"` // Optional. The type of IPv6 access for a cluster. Possible values: PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, INHERIT_FROM_SUBNETWORK, OUTBOUND, BIDIRECTIONAL. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#private_ipv6_google_access DataprocWorkflowTemplate#private_ipv6_google_access} PrivateIpv6GoogleAccess *string `field:"optional" json:"privateIpv6GoogleAccess" yaml:"privateIpv6GoogleAccess"` // reservation_affinity block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#reservation_affinity DataprocWorkflowTemplate#reservation_affinity} ReservationAffinity *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity `field:"optional" json:"reservationAffinity" yaml:"reservationAffinity"` // Optional. // // The [Dataproc service account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc) (also see [VM Data Plane identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity)) used by Dataproc cluster VM instances to access Google Cloud Platform services. If not specified, the [Compute Engine default service account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) is used. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#service_account DataprocWorkflowTemplate#service_account} ServiceAccount *string `field:"optional" json:"serviceAccount" yaml:"serviceAccount"` // Optional. // // The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is always included: * https://www.googleapis.com/auth/cloud.useraccounts.readonly * https://www.googleapis.com/auth/devstorage.read_write * https://www.googleapis.com/auth/logging.write If no scopes are specified, the following defaults are also provided: * https://www.googleapis.com/auth/bigquery * https://www.googleapis.com/auth/bigtable.admin.table * https://www.googleapis.com/auth/bigtable.data * https://www.googleapis.com/auth/devstorage.full_control // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#service_account_scopes DataprocWorkflowTemplate#service_account_scopes} ServiceAccountScopes *[]*string `field:"optional" json:"serviceAccountScopes" yaml:"serviceAccountScopes"` // shielded_instance_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#shielded_instance_config DataprocWorkflowTemplate#shielded_instance_config} ShieldedInstanceConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig `field:"optional" json:"shieldedInstanceConfig" yaml:"shieldedInstanceConfig"` // Optional. // // The Compute Engine subnetwork to be used for machine communications. Cannot be specified with network_uri. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` * `projects/[project_id]/regions/us-east1/subnetworks/sub0` * `sub0` // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#subnetwork DataprocWorkflowTemplate#subnetwork} Subnetwork *string `field:"optional" json:"subnetwork" yaml:"subnetwork"` // The Compute Engine tags to add to all instances (see [Tagging instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#tags DataprocWorkflowTemplate#tags} Tags *[]*string `field:"optional" json:"tags" yaml:"tags"` // Optional. // // The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` * `projects/[project_id]/zones/[zone]` * `us-central1-f` // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#zone DataprocWorkflowTemplate#zone} Zone *string `field:"optional" json:"zone" yaml:"zone"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity struct { // Required. // // The URI of a sole-tenant [node group resource](https://cloud.google.com/compute/docs/reference/rest/v1/nodeGroups) that the cluster will be created on. A full URL, partial URI, or node group name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1` * `projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1` * `node-group-1` // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#node_group DataprocWorkflowTemplate#node_group} NodeGroup *string `field:"required" json:"nodeGroup" yaml:"nodeGroup"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity) NodeGroup() *string SetNodeGroup(val *string) NodeGroupInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalIpOnly() interface{} SetInternalIpOnly(val interface{}) InternalIpOnlyInput() interface{} InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig) Metadata() *map[string]*string SetMetadata(val *map[string]*string) MetadataInput() *map[string]*string Network() *string SetNetwork(val *string) NetworkInput() *string NodeGroupAffinity() DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference NodeGroupAffinityInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity PrivateIpv6GoogleAccess() *string SetPrivateIpv6GoogleAccess(val *string) PrivateIpv6GoogleAccessInput() *string ReservationAffinity() DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference ReservationAffinityInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity ServiceAccount() *string SetServiceAccount(val *string) ServiceAccountInput() *string ServiceAccountScopes() *[]*string SetServiceAccountScopes(val *[]*string) ServiceAccountScopesInput() *[]*string ShieldedInstanceConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference ShieldedInstanceConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig Subnetwork() *string SetSubnetwork(val *string) SubnetworkInput() *string Tags() *[]*string SetTags(val *[]*string) TagsInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) Zone() *string SetZone(val *string) ZoneInput() *string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutNodeGroupAffinity(value *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity) PutReservationAffinity(value *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity) PutShieldedInstanceConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig) ResetInternalIpOnly() ResetMetadata() ResetNetwork() ResetNodeGroupAffinity() ResetPrivateIpv6GoogleAccess() ResetReservationAffinity() ResetServiceAccount() ResetServiceAccountScopes() ResetShieldedInstanceConfig() ResetSubnetwork() ResetTags() ResetZone() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity struct { // Optional. Type of reservation to consume Possible values: TYPE_UNSPECIFIED, NO_RESERVATION, ANY_RESERVATION, SPECIFIC_RESERVATION. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#consume_reservation_type DataprocWorkflowTemplate#consume_reservation_type} ConsumeReservationType *string `field:"optional" json:"consumeReservationType" yaml:"consumeReservationType"` // Optional. Corresponds to the label key of reservation resource. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#key DataprocWorkflowTemplate#key} Key *string `field:"optional" json:"key" yaml:"key"` // Optional. Corresponds to the label values of reservation resource. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#values DataprocWorkflowTemplate#values} Values *[]*string `field:"optional" json:"values" yaml:"values"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ConsumeReservationType() *string SetConsumeReservationType(val *string) ConsumeReservationTypeInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity) Key() *string SetKey(val *string) KeyInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) Values() *[]*string SetValues(val *[]*string) ValuesInput() *[]*string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetConsumeReservationType() ResetKey() ResetValues() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig struct { // Optional. // // Defines whether instances have integrity monitoring enabled. Integrity monitoring compares the most recent boot measurements to the integrity policy baseline and returns a pair of pass/fail results depending on whether they match or not. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#enable_integrity_monitoring DataprocWorkflowTemplate#enable_integrity_monitoring} EnableIntegrityMonitoring interface{} `field:"optional" json:"enableIntegrityMonitoring" yaml:"enableIntegrityMonitoring"` // Optional. // // Defines whether the instances have Secure Boot enabled. Secure Boot helps ensure that the system only runs authentic software by verifying the digital signature of all boot components, and halting the boot process if signature verification fails. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#enable_secure_boot DataprocWorkflowTemplate#enable_secure_boot} EnableSecureBoot interface{} `field:"optional" json:"enableSecureBoot" yaml:"enableSecureBoot"` // Optional. // // Defines whether the instance have the vTPM enabled. Virtual Trusted Platform Module protects objects like keys, certificates and enables Measured Boot by performing the measurements needed to create a known good boot baseline, called the integrity policy baseline. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#enable_vtpm DataprocWorkflowTemplate#enable_vtpm} EnableVtpm interface{} `field:"optional" json:"enableVtpm" yaml:"enableVtpm"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string EnableIntegrityMonitoring() interface{} SetEnableIntegrityMonitoring(val interface{}) EnableIntegrityMonitoringInput() interface{} EnableSecureBoot() interface{} SetEnableSecureBoot(val interface{}) EnableSecureBootInput() interface{} EnableVtpm() interface{} SetEnableVtpm(val interface{}) EnableVtpmInput() interface{} // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetEnableIntegrityMonitoring() ResetEnableSecureBoot() ResetEnableVtpm() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActions ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActions struct { // Required. Cloud Storage URI of executable file. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#executable_file DataprocWorkflowTemplate#executable_file} ExecutableFile *string `field:"optional" json:"executableFile" yaml:"executableFile"` // Optional. // // Amount of time executable has to complete. Default is 10 minutes (see JSON representation of [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). Cluster creation fails with an explanatory error message (the name of the executable that caused the error and the exceeded timeout period) if the executable is not completed at end of the timeout period. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#execution_timeout DataprocWorkflowTemplate#execution_timeout} ExecutionTimeout *string `field:"optional" json:"executionTimeout" yaml:"executionTimeout"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList
type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string ExecutableFile() *string SetExecutableFile(val *string) ExecutableFileInput() *string ExecutionTimeout() *string SetExecutionTimeout(val *string) ExecutionTimeoutInput() *string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetExecutableFile() ResetExecutionTimeout() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig struct { // Optional. The time when cluster will be auto-deleted (see JSON representation of [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#auto_delete_time DataprocWorkflowTemplate#auto_delete_time} AutoDeleteTime *string `field:"optional" json:"autoDeleteTime" yaml:"autoDeleteTime"` // Optional. // // The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#auto_delete_ttl DataprocWorkflowTemplate#auto_delete_ttl} AutoDeleteTtl *string `field:"optional" json:"autoDeleteTtl" yaml:"autoDeleteTtl"` // Optional. // // The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#idle_delete_ttl DataprocWorkflowTemplate#idle_delete_ttl} IdleDeleteTtl *string `field:"optional" json:"idleDeleteTtl" yaml:"idleDeleteTtl"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference interface { cdktf.ComplexObject AutoDeleteTime() *string SetAutoDeleteTime(val *string) AutoDeleteTimeInput() *string AutoDeleteTtl() *string SetAutoDeleteTtl(val *string) AutoDeleteTtlInput() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string IdleDeleteTtl() *string SetIdleDeleteTtl(val *string) IdleDeleteTtlInput() *string IdleStartTime() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetAutoDeleteTime() ResetAutoDeleteTtl() ResetIdleDeleteTtl() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig struct { // accelerators block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerators DataprocWorkflowTemplate#accelerators} Accelerators interface{} `field:"optional" json:"accelerators" yaml:"accelerators"` // disk_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#disk_config DataprocWorkflowTemplate#disk_config} DiskConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig `field:"optional" json:"diskConfig" yaml:"diskConfig"` // Optional. // // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id]` * `projects/[project_id]/global/images/[image-id]` * `image-id` Image family examples. Dataproc will use the most recent image from the family: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name]` * `projects/[project_id]/global/images/family/[custom-image-family-name]` If the URI is unspecified, it will be inferred from `SoftwareConfig.image_version` or the system default. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#image DataprocWorkflowTemplate#image} Image *string `field:"optional" json:"image" yaml:"image"` // Optional. // // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `n1-standard-2` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, `n1-standard-2`. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#machine_type DataprocWorkflowTemplate#machine_type} MachineType *string `field:"optional" json:"machineType" yaml:"machineType"` // Optional. Specifies the minimum cpu platform for the Instance Group. See [Dataproc -> Minimum CPU Platform](https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#min_cpu_platform DataprocWorkflowTemplate#min_cpu_platform} MinCpuPlatform *string `field:"optional" json:"minCpuPlatform" yaml:"minCpuPlatform"` // Optional. // // The number of VM instances in the instance group. For [HA cluster](/dataproc/docs/concepts/configuring-clusters/high-availability) [master_config](#FIELDS.master_config) groups, **must be set to 3**. For standard cluster [master_config](#FIELDS.master_config) groups, **must be set to 1**. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#num_instances DataprocWorkflowTemplate#num_instances} NumInstances *float64 `field:"optional" json:"numInstances" yaml:"numInstances"` // Optional. // // Specifies the preemptibility of the instance group. The default value for master and worker groups is `NON_PREEMPTIBLE`. This default cannot be changed. The default value for secondary instances is `PREEMPTIBLE`. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#preemptibility DataprocWorkflowTemplate#preemptibility} Preemptibility *string `field:"optional" json:"preemptibility" yaml:"preemptibility"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerators ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerators struct { // The number of the accelerator cards of this type exposed to this instance. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerator_count DataprocWorkflowTemplate#accelerator_count} AcceleratorCount *float64 `field:"optional" json:"acceleratorCount" yaml:"acceleratorCount"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. // // See [Compute Engine AcceleratorTypes](https://cloud.google.com/compute/docs/reference/beta/acceleratorTypes). Examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `nvidia-tesla-k80` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, `nvidia-tesla-k80`. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerator_type DataprocWorkflowTemplate#accelerator_type} AcceleratorType *string `field:"optional" json:"acceleratorType" yaml:"acceleratorType"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference interface { cdktf.ComplexObject AcceleratorCount() *float64 SetAcceleratorCount(val *float64) AcceleratorCountInput() *float64 AcceleratorType() *string SetAcceleratorType(val *string) AcceleratorTypeInput() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetAcceleratorCount() ResetAcceleratorType() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig struct { // Optional. Size in GB of the boot disk (default is 500GB). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#boot_disk_size_gb DataprocWorkflowTemplate#boot_disk_size_gb} BootDiskSizeGb *float64 `field:"optional" json:"bootDiskSizeGb" yaml:"bootDiskSizeGb"` // Optional. // // Type of the boot disk (default is "pd-standard"). Valid values: "pd-balanced" (Persistent Disk Balanced Solid State Drive), "pd-ssd" (Persistent Disk Solid State Drive), or "pd-standard" (Persistent Disk Hard Disk Drive). See [Disk types](https://cloud.google.com/compute/docs/disks#disk-types). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#boot_disk_type DataprocWorkflowTemplate#boot_disk_type} BootDiskType *string `field:"optional" json:"bootDiskType" yaml:"bootDiskType"` // Optional. // // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#num_local_ssds DataprocWorkflowTemplate#num_local_ssds} NumLocalSsds *float64 `field:"optional" json:"numLocalSsds" yaml:"numLocalSsds"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference interface { cdktf.ComplexObject BootDiskSizeGb() *float64 SetBootDiskSizeGb(val *float64) BootDiskSizeGbInput() *float64 BootDiskType() *string SetBootDiskType(val *string) BootDiskTypeInput() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig) NumLocalSsds() *float64 SetNumLocalSsds(val *float64) NumLocalSsdsInput() *float64 // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetBootDiskSizeGb() ResetBootDiskType() ResetNumLocalSsds() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig struct { }
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InstanceGroupManagerName() *string InstanceTemplateName() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference interface { cdktf.ComplexObject Accelerators() DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList AcceleratorsInput() interface{} // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DiskConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference DiskConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig // Experimental. Fqn() *string Image() *string SetImage(val *string) ImageInput() *string InstanceNames() *[]*string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig) IsPreemptible() cdktf.IResolvable MachineType() *string SetMachineType(val *string) MachineTypeInput() *string ManagedGroupConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList MinCpuPlatform() *string SetMinCpuPlatform(val *string) MinCpuPlatformInput() *string NumInstances() *float64 SetNumInstances(val *float64) NumInstancesInput() *float64 Preemptibility() *string SetPreemptibility(val *string) PreemptibilityInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutAccelerators(value interface{}) PutDiskConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig) ResetAccelerators() ResetDiskConfig() ResetImage() ResetMachineType() ResetMinCpuPlatform() ResetNumInstances() ResetPreemptibility() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference interface { cdktf.ComplexObject AutoscalingConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference AutoscalingConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string EncryptionConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference EncryptionConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig EndpointConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference EndpointConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig // Experimental. Fqn() *string GceClusterConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference GceClusterConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig InitializationActions() DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList InitializationActionsInput() interface{} InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfig) LifecycleConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference LifecycleConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig MasterConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference MasterConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig SecondaryWorkerConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference SecondaryWorkerConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig SecurityConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference SecurityConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig SoftwareConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference SoftwareConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig StagingBucket() *string SetStagingBucket(val *string) StagingBucketInput() *string TempBucket() *string SetTempBucket(val *string) TempBucketInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) WorkerConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference WorkerConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutAutoscalingConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig) PutEncryptionConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig) PutEndpointConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig) PutGceClusterConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig) PutInitializationActions(value interface{}) PutLifecycleConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig) PutMasterConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig) PutSecondaryWorkerConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig) PutSecurityConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig) PutSoftwareConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig) PutWorkerConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig) ResetAutoscalingConfig() ResetEncryptionConfig() ResetEndpointConfig() ResetGceClusterConfig() ResetInitializationActions() ResetLifecycleConfig() ResetMasterConfig() ResetSecondaryWorkerConfig() ResetSecurityConfig() ResetSoftwareConfig() ResetStagingBucket() ResetTempBucket() ResetWorkerConfig() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig struct { // accelerators block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerators DataprocWorkflowTemplate#accelerators} Accelerators interface{} `field:"optional" json:"accelerators" yaml:"accelerators"` // disk_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#disk_config DataprocWorkflowTemplate#disk_config} DiskConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig `field:"optional" json:"diskConfig" yaml:"diskConfig"` // Optional. // // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id]` * `projects/[project_id]/global/images/[image-id]` * `image-id` Image family examples. Dataproc will use the most recent image from the family: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name]` * `projects/[project_id]/global/images/family/[custom-image-family-name]` If the URI is unspecified, it will be inferred from `SoftwareConfig.image_version` or the system default. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#image DataprocWorkflowTemplate#image} Image *string `field:"optional" json:"image" yaml:"image"` // Optional. // // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `n1-standard-2` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, `n1-standard-2`. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#machine_type DataprocWorkflowTemplate#machine_type} MachineType *string `field:"optional" json:"machineType" yaml:"machineType"` // Optional. Specifies the minimum cpu platform for the Instance Group. See [Dataproc -> Minimum CPU Platform](https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#min_cpu_platform DataprocWorkflowTemplate#min_cpu_platform} MinCpuPlatform *string `field:"optional" json:"minCpuPlatform" yaml:"minCpuPlatform"` // Optional. // // The number of VM instances in the instance group. For [HA cluster](/dataproc/docs/concepts/configuring-clusters/high-availability) [master_config](#FIELDS.master_config) groups, **must be set to 3**. For standard cluster [master_config](#FIELDS.master_config) groups, **must be set to 1**. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#num_instances DataprocWorkflowTemplate#num_instances} NumInstances *float64 `field:"optional" json:"numInstances" yaml:"numInstances"` // Optional. // // Specifies the preemptibility of the instance group. The default value for master and worker groups is `NON_PREEMPTIBLE`. This default cannot be changed. The default value for secondary instances is `PREEMPTIBLE`. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#preemptibility DataprocWorkflowTemplate#preemptibility} Preemptibility *string `field:"optional" json:"preemptibility" yaml:"preemptibility"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerators ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerators struct { // The number of the accelerator cards of this type exposed to this instance. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerator_count DataprocWorkflowTemplate#accelerator_count} AcceleratorCount *float64 `field:"optional" json:"acceleratorCount" yaml:"acceleratorCount"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. // // See [Compute Engine AcceleratorTypes](https://cloud.google.com/compute/docs/reference/beta/acceleratorTypes). Examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `nvidia-tesla-k80` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, `nvidia-tesla-k80`. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerator_type DataprocWorkflowTemplate#accelerator_type} AcceleratorType *string `field:"optional" json:"acceleratorType" yaml:"acceleratorType"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference interface { cdktf.ComplexObject AcceleratorCount() *float64 SetAcceleratorCount(val *float64) AcceleratorCountInput() *float64 AcceleratorType() *string SetAcceleratorType(val *string) AcceleratorTypeInput() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetAcceleratorCount() ResetAcceleratorType() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig struct { // Optional. Size in GB of the boot disk (default is 500GB). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#boot_disk_size_gb DataprocWorkflowTemplate#boot_disk_size_gb} BootDiskSizeGb *float64 `field:"optional" json:"bootDiskSizeGb" yaml:"bootDiskSizeGb"` // Optional. // // Type of the boot disk (default is "pd-standard"). Valid values: "pd-balanced" (Persistent Disk Balanced Solid State Drive), "pd-ssd" (Persistent Disk Solid State Drive), or "pd-standard" (Persistent Disk Hard Disk Drive). See [Disk types](https://cloud.google.com/compute/docs/disks#disk-types). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#boot_disk_type DataprocWorkflowTemplate#boot_disk_type} BootDiskType *string `field:"optional" json:"bootDiskType" yaml:"bootDiskType"` // Optional. // // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#num_local_ssds DataprocWorkflowTemplate#num_local_ssds} NumLocalSsds *float64 `field:"optional" json:"numLocalSsds" yaml:"numLocalSsds"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference interface { cdktf.ComplexObject BootDiskSizeGb() *float64 SetBootDiskSizeGb(val *float64) BootDiskSizeGbInput() *float64 BootDiskType() *string SetBootDiskType(val *string) BootDiskTypeInput() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig) NumLocalSsds() *float64 SetNumLocalSsds(val *float64) NumLocalSsdsInput() *float64 // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetBootDiskSizeGb() ResetBootDiskType() ResetNumLocalSsds() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig struct { }
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InstanceGroupManagerName() *string InstanceTemplateName() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference interface { cdktf.ComplexObject Accelerators() DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList AcceleratorsInput() interface{} // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DiskConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference DiskConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig // Experimental. Fqn() *string Image() *string SetImage(val *string) ImageInput() *string InstanceNames() *[]*string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig) IsPreemptible() cdktf.IResolvable MachineType() *string SetMachineType(val *string) MachineTypeInput() *string ManagedGroupConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList MinCpuPlatform() *string SetMinCpuPlatform(val *string) MinCpuPlatformInput() *string NumInstances() *float64 SetNumInstances(val *float64) NumInstancesInput() *float64 Preemptibility() *string SetPreemptibility(val *string) PreemptibilityInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutAccelerators(value interface{}) PutDiskConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig) ResetAccelerators() ResetDiskConfig() ResetImage() ResetMachineType() ResetMinCpuPlatform() ResetNumInstances() ResetPreemptibility() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig struct { // kerberos_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#kerberos_config DataprocWorkflowTemplate#kerberos_config} KerberosConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig `field:"optional" json:"kerberosConfig" yaml:"kerberosConfig"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig struct { // Optional. The admin server (IP or hostname) for the remote trusted realm in a cross realm trust relationship. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#cross_realm_trust_admin_server DataprocWorkflowTemplate#cross_realm_trust_admin_server} CrossRealmTrustAdminServer *string `field:"optional" json:"crossRealmTrustAdminServer" yaml:"crossRealmTrustAdminServer"` // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross realm trust relationship. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#cross_realm_trust_kdc DataprocWorkflowTemplate#cross_realm_trust_kdc} CrossRealmTrustKdc *string `field:"optional" json:"crossRealmTrustKdc" yaml:"crossRealmTrustKdc"` // Optional. The remote realm the Dataproc on-cluster KDC will trust, should the user enable cross realm trust. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#cross_realm_trust_realm DataprocWorkflowTemplate#cross_realm_trust_realm} CrossRealmTrustRealm *string `field:"optional" json:"crossRealmTrustRealm" yaml:"crossRealmTrustRealm"` // // The Cloud Storage URI of a KMS encrypted file containing the shared password between the on-cluster Kerberos realm and the remote trusted realm, in a cross realm trust relationship. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#cross_realm_trust_shared_password DataprocWorkflowTemplate#cross_realm_trust_shared_password} CrossRealmTrustSharedPassword *string `field:"optional" json:"crossRealmTrustSharedPassword" yaml:"crossRealmTrustSharedPassword"` // Optional. // // Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#enable_kerberos DataprocWorkflowTemplate#enable_kerberos} EnableKerberos interface{} `field:"optional" json:"enableKerberos" yaml:"enableKerberos"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the master key of the KDC database. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#kdc_db_key DataprocWorkflowTemplate#kdc_db_key} KdcDbKey *string `field:"optional" json:"kdcDbKey" yaml:"kdcDbKey"` // Optional. // // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided key. For the self-signed certificate, this password is generated by Dataproc. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#key_password DataprocWorkflowTemplate#key_password} KeyPassword *string `field:"optional" json:"keyPassword" yaml:"keyPassword"` // Optional. // // The Cloud Storage URI of the keystore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#keystore DataprocWorkflowTemplate#keystore} Keystore *string `field:"optional" json:"keystore" yaml:"keystore"` // Optional. // // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided keystore. For the self-signed certificate, this password is generated by Dataproc. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#keystore_password DataprocWorkflowTemplate#keystore_password} KeystorePassword *string `field:"optional" json:"keystorePassword" yaml:"keystorePassword"` // Optional. The uri of the KMS key used to encrypt various sensitive files. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#kms_key DataprocWorkflowTemplate#kms_key} KmsKey *string `field:"optional" json:"kmsKey" yaml:"kmsKey"` // Optional. // // The name of the on-cluster Kerberos realm. If not specified, the uppercased domain of hostnames will be the realm. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#realm DataprocWorkflowTemplate#realm} Realm *string `field:"optional" json:"realm" yaml:"realm"` // Optional. The Cloud Storage URI of a KMS encrypted file containing the root principal password. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#root_principal_password DataprocWorkflowTemplate#root_principal_password} RootPrincipalPassword *string `field:"optional" json:"rootPrincipalPassword" yaml:"rootPrincipalPassword"` // Optional. // // The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#tgt_lifetime_hours DataprocWorkflowTemplate#tgt_lifetime_hours} TgtLifetimeHours *float64 `field:"optional" json:"tgtLifetimeHours" yaml:"tgtLifetimeHours"` // Optional. // // The Cloud Storage URI of the truststore file used for SSL encryption. If not provided, Dataproc will provide a self-signed certificate. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#truststore DataprocWorkflowTemplate#truststore} Truststore *string `field:"optional" json:"truststore" yaml:"truststore"` // Optional. // // The Cloud Storage URI of a KMS encrypted file containing the password to the user provided truststore. For the self-signed certificate, this password is generated by Dataproc. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#truststore_password DataprocWorkflowTemplate#truststore_password} TruststorePassword *string `field:"optional" json:"truststorePassword" yaml:"truststorePassword"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string CrossRealmTrustAdminServer() *string SetCrossRealmTrustAdminServer(val *string) CrossRealmTrustAdminServerInput() *string CrossRealmTrustKdc() *string SetCrossRealmTrustKdc(val *string) CrossRealmTrustKdcInput() *string CrossRealmTrustRealm() *string SetCrossRealmTrustRealm(val *string) CrossRealmTrustRealmInput() *string EnableKerberos() interface{} SetEnableKerberos(val interface{}) EnableKerberosInput() interface{} // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig) KdcDbKey() *string SetKdcDbKey(val *string) KdcDbKeyInput() *string KeyPassword() *string SetKeyPassword(val *string) KeyPasswordInput() *string Keystore() *string SetKeystore(val *string) KeystoreInput() *string KeystorePassword() *string SetKeystorePassword(val *string) KeystorePasswordInput() *string KmsKey() *string SetKmsKey(val *string) KmsKeyInput() *string Realm() *string SetRealm(val *string) RealmInput() *string RootPrincipalPassword() *string SetRootPrincipalPassword(val *string) RootPrincipalPasswordInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) TgtLifetimeHours() *float64 SetTgtLifetimeHours(val *float64) TgtLifetimeHoursInput() *float64 Truststore() *string SetTruststore(val *string) TruststoreInput() *string TruststorePassword() *string SetTruststorePassword(val *string) TruststorePasswordInput() *string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetCrossRealmTrustAdminServer() ResetCrossRealmTrustKdc() ResetCrossRealmTrustRealm() ResetEnableKerberos() ResetKdcDbKey() ResetKeyPassword() ResetKeystore() ResetKeystorePassword() ResetKmsKey() ResetRealm() ResetRootPrincipalPassword() ResetTgtLifetimeHours() ResetTruststore() ResetTruststorePassword() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig) KerberosConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference KerberosConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutKerberosConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig) ResetKerberosConfig() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig struct { // Optional. // // The version of software inside the cluster. It must be one of the supported [Dataproc Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), such as "1.2" (including a subminor version, such as "1.2.29"), or the ["preview" version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). If unspecified, it defaults to the latest Debian version. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#image_version DataprocWorkflowTemplate#image_version} ImageVersion *string `field:"optional" json:"imageVersion" yaml:"imageVersion"` // Optional. The set of components to activate on the cluster. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#optional_components DataprocWorkflowTemplate#optional_components} OptionalComponents *[]*string `field:"optional" json:"optionalComponents" yaml:"optionalComponents"` // Optional. // // The properties to set on daemon config files. Property keys are specified in `prefix:property` format, for example `core:hadoop.tmp.dir`. The following are supported prefixes and their mappings: * capacity-scheduler: `capacity-scheduler.xml` * core: `core-site.xml` * distcp: `distcp-default.xml` * hdfs: `hdfs-site.xml` * hive: `hive-site.xml` * mapred: `mapred-site.xml` * pig: `pig.properties` * spark: `spark-defaults.conf` * yarn: `yarn-site.xml` For more information, see [Cluster properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string ImageVersion() *string SetImageVersion(val *string) ImageVersionInput() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig) OptionalComponents() *[]*string SetOptionalComponents(val *[]*string) OptionalComponentsInput() *[]*string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetImageVersion() ResetOptionalComponents() ResetProperties() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig struct { // accelerators block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerators DataprocWorkflowTemplate#accelerators} Accelerators interface{} `field:"optional" json:"accelerators" yaml:"accelerators"` // disk_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#disk_config DataprocWorkflowTemplate#disk_config} DiskConfig *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig `field:"optional" json:"diskConfig" yaml:"diskConfig"` // Optional. // // The Compute Engine image resource used for cluster instances. The URI can represent an image or image family. Image examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id]` * `projects/[project_id]/global/images/[image-id]` * `image-id` Image family examples. Dataproc will use the most recent image from the family: * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name]` * `projects/[project_id]/global/images/family/[custom-image-family-name]` If the URI is unspecified, it will be inferred from `SoftwareConfig.image_version` or the system default. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#image DataprocWorkflowTemplate#image} Image *string `field:"optional" json:"image" yaml:"image"` // Optional. // // The Compute Engine machine type used for cluster instances. A full URL, partial URI, or short name are valid. Examples: * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` * `n1-standard-2` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the machine type resource, for example, `n1-standard-2`. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#machine_type DataprocWorkflowTemplate#machine_type} MachineType *string `field:"optional" json:"machineType" yaml:"machineType"` // Optional. Specifies the minimum cpu platform for the Instance Group. See [Dataproc -> Minimum CPU Platform](https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#min_cpu_platform DataprocWorkflowTemplate#min_cpu_platform} MinCpuPlatform *string `field:"optional" json:"minCpuPlatform" yaml:"minCpuPlatform"` // Optional. // // The number of VM instances in the instance group. For [HA cluster](/dataproc/docs/concepts/configuring-clusters/high-availability) [master_config](#FIELDS.master_config) groups, **must be set to 3**. For standard cluster [master_config](#FIELDS.master_config) groups, **must be set to 1**. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#num_instances DataprocWorkflowTemplate#num_instances} NumInstances *float64 `field:"optional" json:"numInstances" yaml:"numInstances"` // Optional. // // Specifies the preemptibility of the instance group. The default value for master and worker groups is `NON_PREEMPTIBLE`. This default cannot be changed. The default value for secondary instances is `PREEMPTIBLE`. Possible values: PREEMPTIBILITY_UNSPECIFIED, NON_PREEMPTIBLE, PREEMPTIBLE // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#preemptibility DataprocWorkflowTemplate#preemptibility} Preemptibility *string `field:"optional" json:"preemptibility" yaml:"preemptibility"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerators ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerators struct { // The number of the accelerator cards of this type exposed to this instance. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerator_count DataprocWorkflowTemplate#accelerator_count} AcceleratorCount *float64 `field:"optional" json:"acceleratorCount" yaml:"acceleratorCount"` // Full URL, partial URI, or short name of the accelerator type resource to expose to this instance. // // See [Compute Engine AcceleratorTypes](https://cloud.google.com/compute/docs/reference/beta/acceleratorTypes). Examples: * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` * `nvidia-tesla-k80` **Auto Zone Exception**: If you are using the Dataproc [Auto Zone Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) feature, you must use the short name of the accelerator type resource, for example, `nvidia-tesla-k80`. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#accelerator_type DataprocWorkflowTemplate#accelerator_type} AcceleratorType *string `field:"optional" json:"acceleratorType" yaml:"acceleratorType"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference interface { cdktf.ComplexObject AcceleratorCount() *float64 SetAcceleratorCount(val *float64) AcceleratorCountInput() *float64 AcceleratorType() *string SetAcceleratorType(val *string) AcceleratorTypeInput() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetAcceleratorCount() ResetAcceleratorType() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig struct { // Optional. Size in GB of the boot disk (default is 500GB). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#boot_disk_size_gb DataprocWorkflowTemplate#boot_disk_size_gb} BootDiskSizeGb *float64 `field:"optional" json:"bootDiskSizeGb" yaml:"bootDiskSizeGb"` // Optional. // // Type of the boot disk (default is "pd-standard"). Valid values: "pd-balanced" (Persistent Disk Balanced Solid State Drive), "pd-ssd" (Persistent Disk Solid State Drive), or "pd-standard" (Persistent Disk Hard Disk Drive). See [Disk types](https://cloud.google.com/compute/docs/disks#disk-types). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#boot_disk_type DataprocWorkflowTemplate#boot_disk_type} BootDiskType *string `field:"optional" json:"bootDiskType" yaml:"bootDiskType"` // Optional. // // Number of attached SSDs, from 0 to 4 (default is 0). If SSDs are not attached, the boot disk is used to store runtime logs and [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. If one or more SSDs are attached, this runtime bulk data is spread across them, and the boot disk contains only basic config and installed binaries. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#num_local_ssds DataprocWorkflowTemplate#num_local_ssds} NumLocalSsds *float64 `field:"optional" json:"numLocalSsds" yaml:"numLocalSsds"` }
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference interface { cdktf.ComplexObject BootDiskSizeGb() *float64 SetBootDiskSizeGb(val *float64) BootDiskSizeGbInput() *float64 BootDiskType() *string SetBootDiskType(val *string) BootDiskTypeInput() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig) NumLocalSsds() *float64 SetNumLocalSsds(val *float64) NumLocalSsdsInput() *float64 // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetBootDiskSizeGb() ResetBootDiskType() ResetNumLocalSsds() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig struct { }
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InstanceGroupManagerName() *string InstanceTemplateName() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference interface { cdktf.ComplexObject Accelerators() DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList AcceleratorsInput() interface{} // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DiskConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference DiskConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig // Experimental. Fqn() *string Image() *string SetImage(val *string) ImageInput() *string InstanceNames() *[]*string InternalValue() *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig) IsPreemptible() cdktf.IResolvable MachineType() *string SetMachineType(val *string) MachineTypeInput() *string ManagedGroupConfig() DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList MinCpuPlatform() *string SetMinCpuPlatform(val *string) MinCpuPlatformInput() *string NumInstances() *float64 SetNumInstances(val *float64) NumInstancesInput() *float64 Preemptibility() *string SetPreemptibility(val *string) PreemptibilityInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutAccelerators(value interface{}) PutDiskConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig) ResetAccelerators() ResetDiskConfig() ResetImage() ResetMachineType() ResetMinCpuPlatform() ResetNumInstances() ResetPreemptibility() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference
type DataprocWorkflowTemplatePlacementManagedClusterOutputReference ¶
type DataprocWorkflowTemplatePlacementManagedClusterOutputReference interface { cdktf.ComplexObject ClusterName() *string SetClusterName(val *string) ClusterNameInput() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) Config() DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference ConfigInput() *DataprocWorkflowTemplatePlacementManagedClusterConfig // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacementManagedCluster SetInternalValue(val *DataprocWorkflowTemplatePlacementManagedCluster) Labels() *map[string]*string SetLabels(val *map[string]*string) LabelsInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutConfig(value *DataprocWorkflowTemplatePlacementManagedClusterConfig) ResetLabels() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementManagedClusterOutputReference ¶
func NewDataprocWorkflowTemplatePlacementManagedClusterOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementManagedClusterOutputReference
type DataprocWorkflowTemplatePlacementOutputReference ¶
type DataprocWorkflowTemplatePlacementOutputReference interface { cdktf.ComplexObject ClusterSelector() DataprocWorkflowTemplatePlacementClusterSelectorOutputReference ClusterSelectorInput() *DataprocWorkflowTemplatePlacementClusterSelector // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocWorkflowTemplatePlacement SetInternalValue(val *DataprocWorkflowTemplatePlacement) ManagedCluster() DataprocWorkflowTemplatePlacementManagedClusterOutputReference ManagedClusterInput() *DataprocWorkflowTemplatePlacementManagedCluster // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutClusterSelector(value *DataprocWorkflowTemplatePlacementClusterSelector) PutManagedCluster(value *DataprocWorkflowTemplatePlacementManagedCluster) ResetClusterSelector() ResetManagedCluster() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplatePlacementOutputReference ¶
func NewDataprocWorkflowTemplatePlacementOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplatePlacementOutputReference
type DataprocWorkflowTemplateTimeouts ¶
type DataprocWorkflowTemplateTimeouts struct { // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#create DataprocWorkflowTemplate#create}. Create *string `field:"optional" json:"create" yaml:"create"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.79.0/docs/resources/dataproc_workflow_template#delete DataprocWorkflowTemplate#delete}. Delete *string `field:"optional" json:"delete" yaml:"delete"` }
type DataprocWorkflowTemplateTimeoutsOutputReference ¶
type DataprocWorkflowTemplateTimeoutsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) Create() *string SetCreate(val *string) CreateInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string Delete() *string SetDelete(val *string) DeleteInput() *string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetCreate() ResetDelete() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocWorkflowTemplateTimeoutsOutputReference ¶
func NewDataprocWorkflowTemplateTimeoutsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocWorkflowTemplateTimeoutsOutputReference
Source Files ¶
- DataprocWorkflowTemplate.go
- DataprocWorkflowTemplateConfig.go
- DataprocWorkflowTemplateJobs.go
- DataprocWorkflowTemplateJobsHadoopJob.go
- DataprocWorkflowTemplateJobsHadoopJobLoggingConfig.go
- DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference.go
- DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference__checks.go
- DataprocWorkflowTemplateJobsHadoopJobOutputReference.go
- DataprocWorkflowTemplateJobsHadoopJobOutputReference__checks.go
- DataprocWorkflowTemplateJobsHiveJob.go
- DataprocWorkflowTemplateJobsHiveJobOutputReference.go
- DataprocWorkflowTemplateJobsHiveJobOutputReference__checks.go
- DataprocWorkflowTemplateJobsHiveJobQueryListStruct.go
- DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference.go
- DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference__checks.go
- DataprocWorkflowTemplateJobsList.go
- DataprocWorkflowTemplateJobsList__checks.go
- DataprocWorkflowTemplateJobsOutputReference.go
- DataprocWorkflowTemplateJobsOutputReference__checks.go
- DataprocWorkflowTemplateJobsPigJob.go
- DataprocWorkflowTemplateJobsPigJobLoggingConfig.go
- DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference.go
- DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference__checks.go
- DataprocWorkflowTemplateJobsPigJobOutputReference.go
- DataprocWorkflowTemplateJobsPigJobOutputReference__checks.go
- DataprocWorkflowTemplateJobsPigJobQueryListStruct.go
- DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference.go
- DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference__checks.go
- DataprocWorkflowTemplateJobsPrestoJob.go
- DataprocWorkflowTemplateJobsPrestoJobLoggingConfig.go
- DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference.go
- DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference__checks.go
- DataprocWorkflowTemplateJobsPrestoJobOutputReference.go
- DataprocWorkflowTemplateJobsPrestoJobOutputReference__checks.go
- DataprocWorkflowTemplateJobsPrestoJobQueryListStruct.go
- DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference.go
- DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference__checks.go
- DataprocWorkflowTemplateJobsPysparkJob.go
- DataprocWorkflowTemplateJobsPysparkJobLoggingConfig.go
- DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference.go
- DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference__checks.go
- DataprocWorkflowTemplateJobsPysparkJobOutputReference.go
- DataprocWorkflowTemplateJobsPysparkJobOutputReference__checks.go
- DataprocWorkflowTemplateJobsScheduling.go
- DataprocWorkflowTemplateJobsSchedulingOutputReference.go
- DataprocWorkflowTemplateJobsSchedulingOutputReference__checks.go
- DataprocWorkflowTemplateJobsSparkJob.go
- DataprocWorkflowTemplateJobsSparkJobLoggingConfig.go
- DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference.go
- DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference__checks.go
- DataprocWorkflowTemplateJobsSparkJobOutputReference.go
- DataprocWorkflowTemplateJobsSparkJobOutputReference__checks.go
- DataprocWorkflowTemplateJobsSparkRJob.go
- DataprocWorkflowTemplateJobsSparkRJobLoggingConfig.go
- DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference.go
- DataprocWorkflowTemplateJobsSparkRJobLoggingConfigOutputReference__checks.go
- DataprocWorkflowTemplateJobsSparkRJobOutputReference.go
- DataprocWorkflowTemplateJobsSparkRJobOutputReference__checks.go
- DataprocWorkflowTemplateJobsSparkSqlJob.go
- DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfig.go
- DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference.go
- DataprocWorkflowTemplateJobsSparkSqlJobLoggingConfigOutputReference__checks.go
- DataprocWorkflowTemplateJobsSparkSqlJobOutputReference.go
- DataprocWorkflowTemplateJobsSparkSqlJobOutputReference__checks.go
- DataprocWorkflowTemplateJobsSparkSqlJobQueryListStruct.go
- DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference.go
- DataprocWorkflowTemplateJobsSparkSqlJobQueryListStructOutputReference__checks.go
- DataprocWorkflowTemplateParameters.go
- DataprocWorkflowTemplateParametersList.go
- DataprocWorkflowTemplateParametersList__checks.go
- DataprocWorkflowTemplateParametersOutputReference.go
- DataprocWorkflowTemplateParametersOutputReference__checks.go
- DataprocWorkflowTemplateParametersValidation.go
- DataprocWorkflowTemplateParametersValidationOutputReference.go
- DataprocWorkflowTemplateParametersValidationOutputReference__checks.go
- DataprocWorkflowTemplateParametersValidationRegex.go
- DataprocWorkflowTemplateParametersValidationRegexOutputReference.go
- DataprocWorkflowTemplateParametersValidationRegexOutputReference__checks.go
- DataprocWorkflowTemplateParametersValidationValues.go
- DataprocWorkflowTemplateParametersValidationValuesOutputReference.go
- DataprocWorkflowTemplateParametersValidationValuesOutputReference__checks.go
- DataprocWorkflowTemplatePlacement.go
- DataprocWorkflowTemplatePlacementClusterSelector.go
- DataprocWorkflowTemplatePlacementClusterSelectorOutputReference.go
- DataprocWorkflowTemplatePlacementClusterSelectorOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedCluster.go
- DataprocWorkflowTemplatePlacementManagedClusterConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigAutoscalingConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigEncryptionConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigEndpointConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinity.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigNodeGroupAffinityOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinity.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigReservationAffinityOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigGceClusterConfigShieldedInstanceConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActions.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsList__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigInitializationActionsOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigLifecycleConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAccelerators.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsList__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigAcceleratorsOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigDiskConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigList__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigManagedGroupConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigMasterConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAccelerators.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsList__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigAcceleratorsOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigDiskConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigList__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigManagedGroupConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecondaryWorkerConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigKerberosConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSecurityConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigSoftwareConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAccelerators.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsList__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigAcceleratorsOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigDiskConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfig.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigList__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigManagedGroupConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterConfigWorkerConfigOutputReference__checks.go
- DataprocWorkflowTemplatePlacementManagedClusterOutputReference.go
- DataprocWorkflowTemplatePlacementManagedClusterOutputReference__checks.go
- DataprocWorkflowTemplatePlacementOutputReference.go
- DataprocWorkflowTemplatePlacementOutputReference__checks.go
- DataprocWorkflowTemplateTimeouts.go
- DataprocWorkflowTemplateTimeoutsOutputReference.go
- DataprocWorkflowTemplateTimeoutsOutputReference__checks.go
- DataprocWorkflowTemplate__checks.go
- main.go