Documentation ¶
Index ¶
- func DataprocJob_IsConstruct(x interface{}) *bool
- func DataprocJob_IsTerraformElement(x interface{}) *bool
- func DataprocJob_IsTerraformResource(x interface{}) *bool
- func DataprocJob_TfResourceType() *string
- func NewDataprocJobHadoopConfigLoggingConfigOutputReference_Override(d DataprocJobHadoopConfigLoggingConfigOutputReference, ...)
- func NewDataprocJobHadoopConfigOutputReference_Override(d DataprocJobHadoopConfigOutputReference, ...)
- func NewDataprocJobHiveConfigOutputReference_Override(d DataprocJobHiveConfigOutputReference, ...)
- func NewDataprocJobPigConfigLoggingConfigOutputReference_Override(d DataprocJobPigConfigLoggingConfigOutputReference, ...)
- func NewDataprocJobPigConfigOutputReference_Override(d DataprocJobPigConfigOutputReference, ...)
- func NewDataprocJobPlacementOutputReference_Override(d DataprocJobPlacementOutputReference, ...)
- func NewDataprocJobPrestoConfigLoggingConfigOutputReference_Override(d DataprocJobPrestoConfigLoggingConfigOutputReference, ...)
- func NewDataprocJobPrestoConfigOutputReference_Override(d DataprocJobPrestoConfigOutputReference, ...)
- func NewDataprocJobPysparkConfigLoggingConfigOutputReference_Override(d DataprocJobPysparkConfigLoggingConfigOutputReference, ...)
- func NewDataprocJobPysparkConfigOutputReference_Override(d DataprocJobPysparkConfigOutputReference, ...)
- func NewDataprocJobReferenceOutputReference_Override(d DataprocJobReferenceOutputReference, ...)
- func NewDataprocJobSchedulingOutputReference_Override(d DataprocJobSchedulingOutputReference, ...)
- func NewDataprocJobSparkConfigLoggingConfigOutputReference_Override(d DataprocJobSparkConfigLoggingConfigOutputReference, ...)
- func NewDataprocJobSparkConfigOutputReference_Override(d DataprocJobSparkConfigOutputReference, ...)
- func NewDataprocJobSparksqlConfigLoggingConfigOutputReference_Override(d DataprocJobSparksqlConfigLoggingConfigOutputReference, ...)
- func NewDataprocJobSparksqlConfigOutputReference_Override(d DataprocJobSparksqlConfigOutputReference, ...)
- func NewDataprocJobStatusList_Override(d DataprocJobStatusList, terraformResource cdktf.IInterpolatingParent, ...)
- func NewDataprocJobStatusOutputReference_Override(d DataprocJobStatusOutputReference, ...)
- func NewDataprocJobTimeoutsOutputReference_Override(d DataprocJobTimeoutsOutputReference, ...)
- func NewDataprocJob_Override(d DataprocJob, scope constructs.Construct, id *string, ...)
- type DataprocJob
- type DataprocJobConfig
- type DataprocJobHadoopConfig
- type DataprocJobHadoopConfigLoggingConfig
- type DataprocJobHadoopConfigLoggingConfigOutputReference
- type DataprocJobHadoopConfigOutputReference
- type DataprocJobHiveConfig
- type DataprocJobHiveConfigOutputReference
- type DataprocJobPigConfig
- type DataprocJobPigConfigLoggingConfig
- type DataprocJobPigConfigLoggingConfigOutputReference
- type DataprocJobPigConfigOutputReference
- type DataprocJobPlacement
- type DataprocJobPlacementOutputReference
- type DataprocJobPrestoConfig
- type DataprocJobPrestoConfigLoggingConfig
- type DataprocJobPrestoConfigLoggingConfigOutputReference
- type DataprocJobPrestoConfigOutputReference
- type DataprocJobPysparkConfig
- type DataprocJobPysparkConfigLoggingConfig
- type DataprocJobPysparkConfigLoggingConfigOutputReference
- type DataprocJobPysparkConfigOutputReference
- type DataprocJobReference
- type DataprocJobReferenceOutputReference
- type DataprocJobScheduling
- type DataprocJobSchedulingOutputReference
- type DataprocJobSparkConfig
- type DataprocJobSparkConfigLoggingConfig
- type DataprocJobSparkConfigLoggingConfigOutputReference
- type DataprocJobSparkConfigOutputReference
- type DataprocJobSparksqlConfig
- type DataprocJobSparksqlConfigLoggingConfig
- type DataprocJobSparksqlConfigLoggingConfigOutputReference
- type DataprocJobSparksqlConfigOutputReference
- type DataprocJobStatus
- type DataprocJobStatusList
- type DataprocJobStatusOutputReference
- type DataprocJobTimeouts
- type DataprocJobTimeoutsOutputReference
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func DataprocJob_IsConstruct ¶
func DataprocJob_IsConstruct(x interface{}) *bool
Checks if `x` is a construct.
Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.
Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.
Returns: true if `x` is an object created from a class which extends `Construct`.
func DataprocJob_IsTerraformElement ¶
func DataprocJob_IsTerraformElement(x interface{}) *bool
Experimental.
func DataprocJob_IsTerraformResource ¶
func DataprocJob_IsTerraformResource(x interface{}) *bool
Experimental.
func DataprocJob_TfResourceType ¶
func DataprocJob_TfResourceType() *string
func NewDataprocJobHadoopConfigLoggingConfigOutputReference_Override ¶
func NewDataprocJobHadoopConfigLoggingConfigOutputReference_Override(d DataprocJobHadoopConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobHadoopConfigOutputReference_Override ¶
func NewDataprocJobHadoopConfigOutputReference_Override(d DataprocJobHadoopConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobHiveConfigOutputReference_Override ¶
func NewDataprocJobHiveConfigOutputReference_Override(d DataprocJobHiveConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobPigConfigLoggingConfigOutputReference_Override ¶
func NewDataprocJobPigConfigLoggingConfigOutputReference_Override(d DataprocJobPigConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobPigConfigOutputReference_Override ¶
func NewDataprocJobPigConfigOutputReference_Override(d DataprocJobPigConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobPlacementOutputReference_Override ¶
func NewDataprocJobPlacementOutputReference_Override(d DataprocJobPlacementOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobPrestoConfigLoggingConfigOutputReference_Override ¶
func NewDataprocJobPrestoConfigLoggingConfigOutputReference_Override(d DataprocJobPrestoConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobPrestoConfigOutputReference_Override ¶
func NewDataprocJobPrestoConfigOutputReference_Override(d DataprocJobPrestoConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobPysparkConfigLoggingConfigOutputReference_Override ¶
func NewDataprocJobPysparkConfigLoggingConfigOutputReference_Override(d DataprocJobPysparkConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobPysparkConfigOutputReference_Override ¶
func NewDataprocJobPysparkConfigOutputReference_Override(d DataprocJobPysparkConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobReferenceOutputReference_Override ¶
func NewDataprocJobReferenceOutputReference_Override(d DataprocJobReferenceOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobSchedulingOutputReference_Override ¶
func NewDataprocJobSchedulingOutputReference_Override(d DataprocJobSchedulingOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobSparkConfigLoggingConfigOutputReference_Override ¶
func NewDataprocJobSparkConfigLoggingConfigOutputReference_Override(d DataprocJobSparkConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobSparkConfigOutputReference_Override ¶
func NewDataprocJobSparkConfigOutputReference_Override(d DataprocJobSparkConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobSparksqlConfigLoggingConfigOutputReference_Override ¶
func NewDataprocJobSparksqlConfigLoggingConfigOutputReference_Override(d DataprocJobSparksqlConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobSparksqlConfigOutputReference_Override ¶
func NewDataprocJobSparksqlConfigOutputReference_Override(d DataprocJobSparksqlConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJobStatusList_Override ¶
func NewDataprocJobStatusList_Override(d DataprocJobStatusList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewDataprocJobStatusOutputReference_Override ¶
func NewDataprocJobStatusOutputReference_Override(d DataprocJobStatusOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewDataprocJobTimeoutsOutputReference_Override ¶
func NewDataprocJobTimeoutsOutputReference_Override(d DataprocJobTimeoutsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewDataprocJob_Override ¶
func NewDataprocJob_Override(d DataprocJob, scope constructs.Construct, id *string, config *DataprocJobConfig)
Create a new {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job google_dataproc_job} Resource.
Types ¶
type DataprocJob ¶
type DataprocJob interface { cdktf.TerraformResource // Experimental. CdktfStack() cdktf.TerraformStack // Experimental. Connection() interface{} // Experimental. SetConnection(val interface{}) // Experimental. ConstructNodeMetadata() *map[string]interface{} // Experimental. Count() interface{} // Experimental. SetCount(val interface{}) // Experimental. DependsOn() *[]*string // Experimental. SetDependsOn(val *[]*string) DriverControlsFilesUri() *string DriverOutputResourceUri() *string ForceDelete() interface{} SetForceDelete(val interface{}) ForceDeleteInput() interface{} // Experimental. ForEach() cdktf.ITerraformIterator // Experimental. SetForEach(val cdktf.ITerraformIterator) // Experimental. Fqn() *string // Experimental. FriendlyUniqueId() *string HadoopConfig() DataprocJobHadoopConfigOutputReference HadoopConfigInput() *DataprocJobHadoopConfig HiveConfig() DataprocJobHiveConfigOutputReference HiveConfigInput() *DataprocJobHiveConfig Id() *string SetId(val *string) IdInput() *string Labels() *map[string]*string SetLabels(val *map[string]*string) LabelsInput() *map[string]*string // Experimental. Lifecycle() *cdktf.TerraformResourceLifecycle // Experimental. SetLifecycle(val *cdktf.TerraformResourceLifecycle) // The tree node. Node() constructs.Node PigConfig() DataprocJobPigConfigOutputReference PigConfigInput() *DataprocJobPigConfig Placement() DataprocJobPlacementOutputReference PlacementInput() *DataprocJobPlacement PrestoConfig() DataprocJobPrestoConfigOutputReference PrestoConfigInput() *DataprocJobPrestoConfig Project() *string SetProject(val *string) ProjectInput() *string // Experimental. Provider() cdktf.TerraformProvider // Experimental. SetProvider(val cdktf.TerraformProvider) // Experimental. Provisioners() *[]interface{} // Experimental. SetProvisioners(val *[]interface{}) PysparkConfig() DataprocJobPysparkConfigOutputReference PysparkConfigInput() *DataprocJobPysparkConfig // Experimental. RawOverrides() interface{} Reference() DataprocJobReferenceOutputReference ReferenceInput() *DataprocJobReference Region() *string SetRegion(val *string) RegionInput() *string Scheduling() DataprocJobSchedulingOutputReference SchedulingInput() *DataprocJobScheduling SparkConfig() DataprocJobSparkConfigOutputReference SparkConfigInput() *DataprocJobSparkConfig SparksqlConfig() DataprocJobSparksqlConfigOutputReference SparksqlConfigInput() *DataprocJobSparksqlConfig Status() DataprocJobStatusList // Experimental. TerraformGeneratorMetadata() *cdktf.TerraformProviderGeneratorMetadata // Experimental. TerraformMetaArguments() *map[string]interface{} // Experimental. TerraformResourceType() *string Timeouts() DataprocJobTimeoutsOutputReference TimeoutsInput() interface{} // Experimental. AddOverride(path *string, value interface{}) // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationForAttribute(terraformAttribute *string) cdktf.IResolvable // Overrides the auto-generated logical ID with a specific ID. // Experimental. OverrideLogicalId(newLogicalId *string) PutHadoopConfig(value *DataprocJobHadoopConfig) PutHiveConfig(value *DataprocJobHiveConfig) PutPigConfig(value *DataprocJobPigConfig) PutPlacement(value *DataprocJobPlacement) PutPrestoConfig(value *DataprocJobPrestoConfig) PutPysparkConfig(value *DataprocJobPysparkConfig) PutReference(value *DataprocJobReference) PutScheduling(value *DataprocJobScheduling) PutSparkConfig(value *DataprocJobSparkConfig) PutSparksqlConfig(value *DataprocJobSparksqlConfig) PutTimeouts(value *DataprocJobTimeouts) ResetForceDelete() ResetHadoopConfig() ResetHiveConfig() ResetId() ResetLabels() // Resets a previously passed logical Id to use the auto-generated logical id again. // Experimental. ResetOverrideLogicalId() ResetPigConfig() ResetPrestoConfig() ResetProject() ResetPysparkConfig() ResetReference() ResetRegion() ResetScheduling() ResetSparkConfig() ResetSparksqlConfig() ResetTimeouts() SynthesizeAttributes() *map[string]interface{} // Experimental. ToMetadata() interface{} // Returns a string representation of this construct. ToString() *string // Adds this resource to the terraform JSON output. // Experimental. ToTerraform() interface{} }
Represents a {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job google_dataproc_job}.
func NewDataprocJob ¶
func NewDataprocJob(scope constructs.Construct, id *string, config *DataprocJobConfig) DataprocJob
Create a new {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job google_dataproc_job} Resource.
type DataprocJobConfig ¶
type DataprocJobConfig struct { // Experimental. Connection interface{} `field:"optional" json:"connection" yaml:"connection"` // Experimental. Count interface{} `field:"optional" json:"count" yaml:"count"` // Experimental. DependsOn *[]cdktf.ITerraformDependable `field:"optional" json:"dependsOn" yaml:"dependsOn"` // Experimental. ForEach cdktf.ITerraformIterator `field:"optional" json:"forEach" yaml:"forEach"` // Experimental. Lifecycle *cdktf.TerraformResourceLifecycle `field:"optional" json:"lifecycle" yaml:"lifecycle"` // Experimental. Provider cdktf.TerraformProvider `field:"optional" json:"provider" yaml:"provider"` // Experimental. Provisioners *[]interface{} `field:"optional" json:"provisioners" yaml:"provisioners"` // placement block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#placement DataprocJob#placement} Placement *DataprocJobPlacement `field:"required" json:"placement" yaml:"placement"` // By default, you can only delete inactive jobs within Dataproc. // // Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#force_delete DataprocJob#force_delete} ForceDelete interface{} `field:"optional" json:"forceDelete" yaml:"forceDelete"` // hadoop_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#hadoop_config DataprocJob#hadoop_config} HadoopConfig *DataprocJobHadoopConfig `field:"optional" json:"hadoopConfig" yaml:"hadoopConfig"` // hive_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#hive_config DataprocJob#hive_config} HiveConfig *DataprocJobHiveConfig `field:"optional" json:"hiveConfig" yaml:"hiveConfig"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#id DataprocJob#id}. // // Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. // If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable. Id *string `field:"optional" json:"id" yaml:"id"` // Optional. The labels to associate with this job. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#labels DataprocJob#labels} Labels *map[string]*string `field:"optional" json:"labels" yaml:"labels"` // pig_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#pig_config DataprocJob#pig_config} PigConfig *DataprocJobPigConfig `field:"optional" json:"pigConfig" yaml:"pigConfig"` // presto_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#presto_config DataprocJob#presto_config} PrestoConfig *DataprocJobPrestoConfig `field:"optional" json:"prestoConfig" yaml:"prestoConfig"` // The project in which the cluster can be found and jobs subsequently run against. // // If it is not provided, the provider project is used. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#project DataprocJob#project} Project *string `field:"optional" json:"project" yaml:"project"` // pyspark_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#pyspark_config DataprocJob#pyspark_config} PysparkConfig *DataprocJobPysparkConfig `field:"optional" json:"pysparkConfig" yaml:"pysparkConfig"` // reference block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#reference DataprocJob#reference} Reference *DataprocJobReference `field:"optional" json:"reference" yaml:"reference"` // The Cloud Dataproc region. // // This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#region DataprocJob#region} Region *string `field:"optional" json:"region" yaml:"region"` // scheduling block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#scheduling DataprocJob#scheduling} Scheduling *DataprocJobScheduling `field:"optional" json:"scheduling" yaml:"scheduling"` // spark_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#spark_config DataprocJob#spark_config} SparkConfig *DataprocJobSparkConfig `field:"optional" json:"sparkConfig" yaml:"sparkConfig"` // sparksql_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#sparksql_config DataprocJob#sparksql_config} SparksqlConfig *DataprocJobSparksqlConfig `field:"optional" json:"sparksqlConfig" yaml:"sparksqlConfig"` // timeouts block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#timeouts DataprocJob#timeouts} Timeouts *DataprocJobTimeouts `field:"optional" json:"timeouts" yaml:"timeouts"` }
type DataprocJobHadoopConfig ¶
type DataprocJobHadoopConfig struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#archive_uris DataprocJob#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // The arguments to pass to the driver. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#args DataprocJob#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. // // Useful for naively parallel tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#file_uris DataprocJob#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config} LoggingConfig *DataprocJobHadoopConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // The class containing the main method of the driver. // // Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#main_class DataprocJob#main_class} MainClass *string `field:"optional" json:"mainClass" yaml:"mainClass"` // The HCFS URI of jar file containing the driver jar. Conflicts with main_class. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#main_jar_file_uri DataprocJob#main_jar_file_uri} MainJarFileUri *string `field:"optional" json:"mainJarFileUri" yaml:"mainJarFileUri"` // A mapping of property names to values, used to configure Spark. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#properties DataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` }
type DataprocJobHadoopConfigLoggingConfig ¶
type DataprocJobHadoopConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocJobHadoopConfigLoggingConfigOutputReference ¶
type DataprocJobHadoopConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocJobHadoopConfigLoggingConfig SetInternalValue(val *DataprocJobHadoopConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobHadoopConfigLoggingConfigOutputReference ¶
func NewDataprocJobHadoopConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobHadoopConfigLoggingConfigOutputReference
type DataprocJobHadoopConfigOutputReference ¶
type DataprocJobHadoopConfigOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobHadoopConfig SetInternalValue(val *DataprocJobHadoopConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocJobHadoopConfigLoggingConfigOutputReference LoggingConfigInput() *DataprocJobHadoopConfigLoggingConfig MainClass() *string SetMainClass(val *string) MainClassInput() *string MainJarFileUri() *string SetMainJarFileUri(val *string) MainJarFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocJobHadoopConfigLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetMainClass() ResetMainJarFileUri() ResetProperties() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobHadoopConfigOutputReference ¶
func NewDataprocJobHadoopConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobHadoopConfigOutputReference
type DataprocJobHiveConfig ¶
type DataprocJobHiveConfig struct { // Whether to continue executing queries if a query fails. // // The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#continue_on_failure DataprocJob#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. // // Can contain Hive SerDes and UDFs. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // A mapping of property names and values, used to configure Hive. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#properties DataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#query_file_uri DataprocJob#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#query_list DataprocJob#query_list} QueryList *[]*string `field:"optional" json:"queryList" yaml:"queryList"` // Mapping of query variable names to values (equivalent to the Hive command: SET name="value";). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#script_variables DataprocJob#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type DataprocJobHiveConfigOutputReference ¶
type DataprocJobHiveConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobHiveConfig SetInternalValue(val *DataprocJobHiveConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() *[]*string SetQueryList(val *[]*string) QueryListInput() *[]*string ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetContinueOnFailure() ResetJarFileUris() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobHiveConfigOutputReference ¶
func NewDataprocJobHiveConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobHiveConfigOutputReference
type DataprocJobPigConfig ¶
type DataprocJobPigConfig struct { // Whether to continue executing queries if a query fails. // // The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#continue_on_failure DataprocJob#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. // // Can contain Pig UDFs. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config} LoggingConfig *DataprocJobPigConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // A mapping of property names to values, used to configure Pig. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#properties DataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#query_file_uri DataprocJob#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#query_list DataprocJob#query_list} QueryList *[]*string `field:"optional" json:"queryList" yaml:"queryList"` // Mapping of query variable names to values (equivalent to the Pig command: name=[value]). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#script_variables DataprocJob#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type DataprocJobPigConfigLoggingConfig ¶
type DataprocJobPigConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocJobPigConfigLoggingConfigOutputReference ¶
type DataprocJobPigConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocJobPigConfigLoggingConfig SetInternalValue(val *DataprocJobPigConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobPigConfigLoggingConfigOutputReference ¶
func NewDataprocJobPigConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobPigConfigLoggingConfigOutputReference
type DataprocJobPigConfigOutputReference ¶
type DataprocJobPigConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobPigConfig SetInternalValue(val *DataprocJobPigConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocJobPigConfigLoggingConfigOutputReference LoggingConfigInput() *DataprocJobPigConfigLoggingConfig Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() *[]*string SetQueryList(val *[]*string) QueryListInput() *[]*string ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocJobPigConfigLoggingConfig) ResetContinueOnFailure() ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobPigConfigOutputReference ¶
func NewDataprocJobPigConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobPigConfigOutputReference
type DataprocJobPlacement ¶
type DataprocJobPlacement struct { // The name of the cluster where the job will be submitted. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#cluster_name DataprocJob#cluster_name} ClusterName *string `field:"required" json:"clusterName" yaml:"clusterName"` }
type DataprocJobPlacementOutputReference ¶
type DataprocJobPlacementOutputReference interface { cdktf.ComplexObject ClusterName() *string SetClusterName(val *string) ClusterNameInput() *string ClusterUuid() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobPlacement SetInternalValue(val *DataprocJobPlacement) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobPlacementOutputReference ¶
func NewDataprocJobPlacementOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobPlacementOutputReference
type DataprocJobPrestoConfig ¶
type DataprocJobPrestoConfig struct { // Presto client tags to attach to this query. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#client_tags DataprocJob#client_tags} ClientTags *[]*string `field:"optional" json:"clientTags" yaml:"clientTags"` // Whether to continue executing queries if a query fails. // // Setting to true can be useful when executing independent parallel queries. Defaults to false. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#continue_on_failure DataprocJob#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config} LoggingConfig *DataprocJobPrestoConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // The format in which query output will be displayed. See the Presto documentation for supported output formats. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#output_format DataprocJob#output_format} OutputFormat *string `field:"optional" json:"outputFormat" yaml:"outputFormat"` // A mapping of property names to values. // // Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#properties DataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // The HCFS URI of the script that contains SQL queries. Conflicts with query_list. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#query_file_uri DataprocJob#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#query_list DataprocJob#query_list} QueryList *[]*string `field:"optional" json:"queryList" yaml:"queryList"` }
type DataprocJobPrestoConfigLoggingConfig ¶
type DataprocJobPrestoConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocJobPrestoConfigLoggingConfigOutputReference ¶
type DataprocJobPrestoConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocJobPrestoConfigLoggingConfig SetInternalValue(val *DataprocJobPrestoConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobPrestoConfigLoggingConfigOutputReference ¶
func NewDataprocJobPrestoConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobPrestoConfigLoggingConfigOutputReference
type DataprocJobPrestoConfigOutputReference ¶
type DataprocJobPrestoConfigOutputReference interface { cdktf.ComplexObject ClientTags() *[]*string SetClientTags(val *[]*string) ClientTagsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobPrestoConfig SetInternalValue(val *DataprocJobPrestoConfig) LoggingConfig() DataprocJobPrestoConfigLoggingConfigOutputReference LoggingConfigInput() *DataprocJobPrestoConfigLoggingConfig OutputFormat() *string SetOutputFormat(val *string) OutputFormatInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() *[]*string SetQueryList(val *[]*string) QueryListInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocJobPrestoConfigLoggingConfig) ResetClientTags() ResetContinueOnFailure() ResetLoggingConfig() ResetOutputFormat() ResetProperties() ResetQueryFileUri() ResetQueryList() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobPrestoConfigOutputReference ¶
func NewDataprocJobPrestoConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobPrestoConfigOutputReference
type DataprocJobPysparkConfig ¶
type DataprocJobPysparkConfig struct { // Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#main_python_file_uri DataprocJob#main_python_file_uri} MainPythonFileUri *string `field:"required" json:"mainPythonFileUri" yaml:"mainPythonFileUri"` // Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#archive_uris DataprocJob#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // Optional. // // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#args DataprocJob#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // Optional. // // HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#file_uris DataprocJob#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config} LoggingConfig *DataprocJobPysparkConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // Optional. // // A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#properties DataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // Optional. // // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#python_file_uris DataprocJob#python_file_uris} PythonFileUris *[]*string `field:"optional" json:"pythonFileUris" yaml:"pythonFileUris"` }
type DataprocJobPysparkConfigLoggingConfig ¶
type DataprocJobPysparkConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocJobPysparkConfigLoggingConfigOutputReference ¶
type DataprocJobPysparkConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocJobPysparkConfigLoggingConfig SetInternalValue(val *DataprocJobPysparkConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobPysparkConfigLoggingConfigOutputReference ¶
func NewDataprocJobPysparkConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobPysparkConfigLoggingConfigOutputReference
type DataprocJobPysparkConfigOutputReference ¶
type DataprocJobPysparkConfigOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobPysparkConfig SetInternalValue(val *DataprocJobPysparkConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocJobPysparkConfigLoggingConfigOutputReference LoggingConfigInput() *DataprocJobPysparkConfigLoggingConfig MainPythonFileUri() *string SetMainPythonFileUri(val *string) MainPythonFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string PythonFileUris() *[]*string SetPythonFileUris(val *[]*string) PythonFileUrisInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocJobPysparkConfigLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetPythonFileUris() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobPysparkConfigOutputReference ¶
func NewDataprocJobPysparkConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobPysparkConfigOutputReference
type DataprocJobReference ¶
type DataprocJobReference struct { // The job ID, which must be unique within the project. // // The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#job_id DataprocJob#job_id} JobId *string `field:"optional" json:"jobId" yaml:"jobId"` }
type DataprocJobReferenceOutputReference ¶
type DataprocJobReferenceOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobReference SetInternalValue(val *DataprocJobReference) JobId() *string SetJobId(val *string) JobIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetJobId() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobReferenceOutputReference ¶
func NewDataprocJobReferenceOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobReferenceOutputReference
type DataprocJobScheduling ¶
type DataprocJobScheduling struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#max_failures_per_hour DataprocJob#max_failures_per_hour} MaxFailuresPerHour *float64 `field:"required" json:"maxFailuresPerHour" yaml:"maxFailuresPerHour"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#max_failures_total DataprocJob#max_failures_total} MaxFailuresTotal *float64 `field:"required" json:"maxFailuresTotal" yaml:"maxFailuresTotal"` }
type DataprocJobSchedulingOutputReference ¶
type DataprocJobSchedulingOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobScheduling SetInternalValue(val *DataprocJobScheduling) MaxFailuresPerHour() *float64 SetMaxFailuresPerHour(val *float64) MaxFailuresPerHourInput() *float64 MaxFailuresTotal() *float64 SetMaxFailuresTotal(val *float64) MaxFailuresTotalInput() *float64 // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobSchedulingOutputReference ¶
func NewDataprocJobSchedulingOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobSchedulingOutputReference
type DataprocJobSparkConfig ¶
type DataprocJobSparkConfig struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#archive_uris DataprocJob#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // The arguments to pass to the driver. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#args DataprocJob#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. // // Useful for naively parallel tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#file_uris DataprocJob#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config} LoggingConfig *DataprocJobSparkConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // The class containing the main method of the driver. // // Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#main_class DataprocJob#main_class} MainClass *string `field:"optional" json:"mainClass" yaml:"mainClass"` // The HCFS URI of jar file containing the driver jar. Conflicts with main_class. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#main_jar_file_uri DataprocJob#main_jar_file_uri} MainJarFileUri *string `field:"optional" json:"mainJarFileUri" yaml:"mainJarFileUri"` // A mapping of property names to values, used to configure Spark. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#properties DataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` }
type DataprocJobSparkConfigLoggingConfig ¶
type DataprocJobSparkConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocJobSparkConfigLoggingConfigOutputReference ¶
type DataprocJobSparkConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocJobSparkConfigLoggingConfig SetInternalValue(val *DataprocJobSparkConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobSparkConfigLoggingConfigOutputReference ¶
func NewDataprocJobSparkConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobSparkConfigLoggingConfigOutputReference
type DataprocJobSparkConfigOutputReference ¶
type DataprocJobSparkConfigOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobSparkConfig SetInternalValue(val *DataprocJobSparkConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocJobSparkConfigLoggingConfigOutputReference LoggingConfigInput() *DataprocJobSparkConfigLoggingConfig MainClass() *string SetMainClass(val *string) MainClassInput() *string MainJarFileUri() *string SetMainJarFileUri(val *string) MainJarFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocJobSparkConfigLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetMainClass() ResetMainJarFileUri() ResetProperties() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobSparkConfigOutputReference ¶
func NewDataprocJobSparkConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobSparkConfigOutputReference
type DataprocJobSparksqlConfig ¶
type DataprocJobSparksqlConfig struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config} LoggingConfig *DataprocJobSparksqlConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#properties DataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // The HCFS URI of the script that contains SQL queries. Conflicts with query_list. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#query_file_uri DataprocJob#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#query_list DataprocJob#query_list} QueryList *[]*string `field:"optional" json:"queryList" yaml:"queryList"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#script_variables DataprocJob#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type DataprocJobSparksqlConfigLoggingConfig ¶
type DataprocJobSparksqlConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type DataprocJobSparksqlConfigLoggingConfigOutputReference ¶
type DataprocJobSparksqlConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *DataprocJobSparksqlConfigLoggingConfig SetInternalValue(val *DataprocJobSparksqlConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobSparksqlConfigLoggingConfigOutputReference ¶
func NewDataprocJobSparksqlConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobSparksqlConfigLoggingConfigOutputReference
type DataprocJobSparksqlConfigOutputReference ¶
type DataprocJobSparksqlConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *DataprocJobSparksqlConfig SetInternalValue(val *DataprocJobSparksqlConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() DataprocJobSparksqlConfigLoggingConfigOutputReference LoggingConfigInput() *DataprocJobSparksqlConfigLoggingConfig Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() *[]*string SetQueryList(val *[]*string) QueryListInput() *[]*string ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *DataprocJobSparksqlConfigLoggingConfig) ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobSparksqlConfigOutputReference ¶
func NewDataprocJobSparksqlConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobSparksqlConfigOutputReference
type DataprocJobStatus ¶
type DataprocJobStatus struct { }
type DataprocJobStatusList ¶
type DataprocJobStatusList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) DataprocJobStatusOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobStatusList ¶
func NewDataprocJobStatusList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataprocJobStatusList
type DataprocJobStatusOutputReference ¶
type DataprocJobStatusOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string Details() *string // Experimental. Fqn() *string InternalValue() *DataprocJobStatus SetInternalValue(val *DataprocJobStatus) State() *string StateStartTime() *string Substate() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobStatusOutputReference ¶
func NewDataprocJobStatusOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DataprocJobStatusOutputReference
type DataprocJobTimeouts ¶
type DataprocJobTimeouts struct { // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#create DataprocJob#create}. Create *string `field:"optional" json:"create" yaml:"create"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.84.0/docs/resources/dataproc_job#delete DataprocJob#delete}. Delete *string `field:"optional" json:"delete" yaml:"delete"` }
type DataprocJobTimeoutsOutputReference ¶
type DataprocJobTimeoutsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) Create() *string SetCreate(val *string) CreateInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string Delete() *string SetDelete(val *string) DeleteInput() *string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetCreate() ResetDelete() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewDataprocJobTimeoutsOutputReference ¶
func NewDataprocJobTimeoutsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataprocJobTimeoutsOutputReference
Source Files ¶
- DataprocJob.go
- DataprocJobConfig.go
- DataprocJobHadoopConfig.go
- DataprocJobHadoopConfigLoggingConfig.go
- DataprocJobHadoopConfigLoggingConfigOutputReference.go
- DataprocJobHadoopConfigLoggingConfigOutputReference__checks.go
- DataprocJobHadoopConfigOutputReference.go
- DataprocJobHadoopConfigOutputReference__checks.go
- DataprocJobHiveConfig.go
- DataprocJobHiveConfigOutputReference.go
- DataprocJobHiveConfigOutputReference__checks.go
- DataprocJobPigConfig.go
- DataprocJobPigConfigLoggingConfig.go
- DataprocJobPigConfigLoggingConfigOutputReference.go
- DataprocJobPigConfigLoggingConfigOutputReference__checks.go
- DataprocJobPigConfigOutputReference.go
- DataprocJobPigConfigOutputReference__checks.go
- DataprocJobPlacement.go
- DataprocJobPlacementOutputReference.go
- DataprocJobPlacementOutputReference__checks.go
- DataprocJobPrestoConfig.go
- DataprocJobPrestoConfigLoggingConfig.go
- DataprocJobPrestoConfigLoggingConfigOutputReference.go
- DataprocJobPrestoConfigLoggingConfigOutputReference__checks.go
- DataprocJobPrestoConfigOutputReference.go
- DataprocJobPrestoConfigOutputReference__checks.go
- DataprocJobPysparkConfig.go
- DataprocJobPysparkConfigLoggingConfig.go
- DataprocJobPysparkConfigLoggingConfigOutputReference.go
- DataprocJobPysparkConfigLoggingConfigOutputReference__checks.go
- DataprocJobPysparkConfigOutputReference.go
- DataprocJobPysparkConfigOutputReference__checks.go
- DataprocJobReference.go
- DataprocJobReferenceOutputReference.go
- DataprocJobReferenceOutputReference__checks.go
- DataprocJobScheduling.go
- DataprocJobSchedulingOutputReference.go
- DataprocJobSchedulingOutputReference__checks.go
- DataprocJobSparkConfig.go
- DataprocJobSparkConfigLoggingConfig.go
- DataprocJobSparkConfigLoggingConfigOutputReference.go
- DataprocJobSparkConfigLoggingConfigOutputReference__checks.go
- DataprocJobSparkConfigOutputReference.go
- DataprocJobSparkConfigOutputReference__checks.go
- DataprocJobSparksqlConfig.go
- DataprocJobSparksqlConfigLoggingConfig.go
- DataprocJobSparksqlConfigLoggingConfigOutputReference.go
- DataprocJobSparksqlConfigLoggingConfigOutputReference__checks.go
- DataprocJobSparksqlConfigOutputReference.go
- DataprocJobSparksqlConfigOutputReference__checks.go
- DataprocJobStatus.go
- DataprocJobStatusList.go
- DataprocJobStatusList__checks.go
- DataprocJobStatusOutputReference.go
- DataprocJobStatusOutputReference__checks.go
- DataprocJobTimeouts.go
- DataprocJobTimeoutsOutputReference.go
- DataprocJobTimeoutsOutputReference__checks.go
- DataprocJob__checks.go
- main.go