Documentation
¶
Index ¶
- func GoogleDataprocJob_IsConstruct(x interface{}) *bool
- func GoogleDataprocJob_IsTerraformElement(x interface{}) *bool
- func GoogleDataprocJob_IsTerraformResource(x interface{}) *bool
- func GoogleDataprocJob_TfResourceType() *string
- func NewGoogleDataprocJobHadoopConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobHadoopConfigLoggingConfigOutputReference, ...)
- func NewGoogleDataprocJobHadoopConfigOutputReference_Override(g GoogleDataprocJobHadoopConfigOutputReference, ...)
- func NewGoogleDataprocJobHiveConfigOutputReference_Override(g GoogleDataprocJobHiveConfigOutputReference, ...)
- func NewGoogleDataprocJobPigConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobPigConfigLoggingConfigOutputReference, ...)
- func NewGoogleDataprocJobPigConfigOutputReference_Override(g GoogleDataprocJobPigConfigOutputReference, ...)
- func NewGoogleDataprocJobPlacementOutputReference_Override(g GoogleDataprocJobPlacementOutputReference, ...)
- func NewGoogleDataprocJobPrestoConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobPrestoConfigLoggingConfigOutputReference, ...)
- func NewGoogleDataprocJobPrestoConfigOutputReference_Override(g GoogleDataprocJobPrestoConfigOutputReference, ...)
- func NewGoogleDataprocJobPysparkConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobPysparkConfigLoggingConfigOutputReference, ...)
- func NewGoogleDataprocJobPysparkConfigOutputReference_Override(g GoogleDataprocJobPysparkConfigOutputReference, ...)
- func NewGoogleDataprocJobReferenceOutputReference_Override(g GoogleDataprocJobReferenceOutputReference, ...)
- func NewGoogleDataprocJobSchedulingOutputReference_Override(g GoogleDataprocJobSchedulingOutputReference, ...)
- func NewGoogleDataprocJobSparkConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobSparkConfigLoggingConfigOutputReference, ...)
- func NewGoogleDataprocJobSparkConfigOutputReference_Override(g GoogleDataprocJobSparkConfigOutputReference, ...)
- func NewGoogleDataprocJobSparksqlConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference, ...)
- func NewGoogleDataprocJobSparksqlConfigOutputReference_Override(g GoogleDataprocJobSparksqlConfigOutputReference, ...)
- func NewGoogleDataprocJobStatusList_Override(g GoogleDataprocJobStatusList, terraformResource cdktf.IInterpolatingParent, ...)
- func NewGoogleDataprocJobStatusOutputReference_Override(g GoogleDataprocJobStatusOutputReference, ...)
- func NewGoogleDataprocJobTimeoutsOutputReference_Override(g GoogleDataprocJobTimeoutsOutputReference, ...)
- func NewGoogleDataprocJob_Override(g GoogleDataprocJob, scope constructs.Construct, id *string, ...)
- type GoogleDataprocJob
- type GoogleDataprocJobConfig
- type GoogleDataprocJobHadoopConfig
- type GoogleDataprocJobHadoopConfigLoggingConfig
- type GoogleDataprocJobHadoopConfigLoggingConfigOutputReference
- type GoogleDataprocJobHadoopConfigOutputReference
- type GoogleDataprocJobHiveConfig
- type GoogleDataprocJobHiveConfigOutputReference
- type GoogleDataprocJobPigConfig
- type GoogleDataprocJobPigConfigLoggingConfig
- type GoogleDataprocJobPigConfigLoggingConfigOutputReference
- type GoogleDataprocJobPigConfigOutputReference
- type GoogleDataprocJobPlacement
- type GoogleDataprocJobPlacementOutputReference
- type GoogleDataprocJobPrestoConfig
- type GoogleDataprocJobPrestoConfigLoggingConfig
- type GoogleDataprocJobPrestoConfigLoggingConfigOutputReference
- type GoogleDataprocJobPrestoConfigOutputReference
- type GoogleDataprocJobPysparkConfig
- type GoogleDataprocJobPysparkConfigLoggingConfig
- type GoogleDataprocJobPysparkConfigLoggingConfigOutputReference
- type GoogleDataprocJobPysparkConfigOutputReference
- type GoogleDataprocJobReference
- type GoogleDataprocJobReferenceOutputReference
- type GoogleDataprocJobScheduling
- type GoogleDataprocJobSchedulingOutputReference
- type GoogleDataprocJobSparkConfig
- type GoogleDataprocJobSparkConfigLoggingConfig
- type GoogleDataprocJobSparkConfigLoggingConfigOutputReference
- type GoogleDataprocJobSparkConfigOutputReference
- type GoogleDataprocJobSparksqlConfig
- type GoogleDataprocJobSparksqlConfigLoggingConfig
- type GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference
- type GoogleDataprocJobSparksqlConfigOutputReference
- type GoogleDataprocJobStatus
- type GoogleDataprocJobStatusList
- type GoogleDataprocJobStatusOutputReference
- type GoogleDataprocJobTimeouts
- type GoogleDataprocJobTimeoutsOutputReference
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func GoogleDataprocJob_IsConstruct ¶
func GoogleDataprocJob_IsConstruct(x interface{}) *bool
Checks if `x` is a construct.
Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.
Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.
Returns: true if `x` is an object created from a class which extends `Construct`.
func GoogleDataprocJob_IsTerraformElement ¶
func GoogleDataprocJob_IsTerraformElement(x interface{}) *bool
Experimental.
func GoogleDataprocJob_IsTerraformResource ¶
func GoogleDataprocJob_IsTerraformResource(x interface{}) *bool
Experimental.
func GoogleDataprocJob_TfResourceType ¶
func GoogleDataprocJob_TfResourceType() *string
func NewGoogleDataprocJobHadoopConfigLoggingConfigOutputReference_Override ¶
func NewGoogleDataprocJobHadoopConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobHadoopConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobHadoopConfigOutputReference_Override ¶
func NewGoogleDataprocJobHadoopConfigOutputReference_Override(g GoogleDataprocJobHadoopConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobHiveConfigOutputReference_Override ¶
func NewGoogleDataprocJobHiveConfigOutputReference_Override(g GoogleDataprocJobHiveConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobPigConfigLoggingConfigOutputReference_Override ¶
func NewGoogleDataprocJobPigConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobPigConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobPigConfigOutputReference_Override ¶
func NewGoogleDataprocJobPigConfigOutputReference_Override(g GoogleDataprocJobPigConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobPlacementOutputReference_Override ¶
func NewGoogleDataprocJobPlacementOutputReference_Override(g GoogleDataprocJobPlacementOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobPrestoConfigLoggingConfigOutputReference_Override ¶
func NewGoogleDataprocJobPrestoConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobPrestoConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobPrestoConfigOutputReference_Override ¶
func NewGoogleDataprocJobPrestoConfigOutputReference_Override(g GoogleDataprocJobPrestoConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobPysparkConfigLoggingConfigOutputReference_Override ¶
func NewGoogleDataprocJobPysparkConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobPysparkConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobPysparkConfigOutputReference_Override ¶
func NewGoogleDataprocJobPysparkConfigOutputReference_Override(g GoogleDataprocJobPysparkConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobReferenceOutputReference_Override ¶
func NewGoogleDataprocJobReferenceOutputReference_Override(g GoogleDataprocJobReferenceOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobSchedulingOutputReference_Override ¶
func NewGoogleDataprocJobSchedulingOutputReference_Override(g GoogleDataprocJobSchedulingOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobSparkConfigLoggingConfigOutputReference_Override ¶
func NewGoogleDataprocJobSparkConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobSparkConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobSparkConfigOutputReference_Override ¶
func NewGoogleDataprocJobSparkConfigOutputReference_Override(g GoogleDataprocJobSparkConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobSparksqlConfigLoggingConfigOutputReference_Override ¶
func NewGoogleDataprocJobSparksqlConfigLoggingConfigOutputReference_Override(g GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobSparksqlConfigOutputReference_Override ¶
func NewGoogleDataprocJobSparksqlConfigOutputReference_Override(g GoogleDataprocJobSparksqlConfigOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJobStatusList_Override ¶
func NewGoogleDataprocJobStatusList_Override(g GoogleDataprocJobStatusList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewGoogleDataprocJobStatusOutputReference_Override ¶
func NewGoogleDataprocJobStatusOutputReference_Override(g GoogleDataprocJobStatusOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewGoogleDataprocJobTimeoutsOutputReference_Override ¶
func NewGoogleDataprocJobTimeoutsOutputReference_Override(g GoogleDataprocJobTimeoutsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleDataprocJob_Override ¶
func NewGoogleDataprocJob_Override(g GoogleDataprocJob, scope constructs.Construct, id *string, config *GoogleDataprocJobConfig)
Create a new {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job google_dataproc_job} Resource.
Types ¶
type GoogleDataprocJob ¶
type GoogleDataprocJob interface { cdktf.TerraformResource // Experimental. CdktfStack() cdktf.TerraformStack // Experimental. Connection() interface{} // Experimental. SetConnection(val interface{}) // Experimental. ConstructNodeMetadata() *map[string]interface{} // Experimental. Count() *float64 // Experimental. SetCount(val *float64) // Experimental. DependsOn() *[]*string // Experimental. SetDependsOn(val *[]*string) DriverControlsFilesUri() *string DriverOutputResourceUri() *string ForceDelete() interface{} SetForceDelete(val interface{}) ForceDeleteInput() interface{} // Experimental. ForEach() cdktf.ITerraformIterator // Experimental. SetForEach(val cdktf.ITerraformIterator) // Experimental. Fqn() *string // Experimental. FriendlyUniqueId() *string HadoopConfig() GoogleDataprocJobHadoopConfigOutputReference HadoopConfigInput() *GoogleDataprocJobHadoopConfig HiveConfig() GoogleDataprocJobHiveConfigOutputReference HiveConfigInput() *GoogleDataprocJobHiveConfig Id() *string SetId(val *string) IdInput() *string Labels() *map[string]*string SetLabels(val *map[string]*string) LabelsInput() *map[string]*string // Experimental. Lifecycle() *cdktf.TerraformResourceLifecycle // Experimental. SetLifecycle(val *cdktf.TerraformResourceLifecycle) // The tree node. Node() constructs.Node PigConfig() GoogleDataprocJobPigConfigOutputReference PigConfigInput() *GoogleDataprocJobPigConfig Placement() GoogleDataprocJobPlacementOutputReference PlacementInput() *GoogleDataprocJobPlacement PrestoConfig() GoogleDataprocJobPrestoConfigOutputReference PrestoConfigInput() *GoogleDataprocJobPrestoConfig Project() *string SetProject(val *string) ProjectInput() *string // Experimental. Provider() cdktf.TerraformProvider // Experimental. SetProvider(val cdktf.TerraformProvider) // Experimental. Provisioners() *[]interface{} // Experimental. SetProvisioners(val *[]interface{}) PysparkConfig() GoogleDataprocJobPysparkConfigOutputReference PysparkConfigInput() *GoogleDataprocJobPysparkConfig // Experimental. RawOverrides() interface{} Reference() GoogleDataprocJobReferenceOutputReference ReferenceInput() *GoogleDataprocJobReference Region() *string SetRegion(val *string) RegionInput() *string Scheduling() GoogleDataprocJobSchedulingOutputReference SchedulingInput() *GoogleDataprocJobScheduling SparkConfig() GoogleDataprocJobSparkConfigOutputReference SparkConfigInput() *GoogleDataprocJobSparkConfig SparksqlConfig() GoogleDataprocJobSparksqlConfigOutputReference SparksqlConfigInput() *GoogleDataprocJobSparksqlConfig Status() GoogleDataprocJobStatusList // Experimental. TerraformGeneratorMetadata() *cdktf.TerraformProviderGeneratorMetadata // Experimental. TerraformMetaArguments() *map[string]interface{} // Experimental. TerraformResourceType() *string Timeouts() GoogleDataprocJobTimeoutsOutputReference TimeoutsInput() interface{} // Experimental. AddOverride(path *string, value interface{}) // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationForAttribute(terraformAttribute *string) cdktf.IResolvable // Overrides the auto-generated logical ID with a specific ID. // Experimental. OverrideLogicalId(newLogicalId *string) PutHadoopConfig(value *GoogleDataprocJobHadoopConfig) PutHiveConfig(value *GoogleDataprocJobHiveConfig) PutPigConfig(value *GoogleDataprocJobPigConfig) PutPlacement(value *GoogleDataprocJobPlacement) PutPrestoConfig(value *GoogleDataprocJobPrestoConfig) PutPysparkConfig(value *GoogleDataprocJobPysparkConfig) PutReference(value *GoogleDataprocJobReference) PutScheduling(value *GoogleDataprocJobScheduling) PutSparkConfig(value *GoogleDataprocJobSparkConfig) PutSparksqlConfig(value *GoogleDataprocJobSparksqlConfig) PutTimeouts(value *GoogleDataprocJobTimeouts) ResetForceDelete() ResetHadoopConfig() ResetHiveConfig() ResetId() ResetLabels() // Resets a previously passed logical Id to use the auto-generated logical id again. // Experimental. ResetOverrideLogicalId() ResetPigConfig() ResetPrestoConfig() ResetProject() ResetPysparkConfig() ResetReference() ResetRegion() ResetScheduling() ResetSparkConfig() ResetSparksqlConfig() ResetTimeouts() SynthesizeAttributes() *map[string]interface{} // Experimental. ToMetadata() interface{} // Returns a string representation of this construct. ToString() *string // Adds this resource to the terraform JSON output. // Experimental. ToTerraform() interface{} }
Represents a {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job google_dataproc_job}.
func NewGoogleDataprocJob ¶
func NewGoogleDataprocJob(scope constructs.Construct, id *string, config *GoogleDataprocJobConfig) GoogleDataprocJob
Create a new {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job google_dataproc_job} Resource.
type GoogleDataprocJobConfig ¶
type GoogleDataprocJobConfig struct { // Experimental. Connection interface{} `field:"optional" json:"connection" yaml:"connection"` // Experimental. Count *float64 `field:"optional" json:"count" yaml:"count"` // Experimental. DependsOn *[]cdktf.ITerraformDependable `field:"optional" json:"dependsOn" yaml:"dependsOn"` // Experimental. ForEach cdktf.ITerraformIterator `field:"optional" json:"forEach" yaml:"forEach"` // Experimental. Lifecycle *cdktf.TerraformResourceLifecycle `field:"optional" json:"lifecycle" yaml:"lifecycle"` // Experimental. Provider cdktf.TerraformProvider `field:"optional" json:"provider" yaml:"provider"` // Experimental. Provisioners *[]interface{} `field:"optional" json:"provisioners" yaml:"provisioners"` // placement block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#placement GoogleDataprocJob#placement} Placement *GoogleDataprocJobPlacement `field:"required" json:"placement" yaml:"placement"` // By default, you can only delete inactive jobs within Dataproc. // // Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#force_delete GoogleDataprocJob#force_delete} ForceDelete interface{} `field:"optional" json:"forceDelete" yaml:"forceDelete"` // hadoop_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#hadoop_config GoogleDataprocJob#hadoop_config} HadoopConfig *GoogleDataprocJobHadoopConfig `field:"optional" json:"hadoopConfig" yaml:"hadoopConfig"` // hive_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#hive_config GoogleDataprocJob#hive_config} HiveConfig *GoogleDataprocJobHiveConfig `field:"optional" json:"hiveConfig" yaml:"hiveConfig"` // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#id GoogleDataprocJob#id}. // // Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. // If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable. Id *string `field:"optional" json:"id" yaml:"id"` // Optional. The labels to associate with this job. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#labels GoogleDataprocJob#labels} Labels *map[string]*string `field:"optional" json:"labels" yaml:"labels"` // pig_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#pig_config GoogleDataprocJob#pig_config} PigConfig *GoogleDataprocJobPigConfig `field:"optional" json:"pigConfig" yaml:"pigConfig"` // presto_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#presto_config GoogleDataprocJob#presto_config} PrestoConfig *GoogleDataprocJobPrestoConfig `field:"optional" json:"prestoConfig" yaml:"prestoConfig"` // The project in which the cluster can be found and jobs subsequently run against. // // If it is not provided, the provider project is used. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#project GoogleDataprocJob#project} Project *string `field:"optional" json:"project" yaml:"project"` // pyspark_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#pyspark_config GoogleDataprocJob#pyspark_config} PysparkConfig *GoogleDataprocJobPysparkConfig `field:"optional" json:"pysparkConfig" yaml:"pysparkConfig"` // reference block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#reference GoogleDataprocJob#reference} Reference *GoogleDataprocJobReference `field:"optional" json:"reference" yaml:"reference"` // The Cloud Dataproc region. // // This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#region GoogleDataprocJob#region} Region *string `field:"optional" json:"region" yaml:"region"` // scheduling block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#scheduling GoogleDataprocJob#scheduling} Scheduling *GoogleDataprocJobScheduling `field:"optional" json:"scheduling" yaml:"scheduling"` // spark_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#spark_config GoogleDataprocJob#spark_config} SparkConfig *GoogleDataprocJobSparkConfig `field:"optional" json:"sparkConfig" yaml:"sparkConfig"` // sparksql_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#sparksql_config GoogleDataprocJob#sparksql_config} SparksqlConfig *GoogleDataprocJobSparksqlConfig `field:"optional" json:"sparksqlConfig" yaml:"sparksqlConfig"` // timeouts block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#timeouts GoogleDataprocJob#timeouts} Timeouts *GoogleDataprocJobTimeouts `field:"optional" json:"timeouts" yaml:"timeouts"` }
type GoogleDataprocJobHadoopConfig ¶
type GoogleDataprocJobHadoopConfig struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#archive_uris GoogleDataprocJob#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // The arguments to pass to the driver. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#args GoogleDataprocJob#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. // // Useful for naively parallel tasks. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#file_uris GoogleDataprocJob#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#jar_file_uris GoogleDataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#logging_config GoogleDataprocJob#logging_config} LoggingConfig *GoogleDataprocJobHadoopConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // The class containing the main method of the driver. // // Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#main_class GoogleDataprocJob#main_class} MainClass *string `field:"optional" json:"mainClass" yaml:"mainClass"` // The HCFS URI of jar file containing the driver jar. Conflicts with main_class. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#main_jar_file_uri GoogleDataprocJob#main_jar_file_uri} MainJarFileUri *string `field:"optional" json:"mainJarFileUri" yaml:"mainJarFileUri"` // A mapping of property names to values, used to configure Spark. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#properties GoogleDataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` }
type GoogleDataprocJobHadoopConfigLoggingConfig ¶
type GoogleDataprocJobHadoopConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#driver_log_levels GoogleDataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type GoogleDataprocJobHadoopConfigLoggingConfigOutputReference ¶
type GoogleDataprocJobHadoopConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobHadoopConfigLoggingConfig SetInternalValue(val *GoogleDataprocJobHadoopConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobHadoopConfigLoggingConfigOutputReference ¶
func NewGoogleDataprocJobHadoopConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobHadoopConfigLoggingConfigOutputReference
type GoogleDataprocJobHadoopConfigOutputReference ¶
type GoogleDataprocJobHadoopConfigOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobHadoopConfig SetInternalValue(val *GoogleDataprocJobHadoopConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() GoogleDataprocJobHadoopConfigLoggingConfigOutputReference LoggingConfigInput() *GoogleDataprocJobHadoopConfigLoggingConfig MainClass() *string SetMainClass(val *string) MainClassInput() *string MainJarFileUri() *string SetMainJarFileUri(val *string) MainJarFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *GoogleDataprocJobHadoopConfigLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetMainClass() ResetMainJarFileUri() ResetProperties() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobHadoopConfigOutputReference ¶
func NewGoogleDataprocJobHadoopConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobHadoopConfigOutputReference
type GoogleDataprocJobHiveConfig ¶
type GoogleDataprocJobHiveConfig struct { // Whether to continue executing queries if a query fails. // // The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#continue_on_failure GoogleDataprocJob#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. // // Can contain Hive SerDes and UDFs. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#jar_file_uris GoogleDataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // A mapping of property names and values, used to configure Hive. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#properties GoogleDataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#query_file_uri GoogleDataprocJob#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#query_list GoogleDataprocJob#query_list} QueryList *[]*string `field:"optional" json:"queryList" yaml:"queryList"` // Mapping of query variable names to values (equivalent to the Hive command: SET name="value";). // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#script_variables GoogleDataprocJob#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type GoogleDataprocJobHiveConfigOutputReference ¶
type GoogleDataprocJobHiveConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobHiveConfig SetInternalValue(val *GoogleDataprocJobHiveConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() *[]*string SetQueryList(val *[]*string) QueryListInput() *[]*string ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetContinueOnFailure() ResetJarFileUris() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobHiveConfigOutputReference ¶
func NewGoogleDataprocJobHiveConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobHiveConfigOutputReference
type GoogleDataprocJobPigConfig ¶
type GoogleDataprocJobPigConfig struct { // Whether to continue executing queries if a query fails. // // The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#continue_on_failure GoogleDataprocJob#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. // // Can contain Pig UDFs. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#jar_file_uris GoogleDataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#logging_config GoogleDataprocJob#logging_config} LoggingConfig *GoogleDataprocJobPigConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // A mapping of property names to values, used to configure Pig. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#properties GoogleDataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#query_file_uri GoogleDataprocJob#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#query_list GoogleDataprocJob#query_list} QueryList *[]*string `field:"optional" json:"queryList" yaml:"queryList"` // Mapping of query variable names to values (equivalent to the Pig command: name=[value]). // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#script_variables GoogleDataprocJob#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type GoogleDataprocJobPigConfigLoggingConfig ¶
type GoogleDataprocJobPigConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#driver_log_levels GoogleDataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type GoogleDataprocJobPigConfigLoggingConfigOutputReference ¶
type GoogleDataprocJobPigConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobPigConfigLoggingConfig SetInternalValue(val *GoogleDataprocJobPigConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobPigConfigLoggingConfigOutputReference ¶
func NewGoogleDataprocJobPigConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobPigConfigLoggingConfigOutputReference
type GoogleDataprocJobPigConfigOutputReference ¶
type GoogleDataprocJobPigConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobPigConfig SetInternalValue(val *GoogleDataprocJobPigConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() GoogleDataprocJobPigConfigLoggingConfigOutputReference LoggingConfigInput() *GoogleDataprocJobPigConfigLoggingConfig Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() *[]*string SetQueryList(val *[]*string) QueryListInput() *[]*string ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *GoogleDataprocJobPigConfigLoggingConfig) ResetContinueOnFailure() ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobPigConfigOutputReference ¶
func NewGoogleDataprocJobPigConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobPigConfigOutputReference
type GoogleDataprocJobPlacement ¶
type GoogleDataprocJobPlacement struct { // The name of the cluster where the job will be submitted. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#cluster_name GoogleDataprocJob#cluster_name} ClusterName *string `field:"required" json:"clusterName" yaml:"clusterName"` }
type GoogleDataprocJobPlacementOutputReference ¶
type GoogleDataprocJobPlacementOutputReference interface { cdktf.ComplexObject ClusterName() *string SetClusterName(val *string) ClusterNameInput() *string ClusterUuid() *string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobPlacement SetInternalValue(val *GoogleDataprocJobPlacement) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobPlacementOutputReference ¶
func NewGoogleDataprocJobPlacementOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobPlacementOutputReference
type GoogleDataprocJobPrestoConfig ¶
type GoogleDataprocJobPrestoConfig struct { // Presto client tags to attach to this query. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#client_tags GoogleDataprocJob#client_tags} ClientTags *[]*string `field:"optional" json:"clientTags" yaml:"clientTags"` // Whether to continue executing queries if a query fails. // // Setting to true can be useful when executing independent parallel queries. Defaults to false. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#continue_on_failure GoogleDataprocJob#continue_on_failure} ContinueOnFailure interface{} `field:"optional" json:"continueOnFailure" yaml:"continueOnFailure"` // logging_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#logging_config GoogleDataprocJob#logging_config} LoggingConfig *GoogleDataprocJobPrestoConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // The format in which query output will be displayed. See the Presto documentation for supported output formats. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#output_format GoogleDataprocJob#output_format} OutputFormat *string `field:"optional" json:"outputFormat" yaml:"outputFormat"` // A mapping of property names to values. // // Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#properties GoogleDataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // The HCFS URI of the script that contains SQL queries. Conflicts with query_list. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#query_file_uri GoogleDataprocJob#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#query_list GoogleDataprocJob#query_list} QueryList *[]*string `field:"optional" json:"queryList" yaml:"queryList"` }
type GoogleDataprocJobPrestoConfigLoggingConfig ¶
type GoogleDataprocJobPrestoConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#driver_log_levels GoogleDataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type GoogleDataprocJobPrestoConfigLoggingConfigOutputReference ¶
type GoogleDataprocJobPrestoConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobPrestoConfigLoggingConfig SetInternalValue(val *GoogleDataprocJobPrestoConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobPrestoConfigLoggingConfigOutputReference ¶
func NewGoogleDataprocJobPrestoConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobPrestoConfigLoggingConfigOutputReference
type GoogleDataprocJobPrestoConfigOutputReference ¶
type GoogleDataprocJobPrestoConfigOutputReference interface { cdktf.ComplexObject ClientTags() *[]*string SetClientTags(val *[]*string) ClientTagsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) ContinueOnFailure() interface{} SetContinueOnFailure(val interface{}) ContinueOnFailureInput() interface{} // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobPrestoConfig SetInternalValue(val *GoogleDataprocJobPrestoConfig) LoggingConfig() GoogleDataprocJobPrestoConfigLoggingConfigOutputReference LoggingConfigInput() *GoogleDataprocJobPrestoConfigLoggingConfig OutputFormat() *string SetOutputFormat(val *string) OutputFormatInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() *[]*string SetQueryList(val *[]*string) QueryListInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *GoogleDataprocJobPrestoConfigLoggingConfig) ResetClientTags() ResetContinueOnFailure() ResetLoggingConfig() ResetOutputFormat() ResetProperties() ResetQueryFileUri() ResetQueryList() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobPrestoConfigOutputReference ¶
func NewGoogleDataprocJobPrestoConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobPrestoConfigOutputReference
type GoogleDataprocJobPysparkConfig ¶
type GoogleDataprocJobPysparkConfig struct { // Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#main_python_file_uri GoogleDataprocJob#main_python_file_uri} MainPythonFileUri *string `field:"required" json:"mainPythonFileUri" yaml:"mainPythonFileUri"` // Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#archive_uris GoogleDataprocJob#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // Optional. // // The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#args GoogleDataprocJob#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // Optional. // // HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#file_uris GoogleDataprocJob#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#jar_file_uris GoogleDataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#logging_config GoogleDataprocJob#logging_config} LoggingConfig *GoogleDataprocJobPysparkConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // Optional. // // A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#properties GoogleDataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // Optional. // // HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#python_file_uris GoogleDataprocJob#python_file_uris} PythonFileUris *[]*string `field:"optional" json:"pythonFileUris" yaml:"pythonFileUris"` }
type GoogleDataprocJobPysparkConfigLoggingConfig ¶
type GoogleDataprocJobPysparkConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#driver_log_levels GoogleDataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type GoogleDataprocJobPysparkConfigLoggingConfigOutputReference ¶
type GoogleDataprocJobPysparkConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobPysparkConfigLoggingConfig SetInternalValue(val *GoogleDataprocJobPysparkConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobPysparkConfigLoggingConfigOutputReference ¶
func NewGoogleDataprocJobPysparkConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobPysparkConfigLoggingConfigOutputReference
type GoogleDataprocJobPysparkConfigOutputReference ¶
type GoogleDataprocJobPysparkConfigOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobPysparkConfig SetInternalValue(val *GoogleDataprocJobPysparkConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() GoogleDataprocJobPysparkConfigLoggingConfigOutputReference LoggingConfigInput() *GoogleDataprocJobPysparkConfigLoggingConfig MainPythonFileUri() *string SetMainPythonFileUri(val *string) MainPythonFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string PythonFileUris() *[]*string SetPythonFileUris(val *[]*string) PythonFileUrisInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *GoogleDataprocJobPysparkConfigLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetPythonFileUris() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobPysparkConfigOutputReference ¶
func NewGoogleDataprocJobPysparkConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobPysparkConfigOutputReference
type GoogleDataprocJobReference ¶
type GoogleDataprocJobReference struct { // The job ID, which must be unique within the project. // // The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#job_id GoogleDataprocJob#job_id} JobId *string `field:"optional" json:"jobId" yaml:"jobId"` }
type GoogleDataprocJobReferenceOutputReference ¶
type GoogleDataprocJobReferenceOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobReference SetInternalValue(val *GoogleDataprocJobReference) JobId() *string SetJobId(val *string) JobIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetJobId() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobReferenceOutputReference ¶
func NewGoogleDataprocJobReferenceOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobReferenceOutputReference
type GoogleDataprocJobScheduling ¶
type GoogleDataprocJobScheduling struct { // Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#max_failures_per_hour GoogleDataprocJob#max_failures_per_hour} MaxFailuresPerHour *float64 `field:"required" json:"maxFailuresPerHour" yaml:"maxFailuresPerHour"` // Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#max_failures_total GoogleDataprocJob#max_failures_total} MaxFailuresTotal *float64 `field:"required" json:"maxFailuresTotal" yaml:"maxFailuresTotal"` }
type GoogleDataprocJobSchedulingOutputReference ¶
type GoogleDataprocJobSchedulingOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobScheduling SetInternalValue(val *GoogleDataprocJobScheduling) MaxFailuresPerHour() *float64 SetMaxFailuresPerHour(val *float64) MaxFailuresPerHourInput() *float64 MaxFailuresTotal() *float64 SetMaxFailuresTotal(val *float64) MaxFailuresTotalInput() *float64 // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobSchedulingOutputReference ¶
func NewGoogleDataprocJobSchedulingOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobSchedulingOutputReference
type GoogleDataprocJobSparkConfig ¶
type GoogleDataprocJobSparkConfig struct { // HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#archive_uris GoogleDataprocJob#archive_uris} ArchiveUris *[]*string `field:"optional" json:"archiveUris" yaml:"archiveUris"` // The arguments to pass to the driver. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#args GoogleDataprocJob#args} Args *[]*string `field:"optional" json:"args" yaml:"args"` // HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. // // Useful for naively parallel tasks. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#file_uris GoogleDataprocJob#file_uris} FileUris *[]*string `field:"optional" json:"fileUris" yaml:"fileUris"` // HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#jar_file_uris GoogleDataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#logging_config GoogleDataprocJob#logging_config} LoggingConfig *GoogleDataprocJobSparkConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // The class containing the main method of the driver. // // Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#main_class GoogleDataprocJob#main_class} MainClass *string `field:"optional" json:"mainClass" yaml:"mainClass"` // The HCFS URI of jar file containing the driver jar. Conflicts with main_class. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#main_jar_file_uri GoogleDataprocJob#main_jar_file_uri} MainJarFileUri *string `field:"optional" json:"mainJarFileUri" yaml:"mainJarFileUri"` // A mapping of property names to values, used to configure Spark. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#properties GoogleDataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` }
type GoogleDataprocJobSparkConfigLoggingConfig ¶
type GoogleDataprocJobSparkConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#driver_log_levels GoogleDataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type GoogleDataprocJobSparkConfigLoggingConfigOutputReference ¶
type GoogleDataprocJobSparkConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobSparkConfigLoggingConfig SetInternalValue(val *GoogleDataprocJobSparkConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobSparkConfigLoggingConfigOutputReference ¶
func NewGoogleDataprocJobSparkConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobSparkConfigLoggingConfigOutputReference
type GoogleDataprocJobSparkConfigOutputReference ¶
type GoogleDataprocJobSparkConfigOutputReference interface { cdktf.ComplexObject ArchiveUris() *[]*string SetArchiveUris(val *[]*string) ArchiveUrisInput() *[]*string Args() *[]*string SetArgs(val *[]*string) ArgsInput() *[]*string // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string FileUris() *[]*string SetFileUris(val *[]*string) FileUrisInput() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobSparkConfig SetInternalValue(val *GoogleDataprocJobSparkConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() GoogleDataprocJobSparkConfigLoggingConfigOutputReference LoggingConfigInput() *GoogleDataprocJobSparkConfigLoggingConfig MainClass() *string SetMainClass(val *string) MainClassInput() *string MainJarFileUri() *string SetMainJarFileUri(val *string) MainJarFileUriInput() *string Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *GoogleDataprocJobSparkConfigLoggingConfig) ResetArchiveUris() ResetArgs() ResetFileUris() ResetJarFileUris() ResetLoggingConfig() ResetMainClass() ResetMainJarFileUri() ResetProperties() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobSparkConfigOutputReference ¶
func NewGoogleDataprocJobSparkConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobSparkConfigOutputReference
type GoogleDataprocJobSparksqlConfig ¶
type GoogleDataprocJobSparksqlConfig struct { // HCFS URIs of jar files to be added to the Spark CLASSPATH. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#jar_file_uris GoogleDataprocJob#jar_file_uris} JarFileUris *[]*string `field:"optional" json:"jarFileUris" yaml:"jarFileUris"` // logging_config block. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#logging_config GoogleDataprocJob#logging_config} LoggingConfig *GoogleDataprocJobSparksqlConfigLoggingConfig `field:"optional" json:"loggingConfig" yaml:"loggingConfig"` // A mapping of property names to values, used to configure Spark SQL's SparkConf. // // Properties that conflict with values set by the Cloud Dataproc API may be overwritten. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#properties GoogleDataprocJob#properties} Properties *map[string]*string `field:"optional" json:"properties" yaml:"properties"` // The HCFS URI of the script that contains SQL queries. Conflicts with query_list. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#query_file_uri GoogleDataprocJob#query_file_uri} QueryFileUri *string `field:"optional" json:"queryFileUri" yaml:"queryFileUri"` // The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#query_list GoogleDataprocJob#query_list} QueryList *[]*string `field:"optional" json:"queryList" yaml:"queryList"` // Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#script_variables GoogleDataprocJob#script_variables} ScriptVariables *map[string]*string `field:"optional" json:"scriptVariables" yaml:"scriptVariables"` }
type GoogleDataprocJobSparksqlConfigLoggingConfig ¶
type GoogleDataprocJobSparksqlConfigLoggingConfig struct { // Optional. // // The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'. // // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#driver_log_levels GoogleDataprocJob#driver_log_levels} DriverLogLevels *map[string]*string `field:"required" json:"driverLogLevels" yaml:"driverLogLevels"` }
type GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference ¶
type GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DriverLogLevels() *map[string]*string SetDriverLogLevels(val *map[string]*string) DriverLogLevelsInput() *map[string]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobSparksqlConfigLoggingConfig SetInternalValue(val *GoogleDataprocJobSparksqlConfigLoggingConfig) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobSparksqlConfigLoggingConfigOutputReference ¶
func NewGoogleDataprocJobSparksqlConfigLoggingConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference
type GoogleDataprocJobSparksqlConfigOutputReference ¶
type GoogleDataprocJobSparksqlConfigOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobSparksqlConfig SetInternalValue(val *GoogleDataprocJobSparksqlConfig) JarFileUris() *[]*string SetJarFileUris(val *[]*string) JarFileUrisInput() *[]*string LoggingConfig() GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference LoggingConfigInput() *GoogleDataprocJobSparksqlConfigLoggingConfig Properties() *map[string]*string SetProperties(val *map[string]*string) PropertiesInput() *map[string]*string QueryFileUri() *string SetQueryFileUri(val *string) QueryFileUriInput() *string QueryList() *[]*string SetQueryList(val *[]*string) QueryListInput() *[]*string ScriptVariables() *map[string]*string SetScriptVariables(val *map[string]*string) ScriptVariablesInput() *map[string]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutLoggingConfig(value *GoogleDataprocJobSparksqlConfigLoggingConfig) ResetJarFileUris() ResetLoggingConfig() ResetProperties() ResetQueryFileUri() ResetQueryList() ResetScriptVariables() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobSparksqlConfigOutputReference ¶
func NewGoogleDataprocJobSparksqlConfigOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobSparksqlConfigOutputReference
type GoogleDataprocJobStatus ¶
type GoogleDataprocJobStatus struct { }
type GoogleDataprocJobStatusList ¶
type GoogleDataprocJobStatusList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) GoogleDataprocJobStatusOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobStatusList ¶
func NewGoogleDataprocJobStatusList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) GoogleDataprocJobStatusList
type GoogleDataprocJobStatusOutputReference ¶
type GoogleDataprocJobStatusOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string Details() *string // Experimental. Fqn() *string InternalValue() *GoogleDataprocJobStatus SetInternalValue(val *GoogleDataprocJobStatus) State() *string StateStartTime() *string Substate() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobStatusOutputReference ¶
func NewGoogleDataprocJobStatusOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) GoogleDataprocJobStatusOutputReference
type GoogleDataprocJobTimeouts ¶
type GoogleDataprocJobTimeouts struct { // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#create GoogleDataprocJob#create}. Create *string `field:"optional" json:"create" yaml:"create"` // Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_dataproc_job#delete GoogleDataprocJob#delete}. Delete *string `field:"optional" json:"delete" yaml:"delete"` }
type GoogleDataprocJobTimeoutsOutputReference ¶
type GoogleDataprocJobTimeoutsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) Create() *string SetCreate(val *string) CreateInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string Delete() *string SetDelete(val *string) DeleteInput() *string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetCreate() ResetDelete() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleDataprocJobTimeoutsOutputReference ¶
func NewGoogleDataprocJobTimeoutsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleDataprocJobTimeoutsOutputReference
Source Files
¶
- GoogleDataprocJob.go
- GoogleDataprocJobConfig.go
- GoogleDataprocJobHadoopConfig.go
- GoogleDataprocJobHadoopConfigLoggingConfig.go
- GoogleDataprocJobHadoopConfigLoggingConfigOutputReference.go
- GoogleDataprocJobHadoopConfigLoggingConfigOutputReference__checks.go
- GoogleDataprocJobHadoopConfigOutputReference.go
- GoogleDataprocJobHadoopConfigOutputReference__checks.go
- GoogleDataprocJobHiveConfig.go
- GoogleDataprocJobHiveConfigOutputReference.go
- GoogleDataprocJobHiveConfigOutputReference__checks.go
- GoogleDataprocJobPigConfig.go
- GoogleDataprocJobPigConfigLoggingConfig.go
- GoogleDataprocJobPigConfigLoggingConfigOutputReference.go
- GoogleDataprocJobPigConfigLoggingConfigOutputReference__checks.go
- GoogleDataprocJobPigConfigOutputReference.go
- GoogleDataprocJobPigConfigOutputReference__checks.go
- GoogleDataprocJobPlacement.go
- GoogleDataprocJobPlacementOutputReference.go
- GoogleDataprocJobPlacementOutputReference__checks.go
- GoogleDataprocJobPrestoConfig.go
- GoogleDataprocJobPrestoConfigLoggingConfig.go
- GoogleDataprocJobPrestoConfigLoggingConfigOutputReference.go
- GoogleDataprocJobPrestoConfigLoggingConfigOutputReference__checks.go
- GoogleDataprocJobPrestoConfigOutputReference.go
- GoogleDataprocJobPrestoConfigOutputReference__checks.go
- GoogleDataprocJobPysparkConfig.go
- GoogleDataprocJobPysparkConfigLoggingConfig.go
- GoogleDataprocJobPysparkConfigLoggingConfigOutputReference.go
- GoogleDataprocJobPysparkConfigLoggingConfigOutputReference__checks.go
- GoogleDataprocJobPysparkConfigOutputReference.go
- GoogleDataprocJobPysparkConfigOutputReference__checks.go
- GoogleDataprocJobReference.go
- GoogleDataprocJobReferenceOutputReference.go
- GoogleDataprocJobReferenceOutputReference__checks.go
- GoogleDataprocJobScheduling.go
- GoogleDataprocJobSchedulingOutputReference.go
- GoogleDataprocJobSchedulingOutputReference__checks.go
- GoogleDataprocJobSparkConfig.go
- GoogleDataprocJobSparkConfigLoggingConfig.go
- GoogleDataprocJobSparkConfigLoggingConfigOutputReference.go
- GoogleDataprocJobSparkConfigLoggingConfigOutputReference__checks.go
- GoogleDataprocJobSparkConfigOutputReference.go
- GoogleDataprocJobSparkConfigOutputReference__checks.go
- GoogleDataprocJobSparksqlConfig.go
- GoogleDataprocJobSparksqlConfigLoggingConfig.go
- GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference.go
- GoogleDataprocJobSparksqlConfigLoggingConfigOutputReference__checks.go
- GoogleDataprocJobSparksqlConfigOutputReference.go
- GoogleDataprocJobSparksqlConfigOutputReference__checks.go
- GoogleDataprocJobStatus.go
- GoogleDataprocJobStatusList.go
- GoogleDataprocJobStatusList__checks.go
- GoogleDataprocJobStatusOutputReference.go
- GoogleDataprocJobStatusOutputReference__checks.go
- GoogleDataprocJobTimeouts.go
- GoogleDataprocJobTimeoutsOutputReference.go
- GoogleDataprocJobTimeoutsOutputReference__checks.go
- GoogleDataprocJob__checks.go
- main.go