datapipelinepipeline

package
v14.13.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 19, 2024 License: MPL-2.0 Imports: 7 Imported by: 0

README

google_data_pipeline_pipeline

Refer to the Terraform Registry for docs: google_data_pipeline_pipeline.

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func DataPipelinePipeline_GenerateConfigForImport

func DataPipelinePipeline_GenerateConfigForImport(scope constructs.Construct, importToId *string, importFromId *string, provider cdktf.TerraformProvider) cdktf.ImportableResource

Generates CDKTF code for importing a DataPipelinePipeline resource upon running "cdktf plan <stack-name>".

func DataPipelinePipeline_IsConstruct

func DataPipelinePipeline_IsConstruct(x interface{}) *bool

Checks if `x` is a construct.

Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.

Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.

Returns: true if `x` is an object created from a class which extends `Construct`.

func DataPipelinePipeline_IsTerraformElement

func DataPipelinePipeline_IsTerraformElement(x interface{}) *bool

Experimental.

func DataPipelinePipeline_IsTerraformResource

func DataPipelinePipeline_IsTerraformResource(x interface{}) *bool

Experimental.

func DataPipelinePipeline_TfResourceType

func DataPipelinePipeline_TfResourceType() *string

func NewDataPipelinePipelineScheduleInfoOutputReference_Override

func NewDataPipelinePipelineScheduleInfoOutputReference_Override(d DataPipelinePipelineScheduleInfoOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipelineTimeoutsOutputReference_Override

func NewDataPipelinePipelineTimeoutsOutputReference_Override(d DataPipelinePipelineTimeoutsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference_Override

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference_Override(d DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference_Override

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference_Override(d DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference_Override

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference_Override(d DataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference_Override

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference_Override(d DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference_Override

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference_Override(d DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference_Override

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference_Override(d DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipelineWorkloadOutputReference_Override

func NewDataPipelinePipelineWorkloadOutputReference_Override(d DataPipelinePipelineWorkloadOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewDataPipelinePipeline_Override

func NewDataPipelinePipeline_Override(d DataPipelinePipeline, scope constructs.Construct, id *string, config *DataPipelinePipelineConfig)

Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline google_data_pipeline_pipeline} Resource.

Types

type DataPipelinePipeline

type DataPipelinePipeline interface {
	cdktf.TerraformResource
	// Experimental.
	CdktfStack() cdktf.TerraformStack
	// Experimental.
	Connection() interface{}
	// Experimental.
	SetConnection(val interface{})
	// Experimental.
	ConstructNodeMetadata() *map[string]interface{}
	// Experimental.
	Count() interface{}
	// Experimental.
	SetCount(val interface{})
	CreateTime() *string
	// Experimental.
	DependsOn() *[]*string
	// Experimental.
	SetDependsOn(val *[]*string)
	DisplayName() *string
	SetDisplayName(val *string)
	DisplayNameInput() *string
	// Experimental.
	ForEach() cdktf.ITerraformIterator
	// Experimental.
	SetForEach(val cdktf.ITerraformIterator)
	// Experimental.
	Fqn() *string
	// Experimental.
	FriendlyUniqueId() *string
	Id() *string
	SetId(val *string)
	IdInput() *string
	JobCount() *float64
	LastUpdateTime() *string
	// Experimental.
	Lifecycle() *cdktf.TerraformResourceLifecycle
	// Experimental.
	SetLifecycle(val *cdktf.TerraformResourceLifecycle)
	Name() *string
	SetName(val *string)
	NameInput() *string
	// The tree node.
	Node() constructs.Node
	PipelineSources() *map[string]*string
	SetPipelineSources(val *map[string]*string)
	PipelineSourcesInput() *map[string]*string
	Project() *string
	SetProject(val *string)
	ProjectInput() *string
	// Experimental.
	Provider() cdktf.TerraformProvider
	// Experimental.
	SetProvider(val cdktf.TerraformProvider)
	// Experimental.
	Provisioners() *[]interface{}
	// Experimental.
	SetProvisioners(val *[]interface{})
	// Experimental.
	RawOverrides() interface{}
	Region() *string
	SetRegion(val *string)
	RegionInput() *string
	ScheduleInfo() DataPipelinePipelineScheduleInfoOutputReference
	ScheduleInfoInput() *DataPipelinePipelineScheduleInfo
	SchedulerServiceAccountEmail() *string
	SetSchedulerServiceAccountEmail(val *string)
	SchedulerServiceAccountEmailInput() *string
	State() *string
	SetState(val *string)
	StateInput() *string
	// Experimental.
	TerraformGeneratorMetadata() *cdktf.TerraformProviderGeneratorMetadata
	// Experimental.
	TerraformMetaArguments() *map[string]interface{}
	// Experimental.
	TerraformResourceType() *string
	Timeouts() DataPipelinePipelineTimeoutsOutputReference
	TimeoutsInput() interface{}
	Type() *string
	SetType(val *string)
	TypeInput() *string
	Workload() DataPipelinePipelineWorkloadOutputReference
	WorkloadInput() *DataPipelinePipelineWorkload
	// Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move.
	// Experimental.
	AddMoveTarget(moveTarget *string)
	// Experimental.
	AddOverride(path *string, value interface{})
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	HasResourceMove() interface{}
	// Experimental.
	ImportFrom(id *string, provider cdktf.TerraformProvider)
	// Experimental.
	InterpolationForAttribute(terraformAttribute *string) cdktf.IResolvable
	// Move the resource corresponding to "id" to this resource.
	//
	// Note that the resource being moved from must be marked as moved using it's instance function.
	// Experimental.
	MoveFromId(id *string)
	// Moves this resource to the target resource given by moveTarget.
	// Experimental.
	MoveTo(moveTarget *string, index interface{})
	// Moves this resource to the resource corresponding to "id".
	// Experimental.
	MoveToId(id *string)
	// Overrides the auto-generated logical ID with a specific ID.
	// Experimental.
	OverrideLogicalId(newLogicalId *string)
	PutScheduleInfo(value *DataPipelinePipelineScheduleInfo)
	PutTimeouts(value *DataPipelinePipelineTimeouts)
	PutWorkload(value *DataPipelinePipelineWorkload)
	ResetDisplayName()
	ResetId()
	// Resets a previously passed logical Id to use the auto-generated logical id again.
	// Experimental.
	ResetOverrideLogicalId()
	ResetPipelineSources()
	ResetProject()
	ResetRegion()
	ResetScheduleInfo()
	ResetSchedulerServiceAccountEmail()
	ResetTimeouts()
	ResetWorkload()
	SynthesizeAttributes() *map[string]interface{}
	SynthesizeHclAttributes() *map[string]interface{}
	// Experimental.
	ToHclTerraform() interface{}
	// Experimental.
	ToMetadata() interface{}
	// Returns a string representation of this construct.
	ToString() *string
	// Adds this resource to the terraform JSON output.
	// Experimental.
	ToTerraform() interface{}
}

Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline google_data_pipeline_pipeline}.

func NewDataPipelinePipeline

func NewDataPipelinePipeline(scope constructs.Construct, id *string, config *DataPipelinePipelineConfig) DataPipelinePipeline

Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline google_data_pipeline_pipeline} Resource.

type DataPipelinePipelineConfig

type DataPipelinePipelineConfig struct {
	// Experimental.
	Connection interface{} `field:"optional" json:"connection" yaml:"connection"`
	// Experimental.
	Count interface{} `field:"optional" json:"count" yaml:"count"`
	// Experimental.
	DependsOn *[]cdktf.ITerraformDependable `field:"optional" json:"dependsOn" yaml:"dependsOn"`
	// Experimental.
	ForEach cdktf.ITerraformIterator `field:"optional" json:"forEach" yaml:"forEach"`
	// Experimental.
	Lifecycle *cdktf.TerraformResourceLifecycle `field:"optional" json:"lifecycle" yaml:"lifecycle"`
	// Experimental.
	Provider cdktf.TerraformProvider `field:"optional" json:"provider" yaml:"provider"`
	// Experimental.
	Provisioners *[]interface{} `field:"optional" json:"provisioners" yaml:"provisioners"`
	// "The pipeline name.
	//
	// For example': 'projects/PROJECT_ID/locations/LOCATION_ID/pipelines/PIPELINE_ID."
	// "- PROJECT_ID can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), and periods (.). For more information, see Identifying projects."
	// "LOCATION_ID is the canonical ID for the pipeline's location. The list of available locations can be obtained by calling google.cloud.location.Locations.ListLocations. Note that the Data Pipelines service is not available in all regions. It depends on Cloud Scheduler, an App Engine application, so it's only available in App Engine regions."
	// "PIPELINE_ID is the ID of the pipeline. Must be unique for the selected project and location."
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#name DataPipelinePipeline#name}
	Name *string `field:"required" json:"name" yaml:"name"`
	// The state of the pipeline.
	//
	// When the pipeline is created, the state is set to 'PIPELINE_STATE_ACTIVE' by default. State changes can be requested by setting the state to stopping, paused, or resuming. State cannot be changed through pipelines.patch requests.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#state Possible values: ["STATE_UNSPECIFIED", "STATE_RESUMING", "STATE_ACTIVE", "STATE_STOPPING", "STATE_ARCHIVED", "STATE_PAUSED"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#state DataPipelinePipeline#state}
	State *string `field:"required" json:"state" yaml:"state"`
	// The type of the pipeline.
	//
	// This field affects the scheduling of the pipeline and the type of metrics to show for the pipeline.
	// https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#pipelinetype Possible values: ["PIPELINE_TYPE_UNSPECIFIED", "PIPELINE_TYPE_BATCH", "PIPELINE_TYPE_STREAMING"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#type DataPipelinePipeline#type}
	Type *string `field:"required" json:"type" yaml:"type"`
	// The display name of the pipeline. It can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), and underscores (_).
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#display_name DataPipelinePipeline#display_name}
	DisplayName *string `field:"optional" json:"displayName" yaml:"displayName"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#id DataPipelinePipeline#id}.
	//
	// Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
	// If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
	Id *string `field:"optional" json:"id" yaml:"id"`
	// The sources of the pipeline (for example, Dataplex).
	//
	// The keys and values are set by the corresponding sources during pipeline creation.
	// An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#pipeline_sources DataPipelinePipeline#pipeline_sources}
	PipelineSources *map[string]*string `field:"optional" json:"pipelineSources" yaml:"pipelineSources"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#project DataPipelinePipeline#project}.
	Project *string `field:"optional" json:"project" yaml:"project"`
	// A reference to the region.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#region DataPipelinePipeline#region}
	Region *string `field:"optional" json:"region" yaml:"region"`
	// schedule_info block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#schedule_info DataPipelinePipeline#schedule_info}
	ScheduleInfo *DataPipelinePipelineScheduleInfo `field:"optional" json:"scheduleInfo" yaml:"scheduleInfo"`
	// Optional.
	//
	// A service account email to be used with the Cloud Scheduler job. If not specified, the default compute engine service account will be used.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#scheduler_service_account_email DataPipelinePipeline#scheduler_service_account_email}
	SchedulerServiceAccountEmail *string `field:"optional" json:"schedulerServiceAccountEmail" yaml:"schedulerServiceAccountEmail"`
	// timeouts block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#timeouts DataPipelinePipeline#timeouts}
	Timeouts *DataPipelinePipelineTimeouts `field:"optional" json:"timeouts" yaml:"timeouts"`
	// workload block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#workload DataPipelinePipeline#workload}
	Workload *DataPipelinePipelineWorkload `field:"optional" json:"workload" yaml:"workload"`
}

type DataPipelinePipelineScheduleInfo

type DataPipelinePipelineScheduleInfo struct {
	// Unix-cron format of the schedule. This information is retrieved from the linked Cloud Scheduler.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#schedule DataPipelinePipeline#schedule}
	Schedule *string `field:"optional" json:"schedule" yaml:"schedule"`
	// Timezone ID. This matches the timezone IDs used by the Cloud Scheduler API. If empty, UTC time is assumed.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#time_zone DataPipelinePipeline#time_zone}
	TimeZone *string `field:"optional" json:"timeZone" yaml:"timeZone"`
}

type DataPipelinePipelineScheduleInfoOutputReference

type DataPipelinePipelineScheduleInfoOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() *DataPipelinePipelineScheduleInfo
	SetInternalValue(val *DataPipelinePipelineScheduleInfo)
	NextJobTime() *string
	Schedule() *string
	SetSchedule(val *string)
	ScheduleInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	TimeZone() *string
	SetTimeZone(val *string)
	TimeZoneInput() *string
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetSchedule()
	ResetTimeZone()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineScheduleInfoOutputReference

func NewDataPipelinePipelineScheduleInfoOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineScheduleInfoOutputReference

type DataPipelinePipelineTimeouts

type DataPipelinePipelineTimeouts struct {
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#create DataPipelinePipeline#create}.
	Create *string `field:"optional" json:"create" yaml:"create"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#delete DataPipelinePipeline#delete}.
	Delete *string `field:"optional" json:"delete" yaml:"delete"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#update DataPipelinePipeline#update}.
	Update *string `field:"optional" json:"update" yaml:"update"`
}

type DataPipelinePipelineTimeoutsOutputReference

type DataPipelinePipelineTimeoutsOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	Create() *string
	SetCreate(val *string)
	CreateInput() *string
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	Delete() *string
	SetDelete(val *string)
	DeleteInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() interface{}
	SetInternalValue(val interface{})
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	Update() *string
	SetUpdate(val *string)
	UpdateInput() *string
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetCreate()
	ResetDelete()
	ResetUpdate()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineTimeoutsOutputReference

func NewDataPipelinePipelineTimeoutsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineTimeoutsOutputReference

type DataPipelinePipelineWorkload

type DataPipelinePipelineWorkload struct {
	// dataflow_flex_template_request block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#dataflow_flex_template_request DataPipelinePipeline#dataflow_flex_template_request}
	DataflowFlexTemplateRequest *DataPipelinePipelineWorkloadDataflowFlexTemplateRequest `field:"optional" json:"dataflowFlexTemplateRequest" yaml:"dataflowFlexTemplateRequest"`
	// dataflow_launch_template_request block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#dataflow_launch_template_request DataPipelinePipeline#dataflow_launch_template_request}
	DataflowLaunchTemplateRequest *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest `field:"optional" json:"dataflowLaunchTemplateRequest" yaml:"dataflowLaunchTemplateRequest"`
}

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequest

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequest struct {
	// launch_parameter block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#launch_parameter DataPipelinePipeline#launch_parameter}
	LaunchParameter *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter `field:"required" json:"launchParameter" yaml:"launchParameter"`
	// The regional endpoint to which to direct the request. For example, us-central1, us-west1.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#location DataPipelinePipeline#location}
	Location *string `field:"required" json:"location" yaml:"location"`
	// The ID of the Cloud Platform project that the job belongs to.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#project_id DataPipelinePipeline#project_id}
	ProjectId *string `field:"required" json:"projectId" yaml:"projectId"`
	// If true, the request is validated but not actually executed. Defaults to false.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#validate_only DataPipelinePipeline#validate_only}
	ValidateOnly interface{} `field:"optional" json:"validateOnly" yaml:"validateOnly"`
}

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter struct {
	// The job name to use for the created job.
	//
	// For an update job request, the job name should be the same as the existing running job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#job_name DataPipelinePipeline#job_name}
	JobName *string `field:"required" json:"jobName" yaml:"jobName"`
	// Cloud Storage path to a file with a JSON-serialized ContainerSpec as content.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#container_spec_gcs_path DataPipelinePipeline#container_spec_gcs_path}
	ContainerSpecGcsPath *string `field:"optional" json:"containerSpecGcsPath" yaml:"containerSpecGcsPath"`
	// environment block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#environment DataPipelinePipeline#environment}
	Environment *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment `field:"optional" json:"environment" yaml:"environment"`
	// Launch options for this Flex Template job.
	//
	// This is a common set of options across languages and templates. This should not be used to pass job parameters.
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#launch_options DataPipelinePipeline#launch_options}
	LaunchOptions *map[string]*string `field:"optional" json:"launchOptions" yaml:"launchOptions"`
	// 'The parameters for the Flex Template.
	//
	// Example: {"numWorkers":"5"}'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#parameters DataPipelinePipeline#parameters}
	Parameters *map[string]*string `field:"optional" json:"parameters" yaml:"parameters"`
	// 'Use this to pass transform name mappings for streaming update jobs.
	//
	// Example: {"oldTransformName":"newTransformName",...}'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#transform_name_mappings DataPipelinePipeline#transform_name_mappings}
	TransformNameMappings *map[string]*string `field:"optional" json:"transformNameMappings" yaml:"transformNameMappings"`
	// Set this to true if you are sending a request to update a running streaming job.
	//
	// When set, the job name should be the same as the running job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#update DataPipelinePipeline#update}
	Update interface{} `field:"optional" json:"update" yaml:"update"`
}

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment struct {
	// Additional experiment flags for the job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#additional_experiments DataPipelinePipeline#additional_experiments}
	AdditionalExperiments *[]*string `field:"optional" json:"additionalExperiments" yaml:"additionalExperiments"`
	// Additional user labels to be specified for the job.
	//
	// Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs.
	// 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#additional_user_labels DataPipelinePipeline#additional_user_labels}
	AdditionalUserLabels *map[string]*string `field:"optional" json:"additionalUserLabels" yaml:"additionalUserLabels"`
	// Whether to enable Streaming Engine for the job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#enable_streaming_engine DataPipelinePipeline#enable_streaming_engine}
	EnableStreamingEngine interface{} `field:"optional" json:"enableStreamingEngine" yaml:"enableStreamingEngine"`
	// Set FlexRS goal for the job. https://cloud.google.com/dataflow/docs/guides/flexrs https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexResourceSchedulingGoal Possible values: ["FLEXRS_UNSPECIFIED", "FLEXRS_SPEED_OPTIMIZED", "FLEXRS_COST_OPTIMIZED"].
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#flexrs_goal DataPipelinePipeline#flexrs_goal}
	FlexrsGoal *string `field:"optional" json:"flexrsGoal" yaml:"flexrsGoal"`
	// Configuration for VM IPs. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration Possible values: ["WORKER_IP_UNSPECIFIED", "WORKER_IP_PUBLIC", "WORKER_IP_PRIVATE"].
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#ip_configuration DataPipelinePipeline#ip_configuration}
	IpConfiguration *string `field:"optional" json:"ipConfiguration" yaml:"ipConfiguration"`
	// 'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#kms_key_name DataPipelinePipeline#kms_key_name}
	KmsKeyName *string `field:"optional" json:"kmsKeyName" yaml:"kmsKeyName"`
	// The machine type to use for the job. Defaults to the value from the template if not specified.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#machine_type DataPipelinePipeline#machine_type}
	MachineType *string `field:"optional" json:"machineType" yaml:"machineType"`
	// The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#max_workers DataPipelinePipeline#max_workers}
	MaxWorkers *float64 `field:"optional" json:"maxWorkers" yaml:"maxWorkers"`
	// Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#network DataPipelinePipeline#network}
	Network *string `field:"optional" json:"network" yaml:"network"`
	// The initial number of Compute Engine instances for the job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#num_workers DataPipelinePipeline#num_workers}
	NumWorkers *float64 `field:"optional" json:"numWorkers" yaml:"numWorkers"`
	// The email address of the service account to run the job as.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#service_account_email DataPipelinePipeline#service_account_email}
	ServiceAccountEmail *string `field:"optional" json:"serviceAccountEmail" yaml:"serviceAccountEmail"`
	// Subnetwork to which VMs will be assigned, if desired.
	//
	// You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#subnetwork DataPipelinePipeline#subnetwork}
	Subnetwork *string `field:"optional" json:"subnetwork" yaml:"subnetwork"`
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#temp_location DataPipelinePipeline#temp_location}
	TempLocation *string `field:"optional" json:"tempLocation" yaml:"tempLocation"`
	// The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#worker_region DataPipelinePipeline#worker_region}
	WorkerRegion *string `field:"optional" json:"workerRegion" yaml:"workerRegion"`
	// The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#worker_zone DataPipelinePipeline#worker_zone}
	WorkerZone *string `field:"optional" json:"workerZone" yaml:"workerZone"`
	// The Compute Engine availability zone for launching worker instances to run your pipeline.
	//
	// In the future, workerZone will take precedence.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#zone DataPipelinePipeline#zone}
	Zone *string `field:"optional" json:"zone" yaml:"zone"`
}

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference interface {
	cdktf.ComplexObject
	AdditionalExperiments() *[]*string
	SetAdditionalExperiments(val *[]*string)
	AdditionalExperimentsInput() *[]*string
	AdditionalUserLabels() *map[string]*string
	SetAdditionalUserLabels(val *map[string]*string)
	AdditionalUserLabelsInput() *map[string]*string
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	EnableStreamingEngine() interface{}
	SetEnableStreamingEngine(val interface{})
	EnableStreamingEngineInput() interface{}
	FlexrsGoal() *string
	SetFlexrsGoal(val *string)
	FlexrsGoalInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment
	SetInternalValue(val *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment)
	IpConfiguration() *string
	SetIpConfiguration(val *string)
	IpConfigurationInput() *string
	KmsKeyName() *string
	SetKmsKeyName(val *string)
	KmsKeyNameInput() *string
	MachineType() *string
	SetMachineType(val *string)
	MachineTypeInput() *string
	MaxWorkers() *float64
	SetMaxWorkers(val *float64)
	MaxWorkersInput() *float64
	Network() *string
	SetNetwork(val *string)
	NetworkInput() *string
	NumWorkers() *float64
	SetNumWorkers(val *float64)
	NumWorkersInput() *float64
	ServiceAccountEmail() *string
	SetServiceAccountEmail(val *string)
	ServiceAccountEmailInput() *string
	Subnetwork() *string
	SetSubnetwork(val *string)
	SubnetworkInput() *string
	TempLocation() *string
	SetTempLocation(val *string)
	TempLocationInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	WorkerRegion() *string
	SetWorkerRegion(val *string)
	WorkerRegionInput() *string
	WorkerZone() *string
	SetWorkerZone(val *string)
	WorkerZoneInput() *string
	Zone() *string
	SetZone(val *string)
	ZoneInput() *string
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetAdditionalExperiments()
	ResetAdditionalUserLabels()
	ResetEnableStreamingEngine()
	ResetFlexrsGoal()
	ResetIpConfiguration()
	ResetKmsKeyName()
	ResetMachineType()
	ResetMaxWorkers()
	ResetNetwork()
	ResetNumWorkers()
	ResetServiceAccountEmail()
	ResetSubnetwork()
	ResetTempLocation()
	ResetWorkerRegion()
	ResetWorkerZone()
	ResetZone()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	ContainerSpecGcsPath() *string
	SetContainerSpecGcsPath(val *string)
	ContainerSpecGcsPathInput() *string
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	Environment() DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference
	EnvironmentInput() *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment
	// Experimental.
	Fqn() *string
	InternalValue() *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter
	SetInternalValue(val *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter)
	JobName() *string
	SetJobName(val *string)
	JobNameInput() *string
	LaunchOptions() *map[string]*string
	SetLaunchOptions(val *map[string]*string)
	LaunchOptionsInput() *map[string]*string
	Parameters() *map[string]*string
	SetParameters(val *map[string]*string)
	ParametersInput() *map[string]*string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	TransformNameMappings() *map[string]*string
	SetTransformNameMappings(val *map[string]*string)
	TransformNameMappingsInput() *map[string]*string
	Update() interface{}
	SetUpdate(val interface{})
	UpdateInput() interface{}
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutEnvironment(value *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment)
	ResetContainerSpecGcsPath()
	ResetEnvironment()
	ResetLaunchOptions()
	ResetParameters()
	ResetTransformNameMappings()
	ResetUpdate()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference

type DataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() *DataPipelinePipelineWorkloadDataflowFlexTemplateRequest
	SetInternalValue(val *DataPipelinePipelineWorkloadDataflowFlexTemplateRequest)
	LaunchParameter() DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference
	LaunchParameterInput() *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter
	Location() *string
	SetLocation(val *string)
	LocationInput() *string
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	ValidateOnly() interface{}
	SetValidateOnly(val interface{})
	ValidateOnlyInput() interface{}
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutLaunchParameter(value *DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter)
	ResetValidateOnly()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference

func NewDataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest struct {
	// The ID of the Cloud Platform project that the job belongs to.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#project_id DataPipelinePipeline#project_id}
	ProjectId *string `field:"required" json:"projectId" yaml:"projectId"`
	// A Cloud Storage path to the template from which to create the job.
	//
	// Must be a valid Cloud Storage URL, beginning with 'gs://'.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#gcs_path DataPipelinePipeline#gcs_path}
	GcsPath *string `field:"optional" json:"gcsPath" yaml:"gcsPath"`
	// launch_parameters block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#launch_parameters DataPipelinePipeline#launch_parameters}
	LaunchParameters *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters `field:"optional" json:"launchParameters" yaml:"launchParameters"`
	// The regional endpoint to which to direct the request.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#location DataPipelinePipeline#location}
	Location *string `field:"optional" json:"location" yaml:"location"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#validate_only DataPipelinePipeline#validate_only}.
	ValidateOnly interface{} `field:"optional" json:"validateOnly" yaml:"validateOnly"`
}

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters struct {
	// The job name to use for the created job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#job_name DataPipelinePipeline#job_name}
	JobName *string `field:"required" json:"jobName" yaml:"jobName"`
	// environment block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#environment DataPipelinePipeline#environment}
	Environment *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment `field:"optional" json:"environment" yaml:"environment"`
	// The runtime parameters to pass to the job.
	//
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#parameters DataPipelinePipeline#parameters}
	Parameters *map[string]*string `field:"optional" json:"parameters" yaml:"parameters"`
	// Map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job.
	//
	// Only applicable when updating a pipeline.
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#transform_name_mapping DataPipelinePipeline#transform_name_mapping}
	TransformNameMapping *map[string]*string `field:"optional" json:"transformNameMapping" yaml:"transformNameMapping"`
	// If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#update DataPipelinePipeline#update}
	Update interface{} `field:"optional" json:"update" yaml:"update"`
}

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment struct {
	// Additional experiment flags for the job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#additional_experiments DataPipelinePipeline#additional_experiments}
	AdditionalExperiments *[]*string `field:"optional" json:"additionalExperiments" yaml:"additionalExperiments"`
	// Additional user labels to be specified for the job.
	//
	// Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs.
	// 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.'
	// 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.'
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#additional_user_labels DataPipelinePipeline#additional_user_labels}
	AdditionalUserLabels *map[string]*string `field:"optional" json:"additionalUserLabels" yaml:"additionalUserLabels"`
	// Whether to bypass the safety checks for the job's temporary directory. Use with caution.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#bypass_temp_dir_validation DataPipelinePipeline#bypass_temp_dir_validation}
	BypassTempDirValidation interface{} `field:"optional" json:"bypassTempDirValidation" yaml:"bypassTempDirValidation"`
	// Whether to enable Streaming Engine for the job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#enable_streaming_engine DataPipelinePipeline#enable_streaming_engine}
	EnableStreamingEngine interface{} `field:"optional" json:"enableStreamingEngine" yaml:"enableStreamingEngine"`
	// Configuration for VM IPs. https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration Possible values: ["WORKER_IP_UNSPECIFIED", "WORKER_IP_PUBLIC", "WORKER_IP_PRIVATE"].
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#ip_configuration DataPipelinePipeline#ip_configuration}
	IpConfiguration *string `field:"optional" json:"ipConfiguration" yaml:"ipConfiguration"`
	// 'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/'.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#kms_key_name DataPipelinePipeline#kms_key_name}
	KmsKeyName *string `field:"optional" json:"kmsKeyName" yaml:"kmsKeyName"`
	// The machine type to use for the job. Defaults to the value from the template if not specified.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#machine_type DataPipelinePipeline#machine_type}
	MachineType *string `field:"optional" json:"machineType" yaml:"machineType"`
	// The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#max_workers DataPipelinePipeline#max_workers}
	MaxWorkers *float64 `field:"optional" json:"maxWorkers" yaml:"maxWorkers"`
	// Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default".
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#network DataPipelinePipeline#network}
	Network *string `field:"optional" json:"network" yaml:"network"`
	// The initial number of Compute Engine instances for the job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#num_workers DataPipelinePipeline#num_workers}
	NumWorkers *float64 `field:"optional" json:"numWorkers" yaml:"numWorkers"`
	// The email address of the service account to run the job as.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#service_account_email DataPipelinePipeline#service_account_email}
	ServiceAccountEmail *string `field:"optional" json:"serviceAccountEmail" yaml:"serviceAccountEmail"`
	// Subnetwork to which VMs will be assigned, if desired.
	//
	// You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#subnetwork DataPipelinePipeline#subnetwork}
	Subnetwork *string `field:"optional" json:"subnetwork" yaml:"subnetwork"`
	// The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#temp_location DataPipelinePipeline#temp_location}
	TempLocation *string `field:"optional" json:"tempLocation" yaml:"tempLocation"`
	// The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#worker_region DataPipelinePipeline#worker_region}
	WorkerRegion *string `field:"optional" json:"workerRegion" yaml:"workerRegion"`
	// The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#worker_zone DataPipelinePipeline#worker_zone}
	WorkerZone *string `field:"optional" json:"workerZone" yaml:"workerZone"`
	// The Compute Engine availability zone for launching worker instances to run your pipeline.
	//
	// In the future, workerZone will take precedence.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.14.1/docs/resources/data_pipeline_pipeline#zone DataPipelinePipeline#zone}
	Zone *string `field:"optional" json:"zone" yaml:"zone"`
}

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference interface {
	cdktf.ComplexObject
	AdditionalExperiments() *[]*string
	SetAdditionalExperiments(val *[]*string)
	AdditionalExperimentsInput() *[]*string
	AdditionalUserLabels() *map[string]*string
	SetAdditionalUserLabels(val *map[string]*string)
	AdditionalUserLabelsInput() *map[string]*string
	BypassTempDirValidation() interface{}
	SetBypassTempDirValidation(val interface{})
	BypassTempDirValidationInput() interface{}
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	EnableStreamingEngine() interface{}
	SetEnableStreamingEngine(val interface{})
	EnableStreamingEngineInput() interface{}
	// Experimental.
	Fqn() *string
	InternalValue() *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment
	SetInternalValue(val *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment)
	IpConfiguration() *string
	SetIpConfiguration(val *string)
	IpConfigurationInput() *string
	KmsKeyName() *string
	SetKmsKeyName(val *string)
	KmsKeyNameInput() *string
	MachineType() *string
	SetMachineType(val *string)
	MachineTypeInput() *string
	MaxWorkers() *float64
	SetMaxWorkers(val *float64)
	MaxWorkersInput() *float64
	Network() *string
	SetNetwork(val *string)
	NetworkInput() *string
	NumWorkers() *float64
	SetNumWorkers(val *float64)
	NumWorkersInput() *float64
	ServiceAccountEmail() *string
	SetServiceAccountEmail(val *string)
	ServiceAccountEmailInput() *string
	Subnetwork() *string
	SetSubnetwork(val *string)
	SubnetworkInput() *string
	TempLocation() *string
	SetTempLocation(val *string)
	TempLocationInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	WorkerRegion() *string
	SetWorkerRegion(val *string)
	WorkerRegionInput() *string
	WorkerZone() *string
	SetWorkerZone(val *string)
	WorkerZoneInput() *string
	Zone() *string
	SetZone(val *string)
	ZoneInput() *string
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetAdditionalExperiments()
	ResetAdditionalUserLabels()
	ResetBypassTempDirValidation()
	ResetEnableStreamingEngine()
	ResetIpConfiguration()
	ResetKmsKeyName()
	ResetMachineType()
	ResetMaxWorkers()
	ResetNetwork()
	ResetNumWorkers()
	ResetServiceAccountEmail()
	ResetSubnetwork()
	ResetTempLocation()
	ResetWorkerRegion()
	ResetWorkerZone()
	ResetZone()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	Environment() DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference
	EnvironmentInput() *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment
	// Experimental.
	Fqn() *string
	InternalValue() *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters
	SetInternalValue(val *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters)
	JobName() *string
	SetJobName(val *string)
	JobNameInput() *string
	Parameters() *map[string]*string
	SetParameters(val *map[string]*string)
	ParametersInput() *map[string]*string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	TransformNameMapping() *map[string]*string
	SetTransformNameMapping(val *map[string]*string)
	TransformNameMappingInput() *map[string]*string
	Update() interface{}
	SetUpdate(val interface{})
	UpdateInput() interface{}
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutEnvironment(value *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment)
	ResetEnvironment()
	ResetParameters()
	ResetTransformNameMapping()
	ResetUpdate()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference

type DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	GcsPath() *string
	SetGcsPath(val *string)
	GcsPathInput() *string
	InternalValue() *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest
	SetInternalValue(val *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest)
	LaunchParameters() DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference
	LaunchParametersInput() *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters
	Location() *string
	SetLocation(val *string)
	LocationInput() *string
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	ValidateOnly() interface{}
	SetValidateOnly(val interface{})
	ValidateOnlyInput() interface{}
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutLaunchParameters(value *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters)
	ResetGcsPath()
	ResetLaunchParameters()
	ResetLocation()
	ResetValidateOnly()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference

func NewDataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference

type DataPipelinePipelineWorkloadOutputReference

type DataPipelinePipelineWorkloadOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DataflowFlexTemplateRequest() DataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference
	DataflowFlexTemplateRequestInput() *DataPipelinePipelineWorkloadDataflowFlexTemplateRequest
	DataflowLaunchTemplateRequest() DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference
	DataflowLaunchTemplateRequestInput() *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest
	// Experimental.
	Fqn() *string
	InternalValue() *DataPipelinePipelineWorkload
	SetInternalValue(val *DataPipelinePipelineWorkload)
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutDataflowFlexTemplateRequest(value *DataPipelinePipelineWorkloadDataflowFlexTemplateRequest)
	PutDataflowLaunchTemplateRequest(value *DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest)
	ResetDataflowFlexTemplateRequest()
	ResetDataflowLaunchTemplateRequest()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDataPipelinePipelineWorkloadOutputReference

func NewDataPipelinePipelineWorkloadOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) DataPipelinePipelineWorkloadOutputReference

Source Files

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL