bigqueryjob

package
v8.0.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 21, 2023 License: MPL-2.0 Imports: 7 Imported by: 0

README

google_bigquery_job

Refer to the Terraform Registory for docs: google_bigquery_job.

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func BigqueryJob_IsConstruct

func BigqueryJob_IsConstruct(x interface{}) *bool

Checks if `x` is a construct.

Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.

Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.

Returns: true if `x` is an object created from a class which extends `Construct`.

func BigqueryJob_IsTerraformElement

func BigqueryJob_IsTerraformElement(x interface{}) *bool

Experimental.

func BigqueryJob_IsTerraformResource

func BigqueryJob_IsTerraformResource(x interface{}) *bool

Experimental.

func BigqueryJob_TfResourceType

func BigqueryJob_TfResourceType() *string

func NewBigqueryJobCopyDestinationEncryptionConfigurationOutputReference_Override

func NewBigqueryJobCopyDestinationEncryptionConfigurationOutputReference_Override(b BigqueryJobCopyDestinationEncryptionConfigurationOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobCopyDestinationTableOutputReference_Override

func NewBigqueryJobCopyDestinationTableOutputReference_Override(b BigqueryJobCopyDestinationTableOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobCopyOutputReference_Override

func NewBigqueryJobCopyOutputReference_Override(b BigqueryJobCopyOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobCopySourceTablesList_Override

func NewBigqueryJobCopySourceTablesList_Override(b BigqueryJobCopySourceTablesList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)

func NewBigqueryJobCopySourceTablesOutputReference_Override

func NewBigqueryJobCopySourceTablesOutputReference_Override(b BigqueryJobCopySourceTablesOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)

func NewBigqueryJobExtractOutputReference_Override

func NewBigqueryJobExtractOutputReference_Override(b BigqueryJobExtractOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobExtractSourceModelOutputReference_Override

func NewBigqueryJobExtractSourceModelOutputReference_Override(b BigqueryJobExtractSourceModelOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobExtractSourceTableOutputReference_Override

func NewBigqueryJobExtractSourceTableOutputReference_Override(b BigqueryJobExtractSourceTableOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobLoadDestinationEncryptionConfigurationOutputReference_Override

func NewBigqueryJobLoadDestinationEncryptionConfigurationOutputReference_Override(b BigqueryJobLoadDestinationEncryptionConfigurationOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobLoadDestinationTableOutputReference_Override

func NewBigqueryJobLoadDestinationTableOutputReference_Override(b BigqueryJobLoadDestinationTableOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobLoadOutputReference_Override

func NewBigqueryJobLoadOutputReference_Override(b BigqueryJobLoadOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobLoadParquetOptionsOutputReference_Override

func NewBigqueryJobLoadParquetOptionsOutputReference_Override(b BigqueryJobLoadParquetOptionsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobLoadTimePartitioningOutputReference_Override

func NewBigqueryJobLoadTimePartitioningOutputReference_Override(b BigqueryJobLoadTimePartitioningOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobQueryDefaultDatasetOutputReference_Override

func NewBigqueryJobQueryDefaultDatasetOutputReference_Override(b BigqueryJobQueryDefaultDatasetOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobQueryDestinationEncryptionConfigurationOutputReference_Override

func NewBigqueryJobQueryDestinationEncryptionConfigurationOutputReference_Override(b BigqueryJobQueryDestinationEncryptionConfigurationOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobQueryDestinationTableOutputReference_Override

func NewBigqueryJobQueryDestinationTableOutputReference_Override(b BigqueryJobQueryDestinationTableOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobQueryOutputReference_Override

func NewBigqueryJobQueryOutputReference_Override(b BigqueryJobQueryOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobQueryScriptOptionsOutputReference_Override

func NewBigqueryJobQueryScriptOptionsOutputReference_Override(b BigqueryJobQueryScriptOptionsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJobQueryUserDefinedFunctionResourcesList_Override

func NewBigqueryJobQueryUserDefinedFunctionResourcesList_Override(b BigqueryJobQueryUserDefinedFunctionResourcesList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)

func NewBigqueryJobQueryUserDefinedFunctionResourcesOutputReference_Override

func NewBigqueryJobQueryUserDefinedFunctionResourcesOutputReference_Override(b BigqueryJobQueryUserDefinedFunctionResourcesOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)

func NewBigqueryJobStatusErrorResultList_Override

func NewBigqueryJobStatusErrorResultList_Override(b BigqueryJobStatusErrorResultList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)

func NewBigqueryJobStatusErrorResultOutputReference_Override

func NewBigqueryJobStatusErrorResultOutputReference_Override(b BigqueryJobStatusErrorResultOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)

func NewBigqueryJobStatusErrorsList_Override

func NewBigqueryJobStatusErrorsList_Override(b BigqueryJobStatusErrorsList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)

func NewBigqueryJobStatusErrorsOutputReference_Override

func NewBigqueryJobStatusErrorsOutputReference_Override(b BigqueryJobStatusErrorsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)

func NewBigqueryJobStatusList_Override

func NewBigqueryJobStatusList_Override(b BigqueryJobStatusList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)

func NewBigqueryJobStatusOutputReference_Override

func NewBigqueryJobStatusOutputReference_Override(b BigqueryJobStatusOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)

func NewBigqueryJobTimeoutsOutputReference_Override

func NewBigqueryJobTimeoutsOutputReference_Override(b BigqueryJobTimeoutsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)

func NewBigqueryJob_Override

func NewBigqueryJob_Override(b BigqueryJob, scope constructs.Construct, id *string, config *BigqueryJobConfig)

Create a new {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job google_bigquery_job} Resource.

Types

type BigqueryJob

type BigqueryJob interface {
	cdktf.TerraformResource
	// Experimental.
	CdktfStack() cdktf.TerraformStack
	// Experimental.
	Connection() interface{}
	// Experimental.
	SetConnection(val interface{})
	// Experimental.
	ConstructNodeMetadata() *map[string]interface{}
	Copy() BigqueryJobCopyOutputReference
	CopyInput() *BigqueryJobCopy
	// Experimental.
	Count() interface{}
	// Experimental.
	SetCount(val interface{})
	// Experimental.
	DependsOn() *[]*string
	// Experimental.
	SetDependsOn(val *[]*string)
	Extract() BigqueryJobExtractOutputReference
	ExtractInput() *BigqueryJobExtract
	// Experimental.
	ForEach() cdktf.ITerraformIterator
	// Experimental.
	SetForEach(val cdktf.ITerraformIterator)
	// Experimental.
	Fqn() *string
	// Experimental.
	FriendlyUniqueId() *string
	Id() *string
	SetId(val *string)
	IdInput() *string
	JobId() *string
	SetJobId(val *string)
	JobIdInput() *string
	JobTimeoutMs() *string
	SetJobTimeoutMs(val *string)
	JobTimeoutMsInput() *string
	JobType() *string
	Labels() *map[string]*string
	SetLabels(val *map[string]*string)
	LabelsInput() *map[string]*string
	// Experimental.
	Lifecycle() *cdktf.TerraformResourceLifecycle
	// Experimental.
	SetLifecycle(val *cdktf.TerraformResourceLifecycle)
	Load() BigqueryJobLoadOutputReference
	LoadInput() *BigqueryJobLoad
	Location() *string
	SetLocation(val *string)
	LocationInput() *string
	// The tree node.
	Node() constructs.Node
	Project() *string
	SetProject(val *string)
	ProjectInput() *string
	// Experimental.
	Provider() cdktf.TerraformProvider
	// Experimental.
	SetProvider(val cdktf.TerraformProvider)
	// Experimental.
	Provisioners() *[]interface{}
	// Experimental.
	SetProvisioners(val *[]interface{})
	Query() BigqueryJobQueryOutputReference
	QueryInput() *BigqueryJobQuery
	// Experimental.
	RawOverrides() interface{}
	Status() BigqueryJobStatusList
	// Experimental.
	TerraformGeneratorMetadata() *cdktf.TerraformProviderGeneratorMetadata
	// Experimental.
	TerraformMetaArguments() *map[string]interface{}
	// Experimental.
	TerraformResourceType() *string
	Timeouts() BigqueryJobTimeoutsOutputReference
	TimeoutsInput() interface{}
	UserEmail() *string
	// Experimental.
	AddOverride(path *string, value interface{})
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationForAttribute(terraformAttribute *string) cdktf.IResolvable
	// Overrides the auto-generated logical ID with a specific ID.
	// Experimental.
	OverrideLogicalId(newLogicalId *string)
	PutCopy(value *BigqueryJobCopy)
	PutExtract(value *BigqueryJobExtract)
	PutLoad(value *BigqueryJobLoad)
	PutQuery(value *BigqueryJobQuery)
	PutTimeouts(value *BigqueryJobTimeouts)
	ResetCopy()
	ResetExtract()
	ResetId()
	ResetJobTimeoutMs()
	ResetLabels()
	ResetLoad()
	ResetLocation()
	// Resets a previously passed logical Id to use the auto-generated logical id again.
	// Experimental.
	ResetOverrideLogicalId()
	ResetProject()
	ResetQuery()
	ResetTimeouts()
	SynthesizeAttributes() *map[string]interface{}
	// Experimental.
	ToMetadata() interface{}
	// Returns a string representation of this construct.
	ToString() *string
	// Adds this resource to the terraform JSON output.
	// Experimental.
	ToTerraform() interface{}
}

Represents a {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job google_bigquery_job}.

func NewBigqueryJob

func NewBigqueryJob(scope constructs.Construct, id *string, config *BigqueryJobConfig) BigqueryJob

Create a new {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job google_bigquery_job} Resource.

type BigqueryJobConfig

type BigqueryJobConfig struct {
	// Experimental.
	Connection interface{} `field:"optional" json:"connection" yaml:"connection"`
	// Experimental.
	Count interface{} `field:"optional" json:"count" yaml:"count"`
	// Experimental.
	DependsOn *[]cdktf.ITerraformDependable `field:"optional" json:"dependsOn" yaml:"dependsOn"`
	// Experimental.
	ForEach cdktf.ITerraformIterator `field:"optional" json:"forEach" yaml:"forEach"`
	// Experimental.
	Lifecycle *cdktf.TerraformResourceLifecycle `field:"optional" json:"lifecycle" yaml:"lifecycle"`
	// Experimental.
	Provider cdktf.TerraformProvider `field:"optional" json:"provider" yaml:"provider"`
	// Experimental.
	Provisioners *[]interface{} `field:"optional" json:"provisioners" yaml:"provisioners"`
	// The ID of the job.
	//
	// The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#job_id BigqueryJob#job_id}
	JobId *string `field:"required" json:"jobId" yaml:"jobId"`
	// copy block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#copy BigqueryJob#copy}
	Copy *BigqueryJobCopy `field:"optional" json:"copy" yaml:"copy"`
	// extract block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#extract BigqueryJob#extract}
	Extract *BigqueryJobExtract `field:"optional" json:"extract" yaml:"extract"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#id BigqueryJob#id}.
	//
	// Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
	// If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
	Id *string `field:"optional" json:"id" yaml:"id"`
	// Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt to terminate the job.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#job_timeout_ms BigqueryJob#job_timeout_ms}
	JobTimeoutMs *string `field:"optional" json:"jobTimeoutMs" yaml:"jobTimeoutMs"`
	// The labels associated with this job. You can use these to organize and group your jobs.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#labels BigqueryJob#labels}
	Labels *map[string]*string `field:"optional" json:"labels" yaml:"labels"`
	// load block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#load BigqueryJob#load}
	Load *BigqueryJobLoad `field:"optional" json:"load" yaml:"load"`
	// The geographic location of the job. The default value is US.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#location BigqueryJob#location}
	Location *string `field:"optional" json:"location" yaml:"location"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#project BigqueryJob#project}.
	Project *string `field:"optional" json:"project" yaml:"project"`
	// query block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#query BigqueryJob#query}
	Query *BigqueryJobQuery `field:"optional" json:"query" yaml:"query"`
	// timeouts block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#timeouts BigqueryJob#timeouts}
	Timeouts *BigqueryJobTimeouts `field:"optional" json:"timeouts" yaml:"timeouts"`
}

type BigqueryJobCopy

type BigqueryJobCopy struct {
	// source_tables block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#source_tables BigqueryJob#source_tables}
	SourceTables interface{} `field:"required" json:"sourceTables" yaml:"sourceTables"`
	// Specifies whether the job is allowed to create new tables.
	//
	// The following values are supported:
	// CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
	// CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result.
	// Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#create_disposition BigqueryJob#create_disposition}
	CreateDisposition *string `field:"optional" json:"createDisposition" yaml:"createDisposition"`
	// destination_encryption_configuration block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#destination_encryption_configuration BigqueryJob#destination_encryption_configuration}
	DestinationEncryptionConfiguration *BigqueryJobCopyDestinationEncryptionConfiguration `field:"optional" json:"destinationEncryptionConfiguration" yaml:"destinationEncryptionConfiguration"`
	// destination_table block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#destination_table BigqueryJob#destination_table}
	DestinationTable *BigqueryJobCopyDestinationTable `field:"optional" json:"destinationTable" yaml:"destinationTable"`
	// Specifies the action that occurs if the destination table already exists.
	//
	// The following values are supported:
	// WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result.
	// WRITE_APPEND: If the table already exists, BigQuery appends the data to the table.
	// WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result.
	// Each action is atomic and only occurs if BigQuery is able to complete the job successfully.
	// Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#write_disposition BigqueryJob#write_disposition}
	WriteDisposition *string `field:"optional" json:"writeDisposition" yaml:"writeDisposition"`
}

type BigqueryJobCopyDestinationEncryptionConfiguration

type BigqueryJobCopyDestinationEncryptionConfiguration struct {
	// Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.
	//
	// The BigQuery Service Account associated with your project requires access to this encryption key.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#kms_key_name BigqueryJob#kms_key_name}
	KmsKeyName *string `field:"required" json:"kmsKeyName" yaml:"kmsKeyName"`
}

type BigqueryJobCopyDestinationEncryptionConfigurationOutputReference

type BigqueryJobCopyDestinationEncryptionConfigurationOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobCopyDestinationEncryptionConfiguration
	SetInternalValue(val *BigqueryJobCopyDestinationEncryptionConfiguration)
	KmsKeyName() *string
	SetKmsKeyName(val *string)
	KmsKeyNameInput() *string
	KmsKeyVersion() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobCopyDestinationEncryptionConfigurationOutputReference

func NewBigqueryJobCopyDestinationEncryptionConfigurationOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobCopyDestinationEncryptionConfigurationOutputReference

type BigqueryJobCopyDestinationTable

type BigqueryJobCopyDestinationTable struct {
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
	TableId *string `field:"required" json:"tableId" yaml:"tableId"`
	// The ID of the dataset containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
	DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"`
	// The ID of the project containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
	ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"`
}

type BigqueryJobCopyDestinationTableOutputReference

type BigqueryJobCopyDestinationTableOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DatasetId() *string
	SetDatasetId(val *string)
	DatasetIdInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobCopyDestinationTable
	SetInternalValue(val *BigqueryJobCopyDestinationTable)
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	TableId() *string
	SetTableId(val *string)
	TableIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetDatasetId()
	ResetProjectId()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobCopyDestinationTableOutputReference

func NewBigqueryJobCopyDestinationTableOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobCopyDestinationTableOutputReference

type BigqueryJobCopyOutputReference

type BigqueryJobCopyOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	CreateDisposition() *string
	SetCreateDisposition(val *string)
	CreateDispositionInput() *string
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DestinationEncryptionConfiguration() BigqueryJobCopyDestinationEncryptionConfigurationOutputReference
	DestinationEncryptionConfigurationInput() *BigqueryJobCopyDestinationEncryptionConfiguration
	DestinationTable() BigqueryJobCopyDestinationTableOutputReference
	DestinationTableInput() *BigqueryJobCopyDestinationTable
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobCopy
	SetInternalValue(val *BigqueryJobCopy)
	SourceTables() BigqueryJobCopySourceTablesList
	SourceTablesInput() interface{}
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	WriteDisposition() *string
	SetWriteDisposition(val *string)
	WriteDispositionInput() *string
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutDestinationEncryptionConfiguration(value *BigqueryJobCopyDestinationEncryptionConfiguration)
	PutDestinationTable(value *BigqueryJobCopyDestinationTable)
	PutSourceTables(value interface{})
	ResetCreateDisposition()
	ResetDestinationEncryptionConfiguration()
	ResetDestinationTable()
	ResetWriteDisposition()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobCopyOutputReference

func NewBigqueryJobCopyOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobCopyOutputReference

type BigqueryJobCopySourceTables

type BigqueryJobCopySourceTables struct {
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
	TableId *string `field:"required" json:"tableId" yaml:"tableId"`
	// The ID of the dataset containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
	DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"`
	// The ID of the project containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
	ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"`
}

type BigqueryJobCopySourceTablesList

type BigqueryJobCopySourceTablesList interface {
	cdktf.ComplexList
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() interface{}
	SetInternalValue(val interface{})
	// The attribute on the parent resource this class is referencing.
	TerraformAttribute() *string
	SetTerraformAttribute(val *string)
	// The parent resource.
	TerraformResource() cdktf.IInterpolatingParent
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
	WrapsSet() *bool
	SetWrapsSet(val *bool)
	// Experimental.
	ComputeFqn() *string
	Get(index *float64) BigqueryJobCopySourceTablesOutputReference
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobCopySourceTablesList

func NewBigqueryJobCopySourceTablesList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) BigqueryJobCopySourceTablesList

type BigqueryJobCopySourceTablesOutputReference

type BigqueryJobCopySourceTablesOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DatasetId() *string
	SetDatasetId(val *string)
	DatasetIdInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() interface{}
	SetInternalValue(val interface{})
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	TableId() *string
	SetTableId(val *string)
	TableIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetDatasetId()
	ResetProjectId()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobCopySourceTablesOutputReference

func NewBigqueryJobCopySourceTablesOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) BigqueryJobCopySourceTablesOutputReference

type BigqueryJobExtract

type BigqueryJobExtract struct {
	// A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#destination_uris BigqueryJob#destination_uris}
	DestinationUris *[]*string `field:"required" json:"destinationUris" yaml:"destinationUris"`
	// The compression type to use for exported files.
	//
	// Possible values include GZIP, DEFLATE, SNAPPY, and NONE.
	// The default value is NONE. DEFLATE and SNAPPY are only supported for Avro.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#compression BigqueryJob#compression}
	Compression *string `field:"optional" json:"compression" yaml:"compression"`
	// The exported file format.
	//
	// Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO for tables and SAVED_MODEL for models.
	// The default value for tables is CSV. Tables with nested or repeated fields cannot be exported as CSV.
	// The default value for models is SAVED_MODEL.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#destination_format BigqueryJob#destination_format}
	DestinationFormat *string `field:"optional" json:"destinationFormat" yaml:"destinationFormat"`
	// When extracting data in CSV format, this defines the delimiter to use between fields in the exported data.
	//
	// Default is ','
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#field_delimiter BigqueryJob#field_delimiter}
	FieldDelimiter *string `field:"optional" json:"fieldDelimiter" yaml:"fieldDelimiter"`
	// Whether to print out a header row in the results. Default is true.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#print_header BigqueryJob#print_header}
	PrintHeader interface{} `field:"optional" json:"printHeader" yaml:"printHeader"`
	// source_model block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#source_model BigqueryJob#source_model}
	SourceModel *BigqueryJobExtractSourceModel `field:"optional" json:"sourceModel" yaml:"sourceModel"`
	// source_table block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#source_table BigqueryJob#source_table}
	SourceTable *BigqueryJobExtractSourceTable `field:"optional" json:"sourceTable" yaml:"sourceTable"`
	// Whether to use logical types when extracting to AVRO format.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#use_avro_logical_types BigqueryJob#use_avro_logical_types}
	UseAvroLogicalTypes interface{} `field:"optional" json:"useAvroLogicalTypes" yaml:"useAvroLogicalTypes"`
}

type BigqueryJobExtractOutputReference

type BigqueryJobExtractOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	Compression() *string
	SetCompression(val *string)
	CompressionInput() *string
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DestinationFormat() *string
	SetDestinationFormat(val *string)
	DestinationFormatInput() *string
	DestinationUris() *[]*string
	SetDestinationUris(val *[]*string)
	DestinationUrisInput() *[]*string
	FieldDelimiter() *string
	SetFieldDelimiter(val *string)
	FieldDelimiterInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobExtract
	SetInternalValue(val *BigqueryJobExtract)
	PrintHeader() interface{}
	SetPrintHeader(val interface{})
	PrintHeaderInput() interface{}
	SourceModel() BigqueryJobExtractSourceModelOutputReference
	SourceModelInput() *BigqueryJobExtractSourceModel
	SourceTable() BigqueryJobExtractSourceTableOutputReference
	SourceTableInput() *BigqueryJobExtractSourceTable
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	UseAvroLogicalTypes() interface{}
	SetUseAvroLogicalTypes(val interface{})
	UseAvroLogicalTypesInput() interface{}
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutSourceModel(value *BigqueryJobExtractSourceModel)
	PutSourceTable(value *BigqueryJobExtractSourceTable)
	ResetCompression()
	ResetDestinationFormat()
	ResetFieldDelimiter()
	ResetPrintHeader()
	ResetSourceModel()
	ResetSourceTable()
	ResetUseAvroLogicalTypes()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobExtractOutputReference

func NewBigqueryJobExtractOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobExtractOutputReference

type BigqueryJobExtractSourceModel

type BigqueryJobExtractSourceModel struct {
	// The ID of the dataset containing this model.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
	DatasetId *string `field:"required" json:"datasetId" yaml:"datasetId"`
	// The ID of the model.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#model_id BigqueryJob#model_id}
	ModelId *string `field:"required" json:"modelId" yaml:"modelId"`
	// The ID of the project containing this model.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
	ProjectId *string `field:"required" json:"projectId" yaml:"projectId"`
}

type BigqueryJobExtractSourceModelOutputReference

type BigqueryJobExtractSourceModelOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DatasetId() *string
	SetDatasetId(val *string)
	DatasetIdInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobExtractSourceModel
	SetInternalValue(val *BigqueryJobExtractSourceModel)
	ModelId() *string
	SetModelId(val *string)
	ModelIdInput() *string
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobExtractSourceModelOutputReference

func NewBigqueryJobExtractSourceModelOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobExtractSourceModelOutputReference

type BigqueryJobExtractSourceTable

type BigqueryJobExtractSourceTable struct {
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
	TableId *string `field:"required" json:"tableId" yaml:"tableId"`
	// The ID of the dataset containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
	DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"`
	// The ID of the project containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
	ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"`
}

type BigqueryJobExtractSourceTableOutputReference

type BigqueryJobExtractSourceTableOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DatasetId() *string
	SetDatasetId(val *string)
	DatasetIdInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobExtractSourceTable
	SetInternalValue(val *BigqueryJobExtractSourceTable)
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	TableId() *string
	SetTableId(val *string)
	TableIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetDatasetId()
	ResetProjectId()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobExtractSourceTableOutputReference

func NewBigqueryJobExtractSourceTableOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobExtractSourceTableOutputReference

type BigqueryJobLoad

type BigqueryJobLoad struct {
	// destination_table block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#destination_table BigqueryJob#destination_table}
	DestinationTable *BigqueryJobLoadDestinationTable `field:"required" json:"destinationTable" yaml:"destinationTable"`
	// The fully-qualified URIs that point to your data in Google Cloud.
	//
	// For Google Cloud Storage URIs: Each URI can contain one '\*' wildcard character
	// and it must come after the 'bucket' name. Size limits related to load jobs apply
	// to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be
	// specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table.
	// For Google Cloud Datastore backups: Exactly one URI can be specified. Also, the '\*' wildcard character is not allowed.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#source_uris BigqueryJob#source_uris}
	SourceUris *[]*string `field:"required" json:"sourceUris" yaml:"sourceUris"`
	// Accept rows that are missing trailing optional columns.
	//
	// The missing values are treated as nulls.
	// If false, records with missing trailing columns are treated as bad records, and if there are too many bad records,
	// an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#allow_jagged_rows BigqueryJob#allow_jagged_rows}
	AllowJaggedRows interface{} `field:"optional" json:"allowJaggedRows" yaml:"allowJaggedRows"`
	// Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file.
	//
	// The default value is false.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#allow_quoted_newlines BigqueryJob#allow_quoted_newlines}
	AllowQuotedNewlines interface{} `field:"optional" json:"allowQuotedNewlines" yaml:"allowQuotedNewlines"`
	// Indicates if we should automatically infer the options and schema for CSV and JSON sources.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#autodetect BigqueryJob#autodetect}
	Autodetect interface{} `field:"optional" json:"autodetect" yaml:"autodetect"`
	// Specifies whether the job is allowed to create new tables.
	//
	// The following values are supported:
	// CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
	// CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result.
	// Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#create_disposition BigqueryJob#create_disposition}
	CreateDisposition *string `field:"optional" json:"createDisposition" yaml:"createDisposition"`
	// destination_encryption_configuration block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#destination_encryption_configuration BigqueryJob#destination_encryption_configuration}
	DestinationEncryptionConfiguration *BigqueryJobLoadDestinationEncryptionConfiguration `field:"optional" json:"destinationEncryptionConfiguration" yaml:"destinationEncryptionConfiguration"`
	// The character encoding of the data.
	//
	// The supported values are UTF-8 or ISO-8859-1.
	// The default value is UTF-8. BigQuery decodes the data after the raw, binary data
	// has been split using the values of the quote and fieldDelimiter properties.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#encoding BigqueryJob#encoding}
	Encoding *string `field:"optional" json:"encoding" yaml:"encoding"`
	// The separator for fields in a CSV file.
	//
	// The separator can be any ISO-8859-1 single-byte character.
	// To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts
	// the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the
	// data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator.
	// The default value is a comma (',').
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#field_delimiter BigqueryJob#field_delimiter}
	FieldDelimiter *string `field:"optional" json:"fieldDelimiter" yaml:"fieldDelimiter"`
	// Indicates if BigQuery should allow extra values that are not represented in the table schema.
	//
	// If true, the extra values are ignored. If false, records with extra columns are treated as bad records,
	// and if there are too many bad records, an invalid error is returned in the job result.
	// The default value is false. The sourceFormat property determines what BigQuery treats as an extra value:
	// CSV: Trailing columns
	// JSON: Named values that don't match any column names
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#ignore_unknown_values BigqueryJob#ignore_unknown_values}
	IgnoreUnknownValues interface{} `field:"optional" json:"ignoreUnknownValues" yaml:"ignoreUnknownValues"`
	// If sourceFormat is set to newline-delimited JSON, indicates whether it should be processed as a JSON variant such as GeoJSON.
	//
	// For a sourceFormat other than JSON, omit this field. If the sourceFormat is newline-delimited JSON: - for newline-delimited
	// GeoJSON: set to GEOJSON.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#json_extension BigqueryJob#json_extension}
	JsonExtension *string `field:"optional" json:"jsonExtension" yaml:"jsonExtension"`
	// The maximum number of bad records that BigQuery can ignore when running the job.
	//
	// If the number of bad records exceeds this value,
	// an invalid error is returned in the job result. The default value is 0, which requires that all records are valid.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#max_bad_records BigqueryJob#max_bad_records}
	MaxBadRecords *float64 `field:"optional" json:"maxBadRecords" yaml:"maxBadRecords"`
	// Specifies a string that represents a null value in a CSV file.
	//
	// For example, if you specify "\N", BigQuery interprets "\N" as a null value
	// when loading a CSV file. The default value is the empty string. If you set this property to a custom value, BigQuery throws an error if an
	// empty string is present for all data types except for STRING and BYTE. For STRING and BYTE columns, BigQuery interprets the empty string as
	// an empty value.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#null_marker BigqueryJob#null_marker}
	NullMarker *string `field:"optional" json:"nullMarker" yaml:"nullMarker"`
	// parquet_options block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#parquet_options BigqueryJob#parquet_options}
	ParquetOptions *BigqueryJobLoadParquetOptions `field:"optional" json:"parquetOptions" yaml:"parquetOptions"`
	// If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity properties to load into BigQuery from a Cloud Datastore backup.
	//
	// Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties.
	// If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#projection_fields BigqueryJob#projection_fields}
	ProjectionFields *[]*string `field:"optional" json:"projectionFields" yaml:"projectionFields"`
	// The value that is used to quote data sections in a CSV file.
	//
	// BigQuery converts the string to ISO-8859-1 encoding,
	// and then uses the first byte of the encoded string to split the data in its raw, binary state.
	// The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string.
	// If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#quote BigqueryJob#quote}
	Quote *string `field:"optional" json:"quote" yaml:"quote"`
	// Allows the schema of the destination table to be updated as a side effect of the load job if a schema is autodetected or supplied in the job configuration.
	//
	// Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND;
	// when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators.
	// For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified:
	// ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
	// ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#schema_update_options BigqueryJob#schema_update_options}
	SchemaUpdateOptions *[]*string `field:"optional" json:"schemaUpdateOptions" yaml:"schemaUpdateOptions"`
	// The number of rows at the top of a CSV file that BigQuery will skip when loading the data.
	//
	// The default value is 0. This property is useful if you have header rows in the file that should be skipped.
	// When autodetect is on, the behavior is the following:
	// skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected,
	// the row is read as data. Otherwise data is read starting from the second row.
	// skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row.
	// skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected,
	// row N is just skipped. Otherwise row N is used to extract column names for the detected schema.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#skip_leading_rows BigqueryJob#skip_leading_rows}
	SkipLeadingRows *float64 `field:"optional" json:"skipLeadingRows" yaml:"skipLeadingRows"`
	// The format of the data files.
	//
	// For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP".
	// For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET".
	// For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE".
	// The default value is CSV.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#source_format BigqueryJob#source_format}
	SourceFormat *string `field:"optional" json:"sourceFormat" yaml:"sourceFormat"`
	// time_partitioning block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#time_partitioning BigqueryJob#time_partitioning}
	TimePartitioning *BigqueryJobLoadTimePartitioning `field:"optional" json:"timePartitioning" yaml:"timePartitioning"`
	// Specifies the action that occurs if the destination table already exists.
	//
	// The following values are supported:
	// WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result.
	// WRITE_APPEND: If the table already exists, BigQuery appends the data to the table.
	// WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result.
	// Each action is atomic and only occurs if BigQuery is able to complete the job successfully.
	// Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#write_disposition BigqueryJob#write_disposition}
	WriteDisposition *string `field:"optional" json:"writeDisposition" yaml:"writeDisposition"`
}

type BigqueryJobLoadDestinationEncryptionConfiguration

type BigqueryJobLoadDestinationEncryptionConfiguration struct {
	// Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.
	//
	// The BigQuery Service Account associated with your project requires access to this encryption key.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#kms_key_name BigqueryJob#kms_key_name}
	KmsKeyName *string `field:"required" json:"kmsKeyName" yaml:"kmsKeyName"`
}

type BigqueryJobLoadDestinationEncryptionConfigurationOutputReference

type BigqueryJobLoadDestinationEncryptionConfigurationOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobLoadDestinationEncryptionConfiguration
	SetInternalValue(val *BigqueryJobLoadDestinationEncryptionConfiguration)
	KmsKeyName() *string
	SetKmsKeyName(val *string)
	KmsKeyNameInput() *string
	KmsKeyVersion() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobLoadDestinationEncryptionConfigurationOutputReference

func NewBigqueryJobLoadDestinationEncryptionConfigurationOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobLoadDestinationEncryptionConfigurationOutputReference

type BigqueryJobLoadDestinationTable

type BigqueryJobLoadDestinationTable struct {
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
	TableId *string `field:"required" json:"tableId" yaml:"tableId"`
	// The ID of the dataset containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
	DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"`
	// The ID of the project containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
	ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"`
}

type BigqueryJobLoadDestinationTableOutputReference

type BigqueryJobLoadDestinationTableOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DatasetId() *string
	SetDatasetId(val *string)
	DatasetIdInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobLoadDestinationTable
	SetInternalValue(val *BigqueryJobLoadDestinationTable)
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	TableId() *string
	SetTableId(val *string)
	TableIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetDatasetId()
	ResetProjectId()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobLoadDestinationTableOutputReference

func NewBigqueryJobLoadDestinationTableOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobLoadDestinationTableOutputReference

type BigqueryJobLoadOutputReference

type BigqueryJobLoadOutputReference interface {
	cdktf.ComplexObject
	AllowJaggedRows() interface{}
	SetAllowJaggedRows(val interface{})
	AllowJaggedRowsInput() interface{}
	AllowQuotedNewlines() interface{}
	SetAllowQuotedNewlines(val interface{})
	AllowQuotedNewlinesInput() interface{}
	Autodetect() interface{}
	SetAutodetect(val interface{})
	AutodetectInput() interface{}
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	CreateDisposition() *string
	SetCreateDisposition(val *string)
	CreateDispositionInput() *string
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DestinationEncryptionConfiguration() BigqueryJobLoadDestinationEncryptionConfigurationOutputReference
	DestinationEncryptionConfigurationInput() *BigqueryJobLoadDestinationEncryptionConfiguration
	DestinationTable() BigqueryJobLoadDestinationTableOutputReference
	DestinationTableInput() *BigqueryJobLoadDestinationTable
	Encoding() *string
	SetEncoding(val *string)
	EncodingInput() *string
	FieldDelimiter() *string
	SetFieldDelimiter(val *string)
	FieldDelimiterInput() *string
	// Experimental.
	Fqn() *string
	IgnoreUnknownValues() interface{}
	SetIgnoreUnknownValues(val interface{})
	IgnoreUnknownValuesInput() interface{}
	InternalValue() *BigqueryJobLoad
	SetInternalValue(val *BigqueryJobLoad)
	JsonExtension() *string
	SetJsonExtension(val *string)
	JsonExtensionInput() *string
	MaxBadRecords() *float64
	SetMaxBadRecords(val *float64)
	MaxBadRecordsInput() *float64
	NullMarker() *string
	SetNullMarker(val *string)
	NullMarkerInput() *string
	ParquetOptions() BigqueryJobLoadParquetOptionsOutputReference
	ParquetOptionsInput() *BigqueryJobLoadParquetOptions
	ProjectionFields() *[]*string
	SetProjectionFields(val *[]*string)
	ProjectionFieldsInput() *[]*string
	Quote() *string
	SetQuote(val *string)
	QuoteInput() *string
	SchemaUpdateOptions() *[]*string
	SetSchemaUpdateOptions(val *[]*string)
	SchemaUpdateOptionsInput() *[]*string
	SkipLeadingRows() *float64
	SetSkipLeadingRows(val *float64)
	SkipLeadingRowsInput() *float64
	SourceFormat() *string
	SetSourceFormat(val *string)
	SourceFormatInput() *string
	SourceUris() *[]*string
	SetSourceUris(val *[]*string)
	SourceUrisInput() *[]*string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	TimePartitioning() BigqueryJobLoadTimePartitioningOutputReference
	TimePartitioningInput() *BigqueryJobLoadTimePartitioning
	WriteDisposition() *string
	SetWriteDisposition(val *string)
	WriteDispositionInput() *string
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutDestinationEncryptionConfiguration(value *BigqueryJobLoadDestinationEncryptionConfiguration)
	PutDestinationTable(value *BigqueryJobLoadDestinationTable)
	PutParquetOptions(value *BigqueryJobLoadParquetOptions)
	PutTimePartitioning(value *BigqueryJobLoadTimePartitioning)
	ResetAllowJaggedRows()
	ResetAllowQuotedNewlines()
	ResetAutodetect()
	ResetCreateDisposition()
	ResetDestinationEncryptionConfiguration()
	ResetEncoding()
	ResetFieldDelimiter()
	ResetIgnoreUnknownValues()
	ResetJsonExtension()
	ResetMaxBadRecords()
	ResetNullMarker()
	ResetParquetOptions()
	ResetProjectionFields()
	ResetQuote()
	ResetSchemaUpdateOptions()
	ResetSkipLeadingRows()
	ResetSourceFormat()
	ResetTimePartitioning()
	ResetWriteDisposition()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobLoadOutputReference

func NewBigqueryJobLoadOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobLoadOutputReference

type BigqueryJobLoadParquetOptions

type BigqueryJobLoadParquetOptions struct {
	// If sourceFormat is set to PARQUET, indicates whether to use schema inference specifically for Parquet LIST logical type.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#enable_list_inference BigqueryJob#enable_list_inference}
	EnableListInference interface{} `field:"optional" json:"enableListInference" yaml:"enableListInference"`
	// If sourceFormat is set to PARQUET, indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#enum_as_string BigqueryJob#enum_as_string}
	EnumAsString interface{} `field:"optional" json:"enumAsString" yaml:"enumAsString"`
}

type BigqueryJobLoadParquetOptionsOutputReference

type BigqueryJobLoadParquetOptionsOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	EnableListInference() interface{}
	SetEnableListInference(val interface{})
	EnableListInferenceInput() interface{}
	EnumAsString() interface{}
	SetEnumAsString(val interface{})
	EnumAsStringInput() interface{}
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobLoadParquetOptions
	SetInternalValue(val *BigqueryJobLoadParquetOptions)
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetEnableListInference()
	ResetEnumAsString()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobLoadParquetOptionsOutputReference

func NewBigqueryJobLoadParquetOptionsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobLoadParquetOptionsOutputReference

type BigqueryJobLoadTimePartitioning

type BigqueryJobLoadTimePartitioning struct {
	// The only type supported is DAY, which will generate one partition per day.
	//
	// Providing an empty string used to cause an error,
	// but in OnePlatform the field will be treated as unset.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#type BigqueryJob#type}
	Type *string `field:"required" json:"type" yaml:"type"`
	// Number of milliseconds for which to keep the storage for a partition.
	//
	// A wrapper is used here because 0 is an invalid value.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#expiration_ms BigqueryJob#expiration_ms}
	ExpirationMs *string `field:"optional" json:"expirationMs" yaml:"expirationMs"`
	// If not set, the table is partitioned by pseudo column '_PARTITIONTIME';
	//
	// if set, the table is partitioned by this field.
	// The field must be a top-level TIMESTAMP or DATE field. Its mode must be NULLABLE or REQUIRED.
	// A wrapper is used here because an empty string is an invalid value.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#field BigqueryJob#field}
	Field *string `field:"optional" json:"field" yaml:"field"`
}

type BigqueryJobLoadTimePartitioningOutputReference

type BigqueryJobLoadTimePartitioningOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	ExpirationMs() *string
	SetExpirationMs(val *string)
	ExpirationMsInput() *string
	Field() *string
	SetField(val *string)
	FieldInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobLoadTimePartitioning
	SetInternalValue(val *BigqueryJobLoadTimePartitioning)
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	Type() *string
	SetType(val *string)
	TypeInput() *string
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetExpirationMs()
	ResetField()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobLoadTimePartitioningOutputReference

func NewBigqueryJobLoadTimePartitioningOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobLoadTimePartitioningOutputReference

type BigqueryJobQuery

type BigqueryJobQuery struct {
	// SQL query text to execute.
	//
	// The useLegacySql field can be used to indicate whether the query uses legacy SQL or standard SQL.
	// NOTE*: queries containing [DML language](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language)
	// ('DELETE', 'UPDATE', 'MERGE', 'INSERT') must specify 'create_disposition = ""' and 'write_disposition = ""'.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#query BigqueryJob#query}
	Query *string `field:"required" json:"query" yaml:"query"`
	// If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large result tables at a slight cost in performance.
	//
	// Requires destinationTable to be set. For standard SQL queries, this flag is ignored and large results are always allowed.
	// However, you must still set destinationTable when result size exceeds the allowed maximum response size.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#allow_large_results BigqueryJob#allow_large_results}
	AllowLargeResults interface{} `field:"optional" json:"allowLargeResults" yaml:"allowLargeResults"`
	// Specifies whether the job is allowed to create new tables.
	//
	// The following values are supported:
	// CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
	// CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result.
	// Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#create_disposition BigqueryJob#create_disposition}
	CreateDisposition *string `field:"optional" json:"createDisposition" yaml:"createDisposition"`
	// default_dataset block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#default_dataset BigqueryJob#default_dataset}
	DefaultDataset *BigqueryJobQueryDefaultDataset `field:"optional" json:"defaultDataset" yaml:"defaultDataset"`
	// destination_encryption_configuration block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#destination_encryption_configuration BigqueryJob#destination_encryption_configuration}
	DestinationEncryptionConfiguration *BigqueryJobQueryDestinationEncryptionConfiguration `field:"optional" json:"destinationEncryptionConfiguration" yaml:"destinationEncryptionConfiguration"`
	// destination_table block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#destination_table BigqueryJob#destination_table}
	DestinationTable *BigqueryJobQueryDestinationTable `field:"optional" json:"destinationTable" yaml:"destinationTable"`
	// If true and query uses legacy SQL dialect, flattens all nested and repeated fields in the query results.
	//
	// allowLargeResults must be true if this is set to false. For standard SQL queries, this flag is ignored and results are never flattened.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#flatten_results BigqueryJob#flatten_results}
	FlattenResults interface{} `field:"optional" json:"flattenResults" yaml:"flattenResults"`
	// Limits the billing tier for this job.
	//
	// Queries that have resource usage beyond this tier will fail (without incurring a charge).
	// If unspecified, this will be set to your project default.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#maximum_billing_tier BigqueryJob#maximum_billing_tier}
	MaximumBillingTier *float64 `field:"optional" json:"maximumBillingTier" yaml:"maximumBillingTier"`
	// Limits the bytes billed for this job.
	//
	// Queries that will have bytes billed beyond this limit will fail (without incurring a charge).
	// If unspecified, this will be set to your project default.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#maximum_bytes_billed BigqueryJob#maximum_bytes_billed}
	MaximumBytesBilled *string `field:"optional" json:"maximumBytesBilled" yaml:"maximumBytesBilled"`
	// Standard SQL only.
	//
	// Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#parameter_mode BigqueryJob#parameter_mode}
	ParameterMode *string `field:"optional" json:"parameterMode" yaml:"parameterMode"`
	// Specifies a priority for the query. Default value: "INTERACTIVE" Possible values: ["INTERACTIVE", "BATCH"].
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#priority BigqueryJob#priority}
	Priority *string `field:"optional" json:"priority" yaml:"priority"`
	// Allows the schema of the destination table to be updated as a side effect of the query job.
	//
	// Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND;
	// when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table,
	// specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema.
	// One or more of the following values are specified:
	// ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
	// ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#schema_update_options BigqueryJob#schema_update_options}
	SchemaUpdateOptions *[]*string `field:"optional" json:"schemaUpdateOptions" yaml:"schemaUpdateOptions"`
	// script_options block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#script_options BigqueryJob#script_options}
	ScriptOptions *BigqueryJobQueryScriptOptions `field:"optional" json:"scriptOptions" yaml:"scriptOptions"`
	// Specifies whether to use BigQuery's legacy SQL dialect for this query.
	//
	// The default value is true.
	// If set to false, the query will use BigQuery's standard SQL.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#use_legacy_sql BigqueryJob#use_legacy_sql}
	UseLegacySql interface{} `field:"optional" json:"useLegacySql" yaml:"useLegacySql"`
	// Whether to look for the result in the query cache.
	//
	// The query cache is a best-effort cache that will be flushed whenever
	// tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified.
	// The default value is true.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#use_query_cache BigqueryJob#use_query_cache}
	UseQueryCache interface{} `field:"optional" json:"useQueryCache" yaml:"useQueryCache"`
	// user_defined_function_resources block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#user_defined_function_resources BigqueryJob#user_defined_function_resources}
	UserDefinedFunctionResources interface{} `field:"optional" json:"userDefinedFunctionResources" yaml:"userDefinedFunctionResources"`
	// Specifies the action that occurs if the destination table already exists.
	//
	// The following values are supported:
	// WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result.
	// WRITE_APPEND: If the table already exists, BigQuery appends the data to the table.
	// WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result.
	// Each action is atomic and only occurs if BigQuery is able to complete the job successfully.
	// Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#write_disposition BigqueryJob#write_disposition}
	WriteDisposition *string `field:"optional" json:"writeDisposition" yaml:"writeDisposition"`
}

type BigqueryJobQueryDefaultDataset

type BigqueryJobQueryDefaultDataset struct {
	// The dataset. Can be specified '{{dataset_id}}' if 'project_id' is also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}' if not.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
	DatasetId *string `field:"required" json:"datasetId" yaml:"datasetId"`
	// The ID of the project containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
	ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"`
}

type BigqueryJobQueryDefaultDatasetOutputReference

type BigqueryJobQueryDefaultDatasetOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DatasetId() *string
	SetDatasetId(val *string)
	DatasetIdInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobQueryDefaultDataset
	SetInternalValue(val *BigqueryJobQueryDefaultDataset)
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetProjectId()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobQueryDefaultDatasetOutputReference

func NewBigqueryJobQueryDefaultDatasetOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobQueryDefaultDatasetOutputReference

type BigqueryJobQueryDestinationEncryptionConfiguration

type BigqueryJobQueryDestinationEncryptionConfiguration struct {
	// Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.
	//
	// The BigQuery Service Account associated with your project requires access to this encryption key.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#kms_key_name BigqueryJob#kms_key_name}
	KmsKeyName *string `field:"required" json:"kmsKeyName" yaml:"kmsKeyName"`
}

type BigqueryJobQueryDestinationEncryptionConfigurationOutputReference

type BigqueryJobQueryDestinationEncryptionConfigurationOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobQueryDestinationEncryptionConfiguration
	SetInternalValue(val *BigqueryJobQueryDestinationEncryptionConfiguration)
	KmsKeyName() *string
	SetKmsKeyName(val *string)
	KmsKeyNameInput() *string
	KmsKeyVersion() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobQueryDestinationEncryptionConfigurationOutputReference

func NewBigqueryJobQueryDestinationEncryptionConfigurationOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobQueryDestinationEncryptionConfigurationOutputReference

type BigqueryJobQueryDestinationTable

type BigqueryJobQueryDestinationTable struct {
	// The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
	TableId *string `field:"required" json:"tableId" yaml:"tableId"`
	// The ID of the dataset containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
	DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"`
	// The ID of the project containing this table.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
	ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"`
}

type BigqueryJobQueryDestinationTableOutputReference

type BigqueryJobQueryDestinationTableOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DatasetId() *string
	SetDatasetId(val *string)
	DatasetIdInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobQueryDestinationTable
	SetInternalValue(val *BigqueryJobQueryDestinationTable)
	ProjectId() *string
	SetProjectId(val *string)
	ProjectIdInput() *string
	TableId() *string
	SetTableId(val *string)
	TableIdInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetDatasetId()
	ResetProjectId()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobQueryDestinationTableOutputReference

func NewBigqueryJobQueryDestinationTableOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobQueryDestinationTableOutputReference

type BigqueryJobQueryOutputReference

type BigqueryJobQueryOutputReference interface {
	cdktf.ComplexObject
	AllowLargeResults() interface{}
	SetAllowLargeResults(val interface{})
	AllowLargeResultsInput() interface{}
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	CreateDisposition() *string
	SetCreateDisposition(val *string)
	CreateDispositionInput() *string
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DefaultDataset() BigqueryJobQueryDefaultDatasetOutputReference
	DefaultDatasetInput() *BigqueryJobQueryDefaultDataset
	DestinationEncryptionConfiguration() BigqueryJobQueryDestinationEncryptionConfigurationOutputReference
	DestinationEncryptionConfigurationInput() *BigqueryJobQueryDestinationEncryptionConfiguration
	DestinationTable() BigqueryJobQueryDestinationTableOutputReference
	DestinationTableInput() *BigqueryJobQueryDestinationTable
	FlattenResults() interface{}
	SetFlattenResults(val interface{})
	FlattenResultsInput() interface{}
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobQuery
	SetInternalValue(val *BigqueryJobQuery)
	MaximumBillingTier() *float64
	SetMaximumBillingTier(val *float64)
	MaximumBillingTierInput() *float64
	MaximumBytesBilled() *string
	SetMaximumBytesBilled(val *string)
	MaximumBytesBilledInput() *string
	ParameterMode() *string
	SetParameterMode(val *string)
	ParameterModeInput() *string
	Priority() *string
	SetPriority(val *string)
	PriorityInput() *string
	Query() *string
	SetQuery(val *string)
	QueryInput() *string
	SchemaUpdateOptions() *[]*string
	SetSchemaUpdateOptions(val *[]*string)
	SchemaUpdateOptionsInput() *[]*string
	ScriptOptions() BigqueryJobQueryScriptOptionsOutputReference
	ScriptOptionsInput() *BigqueryJobQueryScriptOptions
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	UseLegacySql() interface{}
	SetUseLegacySql(val interface{})
	UseLegacySqlInput() interface{}
	UseQueryCache() interface{}
	SetUseQueryCache(val interface{})
	UseQueryCacheInput() interface{}
	UserDefinedFunctionResources() BigqueryJobQueryUserDefinedFunctionResourcesList
	UserDefinedFunctionResourcesInput() interface{}
	WriteDisposition() *string
	SetWriteDisposition(val *string)
	WriteDispositionInput() *string
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	PutDefaultDataset(value *BigqueryJobQueryDefaultDataset)
	PutDestinationEncryptionConfiguration(value *BigqueryJobQueryDestinationEncryptionConfiguration)
	PutDestinationTable(value *BigqueryJobQueryDestinationTable)
	PutScriptOptions(value *BigqueryJobQueryScriptOptions)
	PutUserDefinedFunctionResources(value interface{})
	ResetAllowLargeResults()
	ResetCreateDisposition()
	ResetDefaultDataset()
	ResetDestinationEncryptionConfiguration()
	ResetDestinationTable()
	ResetFlattenResults()
	ResetMaximumBillingTier()
	ResetMaximumBytesBilled()
	ResetParameterMode()
	ResetPriority()
	ResetSchemaUpdateOptions()
	ResetScriptOptions()
	ResetUseLegacySql()
	ResetUseQueryCache()
	ResetUserDefinedFunctionResources()
	ResetWriteDisposition()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobQueryOutputReference

func NewBigqueryJobQueryOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobQueryOutputReference

type BigqueryJobQueryScriptOptions

type BigqueryJobQueryScriptOptions struct {
	// Determines which statement in the script represents the "key result", used to populate the schema and query results of the script job.
	//
	// Possible values: ["LAST", "FIRST_SELECT"]
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#key_result_statement BigqueryJob#key_result_statement}
	KeyResultStatement *string `field:"optional" json:"keyResultStatement" yaml:"keyResultStatement"`
	// Limit on the number of bytes billed per statement. Exceeding this budget results in an error.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#statement_byte_budget BigqueryJob#statement_byte_budget}
	StatementByteBudget *string `field:"optional" json:"statementByteBudget" yaml:"statementByteBudget"`
	// Timeout period for each statement in a script.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#statement_timeout_ms BigqueryJob#statement_timeout_ms}
	StatementTimeoutMs *string `field:"optional" json:"statementTimeoutMs" yaml:"statementTimeoutMs"`
}

type BigqueryJobQueryScriptOptionsOutputReference

type BigqueryJobQueryScriptOptionsOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobQueryScriptOptions
	SetInternalValue(val *BigqueryJobQueryScriptOptions)
	KeyResultStatement() *string
	SetKeyResultStatement(val *string)
	KeyResultStatementInput() *string
	StatementByteBudget() *string
	SetStatementByteBudget(val *string)
	StatementByteBudgetInput() *string
	StatementTimeoutMs() *string
	SetStatementTimeoutMs(val *string)
	StatementTimeoutMsInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetKeyResultStatement()
	ResetStatementByteBudget()
	ResetStatementTimeoutMs()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobQueryScriptOptionsOutputReference

func NewBigqueryJobQueryScriptOptionsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobQueryScriptOptionsOutputReference

type BigqueryJobQueryUserDefinedFunctionResources

type BigqueryJobQueryUserDefinedFunctionResources struct {
	// An inline resource that contains code for a user-defined function (UDF).
	//
	// Providing a inline code resource is equivalent to providing a URI for a file containing the same code.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#inline_code BigqueryJob#inline_code}
	InlineCode *string `field:"optional" json:"inlineCode" yaml:"inlineCode"`
	// A code resource to load from a Google Cloud Storage URI (gs://bucket/path).
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#resource_uri BigqueryJob#resource_uri}
	ResourceUri *string `field:"optional" json:"resourceUri" yaml:"resourceUri"`
}

type BigqueryJobQueryUserDefinedFunctionResourcesList

type BigqueryJobQueryUserDefinedFunctionResourcesList interface {
	cdktf.ComplexList
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() interface{}
	SetInternalValue(val interface{})
	// The attribute on the parent resource this class is referencing.
	TerraformAttribute() *string
	SetTerraformAttribute(val *string)
	// The parent resource.
	TerraformResource() cdktf.IInterpolatingParent
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
	WrapsSet() *bool
	SetWrapsSet(val *bool)
	// Experimental.
	ComputeFqn() *string
	Get(index *float64) BigqueryJobQueryUserDefinedFunctionResourcesOutputReference
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobQueryUserDefinedFunctionResourcesList

func NewBigqueryJobQueryUserDefinedFunctionResourcesList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) BigqueryJobQueryUserDefinedFunctionResourcesList

type BigqueryJobQueryUserDefinedFunctionResourcesOutputReference

type BigqueryJobQueryUserDefinedFunctionResourcesOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InlineCode() *string
	SetInlineCode(val *string)
	InlineCodeInput() *string
	InternalValue() interface{}
	SetInternalValue(val interface{})
	ResourceUri() *string
	SetResourceUri(val *string)
	ResourceUriInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetInlineCode()
	ResetResourceUri()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobQueryUserDefinedFunctionResourcesOutputReference

func NewBigqueryJobQueryUserDefinedFunctionResourcesOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) BigqueryJobQueryUserDefinedFunctionResourcesOutputReference

type BigqueryJobStatus

type BigqueryJobStatus struct {
}

type BigqueryJobStatusErrorResult

type BigqueryJobStatusErrorResult struct {
}

type BigqueryJobStatusErrorResultList

type BigqueryJobStatusErrorResultList interface {
	cdktf.ComplexList
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	// The attribute on the parent resource this class is referencing.
	TerraformAttribute() *string
	SetTerraformAttribute(val *string)
	// The parent resource.
	TerraformResource() cdktf.IInterpolatingParent
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
	WrapsSet() *bool
	SetWrapsSet(val *bool)
	// Experimental.
	ComputeFqn() *string
	Get(index *float64) BigqueryJobStatusErrorResultOutputReference
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobStatusErrorResultList

func NewBigqueryJobStatusErrorResultList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) BigqueryJobStatusErrorResultList

type BigqueryJobStatusErrorResultOutputReference

type BigqueryJobStatusErrorResultOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobStatusErrorResult
	SetInternalValue(val *BigqueryJobStatusErrorResult)
	Location() *string
	Message() *string
	Reason() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobStatusErrorResultOutputReference

func NewBigqueryJobStatusErrorResultOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) BigqueryJobStatusErrorResultOutputReference

type BigqueryJobStatusErrors

type BigqueryJobStatusErrors struct {
}

type BigqueryJobStatusErrorsList

type BigqueryJobStatusErrorsList interface {
	cdktf.ComplexList
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	// The attribute on the parent resource this class is referencing.
	TerraformAttribute() *string
	SetTerraformAttribute(val *string)
	// The parent resource.
	TerraformResource() cdktf.IInterpolatingParent
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
	WrapsSet() *bool
	SetWrapsSet(val *bool)
	// Experimental.
	ComputeFqn() *string
	Get(index *float64) BigqueryJobStatusErrorsOutputReference
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobStatusErrorsList

func NewBigqueryJobStatusErrorsList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) BigqueryJobStatusErrorsList

type BigqueryJobStatusErrorsOutputReference

type BigqueryJobStatusErrorsOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobStatusErrors
	SetInternalValue(val *BigqueryJobStatusErrors)
	Location() *string
	Message() *string
	Reason() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobStatusErrorsOutputReference

func NewBigqueryJobStatusErrorsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) BigqueryJobStatusErrorsOutputReference

type BigqueryJobStatusList

type BigqueryJobStatusList interface {
	cdktf.ComplexList
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	// The attribute on the parent resource this class is referencing.
	TerraformAttribute() *string
	SetTerraformAttribute(val *string)
	// The parent resource.
	TerraformResource() cdktf.IInterpolatingParent
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
	WrapsSet() *bool
	SetWrapsSet(val *bool)
	// Experimental.
	ComputeFqn() *string
	Get(index *float64) BigqueryJobStatusOutputReference
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobStatusList

func NewBigqueryJobStatusList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) BigqueryJobStatusList

type BigqueryJobStatusOutputReference

type BigqueryJobStatusOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	ErrorResult() BigqueryJobStatusErrorResultList
	Errors() BigqueryJobStatusErrorsList
	// Experimental.
	Fqn() *string
	InternalValue() *BigqueryJobStatus
	SetInternalValue(val *BigqueryJobStatus)
	State() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobStatusOutputReference

func NewBigqueryJobStatusOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) BigqueryJobStatusOutputReference

type BigqueryJobTimeouts

type BigqueryJobTimeouts struct {
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#create BigqueryJob#create}.
	Create *string `field:"optional" json:"create" yaml:"create"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.70.0/docs/resources/bigquery_job#delete BigqueryJob#delete}.
	Delete *string `field:"optional" json:"delete" yaml:"delete"`
}

type BigqueryJobTimeoutsOutputReference

type BigqueryJobTimeoutsOutputReference interface {
	cdktf.ComplexObject
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	Create() *string
	SetCreate(val *string)
	CreateInput() *string
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	Delete() *string
	SetDelete(val *string)
	DeleteInput() *string
	// Experimental.
	Fqn() *string
	InternalValue() interface{}
	SetInternalValue(val interface{})
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetCreate()
	ResetDelete()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewBigqueryJobTimeoutsOutputReference

func NewBigqueryJobTimeoutsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) BigqueryJobTimeoutsOutputReference

Source Files

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL