Documentation ¶
Index ¶
- func GoogleBigqueryJob_GenerateConfigForImport(scope constructs.Construct, importToId *string, importFromId *string, ...) cdktf.ImportableResource
- func GoogleBigqueryJob_IsConstruct(x interface{}) *bool
- func GoogleBigqueryJob_IsTerraformElement(x interface{}) *bool
- func GoogleBigqueryJob_IsTerraformResource(x interface{}) *bool
- func GoogleBigqueryJob_TfResourceType() *string
- func NewGoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference_Override(g GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference, ...)
- func NewGoogleBigqueryJobCopyDestinationTableOutputReference_Override(g GoogleBigqueryJobCopyDestinationTableOutputReference, ...)
- func NewGoogleBigqueryJobCopyOutputReference_Override(g GoogleBigqueryJobCopyOutputReference, ...)
- func NewGoogleBigqueryJobCopySourceTablesList_Override(g GoogleBigqueryJobCopySourceTablesList, ...)
- func NewGoogleBigqueryJobCopySourceTablesOutputReference_Override(g GoogleBigqueryJobCopySourceTablesOutputReference, ...)
- func NewGoogleBigqueryJobExtractOutputReference_Override(g GoogleBigqueryJobExtractOutputReference, ...)
- func NewGoogleBigqueryJobExtractSourceModelOutputReference_Override(g GoogleBigqueryJobExtractSourceModelOutputReference, ...)
- func NewGoogleBigqueryJobExtractSourceTableOutputReference_Override(g GoogleBigqueryJobExtractSourceTableOutputReference, ...)
- func NewGoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference_Override(g GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference, ...)
- func NewGoogleBigqueryJobLoadDestinationTableOutputReference_Override(g GoogleBigqueryJobLoadDestinationTableOutputReference, ...)
- func NewGoogleBigqueryJobLoadOutputReference_Override(g GoogleBigqueryJobLoadOutputReference, ...)
- func NewGoogleBigqueryJobLoadParquetOptionsOutputReference_Override(g GoogleBigqueryJobLoadParquetOptionsOutputReference, ...)
- func NewGoogleBigqueryJobLoadTimePartitioningOutputReference_Override(g GoogleBigqueryJobLoadTimePartitioningOutputReference, ...)
- func NewGoogleBigqueryJobQueryDefaultDatasetOutputReference_Override(g GoogleBigqueryJobQueryDefaultDatasetOutputReference, ...)
- func NewGoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference_Override(g GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference, ...)
- func NewGoogleBigqueryJobQueryDestinationTableOutputReference_Override(g GoogleBigqueryJobQueryDestinationTableOutputReference, ...)
- func NewGoogleBigqueryJobQueryOutputReference_Override(g GoogleBigqueryJobQueryOutputReference, ...)
- func NewGoogleBigqueryJobQueryScriptOptionsOutputReference_Override(g GoogleBigqueryJobQueryScriptOptionsOutputReference, ...)
- func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesList_Override(g GoogleBigqueryJobQueryUserDefinedFunctionResourcesList, ...)
- func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference_Override(g GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference, ...)
- func NewGoogleBigqueryJobStatusErrorResultList_Override(g GoogleBigqueryJobStatusErrorResultList, ...)
- func NewGoogleBigqueryJobStatusErrorResultOutputReference_Override(g GoogleBigqueryJobStatusErrorResultOutputReference, ...)
- func NewGoogleBigqueryJobStatusErrorsList_Override(g GoogleBigqueryJobStatusErrorsList, ...)
- func NewGoogleBigqueryJobStatusErrorsOutputReference_Override(g GoogleBigqueryJobStatusErrorsOutputReference, ...)
- func NewGoogleBigqueryJobStatusList_Override(g GoogleBigqueryJobStatusList, terraformResource cdktf.IInterpolatingParent, ...)
- func NewGoogleBigqueryJobStatusOutputReference_Override(g GoogleBigqueryJobStatusOutputReference, ...)
- func NewGoogleBigqueryJobTimeoutsOutputReference_Override(g GoogleBigqueryJobTimeoutsOutputReference, ...)
- func NewGoogleBigqueryJob_Override(g GoogleBigqueryJob, scope constructs.Construct, id *string, ...)
- type GoogleBigqueryJob
- type GoogleBigqueryJobConfig
- type GoogleBigqueryJobCopy
- type GoogleBigqueryJobCopyDestinationEncryptionConfiguration
- type GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference
- type GoogleBigqueryJobCopyDestinationTable
- type GoogleBigqueryJobCopyDestinationTableOutputReference
- type GoogleBigqueryJobCopyOutputReference
- type GoogleBigqueryJobCopySourceTables
- type GoogleBigqueryJobCopySourceTablesList
- type GoogleBigqueryJobCopySourceTablesOutputReference
- type GoogleBigqueryJobExtract
- type GoogleBigqueryJobExtractOutputReference
- type GoogleBigqueryJobExtractSourceModel
- type GoogleBigqueryJobExtractSourceModelOutputReference
- type GoogleBigqueryJobExtractSourceTable
- type GoogleBigqueryJobExtractSourceTableOutputReference
- type GoogleBigqueryJobLoad
- type GoogleBigqueryJobLoadDestinationEncryptionConfiguration
- type GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference
- type GoogleBigqueryJobLoadDestinationTable
- type GoogleBigqueryJobLoadDestinationTableOutputReference
- type GoogleBigqueryJobLoadOutputReference
- type GoogleBigqueryJobLoadParquetOptions
- type GoogleBigqueryJobLoadParquetOptionsOutputReference
- type GoogleBigqueryJobLoadTimePartitioning
- type GoogleBigqueryJobLoadTimePartitioningOutputReference
- type GoogleBigqueryJobQuery
- type GoogleBigqueryJobQueryDefaultDataset
- type GoogleBigqueryJobQueryDefaultDatasetOutputReference
- type GoogleBigqueryJobQueryDestinationEncryptionConfiguration
- type GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference
- type GoogleBigqueryJobQueryDestinationTable
- type GoogleBigqueryJobQueryDestinationTableOutputReference
- type GoogleBigqueryJobQueryOutputReference
- type GoogleBigqueryJobQueryScriptOptions
- type GoogleBigqueryJobQueryScriptOptionsOutputReference
- type GoogleBigqueryJobQueryUserDefinedFunctionResources
- type GoogleBigqueryJobQueryUserDefinedFunctionResourcesList
- type GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference
- type GoogleBigqueryJobStatus
- type GoogleBigqueryJobStatusErrorResult
- type GoogleBigqueryJobStatusErrorResultList
- type GoogleBigqueryJobStatusErrorResultOutputReference
- type GoogleBigqueryJobStatusErrors
- type GoogleBigqueryJobStatusErrorsList
- type GoogleBigqueryJobStatusErrorsOutputReference
- type GoogleBigqueryJobStatusList
- type GoogleBigqueryJobStatusOutputReference
- type GoogleBigqueryJobTimeouts
- type GoogleBigqueryJobTimeoutsOutputReference
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func GoogleBigqueryJob_GenerateConfigForImport ¶
func GoogleBigqueryJob_GenerateConfigForImport(scope constructs.Construct, importToId *string, importFromId *string, provider cdktf.TerraformProvider) cdktf.ImportableResource
Generates CDKTF code for importing a GoogleBigqueryJob resource upon running "cdktf plan <stack-name>".
func GoogleBigqueryJob_IsConstruct ¶
func GoogleBigqueryJob_IsConstruct(x interface{}) *bool
Checks if `x` is a construct.
Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.
Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.
Returns: true if `x` is an object created from a class which extends `Construct`.
func GoogleBigqueryJob_IsTerraformElement ¶
func GoogleBigqueryJob_IsTerraformElement(x interface{}) *bool
Experimental.
func GoogleBigqueryJob_IsTerraformResource ¶
func GoogleBigqueryJob_IsTerraformResource(x interface{}) *bool
Experimental.
func GoogleBigqueryJob_TfResourceType ¶
func GoogleBigqueryJob_TfResourceType() *string
func NewGoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference_Override ¶
func NewGoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference_Override(g GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobCopyDestinationTableOutputReference_Override ¶
func NewGoogleBigqueryJobCopyDestinationTableOutputReference_Override(g GoogleBigqueryJobCopyDestinationTableOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobCopyOutputReference_Override ¶
func NewGoogleBigqueryJobCopyOutputReference_Override(g GoogleBigqueryJobCopyOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobCopySourceTablesList_Override ¶
func NewGoogleBigqueryJobCopySourceTablesList_Override(g GoogleBigqueryJobCopySourceTablesList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewGoogleBigqueryJobCopySourceTablesOutputReference_Override ¶
func NewGoogleBigqueryJobCopySourceTablesOutputReference_Override(g GoogleBigqueryJobCopySourceTablesOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewGoogleBigqueryJobExtractOutputReference_Override ¶
func NewGoogleBigqueryJobExtractOutputReference_Override(g GoogleBigqueryJobExtractOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobExtractSourceModelOutputReference_Override ¶
func NewGoogleBigqueryJobExtractSourceModelOutputReference_Override(g GoogleBigqueryJobExtractSourceModelOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobExtractSourceTableOutputReference_Override ¶
func NewGoogleBigqueryJobExtractSourceTableOutputReference_Override(g GoogleBigqueryJobExtractSourceTableOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference_Override ¶
func NewGoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference_Override(g GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobLoadDestinationTableOutputReference_Override ¶
func NewGoogleBigqueryJobLoadDestinationTableOutputReference_Override(g GoogleBigqueryJobLoadDestinationTableOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobLoadOutputReference_Override ¶
func NewGoogleBigqueryJobLoadOutputReference_Override(g GoogleBigqueryJobLoadOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobLoadParquetOptionsOutputReference_Override ¶
func NewGoogleBigqueryJobLoadParquetOptionsOutputReference_Override(g GoogleBigqueryJobLoadParquetOptionsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobLoadTimePartitioningOutputReference_Override ¶
func NewGoogleBigqueryJobLoadTimePartitioningOutputReference_Override(g GoogleBigqueryJobLoadTimePartitioningOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobQueryDefaultDatasetOutputReference_Override ¶
func NewGoogleBigqueryJobQueryDefaultDatasetOutputReference_Override(g GoogleBigqueryJobQueryDefaultDatasetOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference_Override ¶
func NewGoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference_Override(g GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobQueryDestinationTableOutputReference_Override ¶
func NewGoogleBigqueryJobQueryDestinationTableOutputReference_Override(g GoogleBigqueryJobQueryDestinationTableOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobQueryOutputReference_Override ¶
func NewGoogleBigqueryJobQueryOutputReference_Override(g GoogleBigqueryJobQueryOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobQueryScriptOptionsOutputReference_Override ¶
func NewGoogleBigqueryJobQueryScriptOptionsOutputReference_Override(g GoogleBigqueryJobQueryScriptOptionsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesList_Override ¶
func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesList_Override(g GoogleBigqueryJobQueryUserDefinedFunctionResourcesList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference_Override ¶
func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference_Override(g GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewGoogleBigqueryJobStatusErrorResultList_Override ¶
func NewGoogleBigqueryJobStatusErrorResultList_Override(g GoogleBigqueryJobStatusErrorResultList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewGoogleBigqueryJobStatusErrorResultOutputReference_Override ¶
func NewGoogleBigqueryJobStatusErrorResultOutputReference_Override(g GoogleBigqueryJobStatusErrorResultOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewGoogleBigqueryJobStatusErrorsList_Override ¶
func NewGoogleBigqueryJobStatusErrorsList_Override(g GoogleBigqueryJobStatusErrorsList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewGoogleBigqueryJobStatusErrorsOutputReference_Override ¶
func NewGoogleBigqueryJobStatusErrorsOutputReference_Override(g GoogleBigqueryJobStatusErrorsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewGoogleBigqueryJobStatusList_Override ¶
func NewGoogleBigqueryJobStatusList_Override(g GoogleBigqueryJobStatusList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)
func NewGoogleBigqueryJobStatusOutputReference_Override ¶
func NewGoogleBigqueryJobStatusOutputReference_Override(g GoogleBigqueryJobStatusOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)
func NewGoogleBigqueryJobTimeoutsOutputReference_Override ¶
func NewGoogleBigqueryJobTimeoutsOutputReference_Override(g GoogleBigqueryJobTimeoutsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewGoogleBigqueryJob_Override ¶
func NewGoogleBigqueryJob_Override(g GoogleBigqueryJob, scope constructs.Construct, id *string, config *GoogleBigqueryJobConfig)
Create a new {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job google_bigquery_job} Resource.
Types ¶
type GoogleBigqueryJob ¶
type GoogleBigqueryJob interface { cdktf.TerraformResource // Experimental. CdktfStack() cdktf.TerraformStack // Experimental. Connection() interface{} // Experimental. SetConnection(val interface{}) // Experimental. ConstructNodeMetadata() *map[string]interface{} Copy() GoogleBigqueryJobCopyOutputReference CopyInput() *GoogleBigqueryJobCopy // Experimental. Count() interface{} // Experimental. SetCount(val interface{}) // Experimental. DependsOn() *[]*string // Experimental. SetDependsOn(val *[]*string) EffectiveLabels() cdktf.StringMap Extract() GoogleBigqueryJobExtractOutputReference ExtractInput() *GoogleBigqueryJobExtract // Experimental. ForEach() cdktf.ITerraformIterator // Experimental. SetForEach(val cdktf.ITerraformIterator) // Experimental. Fqn() *string // Experimental. FriendlyUniqueId() *string Id() *string SetId(val *string) IdInput() *string JobId() *string SetJobId(val *string) JobIdInput() *string JobTimeoutMs() *string SetJobTimeoutMs(val *string) JobTimeoutMsInput() *string JobType() *string Labels() *map[string]*string SetLabels(val *map[string]*string) LabelsInput() *map[string]*string // Experimental. Lifecycle() *cdktf.TerraformResourceLifecycle // Experimental. SetLifecycle(val *cdktf.TerraformResourceLifecycle) Load() GoogleBigqueryJobLoadOutputReference LoadInput() *GoogleBigqueryJobLoad Location() *string SetLocation(val *string) LocationInput() *string // The tree node. Node() constructs.Node Project() *string SetProject(val *string) ProjectInput() *string // Experimental. Provider() cdktf.TerraformProvider // Experimental. SetProvider(val cdktf.TerraformProvider) // Experimental. Provisioners() *[]interface{} // Experimental. SetProvisioners(val *[]interface{}) Query() GoogleBigqueryJobQueryOutputReference QueryInput() *GoogleBigqueryJobQuery // Experimental. RawOverrides() interface{} Status() GoogleBigqueryJobStatusList // Experimental. TerraformGeneratorMetadata() *cdktf.TerraformProviderGeneratorMetadata TerraformLabels() cdktf.StringMap // Experimental. TerraformMetaArguments() *map[string]interface{} // Experimental. TerraformResourceType() *string Timeouts() GoogleBigqueryJobTimeoutsOutputReference TimeoutsInput() interface{} UserEmail() *string // Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. // Experimental. AddMoveTarget(moveTarget *string) // Experimental. AddOverride(path *string, value interface{}) // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. ImportFrom(id *string, provider cdktf.TerraformProvider) // Experimental. InterpolationForAttribute(terraformAttribute *string) cdktf.IResolvable // Moves this resource to the target resource given by moveTarget. // Experimental. MoveTo(moveTarget *string, index interface{}) // Overrides the auto-generated logical ID with a specific ID. // Experimental. OverrideLogicalId(newLogicalId *string) PutCopy(value *GoogleBigqueryJobCopy) PutExtract(value *GoogleBigqueryJobExtract) PutLoad(value *GoogleBigqueryJobLoad) PutQuery(value *GoogleBigqueryJobQuery) PutTimeouts(value *GoogleBigqueryJobTimeouts) ResetCopy() ResetExtract() ResetId() ResetJobTimeoutMs() ResetLabels() ResetLoad() ResetLocation() // Resets a previously passed logical Id to use the auto-generated logical id again. // Experimental. ResetOverrideLogicalId() ResetProject() ResetQuery() ResetTimeouts() SynthesizeAttributes() *map[string]interface{} // Experimental. ToMetadata() interface{} // Returns a string representation of this construct. ToString() *string // Adds this resource to the terraform JSON output. // Experimental. ToTerraform() interface{} }
Represents a {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job google_bigquery_job}.
func NewGoogleBigqueryJob ¶
func NewGoogleBigqueryJob(scope constructs.Construct, id *string, config *GoogleBigqueryJobConfig) GoogleBigqueryJob
Create a new {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job google_bigquery_job} Resource.
type GoogleBigqueryJobConfig ¶
type GoogleBigqueryJobConfig struct { // Experimental. Connection interface{} `field:"optional" json:"connection" yaml:"connection"` // Experimental. Count interface{} `field:"optional" json:"count" yaml:"count"` // Experimental. DependsOn *[]cdktf.ITerraformDependable `field:"optional" json:"dependsOn" yaml:"dependsOn"` // Experimental. ForEach cdktf.ITerraformIterator `field:"optional" json:"forEach" yaml:"forEach"` // Experimental. Lifecycle *cdktf.TerraformResourceLifecycle `field:"optional" json:"lifecycle" yaml:"lifecycle"` // Experimental. Provider cdktf.TerraformProvider `field:"optional" json:"provider" yaml:"provider"` // Experimental. Provisioners *[]interface{} `field:"optional" json:"provisioners" yaml:"provisioners"` // The ID of the job. // // The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#job_id GoogleBigqueryJob#job_id} JobId *string `field:"required" json:"jobId" yaml:"jobId"` // copy block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#copy GoogleBigqueryJob#copy} Copy *GoogleBigqueryJobCopy `field:"optional" json:"copy" yaml:"copy"` // extract block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#extract GoogleBigqueryJob#extract} Extract *GoogleBigqueryJobExtract `field:"optional" json:"extract" yaml:"extract"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#id GoogleBigqueryJob#id}. // // Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. // If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable. Id *string `field:"optional" json:"id" yaml:"id"` // Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt to terminate the job. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#job_timeout_ms GoogleBigqueryJob#job_timeout_ms} JobTimeoutMs *string `field:"optional" json:"jobTimeoutMs" yaml:"jobTimeoutMs"` // The labels associated with this job. You can use these to organize and group your jobs. // // **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. // Please refer to the field 'effective_labels' for all of the labels present on the resource. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#labels GoogleBigqueryJob#labels} Labels *map[string]*string `field:"optional" json:"labels" yaml:"labels"` // load block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#load GoogleBigqueryJob#load} Load *GoogleBigqueryJobLoad `field:"optional" json:"load" yaml:"load"` // The geographic location of the job. The default value is US. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#location GoogleBigqueryJob#location} Location *string `field:"optional" json:"location" yaml:"location"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#project GoogleBigqueryJob#project}. Project *string `field:"optional" json:"project" yaml:"project"` // query block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#query GoogleBigqueryJob#query} Query *GoogleBigqueryJobQuery `field:"optional" json:"query" yaml:"query"` // timeouts block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#timeouts GoogleBigqueryJob#timeouts} Timeouts *GoogleBigqueryJobTimeouts `field:"optional" json:"timeouts" yaml:"timeouts"` }
type GoogleBigqueryJobCopy ¶
type GoogleBigqueryJobCopy struct { // source_tables block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#source_tables GoogleBigqueryJob#source_tables} SourceTables interface{} `field:"required" json:"sourceTables" yaml:"sourceTables"` // Specifies whether the job is allowed to create new tables. // // The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"] // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#create_disposition GoogleBigqueryJob#create_disposition} CreateDisposition *string `field:"optional" json:"createDisposition" yaml:"createDisposition"` // destination_encryption_configuration block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#destination_encryption_configuration GoogleBigqueryJob#destination_encryption_configuration} DestinationEncryptionConfiguration *GoogleBigqueryJobCopyDestinationEncryptionConfiguration `field:"optional" json:"destinationEncryptionConfiguration" yaml:"destinationEncryptionConfiguration"` // destination_table block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#destination_table GoogleBigqueryJob#destination_table} DestinationTable *GoogleBigqueryJobCopyDestinationTable `field:"optional" json:"destinationTable" yaml:"destinationTable"` // Specifies the action that occurs if the destination table already exists. // // The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"] // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#write_disposition GoogleBigqueryJob#write_disposition} WriteDisposition *string `field:"optional" json:"writeDisposition" yaml:"writeDisposition"` }
type GoogleBigqueryJobCopyDestinationEncryptionConfiguration ¶
type GoogleBigqueryJobCopyDestinationEncryptionConfiguration struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // // The BigQuery Service Account associated with your project requires access to this encryption key. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#kms_key_name GoogleBigqueryJob#kms_key_name} KmsKeyName *string `field:"required" json:"kmsKeyName" yaml:"kmsKeyName"` }
type GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference ¶
type GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobCopyDestinationEncryptionConfiguration SetInternalValue(val *GoogleBigqueryJobCopyDestinationEncryptionConfiguration) KmsKeyName() *string SetKmsKeyName(val *string) KmsKeyNameInput() *string KmsKeyVersion() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference ¶
func NewGoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference
type GoogleBigqueryJobCopyDestinationTable ¶
type GoogleBigqueryJobCopyDestinationTable struct { // The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#table_id GoogleBigqueryJob#table_id} TableId *string `field:"required" json:"tableId" yaml:"tableId"` // The ID of the dataset containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#dataset_id GoogleBigqueryJob#dataset_id} DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"` // The ID of the project containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#project_id GoogleBigqueryJob#project_id} ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"` }
type GoogleBigqueryJobCopyDestinationTableOutputReference ¶
type GoogleBigqueryJobCopyDestinationTableOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DatasetId() *string SetDatasetId(val *string) DatasetIdInput() *string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobCopyDestinationTable SetInternalValue(val *GoogleBigqueryJobCopyDestinationTable) ProjectId() *string SetProjectId(val *string) ProjectIdInput() *string TableId() *string SetTableId(val *string) TableIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDatasetId() ResetProjectId() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobCopyDestinationTableOutputReference ¶
func NewGoogleBigqueryJobCopyDestinationTableOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobCopyDestinationTableOutputReference
type GoogleBigqueryJobCopyOutputReference ¶
type GoogleBigqueryJobCopyOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) CreateDisposition() *string SetCreateDisposition(val *string) CreateDispositionInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DestinationEncryptionConfiguration() GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference DestinationEncryptionConfigurationInput() *GoogleBigqueryJobCopyDestinationEncryptionConfiguration DestinationTable() GoogleBigqueryJobCopyDestinationTableOutputReference DestinationTableInput() *GoogleBigqueryJobCopyDestinationTable // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobCopy SetInternalValue(val *GoogleBigqueryJobCopy) SourceTables() GoogleBigqueryJobCopySourceTablesList SourceTablesInput() interface{} // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) WriteDisposition() *string SetWriteDisposition(val *string) WriteDispositionInput() *string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutDestinationEncryptionConfiguration(value *GoogleBigqueryJobCopyDestinationEncryptionConfiguration) PutDestinationTable(value *GoogleBigqueryJobCopyDestinationTable) PutSourceTables(value interface{}) ResetCreateDisposition() ResetDestinationEncryptionConfiguration() ResetDestinationTable() ResetWriteDisposition() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobCopyOutputReference ¶
func NewGoogleBigqueryJobCopyOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobCopyOutputReference
type GoogleBigqueryJobCopySourceTables ¶
type GoogleBigqueryJobCopySourceTables struct { // The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#table_id GoogleBigqueryJob#table_id} TableId *string `field:"required" json:"tableId" yaml:"tableId"` // The ID of the dataset containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#dataset_id GoogleBigqueryJob#dataset_id} DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"` // The ID of the project containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#project_id GoogleBigqueryJob#project_id} ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"` }
type GoogleBigqueryJobCopySourceTablesList ¶
type GoogleBigqueryJobCopySourceTablesList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) GoogleBigqueryJobCopySourceTablesOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobCopySourceTablesList ¶
func NewGoogleBigqueryJobCopySourceTablesList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) GoogleBigqueryJobCopySourceTablesList
type GoogleBigqueryJobCopySourceTablesOutputReference ¶
type GoogleBigqueryJobCopySourceTablesOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DatasetId() *string SetDatasetId(val *string) DatasetIdInput() *string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) ProjectId() *string SetProjectId(val *string) ProjectIdInput() *string TableId() *string SetTableId(val *string) TableIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDatasetId() ResetProjectId() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobCopySourceTablesOutputReference ¶
func NewGoogleBigqueryJobCopySourceTablesOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) GoogleBigqueryJobCopySourceTablesOutputReference
type GoogleBigqueryJobExtract ¶
type GoogleBigqueryJobExtract struct { // A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#destination_uris GoogleBigqueryJob#destination_uris} DestinationUris *[]*string `field:"required" json:"destinationUris" yaml:"destinationUris"` // The compression type to use for exported files. // // Possible values include GZIP, DEFLATE, SNAPPY, and NONE. // The default value is NONE. DEFLATE and SNAPPY are only supported for Avro. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#compression GoogleBigqueryJob#compression} Compression *string `field:"optional" json:"compression" yaml:"compression"` // The exported file format. // // Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO for tables and SAVED_MODEL for models. // The default value for tables is CSV. Tables with nested or repeated fields cannot be exported as CSV. // The default value for models is SAVED_MODEL. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#destination_format GoogleBigqueryJob#destination_format} DestinationFormat *string `field:"optional" json:"destinationFormat" yaml:"destinationFormat"` // When extracting data in CSV format, this defines the delimiter to use between fields in the exported data. // // Default is ',' // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#field_delimiter GoogleBigqueryJob#field_delimiter} FieldDelimiter *string `field:"optional" json:"fieldDelimiter" yaml:"fieldDelimiter"` // Whether to print out a header row in the results. Default is true. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#print_header GoogleBigqueryJob#print_header} PrintHeader interface{} `field:"optional" json:"printHeader" yaml:"printHeader"` // source_model block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#source_model GoogleBigqueryJob#source_model} SourceModel *GoogleBigqueryJobExtractSourceModel `field:"optional" json:"sourceModel" yaml:"sourceModel"` // source_table block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#source_table GoogleBigqueryJob#source_table} SourceTable *GoogleBigqueryJobExtractSourceTable `field:"optional" json:"sourceTable" yaml:"sourceTable"` // Whether to use logical types when extracting to AVRO format. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#use_avro_logical_types GoogleBigqueryJob#use_avro_logical_types} UseAvroLogicalTypes interface{} `field:"optional" json:"useAvroLogicalTypes" yaml:"useAvroLogicalTypes"` }
type GoogleBigqueryJobExtractOutputReference ¶
type GoogleBigqueryJobExtractOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) Compression() *string SetCompression(val *string) CompressionInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DestinationFormat() *string SetDestinationFormat(val *string) DestinationFormatInput() *string DestinationUris() *[]*string SetDestinationUris(val *[]*string) DestinationUrisInput() *[]*string FieldDelimiter() *string SetFieldDelimiter(val *string) FieldDelimiterInput() *string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobExtract SetInternalValue(val *GoogleBigqueryJobExtract) PrintHeader() interface{} SetPrintHeader(val interface{}) PrintHeaderInput() interface{} SourceModel() GoogleBigqueryJobExtractSourceModelOutputReference SourceModelInput() *GoogleBigqueryJobExtractSourceModel SourceTable() GoogleBigqueryJobExtractSourceTableOutputReference SourceTableInput() *GoogleBigqueryJobExtractSourceTable // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) UseAvroLogicalTypes() interface{} SetUseAvroLogicalTypes(val interface{}) UseAvroLogicalTypesInput() interface{} // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutSourceModel(value *GoogleBigqueryJobExtractSourceModel) PutSourceTable(value *GoogleBigqueryJobExtractSourceTable) ResetCompression() ResetDestinationFormat() ResetFieldDelimiter() ResetPrintHeader() ResetSourceModel() ResetSourceTable() ResetUseAvroLogicalTypes() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobExtractOutputReference ¶
func NewGoogleBigqueryJobExtractOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobExtractOutputReference
type GoogleBigqueryJobExtractSourceModel ¶
type GoogleBigqueryJobExtractSourceModel struct { // The ID of the dataset containing this model. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#dataset_id GoogleBigqueryJob#dataset_id} DatasetId *string `field:"required" json:"datasetId" yaml:"datasetId"` // The ID of the model. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#model_id GoogleBigqueryJob#model_id} ModelId *string `field:"required" json:"modelId" yaml:"modelId"` // The ID of the project containing this model. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#project_id GoogleBigqueryJob#project_id} ProjectId *string `field:"required" json:"projectId" yaml:"projectId"` }
type GoogleBigqueryJobExtractSourceModelOutputReference ¶
type GoogleBigqueryJobExtractSourceModelOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DatasetId() *string SetDatasetId(val *string) DatasetIdInput() *string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobExtractSourceModel SetInternalValue(val *GoogleBigqueryJobExtractSourceModel) ModelId() *string SetModelId(val *string) ModelIdInput() *string ProjectId() *string SetProjectId(val *string) ProjectIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobExtractSourceModelOutputReference ¶
func NewGoogleBigqueryJobExtractSourceModelOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobExtractSourceModelOutputReference
type GoogleBigqueryJobExtractSourceTable ¶
type GoogleBigqueryJobExtractSourceTable struct { // The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#table_id GoogleBigqueryJob#table_id} TableId *string `field:"required" json:"tableId" yaml:"tableId"` // The ID of the dataset containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#dataset_id GoogleBigqueryJob#dataset_id} DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"` // The ID of the project containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#project_id GoogleBigqueryJob#project_id} ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"` }
type GoogleBigqueryJobExtractSourceTableOutputReference ¶
type GoogleBigqueryJobExtractSourceTableOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DatasetId() *string SetDatasetId(val *string) DatasetIdInput() *string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobExtractSourceTable SetInternalValue(val *GoogleBigqueryJobExtractSourceTable) ProjectId() *string SetProjectId(val *string) ProjectIdInput() *string TableId() *string SetTableId(val *string) TableIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDatasetId() ResetProjectId() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobExtractSourceTableOutputReference ¶
func NewGoogleBigqueryJobExtractSourceTableOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobExtractSourceTableOutputReference
type GoogleBigqueryJobLoad ¶
type GoogleBigqueryJobLoad struct { // destination_table block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#destination_table GoogleBigqueryJob#destination_table} DestinationTable *GoogleBigqueryJobLoadDestinationTable `field:"required" json:"destinationTable" yaml:"destinationTable"` // The fully-qualified URIs that point to your data in Google Cloud. // // For Google Cloud Storage URIs: Each URI can contain one '\*' wildcard character // and it must come after the 'bucket' name. Size limits related to load jobs apply // to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be // specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table. // For Google Cloud Datastore backups: Exactly one URI can be specified. Also, the '\*' wildcard character is not allowed. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#source_uris GoogleBigqueryJob#source_uris} SourceUris *[]*string `field:"required" json:"sourceUris" yaml:"sourceUris"` // Accept rows that are missing trailing optional columns. // // The missing values are treated as nulls. // If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, // an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#allow_jagged_rows GoogleBigqueryJob#allow_jagged_rows} AllowJaggedRows interface{} `field:"optional" json:"allowJaggedRows" yaml:"allowJaggedRows"` // Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. // // The default value is false. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#allow_quoted_newlines GoogleBigqueryJob#allow_quoted_newlines} AllowQuotedNewlines interface{} `field:"optional" json:"allowQuotedNewlines" yaml:"allowQuotedNewlines"` // Indicates if we should automatically infer the options and schema for CSV and JSON sources. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#autodetect GoogleBigqueryJob#autodetect} Autodetect interface{} `field:"optional" json:"autodetect" yaml:"autodetect"` // Specifies whether the job is allowed to create new tables. // // The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"] // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#create_disposition GoogleBigqueryJob#create_disposition} CreateDisposition *string `field:"optional" json:"createDisposition" yaml:"createDisposition"` // destination_encryption_configuration block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#destination_encryption_configuration GoogleBigqueryJob#destination_encryption_configuration} DestinationEncryptionConfiguration *GoogleBigqueryJobLoadDestinationEncryptionConfiguration `field:"optional" json:"destinationEncryptionConfiguration" yaml:"destinationEncryptionConfiguration"` // The character encoding of the data. // // The supported values are UTF-8 or ISO-8859-1. // The default value is UTF-8. BigQuery decodes the data after the raw, binary data // has been split using the values of the quote and fieldDelimiter properties. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#encoding GoogleBigqueryJob#encoding} Encoding *string `field:"optional" json:"encoding" yaml:"encoding"` // The separator for fields in a CSV file. // // The separator can be any ISO-8859-1 single-byte character. // To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts // the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the // data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. // The default value is a comma (','). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#field_delimiter GoogleBigqueryJob#field_delimiter} FieldDelimiter *string `field:"optional" json:"fieldDelimiter" yaml:"fieldDelimiter"` // Indicates if BigQuery should allow extra values that are not represented in the table schema. // // If true, the extra values are ignored. If false, records with extra columns are treated as bad records, // and if there are too many bad records, an invalid error is returned in the job result. // The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: // CSV: Trailing columns // JSON: Named values that don't match any column names // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#ignore_unknown_values GoogleBigqueryJob#ignore_unknown_values} IgnoreUnknownValues interface{} `field:"optional" json:"ignoreUnknownValues" yaml:"ignoreUnknownValues"` // If sourceFormat is set to newline-delimited JSON, indicates whether it should be processed as a JSON variant such as GeoJSON. // // For a sourceFormat other than JSON, omit this field. If the sourceFormat is newline-delimited JSON: - for newline-delimited // GeoJSON: set to GEOJSON. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#json_extension GoogleBigqueryJob#json_extension} JsonExtension *string `field:"optional" json:"jsonExtension" yaml:"jsonExtension"` // The maximum number of bad records that BigQuery can ignore when running the job. // // If the number of bad records exceeds this value, // an invalid error is returned in the job result. The default value is 0, which requires that all records are valid. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#max_bad_records GoogleBigqueryJob#max_bad_records} MaxBadRecords *float64 `field:"optional" json:"maxBadRecords" yaml:"maxBadRecords"` // Specifies a string that represents a null value in a CSV file. // // For example, if you specify "\N", BigQuery interprets "\N" as a null value // when loading a CSV file. The default value is the empty string. If you set this property to a custom value, BigQuery throws an error if an // empty string is present for all data types except for STRING and BYTE. For STRING and BYTE columns, BigQuery interprets the empty string as // an empty value. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#null_marker GoogleBigqueryJob#null_marker} NullMarker *string `field:"optional" json:"nullMarker" yaml:"nullMarker"` // parquet_options block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#parquet_options GoogleBigqueryJob#parquet_options} ParquetOptions *GoogleBigqueryJobLoadParquetOptions `field:"optional" json:"parquetOptions" yaml:"parquetOptions"` // If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity properties to load into BigQuery from a Cloud Datastore backup. // // Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties. // If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#projection_fields GoogleBigqueryJob#projection_fields} ProjectionFields *[]*string `field:"optional" json:"projectionFields" yaml:"projectionFields"` // The value that is used to quote data sections in a CSV file. // // BigQuery converts the string to ISO-8859-1 encoding, // and then uses the first byte of the encoded string to split the data in its raw, binary state. // The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string. // If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#quote GoogleBigqueryJob#quote} Quote *string `field:"optional" json:"quote" yaml:"quote"` // Allows the schema of the destination table to be updated as a side effect of the load job if a schema is autodetected or supplied in the job configuration. // // Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; // when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators. // For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified: // ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. // ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#schema_update_options GoogleBigqueryJob#schema_update_options} SchemaUpdateOptions *[]*string `field:"optional" json:"schemaUpdateOptions" yaml:"schemaUpdateOptions"` // The number of rows at the top of a CSV file that BigQuery will skip when loading the data. // // The default value is 0. This property is useful if you have header rows in the file that should be skipped. // When autodetect is on, the behavior is the following: // skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected, // the row is read as data. Otherwise data is read starting from the second row. // skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row. // skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected, // row N is just skipped. Otherwise row N is used to extract column names for the detected schema. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#skip_leading_rows GoogleBigqueryJob#skip_leading_rows} SkipLeadingRows *float64 `field:"optional" json:"skipLeadingRows" yaml:"skipLeadingRows"` // The format of the data files. // // For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". // For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET". // For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE". // The default value is CSV. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#source_format GoogleBigqueryJob#source_format} SourceFormat *string `field:"optional" json:"sourceFormat" yaml:"sourceFormat"` // time_partitioning block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#time_partitioning GoogleBigqueryJob#time_partitioning} TimePartitioning *GoogleBigqueryJobLoadTimePartitioning `field:"optional" json:"timePartitioning" yaml:"timePartitioning"` // Specifies the action that occurs if the destination table already exists. // // The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"] // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#write_disposition GoogleBigqueryJob#write_disposition} WriteDisposition *string `field:"optional" json:"writeDisposition" yaml:"writeDisposition"` }
type GoogleBigqueryJobLoadDestinationEncryptionConfiguration ¶
type GoogleBigqueryJobLoadDestinationEncryptionConfiguration struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // // The BigQuery Service Account associated with your project requires access to this encryption key. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#kms_key_name GoogleBigqueryJob#kms_key_name} KmsKeyName *string `field:"required" json:"kmsKeyName" yaml:"kmsKeyName"` }
type GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference ¶
type GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobLoadDestinationEncryptionConfiguration SetInternalValue(val *GoogleBigqueryJobLoadDestinationEncryptionConfiguration) KmsKeyName() *string SetKmsKeyName(val *string) KmsKeyNameInput() *string KmsKeyVersion() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference ¶
func NewGoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference
type GoogleBigqueryJobLoadDestinationTable ¶
type GoogleBigqueryJobLoadDestinationTable struct { // The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#table_id GoogleBigqueryJob#table_id} TableId *string `field:"required" json:"tableId" yaml:"tableId"` // The ID of the dataset containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#dataset_id GoogleBigqueryJob#dataset_id} DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"` // The ID of the project containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#project_id GoogleBigqueryJob#project_id} ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"` }
type GoogleBigqueryJobLoadDestinationTableOutputReference ¶
type GoogleBigqueryJobLoadDestinationTableOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DatasetId() *string SetDatasetId(val *string) DatasetIdInput() *string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobLoadDestinationTable SetInternalValue(val *GoogleBigqueryJobLoadDestinationTable) ProjectId() *string SetProjectId(val *string) ProjectIdInput() *string TableId() *string SetTableId(val *string) TableIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDatasetId() ResetProjectId() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobLoadDestinationTableOutputReference ¶
func NewGoogleBigqueryJobLoadDestinationTableOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobLoadDestinationTableOutputReference
type GoogleBigqueryJobLoadOutputReference ¶
type GoogleBigqueryJobLoadOutputReference interface { cdktf.ComplexObject AllowJaggedRows() interface{} SetAllowJaggedRows(val interface{}) AllowJaggedRowsInput() interface{} AllowQuotedNewlines() interface{} SetAllowQuotedNewlines(val interface{}) AllowQuotedNewlinesInput() interface{} Autodetect() interface{} SetAutodetect(val interface{}) AutodetectInput() interface{} // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) CreateDisposition() *string SetCreateDisposition(val *string) CreateDispositionInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DestinationEncryptionConfiguration() GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference DestinationEncryptionConfigurationInput() *GoogleBigqueryJobLoadDestinationEncryptionConfiguration DestinationTable() GoogleBigqueryJobLoadDestinationTableOutputReference DestinationTableInput() *GoogleBigqueryJobLoadDestinationTable Encoding() *string SetEncoding(val *string) EncodingInput() *string FieldDelimiter() *string SetFieldDelimiter(val *string) FieldDelimiterInput() *string // Experimental. Fqn() *string IgnoreUnknownValues() interface{} SetIgnoreUnknownValues(val interface{}) IgnoreUnknownValuesInput() interface{} InternalValue() *GoogleBigqueryJobLoad SetInternalValue(val *GoogleBigqueryJobLoad) JsonExtension() *string SetJsonExtension(val *string) JsonExtensionInput() *string MaxBadRecords() *float64 SetMaxBadRecords(val *float64) MaxBadRecordsInput() *float64 NullMarker() *string SetNullMarker(val *string) NullMarkerInput() *string ParquetOptions() GoogleBigqueryJobLoadParquetOptionsOutputReference ParquetOptionsInput() *GoogleBigqueryJobLoadParquetOptions ProjectionFields() *[]*string SetProjectionFields(val *[]*string) ProjectionFieldsInput() *[]*string Quote() *string SetQuote(val *string) QuoteInput() *string SchemaUpdateOptions() *[]*string SetSchemaUpdateOptions(val *[]*string) SchemaUpdateOptionsInput() *[]*string SkipLeadingRows() *float64 SetSkipLeadingRows(val *float64) SkipLeadingRowsInput() *float64 SourceFormat() *string SetSourceFormat(val *string) SourceFormatInput() *string SourceUris() *[]*string SetSourceUris(val *[]*string) SourceUrisInput() *[]*string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) TimePartitioning() GoogleBigqueryJobLoadTimePartitioningOutputReference TimePartitioningInput() *GoogleBigqueryJobLoadTimePartitioning WriteDisposition() *string SetWriteDisposition(val *string) WriteDispositionInput() *string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutDestinationEncryptionConfiguration(value *GoogleBigqueryJobLoadDestinationEncryptionConfiguration) PutDestinationTable(value *GoogleBigqueryJobLoadDestinationTable) PutParquetOptions(value *GoogleBigqueryJobLoadParquetOptions) PutTimePartitioning(value *GoogleBigqueryJobLoadTimePartitioning) ResetAllowJaggedRows() ResetAllowQuotedNewlines() ResetAutodetect() ResetCreateDisposition() ResetDestinationEncryptionConfiguration() ResetEncoding() ResetFieldDelimiter() ResetIgnoreUnknownValues() ResetJsonExtension() ResetMaxBadRecords() ResetNullMarker() ResetParquetOptions() ResetProjectionFields() ResetQuote() ResetSchemaUpdateOptions() ResetSkipLeadingRows() ResetSourceFormat() ResetTimePartitioning() ResetWriteDisposition() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobLoadOutputReference ¶
func NewGoogleBigqueryJobLoadOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobLoadOutputReference
type GoogleBigqueryJobLoadParquetOptions ¶
type GoogleBigqueryJobLoadParquetOptions struct { // If sourceFormat is set to PARQUET, indicates whether to use schema inference specifically for Parquet LIST logical type. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#enable_list_inference GoogleBigqueryJob#enable_list_inference} EnableListInference interface{} `field:"optional" json:"enableListInference" yaml:"enableListInference"` // If sourceFormat is set to PARQUET, indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#enum_as_string GoogleBigqueryJob#enum_as_string} EnumAsString interface{} `field:"optional" json:"enumAsString" yaml:"enumAsString"` }
type GoogleBigqueryJobLoadParquetOptionsOutputReference ¶
type GoogleBigqueryJobLoadParquetOptionsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string EnableListInference() interface{} SetEnableListInference(val interface{}) EnableListInferenceInput() interface{} EnumAsString() interface{} SetEnumAsString(val interface{}) EnumAsStringInput() interface{} // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobLoadParquetOptions SetInternalValue(val *GoogleBigqueryJobLoadParquetOptions) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetEnableListInference() ResetEnumAsString() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobLoadParquetOptionsOutputReference ¶
func NewGoogleBigqueryJobLoadParquetOptionsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobLoadParquetOptionsOutputReference
type GoogleBigqueryJobLoadTimePartitioning ¶
type GoogleBigqueryJobLoadTimePartitioning struct { // The only type supported is DAY, which will generate one partition per day. // // Providing an empty string used to cause an error, // but in OnePlatform the field will be treated as unset. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#type GoogleBigqueryJob#type} Type *string `field:"required" json:"type" yaml:"type"` // Number of milliseconds for which to keep the storage for a partition. // // A wrapper is used here because 0 is an invalid value. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#expiration_ms GoogleBigqueryJob#expiration_ms} ExpirationMs *string `field:"optional" json:"expirationMs" yaml:"expirationMs"` // If not set, the table is partitioned by pseudo column '_PARTITIONTIME'; // // if set, the table is partitioned by this field. // The field must be a top-level TIMESTAMP or DATE field. Its mode must be NULLABLE or REQUIRED. // A wrapper is used here because an empty string is an invalid value. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#field GoogleBigqueryJob#field} Field *string `field:"optional" json:"field" yaml:"field"` }
type GoogleBigqueryJobLoadTimePartitioningOutputReference ¶
type GoogleBigqueryJobLoadTimePartitioningOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string ExpirationMs() *string SetExpirationMs(val *string) ExpirationMsInput() *string Field() *string SetField(val *string) FieldInput() *string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobLoadTimePartitioning SetInternalValue(val *GoogleBigqueryJobLoadTimePartitioning) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) Type() *string SetType(val *string) TypeInput() *string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetExpirationMs() ResetField() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobLoadTimePartitioningOutputReference ¶
func NewGoogleBigqueryJobLoadTimePartitioningOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobLoadTimePartitioningOutputReference
type GoogleBigqueryJobQuery ¶
type GoogleBigqueryJobQuery struct { // SQL query text to execute. // // The useLegacySql field can be used to indicate whether the query uses legacy SQL or standard SQL. // *NOTE*: queries containing [DML language](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language) // ('DELETE', 'UPDATE', 'MERGE', 'INSERT') must specify 'create_disposition = ""' and 'write_disposition = ""'. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#query GoogleBigqueryJob#query} Query *string `field:"required" json:"query" yaml:"query"` // If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large result tables at a slight cost in performance. // // Requires destinationTable to be set. For standard SQL queries, this flag is ignored and large results are always allowed. // However, you must still set destinationTable when result size exceeds the allowed maximum response size. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#allow_large_results GoogleBigqueryJob#allow_large_results} AllowLargeResults interface{} `field:"optional" json:"allowLargeResults" yaml:"allowLargeResults"` // Specifies whether the job is allowed to create new tables. // // The following values are supported: // CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. // CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. // Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"] // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#create_disposition GoogleBigqueryJob#create_disposition} CreateDisposition *string `field:"optional" json:"createDisposition" yaml:"createDisposition"` // default_dataset block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#default_dataset GoogleBigqueryJob#default_dataset} DefaultDataset *GoogleBigqueryJobQueryDefaultDataset `field:"optional" json:"defaultDataset" yaml:"defaultDataset"` // destination_encryption_configuration block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#destination_encryption_configuration GoogleBigqueryJob#destination_encryption_configuration} DestinationEncryptionConfiguration *GoogleBigqueryJobQueryDestinationEncryptionConfiguration `field:"optional" json:"destinationEncryptionConfiguration" yaml:"destinationEncryptionConfiguration"` // destination_table block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#destination_table GoogleBigqueryJob#destination_table} DestinationTable *GoogleBigqueryJobQueryDestinationTable `field:"optional" json:"destinationTable" yaml:"destinationTable"` // If true and query uses legacy SQL dialect, flattens all nested and repeated fields in the query results. // // allowLargeResults must be true if this is set to false. For standard SQL queries, this flag is ignored and results are never flattened. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#flatten_results GoogleBigqueryJob#flatten_results} FlattenResults interface{} `field:"optional" json:"flattenResults" yaml:"flattenResults"` // Limits the billing tier for this job. // // Queries that have resource usage beyond this tier will fail (without incurring a charge). // If unspecified, this will be set to your project default. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#maximum_billing_tier GoogleBigqueryJob#maximum_billing_tier} MaximumBillingTier *float64 `field:"optional" json:"maximumBillingTier" yaml:"maximumBillingTier"` // Limits the bytes billed for this job. // // Queries that will have bytes billed beyond this limit will fail (without incurring a charge). // If unspecified, this will be set to your project default. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#maximum_bytes_billed GoogleBigqueryJob#maximum_bytes_billed} MaximumBytesBilled *string `field:"optional" json:"maximumBytesBilled" yaml:"maximumBytesBilled"` // Standard SQL only. // // Set to POSITIONAL to use positional (?) query parameters or to NAMED to use named (@myparam) query parameters in this query. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#parameter_mode GoogleBigqueryJob#parameter_mode} ParameterMode *string `field:"optional" json:"parameterMode" yaml:"parameterMode"` // Specifies a priority for the query. Default value: "INTERACTIVE" Possible values: ["INTERACTIVE", "BATCH"]. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#priority GoogleBigqueryJob#priority} Priority *string `field:"optional" json:"priority" yaml:"priority"` // Allows the schema of the destination table to be updated as a side effect of the query job. // // Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; // when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, // specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema. // One or more of the following values are specified: // ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. // ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#schema_update_options GoogleBigqueryJob#schema_update_options} SchemaUpdateOptions *[]*string `field:"optional" json:"schemaUpdateOptions" yaml:"schemaUpdateOptions"` // script_options block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#script_options GoogleBigqueryJob#script_options} ScriptOptions *GoogleBigqueryJobQueryScriptOptions `field:"optional" json:"scriptOptions" yaml:"scriptOptions"` // Specifies whether to use BigQuery's legacy SQL dialect for this query. // // The default value is true. // If set to false, the query will use BigQuery's standard SQL. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#use_legacy_sql GoogleBigqueryJob#use_legacy_sql} UseLegacySql interface{} `field:"optional" json:"useLegacySql" yaml:"useLegacySql"` // Whether to look for the result in the query cache. // // The query cache is a best-effort cache that will be flushed whenever // tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified. // The default value is true. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#use_query_cache GoogleBigqueryJob#use_query_cache} UseQueryCache interface{} `field:"optional" json:"useQueryCache" yaml:"useQueryCache"` // user_defined_function_resources block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#user_defined_function_resources GoogleBigqueryJob#user_defined_function_resources} UserDefinedFunctionResources interface{} `field:"optional" json:"userDefinedFunctionResources" yaml:"userDefinedFunctionResources"` // Specifies the action that occurs if the destination table already exists. // // The following values are supported: // WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result. // WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. // WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. // Each action is atomic and only occurs if BigQuery is able to complete the job successfully. // Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"] // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#write_disposition GoogleBigqueryJob#write_disposition} WriteDisposition *string `field:"optional" json:"writeDisposition" yaml:"writeDisposition"` }
type GoogleBigqueryJobQueryDefaultDataset ¶
type GoogleBigqueryJobQueryDefaultDataset struct { // The dataset. Can be specified '{{dataset_id}}' if 'project_id' is also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}' if not. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#dataset_id GoogleBigqueryJob#dataset_id} DatasetId *string `field:"required" json:"datasetId" yaml:"datasetId"` // The ID of the project containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#project_id GoogleBigqueryJob#project_id} ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"` }
type GoogleBigqueryJobQueryDefaultDatasetOutputReference ¶
type GoogleBigqueryJobQueryDefaultDatasetOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DatasetId() *string SetDatasetId(val *string) DatasetIdInput() *string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobQueryDefaultDataset SetInternalValue(val *GoogleBigqueryJobQueryDefaultDataset) ProjectId() *string SetProjectId(val *string) ProjectIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetProjectId() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobQueryDefaultDatasetOutputReference ¶
func NewGoogleBigqueryJobQueryDefaultDatasetOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobQueryDefaultDatasetOutputReference
type GoogleBigqueryJobQueryDestinationEncryptionConfiguration ¶
type GoogleBigqueryJobQueryDestinationEncryptionConfiguration struct { // Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. // // The BigQuery Service Account associated with your project requires access to this encryption key. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#kms_key_name GoogleBigqueryJob#kms_key_name} KmsKeyName *string `field:"required" json:"kmsKeyName" yaml:"kmsKeyName"` }
type GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference ¶
type GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobQueryDestinationEncryptionConfiguration SetInternalValue(val *GoogleBigqueryJobQueryDestinationEncryptionConfiguration) KmsKeyName() *string SetKmsKeyName(val *string) KmsKeyNameInput() *string KmsKeyVersion() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference ¶
func NewGoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference
type GoogleBigqueryJobQueryDestinationTable ¶
type GoogleBigqueryJobQueryDestinationTable struct { // The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#table_id GoogleBigqueryJob#table_id} TableId *string `field:"required" json:"tableId" yaml:"tableId"` // The ID of the dataset containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#dataset_id GoogleBigqueryJob#dataset_id} DatasetId *string `field:"optional" json:"datasetId" yaml:"datasetId"` // The ID of the project containing this table. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#project_id GoogleBigqueryJob#project_id} ProjectId *string `field:"optional" json:"projectId" yaml:"projectId"` }
type GoogleBigqueryJobQueryDestinationTableOutputReference ¶
type GoogleBigqueryJobQueryDestinationTableOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DatasetId() *string SetDatasetId(val *string) DatasetIdInput() *string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobQueryDestinationTable SetInternalValue(val *GoogleBigqueryJobQueryDestinationTable) ProjectId() *string SetProjectId(val *string) ProjectIdInput() *string TableId() *string SetTableId(val *string) TableIdInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetDatasetId() ResetProjectId() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobQueryDestinationTableOutputReference ¶
func NewGoogleBigqueryJobQueryDestinationTableOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobQueryDestinationTableOutputReference
type GoogleBigqueryJobQueryOutputReference ¶
type GoogleBigqueryJobQueryOutputReference interface { cdktf.ComplexObject AllowLargeResults() interface{} SetAllowLargeResults(val interface{}) AllowLargeResultsInput() interface{} // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) CreateDisposition() *string SetCreateDisposition(val *string) CreateDispositionInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string DefaultDataset() GoogleBigqueryJobQueryDefaultDatasetOutputReference DefaultDatasetInput() *GoogleBigqueryJobQueryDefaultDataset DestinationEncryptionConfiguration() GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference DestinationEncryptionConfigurationInput() *GoogleBigqueryJobQueryDestinationEncryptionConfiguration DestinationTable() GoogleBigqueryJobQueryDestinationTableOutputReference DestinationTableInput() *GoogleBigqueryJobQueryDestinationTable FlattenResults() interface{} SetFlattenResults(val interface{}) FlattenResultsInput() interface{} // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobQuery SetInternalValue(val *GoogleBigqueryJobQuery) MaximumBillingTier() *float64 SetMaximumBillingTier(val *float64) MaximumBillingTierInput() *float64 MaximumBytesBilled() *string SetMaximumBytesBilled(val *string) MaximumBytesBilledInput() *string ParameterMode() *string SetParameterMode(val *string) ParameterModeInput() *string Priority() *string SetPriority(val *string) PriorityInput() *string Query() *string SetQuery(val *string) QueryInput() *string SchemaUpdateOptions() *[]*string SetSchemaUpdateOptions(val *[]*string) SchemaUpdateOptionsInput() *[]*string ScriptOptions() GoogleBigqueryJobQueryScriptOptionsOutputReference ScriptOptionsInput() *GoogleBigqueryJobQueryScriptOptions // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) UseLegacySql() interface{} SetUseLegacySql(val interface{}) UseLegacySqlInput() interface{} UseQueryCache() interface{} SetUseQueryCache(val interface{}) UseQueryCacheInput() interface{} UserDefinedFunctionResources() GoogleBigqueryJobQueryUserDefinedFunctionResourcesList UserDefinedFunctionResourcesInput() interface{} WriteDisposition() *string SetWriteDisposition(val *string) WriteDispositionInput() *string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable PutDefaultDataset(value *GoogleBigqueryJobQueryDefaultDataset) PutDestinationEncryptionConfiguration(value *GoogleBigqueryJobQueryDestinationEncryptionConfiguration) PutDestinationTable(value *GoogleBigqueryJobQueryDestinationTable) PutScriptOptions(value *GoogleBigqueryJobQueryScriptOptions) PutUserDefinedFunctionResources(value interface{}) ResetAllowLargeResults() ResetCreateDisposition() ResetDefaultDataset() ResetDestinationEncryptionConfiguration() ResetDestinationTable() ResetFlattenResults() ResetMaximumBillingTier() ResetMaximumBytesBilled() ResetParameterMode() ResetPriority() ResetSchemaUpdateOptions() ResetScriptOptions() ResetUseLegacySql() ResetUseQueryCache() ResetUserDefinedFunctionResources() ResetWriteDisposition() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobQueryOutputReference ¶
func NewGoogleBigqueryJobQueryOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobQueryOutputReference
type GoogleBigqueryJobQueryScriptOptions ¶
type GoogleBigqueryJobQueryScriptOptions struct { // Determines which statement in the script represents the "key result", used to populate the schema and query results of the script job. // // Possible values: ["LAST", "FIRST_SELECT"] // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#key_result_statement GoogleBigqueryJob#key_result_statement} KeyResultStatement *string `field:"optional" json:"keyResultStatement" yaml:"keyResultStatement"` // Limit on the number of bytes billed per statement. Exceeding this budget results in an error. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#statement_byte_budget GoogleBigqueryJob#statement_byte_budget} StatementByteBudget *string `field:"optional" json:"statementByteBudget" yaml:"statementByteBudget"` // Timeout period for each statement in a script. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#statement_timeout_ms GoogleBigqueryJob#statement_timeout_ms} StatementTimeoutMs *string `field:"optional" json:"statementTimeoutMs" yaml:"statementTimeoutMs"` }
type GoogleBigqueryJobQueryScriptOptionsOutputReference ¶
type GoogleBigqueryJobQueryScriptOptionsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobQueryScriptOptions SetInternalValue(val *GoogleBigqueryJobQueryScriptOptions) KeyResultStatement() *string SetKeyResultStatement(val *string) KeyResultStatementInput() *string StatementByteBudget() *string SetStatementByteBudget(val *string) StatementByteBudgetInput() *string StatementTimeoutMs() *string SetStatementTimeoutMs(val *string) StatementTimeoutMsInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetKeyResultStatement() ResetStatementByteBudget() ResetStatementTimeoutMs() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobQueryScriptOptionsOutputReference ¶
func NewGoogleBigqueryJobQueryScriptOptionsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobQueryScriptOptionsOutputReference
type GoogleBigqueryJobQueryUserDefinedFunctionResources ¶
type GoogleBigqueryJobQueryUserDefinedFunctionResources struct { // An inline resource that contains code for a user-defined function (UDF). // // Providing a inline code resource is equivalent to providing a URI for a file containing the same code. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#inline_code GoogleBigqueryJob#inline_code} InlineCode *string `field:"optional" json:"inlineCode" yaml:"inlineCode"` // A code resource to load from a Google Cloud Storage URI (gs://bucket/path). // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#resource_uri GoogleBigqueryJob#resource_uri} ResourceUri *string `field:"optional" json:"resourceUri" yaml:"resourceUri"` }
type GoogleBigqueryJobQueryUserDefinedFunctionResourcesList ¶
type GoogleBigqueryJobQueryUserDefinedFunctionResourcesList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesList ¶
func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) GoogleBigqueryJobQueryUserDefinedFunctionResourcesList
type GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference ¶
type GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InlineCode() *string SetInlineCode(val *string) InlineCodeInput() *string InternalValue() interface{} SetInternalValue(val interface{}) ResourceUri() *string SetResourceUri(val *string) ResourceUriInput() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetInlineCode() ResetResourceUri() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference ¶
func NewGoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference
type GoogleBigqueryJobStatus ¶
type GoogleBigqueryJobStatus struct { }
type GoogleBigqueryJobStatusErrorResult ¶
type GoogleBigqueryJobStatusErrorResult struct { }
type GoogleBigqueryJobStatusErrorResultList ¶
type GoogleBigqueryJobStatusErrorResultList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) GoogleBigqueryJobStatusErrorResultOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobStatusErrorResultList ¶
func NewGoogleBigqueryJobStatusErrorResultList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) GoogleBigqueryJobStatusErrorResultList
type GoogleBigqueryJobStatusErrorResultOutputReference ¶
type GoogleBigqueryJobStatusErrorResultOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobStatusErrorResult SetInternalValue(val *GoogleBigqueryJobStatusErrorResult) Location() *string Message() *string Reason() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobStatusErrorResultOutputReference ¶
func NewGoogleBigqueryJobStatusErrorResultOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) GoogleBigqueryJobStatusErrorResultOutputReference
type GoogleBigqueryJobStatusErrors ¶
type GoogleBigqueryJobStatusErrors struct { }
type GoogleBigqueryJobStatusErrorsList ¶
type GoogleBigqueryJobStatusErrorsList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) GoogleBigqueryJobStatusErrorsOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobStatusErrorsList ¶
func NewGoogleBigqueryJobStatusErrorsList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) GoogleBigqueryJobStatusErrorsList
type GoogleBigqueryJobStatusErrorsOutputReference ¶
type GoogleBigqueryJobStatusErrorsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobStatusErrors SetInternalValue(val *GoogleBigqueryJobStatusErrors) Location() *string Message() *string Reason() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobStatusErrorsOutputReference ¶
func NewGoogleBigqueryJobStatusErrorsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) GoogleBigqueryJobStatusErrorsOutputReference
type GoogleBigqueryJobStatusList ¶
type GoogleBigqueryJobStatusList interface { cdktf.ComplexList // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string // Experimental. Fqn() *string // The attribute on the parent resource this class is referencing. TerraformAttribute() *string SetTerraformAttribute(val *string) // The parent resource. TerraformResource() cdktf.IInterpolatingParent SetTerraformResource(val cdktf.IInterpolatingParent) // whether the list is wrapping a set (will add tolist() to be able to access an item via an index). WrapsSet() *bool SetWrapsSet(val *bool) // Experimental. ComputeFqn() *string Get(index *float64) GoogleBigqueryJobStatusOutputReference // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobStatusList ¶
func NewGoogleBigqueryJobStatusList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) GoogleBigqueryJobStatusList
type GoogleBigqueryJobStatusOutputReference ¶
type GoogleBigqueryJobStatusOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string ErrorResult() GoogleBigqueryJobStatusErrorResultList Errors() GoogleBigqueryJobStatusErrorsList // Experimental. Fqn() *string InternalValue() *GoogleBigqueryJobStatus SetInternalValue(val *GoogleBigqueryJobStatus) State() *string // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobStatusOutputReference ¶
func NewGoogleBigqueryJobStatusOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) GoogleBigqueryJobStatusOutputReference
type GoogleBigqueryJobTimeouts ¶
type GoogleBigqueryJobTimeouts struct { // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#create GoogleBigqueryJob#create}. Create *string `field:"optional" json:"create" yaml:"create"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/5.2.0/docs/resources/google_bigquery_job#delete GoogleBigqueryJob#delete}. Delete *string `field:"optional" json:"delete" yaml:"delete"` }
type GoogleBigqueryJobTimeoutsOutputReference ¶
type GoogleBigqueryJobTimeoutsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) Create() *string SetCreate(val *string) CreateInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string Delete() *string SetDelete(val *string) DeleteInput() *string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetCreate() ResetDelete() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewGoogleBigqueryJobTimeoutsOutputReference ¶
func NewGoogleBigqueryJobTimeoutsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) GoogleBigqueryJobTimeoutsOutputReference
Source Files ¶
- GoogleBigqueryJob.go
- GoogleBigqueryJobConfig.go
- GoogleBigqueryJobCopy.go
- GoogleBigqueryJobCopyDestinationEncryptionConfiguration.go
- GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference.go
- GoogleBigqueryJobCopyDestinationEncryptionConfigurationOutputReference__checks.go
- GoogleBigqueryJobCopyDestinationTable.go
- GoogleBigqueryJobCopyDestinationTableOutputReference.go
- GoogleBigqueryJobCopyDestinationTableOutputReference__checks.go
- GoogleBigqueryJobCopyOutputReference.go
- GoogleBigqueryJobCopyOutputReference__checks.go
- GoogleBigqueryJobCopySourceTables.go
- GoogleBigqueryJobCopySourceTablesList.go
- GoogleBigqueryJobCopySourceTablesList__checks.go
- GoogleBigqueryJobCopySourceTablesOutputReference.go
- GoogleBigqueryJobCopySourceTablesOutputReference__checks.go
- GoogleBigqueryJobExtract.go
- GoogleBigqueryJobExtractOutputReference.go
- GoogleBigqueryJobExtractOutputReference__checks.go
- GoogleBigqueryJobExtractSourceModel.go
- GoogleBigqueryJobExtractSourceModelOutputReference.go
- GoogleBigqueryJobExtractSourceModelOutputReference__checks.go
- GoogleBigqueryJobExtractSourceTable.go
- GoogleBigqueryJobExtractSourceTableOutputReference.go
- GoogleBigqueryJobExtractSourceTableOutputReference__checks.go
- GoogleBigqueryJobLoad.go
- GoogleBigqueryJobLoadDestinationEncryptionConfiguration.go
- GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference.go
- GoogleBigqueryJobLoadDestinationEncryptionConfigurationOutputReference__checks.go
- GoogleBigqueryJobLoadDestinationTable.go
- GoogleBigqueryJobLoadDestinationTableOutputReference.go
- GoogleBigqueryJobLoadDestinationTableOutputReference__checks.go
- GoogleBigqueryJobLoadOutputReference.go
- GoogleBigqueryJobLoadOutputReference__checks.go
- GoogleBigqueryJobLoadParquetOptions.go
- GoogleBigqueryJobLoadParquetOptionsOutputReference.go
- GoogleBigqueryJobLoadParquetOptionsOutputReference__checks.go
- GoogleBigqueryJobLoadTimePartitioning.go
- GoogleBigqueryJobLoadTimePartitioningOutputReference.go
- GoogleBigqueryJobLoadTimePartitioningOutputReference__checks.go
- GoogleBigqueryJobQuery.go
- GoogleBigqueryJobQueryDefaultDataset.go
- GoogleBigqueryJobQueryDefaultDatasetOutputReference.go
- GoogleBigqueryJobQueryDefaultDatasetOutputReference__checks.go
- GoogleBigqueryJobQueryDestinationEncryptionConfiguration.go
- GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference.go
- GoogleBigqueryJobQueryDestinationEncryptionConfigurationOutputReference__checks.go
- GoogleBigqueryJobQueryDestinationTable.go
- GoogleBigqueryJobQueryDestinationTableOutputReference.go
- GoogleBigqueryJobQueryDestinationTableOutputReference__checks.go
- GoogleBigqueryJobQueryOutputReference.go
- GoogleBigqueryJobQueryOutputReference__checks.go
- GoogleBigqueryJobQueryScriptOptions.go
- GoogleBigqueryJobQueryScriptOptionsOutputReference.go
- GoogleBigqueryJobQueryScriptOptionsOutputReference__checks.go
- GoogleBigqueryJobQueryUserDefinedFunctionResources.go
- GoogleBigqueryJobQueryUserDefinedFunctionResourcesList.go
- GoogleBigqueryJobQueryUserDefinedFunctionResourcesList__checks.go
- GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference.go
- GoogleBigqueryJobQueryUserDefinedFunctionResourcesOutputReference__checks.go
- GoogleBigqueryJobStatus.go
- GoogleBigqueryJobStatusErrorResult.go
- GoogleBigqueryJobStatusErrorResultList.go
- GoogleBigqueryJobStatusErrorResultList__checks.go
- GoogleBigqueryJobStatusErrorResultOutputReference.go
- GoogleBigqueryJobStatusErrorResultOutputReference__checks.go
- GoogleBigqueryJobStatusErrors.go
- GoogleBigqueryJobStatusErrorsList.go
- GoogleBigqueryJobStatusErrorsList__checks.go
- GoogleBigqueryJobStatusErrorsOutputReference.go
- GoogleBigqueryJobStatusErrorsOutputReference__checks.go
- GoogleBigqueryJobStatusList.go
- GoogleBigqueryJobStatusList__checks.go
- GoogleBigqueryJobStatusOutputReference.go
- GoogleBigqueryJobStatusOutputReference__checks.go
- GoogleBigqueryJobTimeouts.go
- GoogleBigqueryJobTimeoutsOutputReference.go
- GoogleBigqueryJobTimeoutsOutputReference__checks.go
- GoogleBigqueryJob__checks.go
- main.go