Documentation ¶
Overview ¶
+kcc:proto=google.cloud.bigquery.v2
+kubebuilder:object:generate=true +groupName=bigquery.cnrm.cloud.google.com
Index ¶
- Variables
- type Access
- type BigQueryDataset
- type BigQueryDatasetList
- type BigQueryDatasetObservedState
- type BigQueryDatasetSpec
- type BigQueryDatasetStatus
- type BoolValue
- type DatasetAccessEntry
- type DatasetIdentity
- type DatasetParent
- type DatasetRef
- type DatasetReference
- type EncryptionConfiguration
- type ExternalCatalogDatasetOptions
- type ExternalDatasetReference
- type GcpTag
- type LinkedDatasetMetadata
- type LinkedDatasetSource
- type RestrictionConfig
- type RoutineReference
- type TableReference
Constants ¶
This section is empty.
Variables ¶
var ( // GroupVersion is group version used to register these objects GroupVersion = schema.GroupVersion{Group: "bigquery.cnrm.cloud.google.com", Version: "v1beta1"} // SchemeBuilder is used to add go types to the GroupVersionKind scheme SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} // AddToScheme adds the types in this group-version to the given scheme. AddToScheme = SchemeBuilder.AddToScheme )
var BigQueryDatasetGVK = GroupVersion.WithKind("BigQueryDataset")
Functions ¶
This section is empty.
Types ¶
type Access ¶
type Access struct { // An IAM role ID that should be granted to the user, group, // or domain specified in this access entry. // The following legacy mappings will be applied: // // * `OWNER`: `roles/bigquery.dataOwner` // * `WRITER`: `roles/bigquery.dataEditor` // * `READER`: `roles/bigquery.dataViewer` // // This field will accept any of the above formats, but will return only // the legacy format. For example, if you set this field to // "roles/bigquery.dataOwner", it will be returned back as "OWNER". Role *string `json:"role,omitempty"` // [Pick one] An email address of a user to grant access to. For example: // fred@example.com. Maps to IAM policy member "user:EMAIL" or // "serviceAccount:EMAIL". UserByEmail *string `json:"userByEmail,omitempty"` // [Pick one] An email address of a Google Group to grant access to. // Maps to IAM policy member "group:GROUP". GroupByEmail *string `json:"groupByEmail,omitempty"` // [Pick one] A domain to grant access to. Any users signed in with the domain // specified will be granted the specified access. Example: "example.com". // Maps to IAM policy member "domain:DOMAIN". Domain *string `json:"domain,omitempty"` // [Pick one] A special group to grant access to. Possible values include: // // * projectOwners: Owners of the enclosing project. // * projectReaders: Readers of the enclosing project. // * projectWriters: Writers of the enclosing project. // * allAuthenticatedUsers: All authenticated BigQuery users. // // Maps to similarly-named IAM members. SpecialGroup *string `json:"specialGroup,omitempty"` // [Pick one] Some other type of member that appears in the IAM Policy but // isn't a user, group, domain, or special group. IamMember *string `json:"iamMember,omitempty"` // [Pick one] A view from a different dataset to grant access to. Queries // executed against that view will have read access to views/tables/routines // in this dataset. // The role field is not required when this field is set. If that view is // updated by any user, access to the view needs to be granted again via an // update operation. View *TableReference `json:"view,omitempty"` // [Pick one] A routine from a different dataset to grant access to. Queries // executed against that routine will have read access to // views/tables/routines in this dataset. Only UDF is supported for now. // The role field is not required when this field is set. If that routine is // updated by any user, access to the routine needs to be granted again via // an update operation. Routine *RoutineReference `json:"routine,omitempty"` // [Pick one] A grant authorizing all resources of a particular type in a // particular dataset access to this dataset. Only views are supported for // now. The role field is not required when this field is set. If that dataset // is deleted and re-created, its access needs to be granted again via an // update operation. Dataset *DatasetAccessEntry `json:"dataset,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.Access
func (*Access) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Access.
func (*Access) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigQueryDataset ¶
type BigQueryDataset struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec BigQueryDatasetSpec `json:"spec,omitempty"` Status BigQueryDatasetStatus `json:"status,omitempty"` }
BigQueryDataset is the Schema for the BigQueryDataset API +k8s:openapi-gen=true
func (*BigQueryDataset) DeepCopy ¶
func (in *BigQueryDataset) DeepCopy() *BigQueryDataset
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDataset.
func (*BigQueryDataset) DeepCopyInto ¶
func (in *BigQueryDataset) DeepCopyInto(out *BigQueryDataset)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*BigQueryDataset) DeepCopyObject ¶
func (in *BigQueryDataset) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type BigQueryDatasetList ¶
type BigQueryDatasetList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []BigQueryDataset `json:"items"` }
+k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object BigQueryDatasetList contains a list of BigQueryDataset
func (*BigQueryDatasetList) DeepCopy ¶
func (in *BigQueryDatasetList) DeepCopy() *BigQueryDatasetList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetList.
func (*BigQueryDatasetList) DeepCopyInto ¶
func (in *BigQueryDatasetList) DeepCopyInto(out *BigQueryDatasetList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*BigQueryDatasetList) DeepCopyObject ¶
func (in *BigQueryDatasetList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type BigQueryDatasetObservedState ¶
type BigQueryDatasetObservedState struct { // Optional. If the location is not specified in the spec, the GCP server defaults to a location and will be captured here. Location *string `json:"location,omitempty"` }
BigQueryDatasetObservedState defines the desired state of BigQueryDataset +kcc:proto=google.cloud.bigquery.v2.dataset
func (*BigQueryDatasetObservedState) DeepCopy ¶
func (in *BigQueryDatasetObservedState) DeepCopy() *BigQueryDatasetObservedState
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetObservedState.
func (*BigQueryDatasetObservedState) DeepCopyInto ¶
func (in *BigQueryDatasetObservedState) DeepCopyInto(out *BigQueryDatasetObservedState)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigQueryDatasetSpec ¶
type BigQueryDatasetSpec struct { // The BigQueryDataset name. If not given, the metadata.name will be used. ResourceID *string `json:"resourceID,omitempty"` // An array of objects that define dataset access for one or more entities. // +optional Access []Access `json:"access,omitempty"` // Optional. Defines the default collation specification of future tables // created in the dataset. If a table is created in this dataset without // table-level default collation, then the table inherits the dataset default // collation, which is applied to the string fields that do not have explicit // collation specified. A change to this field affects only tables created // afterwards, and does not alter the existing tables. // The following values are supported: // // * 'und:ci': undetermined locale, case-insensitive. // * ”: empty string. Default to case-sensitive behavior. DefaultCollation *string `json:"defaultCollation,omitempty"` // The default encryption key for all tables in the dataset. // After this property is set, the encryption key of all newly-created tables // in the dataset is set to this value unless the table creation request or // query explicitly overrides the key. DefaultEncryptionConfiguration *EncryptionConfiguration `json:"defaultEncryptionConfiguration,omitempty"` // This default partition expiration, expressed in milliseconds. // // When new time-partitioned tables are created in a dataset where this // property is set, the table will inherit this value, propagated as the // `TimePartitioning.expirationMs` property on the new table. If you set // `TimePartitioning.expirationMs` explicitly when creating a table, // the `defaultPartitionExpirationMs` of the containing dataset is ignored. // // When creating a partitioned table, if `defaultPartitionExpirationMs` // is set, the `defaultTableExpirationMs` value is ignored and the table // will not be inherit a table expiration deadline. DefaultPartitionExpirationMs *int64 `json:"defaultPartitionExpirationMs,omitempty"` // Optional. The default lifetime of all tables in the dataset, in // milliseconds. The minimum lifetime value is 3600000 milliseconds (one // hour). To clear an existing default expiration with a PATCH request, set to // 0. Once this property is set, all newly-created tables in the dataset will // have an expirationTime property set to the creation time plus the value in // this property, and changing the value will only affect new tables, not // existing ones. When the expirationTime for a given table is reached, that // table will be deleted automatically. // If a table's expirationTime is modified or removed before the table // expires, or if you provide an explicit expirationTime when creating a // table, that value takes precedence over the default expiration time // indicated by this property. DefaultTableExpirationMs *int64 `json:"defaultTableExpirationMs,omitempty"` // Optional. A user-friendly description of the dataset. Description *string `json:"description,omitempty"` // Optional. A descriptive name for the dataset. FriendlyName *string `json:"friendlyName,omitempty"` // Optional. TRUE if the dataset and its table names are case-insensitive, // otherwise FALSE. By default, this is FALSE, which means the dataset and its // table names are case-sensitive. This field does not affect routine // references. IsCaseInsensitive *bool `json:"isCaseInsensitive,omitempty"` // Optional. The geographic location where the dataset should reside. See // https://cloud.google.com/bigquery/docs/locations for supported // locations. Location *string `json:"location,omitempty"` // Optional. Defines the time travel window in hours. The value can be from 48 // to 168 hours (2 to 7 days). The default value is 168 hours if this is not // set. MaxTimeTravelHours *string `json:"maxTimeTravelHours,omitempty"` // Optional. The project that this resource belongs to. ProjectRef *refs.ProjectRef `json:"projectRef,omitempty"` // Optional. Updates storage_billing_model for the dataset. StorageBillingModel *string `json:"storageBillingModel,omitempty"` }
BigQueryDatasetSpec defines the desired state of BigQueryDataset +kcc:proto=google.cloud.bigquery.v2.Dataset
func (*BigQueryDatasetSpec) DeepCopy ¶
func (in *BigQueryDatasetSpec) DeepCopy() *BigQueryDatasetSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetSpec.
func (*BigQueryDatasetSpec) DeepCopyInto ¶
func (in *BigQueryDatasetSpec) DeepCopyInto(out *BigQueryDatasetSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BigQueryDatasetStatus ¶
type BigQueryDatasetStatus struct { /* Conditions represent the latest available observations of the object's current state. */ Conditions []v1alpha1.Condition `json:"conditions,omitempty"` // Output only. The time when this dataset was created, in milliseconds since // the epoch. CreationTime *int64 `json:"creationTime,omitempty"` // Output only. A hash of the resource. Etag *string `json:"etag,omitempty"` // A unique specifier for the BigQueryAnalyticsHubDataExchangeListing resource in GCP. ExternalRef *string `json:"externalRef,omitempty"` // Output only. The date when this dataset was last modified, in milliseconds // since the epoch. LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` // ObservedGeneration is the generation of the resource that was most recently observed by the Config Connector controller. If this is equal to metadata.generation, then that means that the current reported status reflects the most recent desired state of the resource. // +optional ObservedGeneration *int64 `json:"observedGeneration,omitempty"` // Output only. A URL that can be used to access the resource again. You can // use this URL in Get or Update requests to the resource. SelfLink *string `json:"selfLink,omitempty"` // ObservedState is the state of the resource as most recently observed in GCP. ObservedState *BigQueryDatasetObservedState `json:"observedState,omitempty"` }
BigQueryDatasetStatus defines the config connector machine state of BigQueryDataset
func (*BigQueryDatasetStatus) DeepCopy ¶
func (in *BigQueryDatasetStatus) DeepCopy() *BigQueryDatasetStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetStatus.
func (*BigQueryDatasetStatus) DeepCopyInto ¶
func (in *BigQueryDatasetStatus) DeepCopyInto(out *BigQueryDatasetStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type BoolValue ¶ added in v1.125.0
type BoolValue struct { // The bool value. Value *bool `json:"value,omitempty"` }
+kcc:proto=google.protobuf.BoolValue
func (*BoolValue) DeepCopy ¶ added in v1.125.0
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BoolValue.
func (*BoolValue) DeepCopyInto ¶ added in v1.125.0
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetAccessEntry ¶
type DatasetAccessEntry struct { // The dataset this entry applies to. // +required Dataset *DatasetReference `json:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only // views are supported, but additional target types may be added in the // future. // +required TargetTypes []string `json:"targetTypes,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.DatasetAccessEntry
func (*DatasetAccessEntry) DeepCopy ¶
func (in *DatasetAccessEntry) DeepCopy() *DatasetAccessEntry
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessEntry.
func (*DatasetAccessEntry) DeepCopyInto ¶
func (in *DatasetAccessEntry) DeepCopyInto(out *DatasetAccessEntry)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type DatasetIdentity ¶ added in v1.127.0
type DatasetIdentity struct {
// contains filtered or unexported fields
}
DatasetIdentity defines the resource reference to BigQueryDataset, which "External" field holds the GCP identifier for the KRM object.
func NewDatasetIdentity ¶ added in v1.127.0
func NewDatasetIdentity(ctx context.Context, reader client.Reader, obj *BigQueryDataset) (*DatasetIdentity, error)
New builds a DatasetIdentity from the Config Connector Dataset object.
func (*DatasetIdentity) DeepCopy ¶ added in v1.127.0
func (in *DatasetIdentity) DeepCopy() *DatasetIdentity
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIdentity.
func (*DatasetIdentity) DeepCopyInto ¶ added in v1.127.0
func (in *DatasetIdentity) DeepCopyInto(out *DatasetIdentity)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetIdentity) ID ¶ added in v1.127.0
func (i *DatasetIdentity) ID() string
func (*DatasetIdentity) Parent ¶ added in v1.127.0
func (i *DatasetIdentity) Parent() *DatasetParent
func (*DatasetIdentity) String ¶ added in v1.127.0
func (i *DatasetIdentity) String() string
type DatasetParent ¶ added in v1.127.0
type DatasetParent struct {
ProjectID string
}
func ParseDatasetExternal ¶ added in v1.127.0
func ParseDatasetExternal(external string) (parent *DatasetParent, resourceID string, err error)
func (*DatasetParent) DeepCopy ¶ added in v1.127.0
func (in *DatasetParent) DeepCopy() *DatasetParent
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetParent.
func (*DatasetParent) DeepCopyInto ¶ added in v1.127.0
func (in *DatasetParent) DeepCopyInto(out *DatasetParent)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetParent) String ¶ added in v1.127.0
func (p *DatasetParent) String() string
type DatasetRef ¶ added in v1.127.0
type DatasetRef struct { // A reference to an externally managed BigQueryDataset resource. // Should be in the format "projects/<projectID>/datasets/<datasetID>". External string `json:"external,omitempty"` // The name of a BigQueryDataset resource. Name string `json:"name,omitempty"` // The namespace of a BigQueryDataset resource. Namespace string `json:"namespace,omitempty"` }
DatasetRef defines the resource reference to BigQueryDataset, which "External" field holds the GCP identifier for the KRM object.
func (*DatasetRef) DeepCopy ¶ added in v1.127.0
func (in *DatasetRef) DeepCopy() *DatasetRef
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetRef.
func (*DatasetRef) DeepCopyInto ¶ added in v1.127.0
func (in *DatasetRef) DeepCopyInto(out *DatasetRef)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*DatasetRef) NormalizedExternal ¶ added in v1.127.0
func (r *DatasetRef) NormalizedExternal(ctx context.Context, reader client.Reader, otherNamespace string) (string, error)
NormalizedExternal provision the "External" value for other resource that depends on BigQueryDataset. If the "External" is given in the other resource's spec.BigQueryDatasetRef, the given value will be used. Otherwise, the "Name" and "Namespace" will be used to query the actual BigQueryDataset object from the cluster.
type DatasetReference ¶
type DatasetReference struct { // A unique Id for this dataset, without the project name. The Id // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). // The maximum length is 1,024 characters. // +required DatasetId *string `json:"datasetId,omitempty"` // The ID of the project containing this dataset. // +required ProjectId *string `json:"projectId,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.DatasetReference
func (*DatasetReference) DeepCopy ¶
func (in *DatasetReference) DeepCopy() *DatasetReference
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetReference.
func (*DatasetReference) DeepCopyInto ¶
func (in *DatasetReference) DeepCopyInto(out *DatasetReference)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type EncryptionConfiguration ¶
type EncryptionConfiguration struct { // Optional. Describes the Cloud KMS encryption key that will be used to // protect destination BigQuery table. The BigQuery Service Account associated // with your project requires access to this encryption key. KmsKeyRef *refs.KMSCryptoKeyRef `json:"kmsKeyRef,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.EncryptionConfiguration
func (*EncryptionConfiguration) DeepCopy ¶
func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfiguration.
func (*EncryptionConfiguration) DeepCopyInto ¶
func (in *EncryptionConfiguration) DeepCopyInto(out *EncryptionConfiguration)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalCatalogDatasetOptions ¶
type ExternalCatalogDatasetOptions struct { // Optional. A map of key value pairs defining the parameters and properties // of the open source schema. Maximum size of 2Mib. Parameters map[string]string `json:"parameters,omitempty"` // Optional. The storage location URI for all tables in the dataset. // Equivalent to hive metastore's database locationUri. Maximum length of 1024 // characters. DefaultStorageLocationUri *string `json:"defaultStorageLocationUri,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.ExternalCatalogDatasetOptions
func (*ExternalCatalogDatasetOptions) DeepCopy ¶
func (in *ExternalCatalogDatasetOptions) DeepCopy() *ExternalCatalogDatasetOptions
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalCatalogDatasetOptions.
func (*ExternalCatalogDatasetOptions) DeepCopyInto ¶
func (in *ExternalCatalogDatasetOptions) DeepCopyInto(out *ExternalCatalogDatasetOptions)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExternalDatasetReference ¶
type ExternalDatasetReference struct { // +required. External source that backs this dataset. ExternalSource *string `json:"externalSource,omitempty"` // +required. The connection id that is used to access the external_source. // // Format: // projects/{project_id}/locations/{location_id}/connections/{connection_id} Connection *string `json:"connection,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.ExternalDatasetReference
func (*ExternalDatasetReference) DeepCopy ¶
func (in *ExternalDatasetReference) DeepCopy() *ExternalDatasetReference
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDatasetReference.
func (*ExternalDatasetReference) DeepCopyInto ¶
func (in *ExternalDatasetReference) DeepCopyInto(out *ExternalDatasetReference)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type GcpTag ¶
type GcpTag struct { // Required. The namespaced friendly name of the tag key, e.g. // "12345/environment" where 12345 is org id. TagKey *string `json:"tagKey,omitempty"` // Required. The friendly short name of the tag value, e.g. "production". TagValue *string `json:"tagValue,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.GcpTag
func (*GcpTag) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GcpTag.
func (*GcpTag) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LinkedDatasetMetadata ¶
type LinkedDatasetMetadata struct { }
func (*LinkedDatasetMetadata) DeepCopy ¶
func (in *LinkedDatasetMetadata) DeepCopy() *LinkedDatasetMetadata
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetMetadata.
func (*LinkedDatasetMetadata) DeepCopyInto ¶
func (in *LinkedDatasetMetadata) DeepCopyInto(out *LinkedDatasetMetadata)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type LinkedDatasetSource ¶
type LinkedDatasetSource struct { // The source dataset reference contains project numbers and not project ids. SourceDataset *DatasetReference `json:"sourceDataset,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.LinkedDatasetSource
func (*LinkedDatasetSource) DeepCopy ¶
func (in *LinkedDatasetSource) DeepCopy() *LinkedDatasetSource
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetSource.
func (*LinkedDatasetSource) DeepCopyInto ¶
func (in *LinkedDatasetSource) DeepCopyInto(out *LinkedDatasetSource)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RestrictionConfig ¶
type RestrictionConfig struct { // Output only. Specifies the type of dataset/table restriction. Type *string `json:"type,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.RestrictionConfig
func (*RestrictionConfig) DeepCopy ¶
func (in *RestrictionConfig) DeepCopy() *RestrictionConfig
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RestrictionConfig.
func (*RestrictionConfig) DeepCopyInto ¶
func (in *RestrictionConfig) DeepCopyInto(out *RestrictionConfig)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RoutineReference ¶
type RoutineReference struct { // The ID of the project containing this routine. // +required ProjectId *string `json:"projectId,omitempty"` // The ID of the dataset containing this routine. // +required DatasetId *string `json:"datasetId,omitempty"` // The Id of the routine. The Id must contain only // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum // length is 256 characters. // +required RoutineId *string `json:"routineId,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.RoutineReference
func (*RoutineReference) DeepCopy ¶
func (in *RoutineReference) DeepCopy() *RoutineReference
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineReference.
func (*RoutineReference) DeepCopyInto ¶
func (in *RoutineReference) DeepCopyInto(out *RoutineReference)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type TableReference ¶
type TableReference struct { // The ID of the project containing this table. // +required ProjectId *string `json:"projectId,omitempty"` // The ID of the dataset containing this table. // +required DatasetId *string `json:"datasetId,omitempty"` // The Id of the table. The Id can contain Unicode characters in // category L (letter), M (mark), N (number), Pc (connector, including // underscore), Pd (dash), and Zs (space). For more information, see [General // Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). // The maximum length is 1,024 characters. Certain operations allow suffixing // of the table Id with a partition decorator, such as // `sample_table$20190123`. // +required TableId *string `json:"tableId,omitempty"` }
+kcc:proto=google.cloud.bigquery.v2.TableReference
func (*TableReference) DeepCopy ¶
func (in *TableReference) DeepCopy() *TableReference
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableReference.
func (*TableReference) DeepCopyInto ¶
func (in *TableReference) DeepCopyInto(out *TableReference)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.