Documentation ¶
Index ¶
- func KafkaClusterTopic_GenerateConfigForImport(scope constructs.Construct, importToId *string, importFromId *string, ...) cdktf.ImportableResource
- func KafkaClusterTopic_IsConstruct(x interface{}) *bool
- func KafkaClusterTopic_IsTerraformElement(x interface{}) *bool
- func KafkaClusterTopic_IsTerraformResource(x interface{}) *bool
- func KafkaClusterTopic_TfResourceType() *string
- func NewKafkaClusterTopicTimeoutsOutputReference_Override(k KafkaClusterTopicTimeoutsOutputReference, ...)
- func NewKafkaClusterTopic_Override(k KafkaClusterTopic, scope constructs.Construct, id *string, ...)
- type KafkaClusterTopic
- type KafkaClusterTopicConfig
- type KafkaClusterTopicTimeouts
- type KafkaClusterTopicTimeoutsOutputReference
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func KafkaClusterTopic_GenerateConfigForImport ¶
func KafkaClusterTopic_GenerateConfigForImport(scope constructs.Construct, importToId *string, importFromId *string, provider cdktf.TerraformProvider) cdktf.ImportableResource
Generates CDKTF code for importing a KafkaClusterTopic resource upon running "cdktf plan <stack-name>".
func KafkaClusterTopic_IsConstruct ¶
func KafkaClusterTopic_IsConstruct(x interface{}) *bool
Checks if `x` is a construct.
Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.
Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.
Returns: true if `x` is an object created from a class which extends `Construct`.
func KafkaClusterTopic_IsTerraformElement ¶
func KafkaClusterTopic_IsTerraformElement(x interface{}) *bool
Experimental.
func KafkaClusterTopic_IsTerraformResource ¶
func KafkaClusterTopic_IsTerraformResource(x interface{}) *bool
Experimental.
func KafkaClusterTopic_TfResourceType ¶
func KafkaClusterTopic_TfResourceType() *string
func NewKafkaClusterTopicTimeoutsOutputReference_Override ¶
func NewKafkaClusterTopicTimeoutsOutputReference_Override(k KafkaClusterTopicTimeoutsOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string)
func NewKafkaClusterTopic_Override ¶
func NewKafkaClusterTopic_Override(k KafkaClusterTopic, scope constructs.Construct, id *string, config *KafkaClusterTopicConfig)
Create a new {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic ionoscloud_kafka_cluster_topic} Resource.
Types ¶
type KafkaClusterTopic ¶
type KafkaClusterTopic interface { cdktf.TerraformResource // Experimental. CdktfStack() cdktf.TerraformStack ClusterId() *string SetClusterId(val *string) ClusterIdInput() *string // Experimental. Connection() interface{} // Experimental. SetConnection(val interface{}) // Experimental. ConstructNodeMetadata() *map[string]interface{} // Experimental. Count() interface{} // Experimental. SetCount(val interface{}) // Experimental. DependsOn() *[]*string // Experimental. SetDependsOn(val *[]*string) // Experimental. ForEach() cdktf.ITerraformIterator // Experimental. SetForEach(val cdktf.ITerraformIterator) // Experimental. Fqn() *string // Experimental. FriendlyUniqueId() *string Id() *string // Experimental. Lifecycle() *cdktf.TerraformResourceLifecycle // Experimental. SetLifecycle(val *cdktf.TerraformResourceLifecycle) Location() *string SetLocation(val *string) LocationInput() *string Name() *string SetName(val *string) NameInput() *string // The tree node. Node() constructs.Node NumberOfPartitions() *float64 SetNumberOfPartitions(val *float64) NumberOfPartitionsInput() *float64 // Experimental. Provider() cdktf.TerraformProvider // Experimental. SetProvider(val cdktf.TerraformProvider) // Experimental. Provisioners() *[]interface{} // Experimental. SetProvisioners(val *[]interface{}) // Experimental. RawOverrides() interface{} ReplicationFactor() *float64 SetReplicationFactor(val *float64) ReplicationFactorInput() *float64 RetentionTime() *float64 SetRetentionTime(val *float64) RetentionTimeInput() *float64 SegmentBytes() *float64 SetSegmentBytes(val *float64) SegmentBytesInput() *float64 // Experimental. TerraformGeneratorMetadata() *cdktf.TerraformProviderGeneratorMetadata // Experimental. TerraformMetaArguments() *map[string]interface{} // Experimental. TerraformResourceType() *string Timeouts() KafkaClusterTopicTimeoutsOutputReference TimeoutsInput() interface{} // Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. // Experimental. AddMoveTarget(moveTarget *string) // Experimental. AddOverride(path *string, value interface{}) // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. HasResourceMove() interface{} // Experimental. ImportFrom(id *string, provider cdktf.TerraformProvider) // Experimental. InterpolationForAttribute(terraformAttribute *string) cdktf.IResolvable // Move the resource corresponding to "id" to this resource. // // Note that the resource being moved from must be marked as moved using it's instance function. // Experimental. MoveFromId(id *string) // Moves this resource to the target resource given by moveTarget. // Experimental. MoveTo(moveTarget *string, index interface{}) // Moves this resource to the resource corresponding to "id". // Experimental. MoveToId(id *string) // Overrides the auto-generated logical ID with a specific ID. // Experimental. OverrideLogicalId(newLogicalId *string) PutTimeouts(value *KafkaClusterTopicTimeouts) ResetLocation() ResetNumberOfPartitions() // Resets a previously passed logical Id to use the auto-generated logical id again. // Experimental. ResetOverrideLogicalId() ResetReplicationFactor() ResetRetentionTime() ResetSegmentBytes() ResetTimeouts() SynthesizeAttributes() *map[string]interface{} SynthesizeHclAttributes() *map[string]interface{} // Experimental. ToHclTerraform() interface{} // Experimental. ToMetadata() interface{} // Returns a string representation of this construct. ToString() *string // Adds this resource to the terraform JSON output. // Experimental. ToTerraform() interface{} }
Represents a {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic ionoscloud_kafka_cluster_topic}.
func NewKafkaClusterTopic ¶
func NewKafkaClusterTopic(scope constructs.Construct, id *string, config *KafkaClusterTopicConfig) KafkaClusterTopic
Create a new {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic ionoscloud_kafka_cluster_topic} Resource.
type KafkaClusterTopicConfig ¶
type KafkaClusterTopicConfig struct { // Experimental. Connection interface{} `field:"optional" json:"connection" yaml:"connection"` // Experimental. Count interface{} `field:"optional" json:"count" yaml:"count"` // Experimental. DependsOn *[]cdktf.ITerraformDependable `field:"optional" json:"dependsOn" yaml:"dependsOn"` // Experimental. ForEach cdktf.ITerraformIterator `field:"optional" json:"forEach" yaml:"forEach"` // Experimental. Lifecycle *cdktf.TerraformResourceLifecycle `field:"optional" json:"lifecycle" yaml:"lifecycle"` // Experimental. Provider cdktf.TerraformProvider `field:"optional" json:"provider" yaml:"provider"` // Experimental. Provisioners *[]interface{} `field:"optional" json:"provisioners" yaml:"provisioners"` // The ID of the Kafka Cluster to which the topic belongs. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#cluster_id KafkaClusterTopic#cluster_id} ClusterId *string `field:"required" json:"clusterId" yaml:"clusterId"` // The name of your Kafka Cluster Topic. // // Must be 63 characters or less and must begin and end with an alphanumeric character (`[a-z0-9A-Z]`) with dashes (`-`), underscores (`_`), dots (`.`), and alphanumerics between. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#name KafkaClusterTopic#name} Name *string `field:"required" json:"name" yaml:"name"` // The location of your Kafka Cluster Topic. Supported locations: de/fra, de/txl. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#location KafkaClusterTopic#location} Location *string `field:"optional" json:"location" yaml:"location"` // The number of partitions of the topic. // // Partitions allow for parallel processing of messages. The partition count must be greater than or equal to the replication factor. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#number_of_partitions KafkaClusterTopic#number_of_partitions} NumberOfPartitions *float64 `field:"optional" json:"numberOfPartitions" yaml:"numberOfPartitions"` // The number of replicas of the topic. // // The replication factor determines how many copies of the topic are stored on different brokers. The replication factor must be less than or equal to the number of brokers in the Kafka Cluster. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#replication_factor KafkaClusterTopic#replication_factor} ReplicationFactor *float64 `field:"optional" json:"replicationFactor" yaml:"replicationFactor"` // This configuration controls the maximum time we will retain a log before we will discard old log segments to free up space. // // This represents an SLA on how soon consumers must read their data. If set to -1, no time limit is applied. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#retention_time KafkaClusterTopic#retention_time} RetentionTime *float64 `field:"optional" json:"retentionTime" yaml:"retentionTime"` // This configuration controls the segment file size for the log. // // Retention and cleaning is always done a file at a time so a larger segment size means fewer files but less granular control over retention. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#segment_bytes KafkaClusterTopic#segment_bytes} SegmentBytes *float64 `field:"optional" json:"segmentBytes" yaml:"segmentBytes"` // timeouts block. // // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#timeouts KafkaClusterTopic#timeouts} Timeouts *KafkaClusterTopicTimeouts `field:"optional" json:"timeouts" yaml:"timeouts"` }
type KafkaClusterTopicTimeouts ¶
type KafkaClusterTopicTimeouts struct { // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#create KafkaClusterTopic#create}. Create *string `field:"optional" json:"create" yaml:"create"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#default KafkaClusterTopic#default}. Default *string `field:"optional" json:"default" yaml:"default"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#delete KafkaClusterTopic#delete}. Delete *string `field:"optional" json:"delete" yaml:"delete"` // Docs at Terraform Registry: {@link https://registry.terraform.io/providers/ionos-cloud/ionoscloud/6.7.1/docs/resources/kafka_cluster_topic#update KafkaClusterTopic#update}. Update *string `field:"optional" json:"update" yaml:"update"` }
type KafkaClusterTopicTimeoutsOutputReference ¶
type KafkaClusterTopicTimeoutsOutputReference interface { cdktf.ComplexObject // the index of the complex object in a list. // Experimental. ComplexObjectIndex() interface{} // Experimental. SetComplexObjectIndex(val interface{}) // set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items. // Experimental. ComplexObjectIsFromSet() *bool // Experimental. SetComplexObjectIsFromSet(val *bool) Create() *string SetCreate(val *string) CreateInput() *string // The creation stack of this resolvable which will be appended to errors thrown during resolution. // // If this returns an empty array the stack will not be attached. // Experimental. CreationStack() *[]*string Default() *string SetDefault(val *string) DefaultInput() *string Delete() *string SetDelete(val *string) DeleteInput() *string // Experimental. Fqn() *string InternalValue() interface{} SetInternalValue(val interface{}) // Experimental. TerraformAttribute() *string // Experimental. SetTerraformAttribute(val *string) // Experimental. TerraformResource() cdktf.IInterpolatingParent // Experimental. SetTerraformResource(val cdktf.IInterpolatingParent) Update() *string SetUpdate(val *string) UpdateInput() *string // Experimental. ComputeFqn() *string // Experimental. GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{} // Experimental. GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable // Experimental. GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool // Experimental. GetListAttribute(terraformAttribute *string) *[]*string // Experimental. GetNumberAttribute(terraformAttribute *string) *float64 // Experimental. GetNumberListAttribute(terraformAttribute *string) *[]*float64 // Experimental. GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64 // Experimental. GetStringAttribute(terraformAttribute *string) *string // Experimental. GetStringMapAttribute(terraformAttribute *string) *map[string]*string // Experimental. InterpolationAsList() cdktf.IResolvable // Experimental. InterpolationForAttribute(property *string) cdktf.IResolvable ResetCreate() ResetDefault() ResetDelete() ResetUpdate() // Produce the Token's value at resolution time. // Experimental. Resolve(_context cdktf.IResolveContext) interface{} // Return a string representation of this resolvable object. // // Returns a reversible string representation. // Experimental. ToString() *string }
func NewKafkaClusterTopicTimeoutsOutputReference ¶
func NewKafkaClusterTopicTimeoutsOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string) KafkaClusterTopicTimeoutsOutputReference