databasekafkatopic

package
v11.3.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 24, 2024 License: MPL-2.0 Imports: 7 Imported by: 0

README

digitalocean_database_kafka_topic

Refer to the Terraform Registry for docs: digitalocean_database_kafka_topic.

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func DatabaseKafkaTopic_GenerateConfigForImport

func DatabaseKafkaTopic_GenerateConfigForImport(scope constructs.Construct, importToId *string, importFromId *string, provider cdktf.TerraformProvider) cdktf.ImportableResource

Generates CDKTF code for importing a DatabaseKafkaTopic resource upon running "cdktf plan <stack-name>".

func DatabaseKafkaTopic_IsConstruct

func DatabaseKafkaTopic_IsConstruct(x interface{}) *bool

Checks if `x` is a construct.

Use this method instead of `instanceof` to properly detect `Construct` instances, even when the construct library is symlinked.

Explanation: in JavaScript, multiple copies of the `constructs` library on disk are seen as independent, completely different libraries. As a consequence, the class `Construct` in each copy of the `constructs` library is seen as a different class, and an instance of one class will not test as `instanceof` the other class. `npm install` will not create installations like this, but users may manually symlink construct libraries together or use a monorepo tool: in those cases, multiple copies of the `constructs` library can be accidentally installed, and `instanceof` will behave unpredictably. It is safest to avoid using `instanceof`, and using this type-testing method instead.

Returns: true if `x` is an object created from a class which extends `Construct`.

func DatabaseKafkaTopic_IsTerraformElement

func DatabaseKafkaTopic_IsTerraformElement(x interface{}) *bool

Experimental.

func DatabaseKafkaTopic_IsTerraformResource

func DatabaseKafkaTopic_IsTerraformResource(x interface{}) *bool

Experimental.

func DatabaseKafkaTopic_TfResourceType

func DatabaseKafkaTopic_TfResourceType() *string

func NewDatabaseKafkaTopicConfigAList_Override

func NewDatabaseKafkaTopicConfigAList_Override(d DatabaseKafkaTopicConfigAList, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool)

func NewDatabaseKafkaTopicConfigAOutputReference_Override

func NewDatabaseKafkaTopicConfigAOutputReference_Override(d DatabaseKafkaTopicConfigAOutputReference, terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool)

func NewDatabaseKafkaTopic_Override

func NewDatabaseKafkaTopic_Override(d DatabaseKafkaTopic, scope constructs.Construct, id *string, config *DatabaseKafkaTopicConfig)

Create a new {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic digitalocean_database_kafka_topic} Resource.

Types

type DatabaseKafkaTopic

type DatabaseKafkaTopic interface {
	cdktf.TerraformResource
	// Experimental.
	CdktfStack() cdktf.TerraformStack
	ClusterId() *string
	SetClusterId(val *string)
	ClusterIdInput() *string
	Config() DatabaseKafkaTopicConfigAList
	ConfigInput() interface{}
	// Experimental.
	Connection() interface{}
	// Experimental.
	SetConnection(val interface{})
	// Experimental.
	ConstructNodeMetadata() *map[string]interface{}
	// Experimental.
	Count() interface{}
	// Experimental.
	SetCount(val interface{})
	// Experimental.
	DependsOn() *[]*string
	// Experimental.
	SetDependsOn(val *[]*string)
	// Experimental.
	ForEach() cdktf.ITerraformIterator
	// Experimental.
	SetForEach(val cdktf.ITerraformIterator)
	// Experimental.
	Fqn() *string
	// Experimental.
	FriendlyUniqueId() *string
	Id() *string
	SetId(val *string)
	IdInput() *string
	// Experimental.
	Lifecycle() *cdktf.TerraformResourceLifecycle
	// Experimental.
	SetLifecycle(val *cdktf.TerraformResourceLifecycle)
	Name() *string
	SetName(val *string)
	NameInput() *string
	// The tree node.
	Node() constructs.Node
	PartitionCount() *float64
	SetPartitionCount(val *float64)
	PartitionCountInput() *float64
	// Experimental.
	Provider() cdktf.TerraformProvider
	// Experimental.
	SetProvider(val cdktf.TerraformProvider)
	// Experimental.
	Provisioners() *[]interface{}
	// Experimental.
	SetProvisioners(val *[]interface{})
	// Experimental.
	RawOverrides() interface{}
	ReplicationFactor() *float64
	SetReplicationFactor(val *float64)
	ReplicationFactorInput() *float64
	State() *string
	// Experimental.
	TerraformGeneratorMetadata() *cdktf.TerraformProviderGeneratorMetadata
	// Experimental.
	TerraformMetaArguments() *map[string]interface{}
	// Experimental.
	TerraformResourceType() *string
	// Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move.
	// Experimental.
	AddMoveTarget(moveTarget *string)
	// Experimental.
	AddOverride(path *string, value interface{})
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	HasResourceMove() interface{}
	// Experimental.
	ImportFrom(id *string, provider cdktf.TerraformProvider)
	// Experimental.
	InterpolationForAttribute(terraformAttribute *string) cdktf.IResolvable
	// Move the resource corresponding to "id" to this resource.
	//
	// Note that the resource being moved from must be marked as moved using it's instance function.
	// Experimental.
	MoveFromId(id *string)
	// Moves this resource to the target resource given by moveTarget.
	// Experimental.
	MoveTo(moveTarget *string, index interface{})
	// Moves this resource to the resource corresponding to "id".
	// Experimental.
	MoveToId(id *string)
	// Overrides the auto-generated logical ID with a specific ID.
	// Experimental.
	OverrideLogicalId(newLogicalId *string)
	PutConfig(value interface{})
	ResetConfig()
	ResetId()
	// Resets a previously passed logical Id to use the auto-generated logical id again.
	// Experimental.
	ResetOverrideLogicalId()
	ResetPartitionCount()
	ResetReplicationFactor()
	SynthesizeAttributes() *map[string]interface{}
	SynthesizeHclAttributes() *map[string]interface{}
	// Experimental.
	ToHclTerraform() interface{}
	// Experimental.
	ToMetadata() interface{}
	// Returns a string representation of this construct.
	ToString() *string
	// Adds this resource to the terraform JSON output.
	// Experimental.
	ToTerraform() interface{}
}

Represents a {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic digitalocean_database_kafka_topic}.

func NewDatabaseKafkaTopic

func NewDatabaseKafkaTopic(scope constructs.Construct, id *string, config *DatabaseKafkaTopicConfig) DatabaseKafkaTopic

Create a new {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic digitalocean_database_kafka_topic} Resource.

type DatabaseKafkaTopicConfig

type DatabaseKafkaTopicConfig struct {
	// Experimental.
	Connection interface{} `field:"optional" json:"connection" yaml:"connection"`
	// Experimental.
	Count interface{} `field:"optional" json:"count" yaml:"count"`
	// Experimental.
	DependsOn *[]cdktf.ITerraformDependable `field:"optional" json:"dependsOn" yaml:"dependsOn"`
	// Experimental.
	ForEach cdktf.ITerraformIterator `field:"optional" json:"forEach" yaml:"forEach"`
	// Experimental.
	Lifecycle *cdktf.TerraformResourceLifecycle `field:"optional" json:"lifecycle" yaml:"lifecycle"`
	// Experimental.
	Provider cdktf.TerraformProvider `field:"optional" json:"provider" yaml:"provider"`
	// Experimental.
	Provisioners *[]interface{} `field:"optional" json:"provisioners" yaml:"provisioners"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#cluster_id DatabaseKafkaTopic#cluster_id}.
	ClusterId *string `field:"required" json:"clusterId" yaml:"clusterId"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#name DatabaseKafkaTopic#name}.
	Name *string `field:"required" json:"name" yaml:"name"`
	// config block.
	//
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#config DatabaseKafkaTopic#config}
	Config interface{} `field:"optional" json:"config" yaml:"config"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#id DatabaseKafkaTopic#id}.
	//
	// Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
	// If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
	Id *string `field:"optional" json:"id" yaml:"id"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#partition_count DatabaseKafkaTopic#partition_count}.
	PartitionCount *float64 `field:"optional" json:"partitionCount" yaml:"partitionCount"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#replication_factor DatabaseKafkaTopic#replication_factor}.
	ReplicationFactor *float64 `field:"optional" json:"replicationFactor" yaml:"replicationFactor"`
}

type DatabaseKafkaTopicConfigA

type DatabaseKafkaTopicConfigA struct {
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#cleanup_policy DatabaseKafkaTopic#cleanup_policy}.
	CleanupPolicy *string `field:"optional" json:"cleanupPolicy" yaml:"cleanupPolicy"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#compression_type DatabaseKafkaTopic#compression_type}.
	CompressionType *string `field:"optional" json:"compressionType" yaml:"compressionType"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#delete_retention_ms DatabaseKafkaTopic#delete_retention_ms}.
	DeleteRetentionMs *string `field:"optional" json:"deleteRetentionMs" yaml:"deleteRetentionMs"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#file_delete_delay_ms DatabaseKafkaTopic#file_delete_delay_ms}.
	FileDeleteDelayMs *string `field:"optional" json:"fileDeleteDelayMs" yaml:"fileDeleteDelayMs"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#flush_messages DatabaseKafkaTopic#flush_messages}.
	FlushMessages *string `field:"optional" json:"flushMessages" yaml:"flushMessages"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#flush_ms DatabaseKafkaTopic#flush_ms}.
	FlushMs *string `field:"optional" json:"flushMs" yaml:"flushMs"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#index_interval_bytes DatabaseKafkaTopic#index_interval_bytes}.
	IndexIntervalBytes *string `field:"optional" json:"indexIntervalBytes" yaml:"indexIntervalBytes"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#max_compaction_lag_ms DatabaseKafkaTopic#max_compaction_lag_ms}.
	MaxCompactionLagMs *string `field:"optional" json:"maxCompactionLagMs" yaml:"maxCompactionLagMs"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#max_message_bytes DatabaseKafkaTopic#max_message_bytes}.
	MaxMessageBytes *string `field:"optional" json:"maxMessageBytes" yaml:"maxMessageBytes"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#message_down_conversion_enable DatabaseKafkaTopic#message_down_conversion_enable}.
	MessageDownConversionEnable interface{} `field:"optional" json:"messageDownConversionEnable" yaml:"messageDownConversionEnable"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#message_format_version DatabaseKafkaTopic#message_format_version}.
	MessageFormatVersion *string `field:"optional" json:"messageFormatVersion" yaml:"messageFormatVersion"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#message_timestamp_difference_max_ms DatabaseKafkaTopic#message_timestamp_difference_max_ms}.
	MessageTimestampDifferenceMaxMs *string `field:"optional" json:"messageTimestampDifferenceMaxMs" yaml:"messageTimestampDifferenceMaxMs"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#message_timestamp_type DatabaseKafkaTopic#message_timestamp_type}.
	MessageTimestampType *string `field:"optional" json:"messageTimestampType" yaml:"messageTimestampType"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#min_cleanable_dirty_ratio DatabaseKafkaTopic#min_cleanable_dirty_ratio}.
	MinCleanableDirtyRatio *float64 `field:"optional" json:"minCleanableDirtyRatio" yaml:"minCleanableDirtyRatio"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#min_compaction_lag_ms DatabaseKafkaTopic#min_compaction_lag_ms}.
	MinCompactionLagMs *string `field:"optional" json:"minCompactionLagMs" yaml:"minCompactionLagMs"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#min_insync_replicas DatabaseKafkaTopic#min_insync_replicas}.
	MinInsyncReplicas *float64 `field:"optional" json:"minInsyncReplicas" yaml:"minInsyncReplicas"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#preallocate DatabaseKafkaTopic#preallocate}.
	Preallocate interface{} `field:"optional" json:"preallocate" yaml:"preallocate"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#retention_bytes DatabaseKafkaTopic#retention_bytes}.
	RetentionBytes *string `field:"optional" json:"retentionBytes" yaml:"retentionBytes"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#retention_ms DatabaseKafkaTopic#retention_ms}.
	RetentionMs *string `field:"optional" json:"retentionMs" yaml:"retentionMs"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#segment_bytes DatabaseKafkaTopic#segment_bytes}.
	SegmentBytes *string `field:"optional" json:"segmentBytes" yaml:"segmentBytes"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#segment_index_bytes DatabaseKafkaTopic#segment_index_bytes}.
	SegmentIndexBytes *string `field:"optional" json:"segmentIndexBytes" yaml:"segmentIndexBytes"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#segment_jitter_ms DatabaseKafkaTopic#segment_jitter_ms}.
	SegmentJitterMs *string `field:"optional" json:"segmentJitterMs" yaml:"segmentJitterMs"`
	// Docs at Terraform Registry: {@link https://registry.terraform.io/providers/digitalocean/digitalocean/2.37.0/docs/resources/database_kafka_topic#segment_ms DatabaseKafkaTopic#segment_ms}.
	SegmentMs *string `field:"optional" json:"segmentMs" yaml:"segmentMs"`
}

type DatabaseKafkaTopicConfigAList

type DatabaseKafkaTopicConfigAList interface {
	cdktf.ComplexList
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	// Experimental.
	Fqn() *string
	InternalValue() interface{}
	SetInternalValue(val interface{})
	// The attribute on the parent resource this class is referencing.
	TerraformAttribute() *string
	SetTerraformAttribute(val *string)
	// The parent resource.
	TerraformResource() cdktf.IInterpolatingParent
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
	WrapsSet() *bool
	SetWrapsSet(val *bool)
	// Creating an iterator for this complex list.
	//
	// The list will be converted into a map with the mapKeyAttributeName as the key.
	// Experimental.
	AllWithMapKey(mapKeyAttributeName *string) cdktf.DynamicListTerraformIterator
	// Experimental.
	ComputeFqn() *string
	Get(index *float64) DatabaseKafkaTopicConfigAOutputReference
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDatabaseKafkaTopicConfigAList

func NewDatabaseKafkaTopicConfigAList(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DatabaseKafkaTopicConfigAList

type DatabaseKafkaTopicConfigAOutputReference

type DatabaseKafkaTopicConfigAOutputReference interface {
	cdktf.ComplexObject
	CleanupPolicy() *string
	SetCleanupPolicy(val *string)
	CleanupPolicyInput() *string
	// the index of the complex object in a list.
	// Experimental.
	ComplexObjectIndex() interface{}
	// Experimental.
	SetComplexObjectIndex(val interface{})
	// set to true if this item is from inside a set and needs tolist() for accessing it set to "0" for single list items.
	// Experimental.
	ComplexObjectIsFromSet() *bool
	// Experimental.
	SetComplexObjectIsFromSet(val *bool)
	CompressionType() *string
	SetCompressionType(val *string)
	CompressionTypeInput() *string
	// The creation stack of this resolvable which will be appended to errors thrown during resolution.
	//
	// If this returns an empty array the stack will not be attached.
	// Experimental.
	CreationStack() *[]*string
	DeleteRetentionMs() *string
	SetDeleteRetentionMs(val *string)
	DeleteRetentionMsInput() *string
	FileDeleteDelayMs() *string
	SetFileDeleteDelayMs(val *string)
	FileDeleteDelayMsInput() *string
	FlushMessages() *string
	SetFlushMessages(val *string)
	FlushMessagesInput() *string
	FlushMs() *string
	SetFlushMs(val *string)
	FlushMsInput() *string
	// Experimental.
	Fqn() *string
	IndexIntervalBytes() *string
	SetIndexIntervalBytes(val *string)
	IndexIntervalBytesInput() *string
	InternalValue() interface{}
	SetInternalValue(val interface{})
	MaxCompactionLagMs() *string
	SetMaxCompactionLagMs(val *string)
	MaxCompactionLagMsInput() *string
	MaxMessageBytes() *string
	SetMaxMessageBytes(val *string)
	MaxMessageBytesInput() *string
	MessageDownConversionEnable() interface{}
	SetMessageDownConversionEnable(val interface{})
	MessageDownConversionEnableInput() interface{}
	MessageFormatVersion() *string
	SetMessageFormatVersion(val *string)
	MessageFormatVersionInput() *string
	MessageTimestampDifferenceMaxMs() *string
	SetMessageTimestampDifferenceMaxMs(val *string)
	MessageTimestampDifferenceMaxMsInput() *string
	MessageTimestampType() *string
	SetMessageTimestampType(val *string)
	MessageTimestampTypeInput() *string
	MinCleanableDirtyRatio() *float64
	SetMinCleanableDirtyRatio(val *float64)
	MinCleanableDirtyRatioInput() *float64
	MinCompactionLagMs() *string
	SetMinCompactionLagMs(val *string)
	MinCompactionLagMsInput() *string
	MinInsyncReplicas() *float64
	SetMinInsyncReplicas(val *float64)
	MinInsyncReplicasInput() *float64
	Preallocate() interface{}
	SetPreallocate(val interface{})
	PreallocateInput() interface{}
	RetentionBytes() *string
	SetRetentionBytes(val *string)
	RetentionBytesInput() *string
	RetentionMs() *string
	SetRetentionMs(val *string)
	RetentionMsInput() *string
	SegmentBytes() *string
	SetSegmentBytes(val *string)
	SegmentBytesInput() *string
	SegmentIndexBytes() *string
	SetSegmentIndexBytes(val *string)
	SegmentIndexBytesInput() *string
	SegmentJitterMs() *string
	SetSegmentJitterMs(val *string)
	SegmentJitterMsInput() *string
	SegmentMs() *string
	SetSegmentMs(val *string)
	SegmentMsInput() *string
	// Experimental.
	TerraformAttribute() *string
	// Experimental.
	SetTerraformAttribute(val *string)
	// Experimental.
	TerraformResource() cdktf.IInterpolatingParent
	// Experimental.
	SetTerraformResource(val cdktf.IInterpolatingParent)
	// Experimental.
	ComputeFqn() *string
	// Experimental.
	GetAnyMapAttribute(terraformAttribute *string) *map[string]interface{}
	// Experimental.
	GetBooleanAttribute(terraformAttribute *string) cdktf.IResolvable
	// Experimental.
	GetBooleanMapAttribute(terraformAttribute *string) *map[string]*bool
	// Experimental.
	GetListAttribute(terraformAttribute *string) *[]*string
	// Experimental.
	GetNumberAttribute(terraformAttribute *string) *float64
	// Experimental.
	GetNumberListAttribute(terraformAttribute *string) *[]*float64
	// Experimental.
	GetNumberMapAttribute(terraformAttribute *string) *map[string]*float64
	// Experimental.
	GetStringAttribute(terraformAttribute *string) *string
	// Experimental.
	GetStringMapAttribute(terraformAttribute *string) *map[string]*string
	// Experimental.
	InterpolationAsList() cdktf.IResolvable
	// Experimental.
	InterpolationForAttribute(property *string) cdktf.IResolvable
	ResetCleanupPolicy()
	ResetCompressionType()
	ResetDeleteRetentionMs()
	ResetFileDeleteDelayMs()
	ResetFlushMessages()
	ResetFlushMs()
	ResetIndexIntervalBytes()
	ResetMaxCompactionLagMs()
	ResetMaxMessageBytes()
	ResetMessageDownConversionEnable()
	ResetMessageFormatVersion()
	ResetMessageTimestampDifferenceMaxMs()
	ResetMessageTimestampType()
	ResetMinCleanableDirtyRatio()
	ResetMinCompactionLagMs()
	ResetMinInsyncReplicas()
	ResetPreallocate()
	ResetRetentionBytes()
	ResetRetentionMs()
	ResetSegmentBytes()
	ResetSegmentIndexBytes()
	ResetSegmentJitterMs()
	ResetSegmentMs()
	// Produce the Token's value at resolution time.
	// Experimental.
	Resolve(_context cdktf.IResolveContext) interface{}
	// Return a string representation of this resolvable object.
	//
	// Returns a reversible string representation.
	// Experimental.
	ToString() *string
}

func NewDatabaseKafkaTopicConfigAOutputReference

func NewDatabaseKafkaTopicConfigAOutputReference(terraformResource cdktf.IInterpolatingParent, terraformAttribute *string, complexObjectIndex *float64, complexObjectIsFromSet *bool) DatabaseKafkaTopicConfigAOutputReference

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL