destinations

package
v1.15.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 8, 2024 License: MIT Imports: 17 Imported by: 0

Documentation

Index

Constants

View Source
const AZURE_BLOB_STORAGE_DESTINATION_NODE_NAME = "azure-blob-storage"
View Source
const AZURE_BLOB_STORAGE_DESTINATION_TYPE_NAME = "azure_blob_storage"
View Source
const BLACKHOLE_DESTINATION_NODE_NAME = "blackhole"
View Source
const BLACKHOLE_DESTINATION_TYPE_NAME = BLACKHOLE_DESTINATION_NODE_NAME
View Source
const DATADOG_LOGS_DESTINATION_NODE_NAME = "datadog-logs"
View Source
const DATADOG_LOGS_DESTINATION_TYPE_NAME = "datadog_logs"
View Source
const DATADOG_METRICS_DESTINATION_NODE_NAME = "datadog-metrics"
View Source
const DATADOG_METRICS_DESTINATION_TYPE_NAME = "datadog_metrics"
View Source
const ELASTICSEARCH_DESTINATION_NODE_NAME = ELASTICSEARCH_DESTINATION_TYPE_NAME
View Source
const ELASTICSEARCH_DESTINATION_TYPE_NAME = "elasticsearch"
View Source
const GCP_CLOUD_STORAGE_DESTINATION_NODE_NAME = "gcp-cloud-storage"
View Source
const GCP_CLOUD_STORAGE_DESTINATION_TYPE_NAME = "gcp_cloud_storage"
View Source
const HONEYCOMB_LOGS_DESTINATION_NODE_NAME = "honeycomb-logs"
View Source
const HONEYCOMB_LOGS_DESTINATION_TYPE_NAME = "honeycomb_logs"
View Source
const HTTP_DESTINATION_NODE_NAME = HTTP_DESTINATION_TYPE_NAME
View Source
const HTTP_DESTINATION_TYPE_NAME = "http"
View Source
const KAFKA_DESTINATION_NODE_NAME = KAFKA_DESTINATION_TYPE_NAME
View Source
const KAFKA_DESTINATION_TYPE_NAME = "kafka"
View Source
const LOKI_DESTINATION_NODE_NAME = "loki"
View Source
const LOKI_DESTINATION_TYPE_NAME = "loki"
View Source
const MEZMO_DESTINATION_NODE_NAME = "mezmo"
View Source
const MEZMO_DESTINATION_TYPE_NAME = "logs"
View Source
const NEWRELIC_DESTINATION_NODE_NAME = "new-relic"
View Source
const NEWRELIC_DESTINATION_TYPE_NAME = "new_relic"
View Source
const PROMETHEUS_REMOTE_WRITE_DESTINATION_NODE_NAME = "prometheus-remote-write"
View Source
const PROMETHEUS_REMOTE_WRITE_DESTINATION_TYPE_NAME = "prometheus_remote_write"
View Source
const S3_DESTINATION_NODE_NAME = S3_DESTINATION_TYPE_NAME
View Source
const S3_DESTINATION_TYPE_NAME = "s3"
View Source
const SPLUNK_HEC_LOGS_DESTINATION_NODE_NAME = "splunk-hec-logs"
View Source
const SPLUNK_HEC_LOGS_DESTINATION_TYPE_NAME = "splunk_hec_logs"

Variables

View Source
var AzureBlobStorageResourceSchema = schema.Schema{
	Description: "Publishes events to Azure Blob Storage",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"encoding": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The encoding to apply to the data",
			Default:     stringdefault.StaticString("text"),
			Validators:  []validator.String{stringvalidator.OneOf("json", "text")},
		},
		"compression": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The compression strategy used on the encoded data prior to sending",
			Default:     stringdefault.StaticString("none"),
			Validators:  []validator.String{stringvalidator.OneOf("gzip", "none")},
		},
		"container_name": schema.StringAttribute{
			Required:    true,
			Description: "The name of the container for blob storage",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"connection_string": schema.StringAttribute{
			Required:    true,
			Sensitive:   true,
			Description: "A connection string for the account that contains an access key",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"prefix": schema.StringAttribute{
			Optional:    true,
			Description: "A prefix to be applied to all object keys",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
	}, []string{"batch_timeout_secs"}),
}
View Source
var BlackholeDestinationResourceSchema = schema.Schema{
	Description: "Represents a blackhole destination.",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, nil),
}
View Source
var DatadogLogsDestinationResourceSchema = schema.Schema{
	Description: "Publishes log events to Datadog",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"api_key": schema.StringAttribute{
			Sensitive:   true,
			Required:    true,
			Description: "Datadog logs application API key.",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"site": schema.StringAttribute{
			Required:    true,
			Description: "The Datadog site (region) to send logs to.",
			Validators:  []validator.String{stringvalidator.OneOf("us1", "us3", "us5", "eu1")},
		},
		"compression": schema.StringAttribute{
			Required:    true,
			Description: "The compression strategy used on the encoded data prior to sending..",
			Validators:  []validator.String{stringvalidator.OneOf("none", "gzip")},
		},
	}, nil),
}
View Source
var DatadogMetricsDestinationResourceSchema = schema.Schema{
	Description: "Publishes metric events to Datadog",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"api_key": schema.StringAttribute{
			Sensitive:   true,
			Required:    true,
			Description: "Datadog metrics application API key.",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"site": schema.StringAttribute{
			Required:    true,
			Description: "The Datadog site (region) to send metrics to.",
			Validators:  []validator.String{stringvalidator.OneOf("us1", "us3", "us5", "eu1")},
		},
	}, nil),
}
View Source
var ElasticSearchDestinationResourceSchema = schema.Schema{
	Description: "Represents an ElasticSearch destination.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"compression": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The compression strategy used on the encoded data prior to sending",
			Default:     stringdefault.StaticString("none"),
			Validators:  []validator.String{stringvalidator.OneOf("gzip", "none")},
		},
		"auth": schema.SingleNestedAttribute{
			Required:    true,
			Description: "Configures ES authentication",
			Attributes: map[string]schema.Attribute{
				"strategy": schema.StringAttribute{
					Required:    true,
					Description: "The ES authentication strategy to use",
					Validators:  []validator.String{stringvalidator.OneOf("basic", "aws")},
				},
				"user": schema.StringAttribute{
					Optional:    true,
					Computed:    true,
					Default:     stringdefault.StaticString(""),
					Description: "The username for basic authentication",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"password": schema.StringAttribute{
					Sensitive:   true,
					Optional:    true,
					Computed:    true,
					Default:     stringdefault.StaticString(""),
					Description: "The password to use for basic authentication",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"access_key_id": schema.StringAttribute{
					Sensitive:   true,
					Optional:    true,
					Computed:    true,
					Default:     stringdefault.StaticString(""),
					Description: "The AWS access key id",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"secret_access_key": schema.StringAttribute{
					Sensitive:   true,
					Optional:    true,
					Computed:    true,
					Default:     stringdefault.StaticString(""),
					Description: "The AWS secret access key",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"region": schema.StringAttribute{
					Optional:    true,
					Computed:    true,
					Default:     stringdefault.StaticString(""),
					Description: "The AWS Region",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
			},
		},
		"endpoints": schema.ListAttribute{
			ElementType: StringType,
			Required:    true,
			Description: "An array of ElasticSearch endpoints",
			Validators: []validator.List{
				listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
			},
		},
		"pipeline": schema.StringAttribute{
			Optional:    true,
			Description: "Name of an ES ingest pipeline to include",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"index": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "Index to use when writing ES events (default = mezmo-%Y.%m.%d)",
			Default:     stringdefault.StaticString("mezmo-%Y.%m.%d"),
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
	}, nil),
}
View Source
var GcpCloudStorageResourceSchema = schema.Schema{
	Description: "Publish log events to GCP Cloud Storage",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"encoding": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "Dictates how the data will be serialized before storing.",
			Default:     stringdefault.StaticString("text"),
			Validators: []validator.String{
				stringvalidator.OneOf("json", "text"),
			},
		},
		"bucket": schema.StringAttribute{
			Required:    true,
			Description: "The name of the bucket in GCP where the data will be stored.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"compression": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The compression strategy used on the encoded data prior to sending.",
			Default:     stringdefault.StaticString("none"),
			Validators: []validator.String{
				stringvalidator.OneOf("gzip", "none"),
			},
		},
		"bucket_prefix": schema.StringAttribute{
			Optional:    true,
			Computed:    false,
			Description: "The prefix applied to the bucket name, giving the appearance of having directories.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"auth": schema.SingleNestedAttribute{
			Required:    true,
			Description: "Configure GCP Cloud Storage authentication",
			Attributes: map[string]schema.Attribute{
				"type": schema.StringAttribute{
					Required:    true,
					Description: "The type of authentication to use.",
					Validators: []validator.String{
						stringvalidator.OneOf("api_key", "credentials_json"),
					},
				},
				"value": schema.StringAttribute{
					Required:    true,
					Sensitive:   true,
					Description: "Authentication secret value.",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
					},
				},
			},
		},
	}, []string{"batch_timeout_secs"}),
}
View Source
var HoneycombLogsResourceSchema = schema.Schema{
	Description: "Send log data to Honeycomb",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"api_key": schema.StringAttribute{
			Required:    true,
			Sensitive:   true,
			Description: "Honeycomb API key",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"dataset": schema.StringAttribute{
			Required:    true,
			Description: "The name of the targeted dataset",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
	}, nil),
}
View Source
var HttpDestinationResourceSchema = schema.Schema{
	Description: "Represents an HTTP destination.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"uri": schema.StringAttribute{
			Required: true,
			Description: "The full URI to make HTTP requests to. This should include the " +
				"protocol and host, but can also include the port, path, and any other valid " +
				"part of a URI.",
			Validators: []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"encoding": schema.StringAttribute{
			Optional:    true,
			Description: "The encoding to apply to the data",
			Computed:    true,
			Default:     stringdefault.StaticString("text"),
			Validators:  []validator.String{stringvalidator.OneOf("json", "ndjson", "text")},
		},
		"compression": schema.StringAttribute{
			Optional:    true,
			Description: "The compression strategy used on the encoded data prior to sending",
			Computed:    true,
			Default:     stringdefault.StaticString("none"),
			Validators:  []validator.String{stringvalidator.OneOf("gzip", "none")},
		},
		"auth": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "Configures HTTP authentication",
			Attributes: map[string]schema.Attribute{
				"strategy": schema.StringAttribute{
					Required:   true,
					Validators: []validator.String{stringvalidator.OneOf("basic", "bearer")},
				},
				"user": schema.StringAttribute{
					Optional:   true,
					Computed:   true,
					Default:    stringdefault.StaticString(""),
					Validators: []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"password": schema.StringAttribute{
					Sensitive:  true,
					Optional:   true,
					Computed:   true,
					Default:    stringdefault.StaticString(""),
					Validators: []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"token": schema.StringAttribute{
					Sensitive:  true,
					Optional:   true,
					Computed:   true,
					Default:    stringdefault.StaticString(""),
					Validators: []validator.String{stringvalidator.LengthAtLeast(1)},
				},
			},
		},
		"headers": schema.MapAttribute{
			Optional:    true,
			Description: "A key/value object describing a header name and its value",
			ElementType: StringType{},
			Validators: []validator.Map{
				mapvalidator.All(
					mapvalidator.KeysAre(stringvalidator.LengthAtLeast(1)),
					mapvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
				),
			},
		},
		"max_bytes": schema.Int64Attribute{
			Optional:    true,
			Description: "The maximum number of uncompressed bytes when batching data to the destination",
			Validators: []validator.Int64{
				int64validator.AtLeast(1),
				int64validator.AtMost(1024 * 1024 * 2),
			},
		},
		"timeout_secs": schema.Int64Attribute{
			Optional:    true,
			Description: "The number of seconds before a destination write timeout.",
			Validators: []validator.Int64{
				int64validator.AtLeast(5),
			},
		},
		"method": schema.StringAttribute{
			Optional:    true,
			Description: "The HTTP method to use for the destination.",
			Validators:  []validator.String{stringvalidator.OneOf("post", "put", "patch", "delete", "get", "head", "options", "trace")},
		},
		"payload_prefix": schema.StringAttribute{
			Optional:    true,
			Description: "Add a prefix to the payload. Only used for serialized JSON chunks. This option also requires 'Payload Suffix' to form a valid JSON string.",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"payload_suffix": schema.StringAttribute{
			Optional:    true,
			Description: "Used in combination with 'Payload Prefix' to form valid JSON from the payload.",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"tls_protocols": schema.ListAttribute{
			ElementType: types.StringType,
			Optional:    true,
			Description: "A list of ALPN protocols to use during TLS negotiation. They are attempted in the order they appear.",
			Validators: []validator.List{
				listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
			},
		},
		"proxy": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "Proxy Settings",
			Attributes: map[string]schema.Attribute{
				"enabled": schema.BoolAttribute{
					Optional:    true,
					Computed:    true,
					Default:     booldefault.StaticBool(false),
					Description: "Turns Proxying on/off.",
				},
				"endpoint": schema.StringAttribute{
					Optional:    true,
					Description: "HTTP or HTTPS Endpoint to use for traffic.",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"hosts_bypass_proxy": schema.ListAttribute{
					ElementType: types.StringType,
					Required:    true,
					Description: "A list of hosts to bypass proxying. Can be specified as a " +
						"domain name, IP address, or CIDR block. Wildcards are supported as " +
						"a dot (.) in domain names, or as a star (*) to match all hosts.",
					Validators: []validator.List{
						listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
					},
				},
			},
		},
		"rate_limiting": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "Settings for controlling rate limiting to the destination.",
			Attributes: map[string]schema.Attribute{
				"request_limit": schema.Int64Attribute{
					Optional:    true,
					Description: "The max number of requests allowed within the specified.",
					Validators: []validator.Int64{
						int64validator.AtLeast(1),
					},
				},
				"duration_secs": schema.Int64Attribute{
					Optional:    true,
					Description: "The window of time used to apply 'Request Limit.",
					Validators: []validator.Int64{
						int64validator.AtLeast(1),
					},
				},
			},
		},
	}, nil),
}
View Source
var KafkaDestinationResourceSchema = schema.Schema{
	Description: "Represents a Kafka destination.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"encoding": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("text"),
			Description: "The encoding to apply to the data.",
			Validators: []validator.String{
				stringvalidator.OneOf("json", "text"),
			},
		},
		"compression": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("none"),
			Description: "The compression strategy used on the encoded data prior to sending.",
			Validators: []validator.String{
				stringvalidator.OneOf("gzip", "lz4", "snappy", "zstd", "none"),
			},
		},
		"event_key_field": schema.StringAttribute{
			Optional:    true,
			Description: "The field in the log whose value is used as Kafka's event key.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"brokers": schema.ListNestedAttribute{
			Required:    true,
			Description: "The Kafka brokers to connect to.",
			NestedObject: schema.NestedAttributeObject{
				Attributes: map[string]schema.Attribute{
					"host": schema.StringAttribute{
						Required:    true,
						Description: "The host of the Kafka broker.",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
							stringvalidator.LengthAtMost(255),
						},
					},
					"port": schema.Int64Attribute{
						Required:    true,
						Description: "The port of the Kafka broker.",
						Validators: []validator.Int64{
							int64validator.Between(1, 65535),
						},
					},
				},
			},
			Validators: []validator.List{
				listvalidator.UniqueValues(),
				listvalidator.SizeAtLeast(1),
			},
		},
		"topic": schema.StringAttribute{
			Required:    true,
			Description: "The name of the topic to publish to.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"tls_enabled": schema.BoolAttribute{
			Optional:    true,
			Computed:    true,
			Default:     booldefault.StaticBool(true),
			Description: "Whether to use TLS when connecting to Kafka.",
		},
		"sasl": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "The SASL configuration to use when connecting to Kafka.",
			Attributes: map[string]schema.Attribute{
				"mechanism": schema.StringAttribute{
					Optional:    true,
					Computed:    true,
					Description: "The SASL mechanism to use when connecting to Kafka.",
					Validators: []validator.String{
						stringvalidator.OneOf("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"),
					},
				},
				"username": schema.StringAttribute{
					Required:    true,
					Description: "The SASL username to use when connecting to Kafka.",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
					},
				},
				"password": schema.StringAttribute{
					Required:    true,
					Sensitive:   true,
					Description: "The SASL password to use when connecting to Kafka.",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
					},
				},
			},
		},
	}, nil),
}
View Source
var LokiDestinationResourceSchema = schema.Schema{
	Description: "Publish log events to Loki",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"auth": schema.SingleNestedAttribute{
			Required:    true,
			Description: "The authentication strategy to use (only basic supported)",
			Attributes: map[string]schema.Attribute{
				"strategy": schema.StringAttribute{
					Required:    true,
					Description: "The authentication strategy to use (only basic supported)",
					Validators:  []validator.String{stringvalidator.OneOf("basic")},
				},
				"user": schema.StringAttribute{
					Required:    true,
					Description: "The basic authentication user",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"password": schema.StringAttribute{
					Sensitive:   true,
					Required:    true,
					Description: "The basic authentication password",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
			},
		},
		"endpoint": schema.StringAttribute{
			Required:    true,
			Description: "The Loki base URL",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"encoding": schema.StringAttribute{
			Required:    true,
			Description: "Configures how event are encoded",
			Validators:  []validator.String{stringvalidator.OneOf("json", "text")},
		},
		"path": schema.StringAttribute{
			Optional:    true,
			Description: "The path appended to the Loki base URL, (defaults to /loki/api/v1/push)",
			Computed:    true,
			Default:     stringdefault.StaticString("/loki/api/v1/push"),
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"labels": schema.MapAttribute{
			Required:    true,
			ElementType: StringType,
			Description: "Key/Value pair used to describe Loki data",
			Validators: []validator.Map{
				mapvalidator.All(
					mapvalidator.KeysAre(stringvalidator.LengthAtLeast(1)),
					mapvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
				),
			},
		},
	}, nil),
}
View Source
var MezmoDestinationResourceSchema = schema.Schema{
	Description: "Represents a Mezmo destination.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"host": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("logs.logdna.com"),
			Description: "The host for your Log Analysis environment",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"ingestion_key": schema.StringAttribute{
			Required:    true,
			Sensitive:   true,
			Description: "Ingestion key",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"query": schema.SingleNestedAttribute{
			Optional:    true,
			Computed:    true,
			Description: "Query Parameters",
			Attributes: map[string]schema.Attribute{
				"hostname": schema.StringAttribute{
					Optional:    true,
					Computed:    true,
					Description: "Hostname string or template to attach to logs",
					Default:     stringdefault.StaticString("mezmo"),
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"tags": schema.ListAttribute{
					ElementType: StringType,
					Optional:    true,
					Description: "List of tag strings or templates to attach to logs",
					Validators: []validator.List{
						listvalidator.ValueStringsAre(stringvalidator.LengthAtLeast(1)),
					},
				},
				"ip": schema.StringAttribute{
					Optional:    true,
					Description: "IP address template to attach to logs",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
						stringvalidator.LengthAtMost(512),
					},
				},
				"mac": schema.StringAttribute{
					Optional:    true,
					Description: "MAC address template to attach to logs",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
						stringvalidator.LengthAtMost(512),
					},
				},
			},
		},
		"log_construction_scheme": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("explicit"),
			Description: "How to construct the log message",
			Validators: []validator.String{
				stringvalidator.OneOf(MapKeys(log_construction_schemes)...),
			},
		},
		"explicit_scheme_options": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "Log construction options for the explicit scheme",
			Attributes: map[string]schema.Attribute{
				"line": schema.StringAttribute{
					Optional: true,
					Description: "Template or field reference to use as the log line. " +
						"Field reference can point to an object.",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
						stringvalidator.LengthAtMost(1024),
					},
				},
				"meta_field": schema.StringAttribute{
					Optional:    true,
					Description: "Field containing the meta object for the log",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
						stringvalidator.LengthAtMost(512),
					},
				},
				"app": schema.StringAttribute{
					Optional:    true,
					Description: "App name template to attach to logs",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
						stringvalidator.LengthAtMost(512),
					},
				},
				"file": schema.StringAttribute{
					Optional:    true,
					Description: "File name template to attach to logs",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
						stringvalidator.LengthAtMost(512),
					},
				},
				"timestamp_field": schema.StringAttribute{
					Optional:    true,
					Description: "Field containing the timestamp for the log, for example ._ts",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
						stringvalidator.LengthAtMost(512),
					},
				},
				"env": schema.StringAttribute{
					Optional:    true,
					Description: "Environment name template to attach to logs",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
						stringvalidator.LengthAtMost(512),
					},
				},
			},
		},
	}, nil),
}
View Source
var NewRelicDestinationResourceSchema = schema.Schema{
	Description: "Represents a NewRelic destination.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"api": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("logs"),
			Description: "New Relic API endpoint type",
			Validators:  []validator.String{stringvalidator.OneOf("logs", "metrics")},
		},
		"account_id": schema.StringAttribute{
			Required:    true,
			Sensitive:   true,
			Description: "New Relic Account ID",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"license_key": schema.StringAttribute{
			Required:    true,
			Sensitive:   true,
			Description: "New Relic License Key",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
	}, nil),
}
View Source
var PrometheusRemoteWriteDestinationResourceSchema = schema.Schema{
	Description: "Represents Prometheus remote-write destination that publishes metrics to a " +
		"Prometheus endpoint",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"endpoint": schema.StringAttribute{
			Required: true,
			Description: "The full URI to make HTTP requests to. This should include the " +
				"protocol and host, but can also include the port, path, and any other valid " +
				"part of a URI. Example: http://example.org:8080/api/v1/push",
			Validators: []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"auth": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "Configures authentication",
			Attributes: map[string]schema.Attribute{
				"strategy": schema.StringAttribute{
					Required:    true,
					Description: "The authentication strategy to use",
					Validators:  []validator.String{stringvalidator.OneOf("basic", "bearer")},
				},
				"user": schema.StringAttribute{
					Optional:    true,
					Computed:    true,
					Default:     stringdefault.StaticString(""),
					Description: "The username for basic authentication",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"password": schema.StringAttribute{
					Sensitive:   true,
					Optional:    true,
					Computed:    true,
					Default:     stringdefault.StaticString(""),
					Description: "The password to use for basic authentication",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"token": schema.StringAttribute{
					Sensitive:   true,
					Optional:    true,
					Computed:    true,
					Default:     stringdefault.StaticString(""),
					Description: "The token to use for bearer auth strategy",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
			},
		},
	}, nil),
}
View Source
var S3DestinationResourceSchema = schema.Schema{
	Description: "Publishes events as objects in AWS S3",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"auth": schema.SingleNestedAttribute{
			Required:    true,
			Description: "Configures AWS authentication",
			Attributes: map[string]schema.Attribute{
				"access_key_id": schema.StringAttribute{
					Required:    true,
					Description: "The AWS access key id",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"secret_access_key": schema.StringAttribute{
					Required:    true,
					Sensitive:   true,
					Description: "The AWS secret access key",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
			},
		},
		"region": schema.StringAttribute{
			Required:    true,
			Description: "The name of the AWS region",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"bucket": schema.StringAttribute{
			Required:    true,
			Description: "The S3 bucket name. Do not include a leading s3:// or a trailing /",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"prefix": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "A prefix to apply to all object key names.",
			Default:     stringdefault.StaticString("/"),
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"encoding": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The encoding to apply to the data",
			Default:     stringdefault.StaticString("text"),
			Validators:  []validator.String{stringvalidator.OneOf("json", "ndjson", "text")},
		},
		"compression": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("none"),
			Description: "The compression format of the S3 objects",
			Validators:  []validator.String{stringvalidator.OneOf([]string{"gzip", "none"}...)},
		},
	}, []string{"batch_timeout_secs"}),
}
View Source
var SplunkHecLogsDestinationResourceSchema = schema.Schema{
	Description: "Publishes log events to a Splunk HTTP Event Collector",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"compression": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The compression strategy used on the encoded data prior to sending",
			Default:     stringdefault.StaticString("none"),
			Validators:  []validator.String{stringvalidator.OneOf("gzip", "none")},
		},
		"endpoint": schema.StringAttribute{
			Required: true,
			Description: "The base URL for the Splunk instance. The collector path, such as " +
				"`/services/collector/events`, will be automatically inferred from the " +
				"destination's configuration.",
			Validators: []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"token": schema.StringAttribute{
			Required:    true,
			Sensitive:   true,
			Description: "The default token to authenticate to Splunk HEC",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"tls_verify_certificate": schema.BoolAttribute{
			Optional:    true,
			Computed:    true,
			Description: "Verify TLS Certificate",
			Default:     booldefault.StaticBool(true),
		},
		"host_field": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The field that contains the hostname to include in the event",
			Default:     stringdefault.StaticString("metadata.host"),
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"timestamp_field": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Description: "The field that contains the timestamp to include in the event",
			Default:     stringdefault.StaticString("metadata.time"),
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"source": schema.SingleNestedAttribute{
			Optional: true,
			Description: "The source of events sent to this destination. This is typically the filename " +
				"the logs originated from. Use the field path \"metadata.source\" to use the" +
				" upstream source value from a HEC log source",
			Attributes: splunkValueTypeAttributes,
		},
		"source_type": schema.SingleNestedAttribute{
			Optional: true,
			Description: "The sourcetype of events sent to this destination. Use the field path" +
				" \"metadata.sourcetype\" to use the upstream sourcetype value from a HEC" +
				" log source",
			Attributes: splunkValueTypeAttributes,
		},
		"index": schema.SingleNestedAttribute{
			Optional: true,
			Description: "The name of the index to send events to. Use the field path " +
				" \"metadata.index\" to use the upstream index value from a HEC log source",
			Attributes: splunkValueTypeAttributes,
		},
	}, nil),
}

Functions

func AzureBlobStorageFromModel

func AzureBlobStorageFromModel(plan *AzureBlobStorageDestinationModel, previousState *AzureBlobStorageDestinationModel) (*Destination, diag.Diagnostics)

func AzureBlobStorageToModel

func AzureBlobStorageToModel(plan *AzureBlobStorageDestinationModel, component *Destination)

func BlackholeDestinationFromModel

func BlackholeDestinationFromModel(plan *BlackholeDestinationModel, previousState *BlackholeDestinationModel) (*Destination, diag.Diagnostics)

func BlackholeDestinationToModel

func BlackholeDestinationToModel(plan *BlackholeDestinationModel, component *Destination)

func DatadogLogsDestinationToModel

func DatadogLogsDestinationToModel(plan *DatadogLogsDestinationModel, component *Destination)

func DatadogLogsFromModel

func DatadogLogsFromModel(plan *DatadogLogsDestinationModel, previousState *DatadogLogsDestinationModel) (*Destination, diag.Diagnostics)

func DatadogMetricsDestinationToModel

func DatadogMetricsDestinationToModel(plan *DatadogMetricsDestinationModel, component *Destination)

func DatadogMetricsFromModel

func DatadogMetricsFromModel(plan *DatadogMetricsDestinationModel, previousState *DatadogMetricsDestinationModel) (*Destination, diag.Diagnostics)

func ElasticSearchDestinationFromModel

func ElasticSearchDestinationFromModel(plan *ElasticSearchDestinationModel, previousState *ElasticSearchDestinationModel) (*Destination, diag.Diagnostics)

func ElasticSearchDestinationToModel

func ElasticSearchDestinationToModel(plan *ElasticSearchDestinationModel, component *Destination)

func GcpCloudStorageDestinationFromModel

func GcpCloudStorageDestinationFromModel(plan *GcpCloudStorageDestinationModel, previousState *GcpCloudStorageDestinationModel) (*Destination, diag.Diagnostics)

func GcpCloudStorageDestinationToModel

func GcpCloudStorageDestinationToModel(plan *GcpCloudStorageDestinationModel, component *Destination)

func HoneycombLogsFromModel

func HoneycombLogsFromModel(plan *HoneycombLogsDestinationModel, previousState *HoneycombLogsDestinationModel) (*Destination, diag.Diagnostics)

func HoneycombLogsToModel

func HoneycombLogsToModel(plan *HoneycombLogsDestinationModel, component *Destination)

func HttpDestinationFromModel

func HttpDestinationFromModel(plan *HttpDestinationModel, previousState *HttpDestinationModel) (*Destination, diag.Diagnostics)

func HttpDestinationToModel

func HttpDestinationToModel(plan *HttpDestinationModel, component *Destination)

func KafkaDestinationFromModel

func KafkaDestinationFromModel(plan *KafkaDestinationModel, previousState *KafkaDestinationModel) (*Destination, diag.Diagnostics)

func KafkaDestinationToModel

func KafkaDestinationToModel(plan *KafkaDestinationModel, component *Destination)

func LokiDestinationToModel

func LokiDestinationToModel(plan *LokiDestinationModel, component *Destination)

func LokiFromModel

func LokiFromModel(plan *LokiDestinationModel, previousState *LokiDestinationModel) (*Destination, diag.Diagnostics)

func MezmoDestinationFromModel

func MezmoDestinationFromModel(plan *MezmoDestinationModel, previousState *MezmoDestinationModel) (*Destination, diag.Diagnostics)

func MezmoDestinationToModel

func MezmoDestinationToModel(plan *MezmoDestinationModel, component *Destination)

func NewRelicDestinationFromModel

func NewRelicDestinationFromModel(plan *NewRelicDestinationModel, previousState *NewRelicDestinationModel) (*Destination, diag.Diagnostics)

func NewRelicDestinationToModel

func NewRelicDestinationToModel(plan *NewRelicDestinationModel, component *Destination)

func PrometheusRemoteWriteDestinationFromModel

func PrometheusRemoteWriteDestinationFromModel(plan *PrometheusRemoteWriteDestinationModel, previousState *PrometheusRemoteWriteDestinationModel) (*Destination, diag.Diagnostics)

func PrometheusRemoteWriteDestinationToModel

func PrometheusRemoteWriteDestinationToModel(plan *PrometheusRemoteWriteDestinationModel, component *Destination)

func S3DestinationFromModel

func S3DestinationFromModel(plan *S3DestinationModel, previousState *S3DestinationModel) (*Destination, diag.Diagnostics)

func S3DestinationToModel

func S3DestinationToModel(plan *S3DestinationModel, component *Destination)

func SplunkHecLogsDestinationFromModel

func SplunkHecLogsDestinationFromModel(plan *SplunkHecLogsDestinationModel, previousState *SplunkHecLogsDestinationModel) (*Destination, diag.Diagnostics)

func SplunkHecLogsDestinationToModel

func SplunkHecLogsDestinationToModel(plan *SplunkHecLogsDestinationModel, component *Destination)

Types

type AzureBlobStorageDestinationModel

type AzureBlobStorageDestinationModel struct {
	Id                  String `tfsdk:"id"`
	PipelineId          String `tfsdk:"pipeline_id"`
	Title               String `tfsdk:"title"`
	Description         String `tfsdk:"description"`
	Inputs              List   `tfsdk:"inputs"`
	GenerationId        Int64  `tfsdk:"generation_id"`
	AckEnabled          Bool   `tfsdk:"ack_enabled" user_config:"true"`
	BatchTimeoutSeconds Int64  `tfsdk:"batch_timeout_secs" user_config:"true"`
	Encoding            String `tfsdk:"encoding" user_config:"true"`
	Compression         String `tfsdk:"compression" user_config:"true"`
	ContainerName       String `tfsdk:"container_name" user_config:"true"`
	ConnectionString    String `tfsdk:"connection_string" user_config:"true"`
	Prefix              String `tfsdk:"prefix" user_config:"true"`
}

type BlackholeDestinationModel

type BlackholeDestinationModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	AckEnabled   Bool   `tfsdk:"ack_enabled" user_config:"true"`
}

type DatadogLogsDestinationModel

type DatadogLogsDestinationModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	ApiKey       String `tfsdk:"api_key" user_config:"true"`
	Site         String `tfsdk:"site" user_config:"true"`
	Compression  String `tfsdk:"compression" user_config:"true"`
	AckEnabled   Bool   `tfsdk:"ack_enabled" user_config:"true"`
}

type DatadogMetricsDestinationModel

type DatadogMetricsDestinationModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	ApiKey       String `tfsdk:"api_key" user_config:"true"`
	Site         String `tfsdk:"site" user_config:"true"`
	AckEnabled   Bool   `tfsdk:"ack_enabled" user_config:"true"`
}

type ElasticSearchDestinationModel

type ElasticSearchDestinationModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	AckEnabled   Bool   `tfsdk:"ack_enabled" user_config:"true"`
	Compression  String `tfsdk:"compression" user_config:"true"`
	Auth         Object `tfsdk:"auth" user_config:"true"`
	Endpoints    List   `tfsdk:"endpoints" user_config:"true"`
	Pipeline     String `tfsdk:"pipeline" user_config:"true"`
	Index        String `tfsdk:"index" user_config:"true"`
}

type GcpCloudStorageDestinationModel

type GcpCloudStorageDestinationModel struct {
	Id                  String `tfsdk:"id"`
	PipelineId          String `tfsdk:"pipeline_id"`
	Title               String `tfsdk:"title"`
	Description         String `tfsdk:"description"`
	Inputs              List   `tfsdk:"inputs"`
	GenerationId        Int64  `tfsdk:"generation_id"`
	Encoding            String `tfsdk:"encoding" user_config:"true"`
	Bucket              String `tfsdk:"bucket" user_config:"true"`
	Compression         String `tfsdk:"compression" user_config:"true"`
	BucketPrefix        String `tfsdk:"bucket_prefix" user_config:"true"`
	Auth                Object `tfsdk:"auth" user_config:"true"`
	AckEnabled          Bool   `tfsdk:"ack_enabled" user_config:"true"`
	BatchTimeoutSeconds Int64  `tfsdk:"batch_timeout_secs" user_config:"true"`
}

type HoneycombLogsDestinationModel

type HoneycombLogsDestinationModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	AckEnabled   Bool   `tfsdk:"ack_enabled" user_config:"true"`
	DataSet      String `tfsdk:"dataset" user_config:"true"`
	ApiKey       String `tfsdk:"api_key" user_config:"true"`
}

type HttpDestinationModel

type HttpDestinationModel struct {
	Id            StringValue `tfsdk:"id"`
	PipelineId    StringValue `tfsdk:"pipeline_id"`
	Title         StringValue `tfsdk:"title"`
	Description   StringValue `tfsdk:"description"`
	Inputs        ListValue   `tfsdk:"inputs"`
	GenerationId  Int64Value  `tfsdk:"generation_id"`
	Uri           StringValue `tfsdk:"uri" user_config:"true"`
	Encoding      StringValue `tfsdk:"encoding" user_config:"true"`
	Compression   StringValue `tfsdk:"compression" user_config:"true"`
	Auth          ObjectValue `tfsdk:"auth" user_config:"true"`
	Headers       MapValue    `tfsdk:"headers" user_config:"true"`
	AckEnabled    BoolValue   `tfsdk:"ack_enabled" user_config:"true"`
	MaxBytes      Int64Value  `tfsdk:"max_bytes" user_config:"true"`
	TimeoutSecs   Int64Value  `tfsdk:"timeout_secs" user_config:"true"`
	Method        StringValue `tfsdk:"method" user_config:"true"`
	PayloadPrefix StringValue `tfsdk:"payload_prefix" user_config:"true"`
	PayloadSuffix StringValue `tfsdk:"payload_suffix" user_config:"true"`
	TLSProtocols  ListValue   `tfsdk:"tls_protocols" user_config:"true"`
	Proxy         ObjectValue `tfsdk:"proxy" user_config:"true"`
	RateLimiting  ObjectValue `tfsdk:"rate_limiting" user_config:"true"`
}

type KafkaDestinationModel

type KafkaDestinationModel struct {
	Id            String `tfsdk:"id"`
	PipelineId    String `tfsdk:"pipeline_id"`
	Title         String `tfsdk:"title"`
	Description   String `tfsdk:"description"`
	Inputs        List   `tfsdk:"inputs"`
	GenerationId  Int64  `tfsdk:"generation_id"`
	Encoding      String `tfsdk:"encoding" user_config:"true"`
	Compression   String `tfsdk:"compression" user_config:"true"`
	EventKeyField String `tfsdk:"event_key_field" user_config:"true"`
	Brokers       List   `tfsdk:"brokers" user_config:"true"`
	Topic         String `tfsdk:"topic" user_config:"true"`
	TLSEnabled    Bool   `tfsdk:"tls_enabled" user_config:"true"`
	SASL          Object `tfsdk:"sasl" user_config:"true"`
	AckEnabled    Bool   `tfsdk:"ack_enabled" user_config:"true"`
}

type LokiDestinationModel

type LokiDestinationModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Endpoint     String `tfsdk:"endpoint" user_config:"true"`
	Path         String `tfsdk:"path" user_config:"true"`
	Encoding     String `tfsdk:"encoding" user_config:"true"`
	Auth         Object `tfsdk:"auth" user_config:"true"`
	Labels       Map    `tfsdk:"labels" user_config:"true"`
	Inputs       List   `tfsdk:"inputs" user_config:"true"`
	AckEnabled   Bool   `tfsdk:"ack_enabled" user_config:"true"`
}

type MezmoDestinationModel

type MezmoDestinationModel struct {
	Id                    String `tfsdk:"id"`
	PipelineId            String `tfsdk:"pipeline_id"`
	Title                 String `tfsdk:"title"`
	Description           String `tfsdk:"description"`
	Inputs                List   `tfsdk:"inputs"`
	GenerationId          Int64  `tfsdk:"generation_id"`
	AckEnabled            Bool   `tfsdk:"ack_enabled" user_config:"true"`
	Host                  String `tfsdk:"host" user_config:"true"`
	IngestionKey          String `tfsdk:"ingestion_key" user_config:"true"`
	Query                 Object `tfsdk:"query" user_config:"true"`
	LogConstructionScheme String `tfsdk:"log_construction_scheme" user_config:"true"`
	ExplicitSchemeOptions Object `tfsdk:"explicit_scheme_options" user_config:"true"`
}

type NewRelicDestinationModel

type NewRelicDestinationModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	AckEnabled   Bool   `tfsdk:"ack_enabled" user_config:"true"`
	Api          String `tfsdk:"api" user_config:"true"`
	AccountId    String `tfsdk:"account_id" user_config:"true"`
	LicenseKey   String `tfsdk:"license_key" user_config:"true"`
}

type PrometheusRemoteWriteDestinationModel

type PrometheusRemoteWriteDestinationModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Inputs       List   `tfsdk:"inputs"`
	GenerationId Int64  `tfsdk:"generation_id"`
	AckEnabled   Bool   `tfsdk:"ack_enabled" user_config:"true"`
	Endpoint     String `tfsdk:"endpoint" user_config:"true"`
	Auth         Object `tfsdk:"auth" user_config:"true"`
}

type S3DestinationModel

type S3DestinationModel struct {
	Id                  String `tfsdk:"id"`
	PipelineId          String `tfsdk:"pipeline_id"`
	Title               String `tfsdk:"title"`
	Description         String `tfsdk:"description"`
	Inputs              List   `tfsdk:"inputs"`
	GenerationId        Int64  `tfsdk:"generation_id"`
	AckEnabled          Bool   `tfsdk:"ack_enabled" user_config:"true"`
	BatchTimeoutSeconds Int64  `tfsdk:"batch_timeout_secs" user_config:"true"`
	Auth                Object `tfsdk:"auth" user_config:"true"`
	Region              String `tfsdk:"region" user_config:"true"`
	Bucket              String `tfsdk:"bucket" user_config:"true"`
	Prefix              String `tfsdk:"prefix" user_config:"true"`
	Encoding            String `tfsdk:"encoding" user_config:"true"`
	Compression         String `tfsdk:"compression" user_config:"true"`
}

type SchemaAttributes

type SchemaAttributes map[string]schema.Attribute

func ExtendBaseAttributes

func ExtendBaseAttributes(target SchemaAttributes, addons []string) SchemaAttributes

type SplunkHecLogsDestinationModel

type SplunkHecLogsDestinationModel struct {
	Id                   String `tfsdk:"id"`
	PipelineId           String `tfsdk:"pipeline_id"`
	Title                String `tfsdk:"title"`
	Description          String `tfsdk:"description"`
	Inputs               List   `tfsdk:"inputs"`
	GenerationId         Int64  `tfsdk:"generation_id"`
	AckEnabled           Bool   `tfsdk:"ack_enabled" user_config:"true"`
	Compression          String `tfsdk:"compression" user_config:"true"`
	Endpoint             String `tfsdk:"endpoint" user_config:"true"`
	Token                String `tfsdk:"token" user_config:"true"`
	HostField            String `tfsdk:"host_field" user_config:"true"`
	TimestampField       String `tfsdk:"timestamp_field" user_config:"true"`
	TlsVerifyCertificate Bool   `tfsdk:"tls_verify_certificate" user_config:"true"`
	Source               Object `tfsdk:"source" user_config:"true"`
	SourceType           Object `tfsdk:"source_type" user_config:"true"`
	Index                Object `tfsdk:"index" user_config:"true"`
}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL