sources

package
v4.2.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 13, 2024 License: MIT Imports: 14 Imported by: 0

Documentation

Index

Constants

View Source
const AGENT_SOURCE_NODE_NAME = "mezmo-agent"
View Source
const AGENT_SOURCE_TYPE_NAME = "agent"
View Source
const AZURE_EVENT_HUB_SOURCE_NODE_NAME = "azure-event-hub"
View Source
const AZURE_EVENT_HUB_SOURCE_TYPE_NAME = "azure_event_hub"
View Source
const DATADOG_SOURCE_NODE_NAME = "mezmo-datadog-source"
View Source
const DATADOG_SOURCE_TYPE_NAME = "datadog"
View Source
const DEMO_SOURCE_NODE_NAME = "demo-logs"
View Source
const DEMO_SOURCE_TYPE_NAME = "demo"
View Source
const FLUENT_SOURCE_NODE_NAME = FLUENT_SOURCE_TYPE_NAME
View Source
const FLUENT_SOURCE_TYPE_NAME = "fluent"
View Source
const HTTP_SOURCE_NODE_NAME = HTTP_SOURCE_TYPE_NAME
View Source
const HTTP_SOURCE_TYPE_NAME = "http"
View Source
const KAFKA_SOURCE_NODE_NAME = KAFKA_SOURCE_TYPE_NAME
View Source
const KAFKA_SOURCE_TYPE_NAME = "kafka"
View Source
const KINESIS_FIREHOSE_SOURCE_NODE_NAME = "kinesis-firehose"
View Source
const KINESIS_FIREHOSE_SOURCE_TYPE_NAME = "kinesis_firehose"
View Source
const LOGSTASH_SOURCE_NODE_NAME = LOGSTASH_SOURCE_TYPE_NAME
View Source
const LOGSTASH_SOURCE_TYPE_NAME = "logstash"
View Source
const LOG_ANALYSIS_SOURCE_NODE_NAME = "log-analysis"
View Source
const LOG_ANALYSIS_SOURCE_TYPE_NAME = "log_analysis"
View Source
const OPEN_TELEMETRY_LOGS_SOURCE_NODE_NAME = "open-telemetry-logs"
View Source
const OPEN_TELEMETRY_LOGS_SOURCE_TYPE_NAME = "open_telemetry_logs"
View Source
const OPEN_TELEMETRY_METRICS_SOURCE_NODE_NAME = "open-telemetry-metrics"
View Source
const OPEN_TELEMETRY_METRICS_SOURCE_TYPE_NAME = "open_telemetry_metrics"
View Source
const OPEN_TELEMETRY_TRACES_SOURCE_NODE_NAME = "open-telemetry-traces"
View Source
const OPEN_TELEMETRY_TRACES_SOURCE_TYPE_NAME = "open_telemetry_traces"
View Source
const PROMETHEUS_REMOTE_WRITE_SOURCE_NODE_NAME = "prometheus-remote-write"
View Source
const PROMETHEUS_REMOTE_WRITE_SOURCE_TYPE_NAME = "prometheus_remote_write"
View Source
const S3_SOURCE_NODE_NAME = S3_SOURCE_TYPE_NAME
View Source
const S3_SOURCE_TYPE_NAME = "s3"
View Source
const SPLUNK_HEC_SOURCE_NODE_NAME = "splunk-hec"
View Source
const SPLUNK_HEC_SOURCE_TYPE_NAME = "splunk_hec"
View Source
const SQS_SOURCE_NODE_NAME = SQS_SOURCE_TYPE_NAME
View Source
const SQS_SOURCE_TYPE_NAME = "sqs"
View Source
const WEBHOOK_SOURCE_NODE_NAME = WEBHOOK_SOURCE_TYPE_NAME
View Source
const WEBHOOK_SOURCE_TYPE_NAME = "webhook"

Variables

View Source
var AgentSourceResourceSchema = schema.Schema{
	Description: "Represents a Mezmo agent source.",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var AzureEventHubSourceResourceSchema = schema.Schema{
	Description: "Represents an Azure Event Hub source.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"decoding": schema.StringAttribute{
			Required:    false,
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("bytes"),
			Description: "Configures how events are decoded from raw bytes",
			Validators: []validator.String{
				stringvalidator.OneOf("bytes", "json"),
			},
		},
		"connection_string": schema.StringAttribute{
			Required:    true,
			Description: "The Connection String as it appears in hub consumer SAS Policy",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
				stringvalidator.LengthAtMost(512),
			},
		},
		"namespace": schema.StringAttribute{
			Required:    true,
			Description: "The Event Hub Namespace",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
				stringvalidator.LengthAtMost(256),
			},
		},
		"group_id": schema.StringAttribute{
			Required:    true,
			Description: "The consumer group name that this consumer belongs to.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
				stringvalidator.LengthAtMost(256),
			},
		},
		"topics": schema.ListAttribute{
			Required:    true,
			ElementType: StringType,
			Description: "The list of Azure Event Hub name(s) to read events from.",
			Validators: []validator.List{
				listvalidator.SizeAtLeast(1),
				listvalidator.ValueStringsAre(
					stringvalidator.LengthAtLeast(1),
					stringvalidator.LengthAtMost(256),
				),
			},
		},
	}, nil),
}
View Source
var DatadogSourceResourceSchema = schema.Schema{
	Description: "Send logs and metrics data directly from an installed datadog agent",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var DemoSourceResourceSchema = schema.Schema{
	Description: "Represents a demo logs source.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"format": schema.StringAttribute{
			Required:    true,
			Description: "The format of the events",
			Validators: []validator.String{
				stringvalidator.OneOf(
					"env_sensor", "financial", "nginx", "json", "apache_common",
					"apache_error", "bsd_syslog", "syslog", "http_metrics", "generic_metrics"),
			},
		},
	}, nil),
}
View Source
var FluentSourceResourceSchema = schema.Schema{
	Description: "Receive data from Fluentd or Fluent Bit",
	Attributes: ExtendBaseAttributes(
		map[string]schema.Attribute{
			"decoding": schema.StringAttribute{
				Optional:    true,
				Computed:    true,
				Default:     stringdefault.StaticString("json"),
				Description: "The decoding method for converting frames into data events",
				Validators: []validator.String{
					stringvalidator.OneOf("bytes", "json", "ndjson"),
				},
			},
		},
		[]string{"capture_metadata", "shared_source_id"},
	),
}
View Source
var HttpSourceResourceSchema = schema.Schema{
	Description: "Represents an HTTP source.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"decoding": schema.StringAttribute{
			Required:    false,
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("auto"),
			Description: "The decoding method for converting frames into data events.",
			Validators: []validator.String{
				stringvalidator.OneOf("bytes", "json", "ndjson", "auto"),
			},
		},
	}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var KafkaSourceResourceSchema = schema.Schema{
	Description: "Represents a Kafka source.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"brokers": schema.ListNestedAttribute{
			Required:    true,
			Description: "The Kafka brokers to connect to.",
			NestedObject: schema.NestedAttributeObject{
				Attributes: map[string]schema.Attribute{
					"host": schema.StringAttribute{
						Required:    true,
						Description: "The host of the Kafka broker.",
						Validators: []validator.String{
							stringvalidator.LengthAtLeast(1),
							stringvalidator.LengthAtMost(255),
						},
					},
					"port": schema.Int64Attribute{
						Required:    true,
						Description: "The port of the Kafka broker.",
						Validators: []validator.Int64{
							int64validator.Between(1, 65535),
						},
					},
				},
			},
			Validators: []validator.List{
				listvalidator.UniqueValues(),
				listvalidator.SizeAtLeast(1),
			},
		},
		"topics": schema.ListAttribute{
			Required:    true,
			Description: "The Kafka topics to consume from.",
			ElementType: StringType,
			Validators: []validator.List{
				listvalidator.UniqueValues(),
				listvalidator.SizeAtLeast(1),
				listvalidator.ValueStringsAre(
					stringvalidator.LengthBetween(1, 256),
				),
			},
		},
		"group_id": schema.StringAttribute{
			Required:    true,
			Description: "The Kafka consumer group ID to use.",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(1),
			},
		},
		"tls_enabled": schema.BoolAttribute{
			Optional:    true,
			Computed:    true,
			Default:     booldefault.StaticBool(true),
			Description: "Whether to use TLS when connecting to Kafka.",
		},
		"sasl": schema.SingleNestedAttribute{
			Optional:    true,
			Description: "The SASL configuration to use when connecting to Kafka.",
			Attributes: map[string]schema.Attribute{
				"mechanism": schema.StringAttribute{
					Optional:    true,
					Computed:    true,
					Description: "The SASL mechanism to use when connecting to Kafka.",
					Validators: []validator.String{
						stringvalidator.OneOf("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"),
					},
				},
				"username": schema.StringAttribute{
					Required:    true,
					Description: "The SASL username to use when connecting to Kafka.",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
					},
				},
				"password": schema.StringAttribute{
					Required:    true,
					Sensitive:   true,
					Description: "The SASL password to use when connecting to Kafka.",
					Validators: []validator.String{
						stringvalidator.LengthAtLeast(1),
					},
				},
			},
		},
		"decoding": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("json"),
			Description: "The decoding method for converting frames into data events.",
			Validators: []validator.String{
				stringvalidator.OneOf("bytes", "json"),
			},
		},
	}, nil),
}
View Source
var KinesisFirehoseSourceResourceSchema = schema.Schema{
	Description: "Receive Kinesis Firehose data",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"decoding": schema.StringAttribute{
			Optional: true,
			Computed: true,
			Default:  stringdefault.StaticString("json"),
			Description: "This specifies what the data format will be after it is base64 decoded. " +
				"If it is JSON, it will be automatically parsed.",
			Validators: []validator.String{
				stringvalidator.OneOf("text", "json"),
			},
		},
	}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var LogAnalysisSourceResourceSchema = schema.Schema{
	Description: "Receive data directly from your Mezmo Log Analysis account",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, nil),
}
View Source
var LogStashSourceResourceSchema = schema.Schema{
	Description: "Receive Logstash data",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"format": schema.StringAttribute{
			Computed:    true,
			Optional:    true,
			Default:     stringdefault.StaticString("json"),
			Description: "The format of the logstash data",
			Validators:  []validator.String{stringvalidator.OneOf("json", "text")},
		},
	}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var OpenTelemetryLogsSourceResourceSchema = schema.Schema{
	Description: "Represents a Open Telemetry Logs source.",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var OpenTelemetryMetricsSourceResourceSchema = schema.Schema{
	Description: "Represents a Open Telemetry Metrics source.",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var OpenTelemetryTracesSourceResourceSchema = schema.Schema{
	Description: "Represents a Open Telemetry Traces source.",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var PrometheusRemoteWriteSourceResourceSchema = schema.Schema{
	Description: "Represents a Prometheus Remote Write source.",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, []string{"capture_metadata", "shared_source_id"}),
}
View Source
var S3SourceResourceSchema = schema.Schema{
	Description: "Represents an S3 pull source.",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"auth": schema.SingleNestedAttribute{
			Required:    true,
			Description: "Configures AWS authentication",
			Attributes: map[string]schema.Attribute{
				"access_key_id": schema.StringAttribute{
					Required:    true,
					Description: "The AWS access key id",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"secret_access_key": schema.StringAttribute{
					Required:    true,
					Sensitive:   true,
					Description: "The AWS secret access key",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
			},
		},
		"region": schema.StringAttribute{
			Required:    true,
			Description: "The name of the AWS region",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
		"sqs_queue_url": schema.StringAttribute{
			Required:    true,
			Description: "The URL of a AWS SQS queue configured to receive S3 bucket notifications",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(7)},
		},
		"compression": schema.StringAttribute{
			Optional:    true,
			Computed:    true,
			Default:     stringdefault.StaticString("auto"),
			Description: "The compression format of the S3 objects",
			Validators:  []validator.String{stringvalidator.OneOf([]string{"auto", "gzip", "none", "zstd"}...)},
		},
	}, nil),
}
View Source
var SQSSourceResourceSchema = schema.Schema{
	Description: "Collect messages from AWS SQS",
	Attributes: ExtendBaseAttributes(map[string]schema.Attribute{
		"queue_url": schema.StringAttribute{
			Required:    true,
			Description: "The URL of an AWS SQS queue",
			Validators: []validator.String{
				stringvalidator.LengthAtLeast(7),
				stringvalidator.LengthAtMost(128),
			},
		},
		"auth": schema.SingleNestedAttribute{
			Required:    true,
			Description: "Configures AWS authentication",
			Attributes: map[string]schema.Attribute{
				"access_key_id": schema.StringAttribute{
					Required:    true,
					Description: "The AWS access key id",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
				"secret_access_key": schema.StringAttribute{
					Required:    true,
					Sensitive:   true,
					Description: "The AWS secret access key",
					Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
				},
			},
		},
		"region": schema.StringAttribute{
			Required:    true,
			Description: "The name of the source's AWS region",
			Validators:  []validator.String{stringvalidator.LengthAtLeast(1)},
		},
	}, nil),
}
View Source
var SplunkHecSourceResourceSchema = schema.Schema{
	Description: "Receive Splunk logs",
	Attributes: ExtendBaseAttributes(
		map[string]schema.Attribute{},
		[]string{"capture_metadata", "shared_source_id"},
	),
}
View Source
var WebhookSourceResourceSchema = schema.Schema{
	Description: "Receive data from incoming webhooks using the WebSub protocol",
	Attributes:  ExtendBaseAttributes(map[string]schema.Attribute{}, []string{"capture_metadata", "shared_source_id"}),
}

Functions

func AgentSourceFromModel

func AgentSourceFromModel(plan *AgentSourceModel, previousState *AgentSourceModel) (*Source, diag.Diagnostics)

func AgentSourceToModel

func AgentSourceToModel(plan *AgentSourceModel, component *Source)

func AzureEventHubSourceFromModel

func AzureEventHubSourceFromModel(plan *AzureEventHubSourceModel, previousState *AzureEventHubSourceModel) (*Source, diag.Diagnostics)

func AzureEventHubSourceToModel

func AzureEventHubSourceToModel(plan *AzureEventHubSourceModel, component *Source)

func DatadogSourceFromModel

func DatadogSourceFromModel(plan *DatadogSourceModel, previousState *DatadogSourceModel) (*Source, diag.Diagnostics)

func DatadogSourceToModel

func DatadogSourceToModel(plan *DatadogSourceModel, component *Source)

func DemoSourceFromModel

func DemoSourceFromModel(plan *DemoSourceModel, previousState *DemoSourceModel) (*Source, diag.Diagnostics)

func DemoSourceToModel

func DemoSourceToModel(plan *DemoSourceModel, component *Source)

func FluentSourceFromModel

func FluentSourceFromModel(plan *FluentSourceModel, previousState *FluentSourceModel) (*Source, diag.Diagnostics)

func FluentSourceToModel

func FluentSourceToModel(plan *FluentSourceModel, component *Source)

func HttpSourceFromModel

func HttpSourceFromModel(plan *HttpSourceModel, previousState *HttpSourceModel) (*Source, diag.Diagnostics)

func HttpSourceToModel

func HttpSourceToModel(plan *HttpSourceModel, component *Source)

func KafkaSourceFromModel

func KafkaSourceFromModel(plan *KafkaSourceModel, previousState *KafkaSourceModel) (*Source, diag.Diagnostics)

func KafkaSourceToModel

func KafkaSourceToModel(plan *KafkaSourceModel, component *Source)

func KinesisFirehoseSourceFromModel

func KinesisFirehoseSourceFromModel(plan *KinesisFirehoseSourceModel, previousState *KinesisFirehoseSourceModel) (*Source, diag.Diagnostics)

func KinesisFirehoseSourceToModel

func KinesisFirehoseSourceToModel(plan *KinesisFirehoseSourceModel, component *Source)

func LogAnalysisSourceFromModel

func LogAnalysisSourceFromModel(plan *LogAnalysisSourceModel, previousState *LogAnalysisSourceModel) (*Source, diag.Diagnostics)

func LogAnalysisSourceToModel

func LogAnalysisSourceToModel(plan *LogAnalysisSourceModel, component *Source)

func LogStashSourceFromModel

func LogStashSourceFromModel(plan *LogStashSourceModel, previousState *LogStashSourceModel) (*Source, diag.Diagnostics)

func LogStashSourceToModel

func LogStashSourceToModel(plan *LogStashSourceModel, component *Source)

func OpenTelemetryLogsSourceFromModel

func OpenTelemetryLogsSourceFromModel(plan *OpenTelemetryLogsSourceModel, previousState *OpenTelemetryLogsSourceModel) (*Source, diag.Diagnostics)

func OpenTelemetryLogsSourceToModel

func OpenTelemetryLogsSourceToModel(plan *OpenTelemetryLogsSourceModel, component *Source)

func OpenTelemetryMetricsSourceFromModel

func OpenTelemetryMetricsSourceFromModel(plan *OpenTelemetryMetricsSourceModel, previousState *OpenTelemetryMetricsSourceModel) (*Source, diag.Diagnostics)

func OpenTelemetryMetricsSourceToModel

func OpenTelemetryMetricsSourceToModel(plan *OpenTelemetryMetricsSourceModel, component *Source)

func OpenTelemetryTracesSourceFromModel

func OpenTelemetryTracesSourceFromModel(plan *OpenTelemetryTracesSourceModel, previousState *OpenTelemetryTracesSourceModel) (*Source, diag.Diagnostics)

func OpenTelemetryTracesSourceToModel

func OpenTelemetryTracesSourceToModel(plan *OpenTelemetryTracesSourceModel, component *Source)

func PrometheusRemoteWriteSourceFromModel

func PrometheusRemoteWriteSourceFromModel(plan *PrometheusRemoteWriteSourceModel, previousState *PrometheusRemoteWriteSourceModel) (*Source, diag.Diagnostics)

func PrometheusRemoteWriteSourceToModel

func PrometheusRemoteWriteSourceToModel(plan *PrometheusRemoteWriteSourceModel, component *Source)

func S3SourceFromModel

func S3SourceFromModel(plan *S3SourceModel, previousState *S3SourceModel) (*Source, diag.Diagnostics)

func S3SourceToModel

func S3SourceToModel(plan *S3SourceModel, component *Source)

func SQSSourceFromModel

func SQSSourceFromModel(plan *SQSSourceModel, previousState *SQSSourceModel) (*Source, diag.Diagnostics)

func SQSSourceToModel

func SQSSourceToModel(plan *SQSSourceModel, component *Source)

func SplunkHecSourceFromModel

func SplunkHecSourceFromModel(plan *SplunkHecSourceModel, previousState *SplunkHecSourceModel) (*Source, diag.Diagnostics)

func SplunkHecSourceToModel

func SplunkHecSourceToModel(plan *SplunkHecSourceModel, component *Source)

func WebhookSourceFromModel

func WebhookSourceFromModel(plan *WebhookSourceModel, previousState *WebhookSourceModel) (*Source, diag.Diagnostics)

func WebhookSourceToModel

func WebhookSourceToModel(plan *WebhookSourceModel, component *Source)

Types

type AgentSourceModel

type AgentSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type AzureEventHubSourceModel

type AzureEventHubSourceModel struct {
	Id               String `tfsdk:"id"`
	PipelineId       String `tfsdk:"pipeline_id"`
	Title            String `tfsdk:"title"`
	Description      String `tfsdk:"description"`
	GenerationId     Int64  `tfsdk:"generation_id"`
	Decoding         String `tfsdk:"decoding" user_config:"true"`
	ConnectionString String `tfsdk:"connection_string" user_config:"true"`
	Namespace        String `tfsdk:"namespace" user_config:"true"`
	GroupId          String `tfsdk:"group_id" user_config:"true"`
	Topics           List   `tfsdk:"topics" user_config:"true"`
}

type DatadogSourceModel

type DatadogSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type DemoSourceModel

type DemoSourceModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	Format       String `tfsdk:"format"`
	GenerationId Int64  `tfsdk:"generation_id"`
}

type FluentSourceModel

type FluentSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	Decoding        String `tfsdk:"decoding" user_config:"true"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type HttpSourceModel

type HttpSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	Decoding        String `tfsdk:"decoding" user_config:"true"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type KafkaSourceModel

type KafkaSourceModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Brokers      List   `tfsdk:"brokers" user_config:"true"`
	Topics       List   `tfsdk:"topics" user_config:"true"`
	GroupId      String `tfsdk:"group_id" user_config:"true"`
	TLSEnabled   Bool   `tfsdk:"tls_enabled" user_config:"true"`
	SASL         Object `tfsdk:"sasl" user_config:"true"`
	Decoding     String `tfsdk:"decoding" user_config:"true"`
}

type KinesisFirehoseSourceModel

type KinesisFirehoseSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	Decoding        String `tfsdk:"decoding" user_config:"true"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type LogAnalysisSourceModel

type LogAnalysisSourceModel struct {
	Id           StringValue `tfsdk:"id"`
	PipelineId   StringValue `tfsdk:"pipeline_id"`
	Title        StringValue `tfsdk:"title"`
	Description  StringValue `tfsdk:"description"`
	GenerationId Int64Value  `tfsdk:"generation_id"`
}

type LogStashSourceModel

type LogStashSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	Format          String `tfsdk:"format" user_config:"true"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type OpenTelemetryLogsSourceModel

type OpenTelemetryLogsSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type OpenTelemetryMetricsSourceModel

type OpenTelemetryMetricsSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type OpenTelemetryTracesSourceModel

type OpenTelemetryTracesSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type PrometheusRemoteWriteSourceModel

type PrometheusRemoteWriteSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type S3SourceModel

type S3SourceModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	GenerationId Int64  `tfsdk:"generation_id"`
	Auth         Object `tfsdk:"auth" user_config:"true"`
	Region       String `tfsdk:"region" user_config:"true"`
	SqsQueueUrl  String `tfsdk:"sqs_queue_url" user_config:"true"`
	Compression  String `tfsdk:"compression" user_config:"true"`
}

type SQSSourceModel

type SQSSourceModel struct {
	Id           String `tfsdk:"id"`
	PipelineId   String `tfsdk:"pipeline_id"`
	Title        String `tfsdk:"title"`
	Description  String `tfsdk:"description"`
	GenerationId Int64  `tfsdk:"generation_id"`
	QueueUrl     String `tfsdk:"queue_url" user_config:"true"`
	Auth         Object `tfsdk:"auth" user_config:"true"`
	Region       String `tfsdk:"region" user_config:"true"`
}

type SchemaAttributes

type SchemaAttributes map[string]schema.Attribute

func ExtendBaseAttributes

func ExtendBaseAttributes(target SchemaAttributes, addons []string) SchemaAttributes

type SplunkHecSourceModel

type SplunkHecSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

type WebhookSourceModel

type WebhookSourceModel struct {
	Id              String `tfsdk:"id"`
	PipelineId      String `tfsdk:"pipeline_id"`
	Title           String `tfsdk:"title"`
	Description     String `tfsdk:"description"`
	GenerationId    Int64  `tfsdk:"generation_id"`
	SharedSourceId  String `tfsdk:"shared_source_id"`
	CaptureMetadata Bool   `tfsdk:"capture_metadata" user_config:"true"`
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL