messaging

package
v0.17.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 13, 2021 License: MIT Imports: 18 Imported by: 19

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	// ErrMessageSizeLimit indicate that message is rejected by server due to size limitation
	ErrMessageSizeLimit = errors.New("message was too large, server rejected it to avoid allocation error")
)

Functions

func CreateTLSConfig added in v0.6.0

func CreateTLSConfig(tlsConfig auth.TLS) (*tls.Config, error)

CreateTLSConfig return tls config

Types

type Client

type Client interface {
	NewConsumer(appName, consumerName string, concurrency int) (Consumer, error)
	NewProducer(appName string) (Producer, error)
}

Client is the interface used to abstract out interaction with messaging system for replication

func NewKafkaClient added in v0.4.0

func NewKafkaClient(kc *KafkaConfig, metricsClient metrics.Client, zLogger *zap.Logger, logger log.Logger, metricScope tally.Scope,
	checkApp bool) Client

NewKafkaClient is used to create an instance of KafkaClient

type CloseableProducer added in v0.9.3

type CloseableProducer interface {
	Producer
	Close() error
}

CloseableProducer is a Producer that can be closed

type ClusterConfig

type ClusterConfig struct {
	Brokers []string `yaml:"brokers"`
}

ClusterConfig describes the configuration for a single Kafka cluster

type Consumer added in v0.4.0

type Consumer interface {
	// Start starts the consumer
	Start() error
	// Stop stops the consumer
	Stop()
	// Messages return the message channel for this consumer
	Messages() <-chan Message
}

Consumer is the unified interface for both internal and external kafka clients

type KafkaConfig

type KafkaConfig struct {
	TLS      auth.TLS                 `yaml:"tls"`
	Clusters map[string]ClusterConfig `yaml:"clusters"`
	Topics   map[string]TopicConfig   `yaml:"topics"`
	// Applications describes the applications that will use the Kafka topics
	Applications map[string]TopicList `yaml:"applications"`
}

KafkaConfig describes the configuration needed to connect to all kafka clusters

func (*KafkaConfig) Validate added in v0.4.0

func (k *KafkaConfig) Validate(checkApp bool)

Validate will validate config for kafka

type Message added in v0.4.0

type Message interface {
	// Value is a mutable reference to the message's value
	Value() []byte
	// Partition is the ID of the partition from which the message was read.
	Partition() int32
	// Offset is the message's offset.
	Offset() int64
	// Ack marks the message as successfully processed.
	Ack() error
	// Nack marks the message processing as failed and the message will be retried or sent to DLQ.
	Nack() error
}

Message is the unified interface for a Kafka message

type Producer

type Producer interface {
	Publish(ctx context.Context, message interface{}) error
}

Producer is the interface used to send replication tasks to other clusters through replicator

func NewKafkaProducer

func NewKafkaProducer(topic string, producer sarama.SyncProducer, logger log.Logger) Producer

NewKafkaProducer is used to create the Kafka based producer implementation

func NewMetricProducer added in v0.5.0

func NewMetricProducer(
	producer Producer,
	metricsClient metrics.Client,
) Producer

NewMetricProducer creates a new instance of producer that emits metrics

func NewNoopProducer added in v0.15.0

func NewNoopProducer() Producer

NewNoopProducer returns a no-op message producer

type TopicConfig

type TopicConfig struct {
	Cluster string `yaml:"cluster"`
}

TopicConfig describes the mapping from topic to Kafka cluster

type TopicList added in v0.3.12

type TopicList struct {
	Topic      string `yaml:"topic"`
	RetryTopic string `yaml:"retry-topic"`
	DLQTopic   string `yaml:"dlq-topic"`
}

TopicList describes the topic names for each cluster

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL