scraper

package
v1.6.6-rc0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Dec 8, 2022 License: MIT Imports: 20 Imported by: 195

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type Config

type Config struct {
	Enabled bool `toml:"enabled" override:"enabled"`
	// The job name to which the job label is set by default.
	Name string `toml:"name" override:"name"`
	// Type of the scraper
	Type string `toml:"type" override:"type"`
	// Database this data will be associated with
	Database string `toml:"db" override:"db"`
	// RetentionPolicyt this data will be associated with
	RetentionPolicy string `toml:"rp" override:"rp"`
	// The URL scheme with which to fetch metrics from targets.
	Scheme string `toml:"scheme" override:"scheme"`
	// The HTTP resource path on which to fetch metrics from targets.
	MetricsPath string `toml:"metrics-path" override:"metrics-path"`
	// A set of query parameters with which the target is scraped.
	Params url.Values `toml:"params" override:"params"`
	// How frequently to scrape the targets of this scrape config.
	ScrapeInterval toml.Duration `toml:"scrape-interval" override:"scrape-interval"`
	// The timeout for scraping targets of this config.
	ScrapeTimeout toml.Duration `toml:"scrape-timeout" override:"scrape-timeout"`
	// The HTTP basic authentication credentials for the targets.
	Username string `toml:"username" override:"username"`
	Password string `toml:"password" override:"password,redact"`

	// Path to CA file
	SSLCA string `toml:"ssl-ca" override:"ssl-ca"`
	// Path to host cert file
	SSLCert string `toml:"ssl-cert" override:"ssl-cert"`
	// Path to cert key file
	SSLKey string `toml:"ssl-key" override:"ssl-key"`
	// SSLServerName is used to verify the hostname for the targets.
	SSLServerName string `toml:"ssl-server-name" override:"ssl-server-name"`
	// Use SSL but skip chain & host verification
	InsecureSkipVerify bool `toml:"insecure-skip-verify" override:"insecure-skip-verify"`

	// The bearer token for the targets.
	BearerToken string `toml:"bearer-token" override:"bearer-token,redact"`
	// HTTP proxy server to use to connect to the targets.
	ProxyURL *url.URL `toml:"proxy-url" override:"proxy-url"`
	// DiscoverID is the id of the discoverer that generates hosts for the scraper
	DiscoverID string `toml:"discoverer-id" override:"discoverer-id"`
	// DiscoverService is the type of the discoverer that generates hosts for the scraper
	DiscoverService string `toml:"discoverer-service" override:"discoverer-service"`

	// Blacklist is a list of hosts to ignore and not scrape
	Blacklist []string `toml:"blacklist" override:"blacklist"`
}

Config is the scraper configuration

func (*Config) Init

func (c *Config) Init()

Init adds default values to Config scraper

func (*Config) Prom

func (c *Config) Prom() *config.ScrapeConfig

Prom generates the prometheus configuration for the scraper

func (*Config) Validate

func (c *Config) Validate() error

Validate validates the configuration of the Scraper

type Diagnostic added in v1.4.0

type Diagnostic interface {
	plog.Logger
	Log(...interface{}) error
}

Prometheus logger

type Discoverer

type Discoverer interface {
	// Service returns the service type of the Discoverer
	Service() string
	// ID returns the unique ID of this specific discoverer
	ServiceID() string
	// Prom creates a prometheus scrape configuration.
	// TODO: replace when reimplement TargetManager
	Prom(c *config.ScrapeConfig)
}

Discoverer represents a service that discovers hosts to scrape

type Pair

type Pair struct {
	Discoverer Discoverer
	Scraper    Config
}

Pair is the linked discovery/scraper pair

type Registry

type Registry interface {
	// Commit finishes the update to the registry configuration
	Commit() error
	// AddDiscoverer adds discoverers to the registry
	AddDiscoverer(Discoverer)
	// RemoveDiscoverer removes discoverers from the registry
	RemoveDiscoverer(Discoverer)
	// AddScrapers adds scrapers to the registry
	AddScrapers([]Config)
	// RemoveScrapers removes scrapers from the registry
	RemoveScrapers([]Config)
	// Pairs returns the linked scraper/discovery combinations
	Pairs() []Pair
}

Registry represents the combined configuration state of discoverers and scrapers

type Service

type Service struct {
	PointsWriter interface {
		WriteKapacitorPoint(edge.PointMessage) error
	}
	// contains filtered or unexported fields
}

Service represents the scraper manager

func NewService

func NewService(c []Config, d Diagnostic) *Service

NewService creates a new scraper service

func (*Service) AddDiscoverer

func (s *Service) AddDiscoverer(discoverer Discoverer)

AddDiscoverer adds discoverer to the registry

func (*Service) AddScrapers

func (s *Service) AddScrapers(scrapers []Config)

AddScrapers adds scrapers to the registry

func (*Service) Append

func (s *Service) Append(_ uint64, labels labels.Labels, timestamp int64, value float64) (uint64, error)

Append tranforms prometheus samples and inserts data into the tasks pipeline

func (*Service) Close

func (s *Service) Close() error

Close stops the scraper service

func (*Service) Commit

func (s *Service) Commit() error

Commit applies the configuration to the scraper

func (*Service) Open

func (s *Service) Open() error

Open starts the scraper service

func (*Service) Pairs

func (s *Service) Pairs() []Pair

Pairs returns all named pairs of scrapers and discoverers from registry must be locked

func (*Service) RemoveDiscoverer

func (s *Service) RemoveDiscoverer(rm Discoverer)

RemoveDiscoverer removes discoverer from the registry

func (*Service) RemoveScrapers

func (s *Service) RemoveScrapers(scrapers []Config)

RemoveScrapers removes scrapers from the registry

func (*Service) Test

func (s *Service) Test(options interface{}) error

Test tests the options for the scrapers

func (*Service) TestOptions

func (s *Service) TestOptions() interface{}

TestOptions returns options that are allowed for the Test

func (*Service) Update

func (s *Service) Update(newConfigs []interface{}) error

Update will replace all scraper configurations and apply the configuration to the target manager

type ServiceAppenderAdapter added in v1.6.0

type ServiceAppenderAdapter struct {
	Wrapped *Service
}

ServiceAppenderAdapter exposes the service's Append method, but hides the Commit method used by the registry to commit config

func (*ServiceAppenderAdapter) Append added in v1.6.0

func (s *ServiceAppenderAdapter) Append(ref uint64, labels labels.Labels, timestamp int64, value float64) (uint64, error)

Append tranforms prometheus samples and inserts data into the tasks pipeline

func (*ServiceAppenderAdapter) AppendExemplar added in v1.6.0

conforms to Appender interface

func (*ServiceAppenderAdapter) Commit added in v1.6.0

func (s *ServiceAppenderAdapter) Commit() error

conforms to Appender interface, but we don't support transactions

func (*ServiceAppenderAdapter) NeedsThrottling added in v1.6.0

func (s *ServiceAppenderAdapter) NeedsThrottling() bool

NeedsThrottling conforms to Appender and never returns true currently.

func (*ServiceAppenderAdapter) Rollback added in v1.6.0

func (s *ServiceAppenderAdapter) Rollback() error

conforms to Appender interface, but we don't support transactions

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL