Documentation ¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
var DefaultArguments = Arguments{ MetricsPath: "/metrics", Scheme: "http", HonorLabels: false, HonorTimestamps: true, HTTPClientConfig: component_config.DefaultHTTPClientConfig, ScrapeInterval: 1 * time.Minute, ScrapeTimeout: 10 * time.Second, }
DefaultArguments defines the default settings for a scrape job.
Functions ¶
This section is empty.
Types ¶
type Arguments ¶
type Arguments struct { Targets []discovery.Target `river:"targets,attr"` ForwardTo []storage.Appendable `river:"forward_to,attr"` // The job name to override the job label with. JobName string `river:"job_name,attr,optional"` // Indicator whether the scraped metrics should remain unmodified. HonorLabels bool `river:"honor_labels,attr,optional"` // Indicator whether the scraped timestamps should be respected. HonorTimestamps bool `river:"honor_timestamps,attr,optional"` // A set of query parameters with which the target is scraped. Params url.Values `river:"params,attr,optional"` // How frequently to scrape the targets of this scrape config. ScrapeInterval time.Duration `river:"scrape_interval,attr,optional"` // The timeout for scraping targets of this config. ScrapeTimeout time.Duration `river:"scrape_timeout,attr,optional"` // The HTTP resource path on which to fetch metrics from targets. MetricsPath string `river:"metrics_path,attr,optional"` // The URL scheme with which to fetch metrics from targets. Scheme string `river:"scheme,attr,optional"` // An uncompressed response body larger than this many bytes will cause the // scrape to fail. 0 means no limit. BodySizeLimit units.Base2Bytes `river:"body_size_limit,attr,optional"` // More than this many samples post metric-relabeling will cause the scrape // to fail. SampleLimit uint `river:"sample_limit,attr,optional"` // More than this many targets after the target relabeling will cause the // scrapes to fail. TargetLimit uint `river:"target_limit,attr,optional"` // More than this many labels post metric-relabeling will cause the scrape // to fail. LabelLimit uint `river:"label_limit,attr,optional"` // More than this label name length post metric-relabeling will cause the // scrape to fail. LabelNameLengthLimit uint `river:"label_name_length_limit,attr,optional"` // More than this label value length post metric-relabeling will cause the // scrape to fail. LabelValueLengthLimit uint `river:"label_value_length_limit,attr,optional"` HTTPClientConfig component_config.HTTPClientConfig `river:",squash"` // Scrape Options ExtraMetrics bool `river:"extra_metrics,attr,optional"` Clustering Clustering `river:"clustering,block,optional"` }
Arguments holds values which are used to configure the prometheus.scrape component.
func (*Arguments) UnmarshalRiver ¶
UnmarshalRiver implements river.Unmarshaler.
type Clustering ¶ added in v0.33.0
type Clustering struct { // TODO(@tpaschalis) Move this block to a shared place for all components using clustering. Enabled bool `river:"enabled,attr"` }
Clustering holds values that configure clustering-specific behavior.
type Component ¶
type Component struct {
// contains filtered or unexported fields
}
Component implements the prometheus.scrape component.
func (*Component) ClusterUpdatesRegistration ¶ added in v0.33.0
ClusterUpdatesRegistration implements component.ClusterComponent.
func (*Component) DebugInfo ¶
func (c *Component) DebugInfo() interface{}
DebugInfo implements component.DebugComponent
type ScraperStatus ¶
type ScraperStatus struct {
TargetStatus []TargetStatus `river:"target,block,optional"`
}
ScraperStatus reports the status of the scraper's jobs.
type TargetStatus ¶
type TargetStatus struct { JobName string `river:"job,attr"` URL string `river:"url,attr"` Health string `river:"health,attr"` Labels map[string]string `river:"labels,attr"` LastError string `river:"last_error,attr,optional"` LastScrape time.Time `river:"last_scrape,attr"` LastScrapeDuration time.Duration `river:"last_scrape_duration,attr,optional"` }
TargetStatus reports on the status of the latest scrape for a target.
func BuildTargetStatuses ¶ added in v0.33.0
func BuildTargetStatuses(targets map[string][]*scrape.Target) []TargetStatus
BuildTargetStatuses transforms the targets from a scrape manager into our internal status type for debug info.