promql

package
v0.37.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 6, 2022 License: Apache-2.0 Imports: 38 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	ErrValidationAtModifierDisabled     = errors.New("@ modifier is disabled")
	ErrValidationNegativeOffsetDisabled = errors.New("negative offset is disabled")
)
View Source
var AtModifierUnsafeFunctions = map[string]struct{}{

	"days_in_month": {}, "day_of_month": {}, "day_of_week": {}, "day_of_year": {},
	"hour": {}, "minute": {}, "month": {}, "year": {},
	"predict_linear": {}, "time": {},

	"timestamp": {},
}

AtModifierUnsafeFunctions are the functions whose result can vary if evaluation time is changed when the arguments are step invariant. It also includes functions that use the timestamps of the passed instant vector argument to calculate a result since that can also change with change in eval time.

View Source
var FunctionCalls = map[string]FunctionCall{
	"abs":                funcAbs,
	"absent":             funcAbsent,
	"absent_over_time":   funcAbsentOverTime,
	"acos":               funcAcos,
	"acosh":              funcAcosh,
	"asin":               funcAsin,
	"asinh":              funcAsinh,
	"atan":               funcAtan,
	"atanh":              funcAtanh,
	"avg_over_time":      funcAvgOverTime,
	"ceil":               funcCeil,
	"changes":            funcChanges,
	"clamp":              funcClamp,
	"clamp_max":          funcClampMax,
	"clamp_min":          funcClampMin,
	"cos":                funcCos,
	"cosh":               funcCosh,
	"count_over_time":    funcCountOverTime,
	"days_in_month":      funcDaysInMonth,
	"day_of_month":       funcDayOfMonth,
	"day_of_week":        funcDayOfWeek,
	"day_of_year":        funcDayOfYear,
	"deg":                funcDeg,
	"delta":              funcDelta,
	"deriv":              funcDeriv,
	"exp":                funcExp,
	"floor":              funcFloor,
	"histogram_quantile": funcHistogramQuantile,
	"holt_winters":       funcHoltWinters,
	"hour":               funcHour,
	"idelta":             funcIdelta,
	"increase":           funcIncrease,
	"irate":              funcIrate,
	"label_replace":      funcLabelReplace,
	"label_join":         funcLabelJoin,
	"ln":                 funcLn,
	"log10":              funcLog10,
	"log2":               funcLog2,
	"last_over_time":     funcLastOverTime,
	"max_over_time":      funcMaxOverTime,
	"min_over_time":      funcMinOverTime,
	"minute":             funcMinute,
	"month":              funcMonth,
	"pi":                 funcPi,
	"predict_linear":     funcPredictLinear,
	"present_over_time":  funcPresentOverTime,
	"quantile_over_time": funcQuantileOverTime,
	"rad":                funcRad,
	"rate":               funcRate,
	"resets":             funcResets,
	"round":              funcRound,
	"scalar":             funcScalar,
	"sgn":                funcSgn,
	"sin":                funcSin,
	"sinh":               funcSinh,
	"sort":               funcSort,
	"sort_desc":          funcSortDesc,
	"sqrt":               funcSqrt,
	"stddev_over_time":   funcStddevOverTime,
	"stdvar_over_time":   funcStdvarOverTime,
	"sum_over_time":      funcSumOverTime,
	"tan":                funcTan,
	"tanh":               funcTanh,
	"time":               funcTime,
	"timestamp":          funcTimestamp,
	"vector":             funcVector,
	"year":               funcYear,
}

FunctionCalls is a list of all functions supported by PromQL, including their types.

Functions

func NewOriginContext

func NewOriginContext(ctx context.Context, data map[string]interface{}) context.Context

NewOriginContext returns a new context with data about the origin attached.

func PreprocessExpr

func PreprocessExpr(expr parser.Expr, start, end time.Time) parser.Expr

PreprocessExpr wraps all possible step invariant parts of the given expression with StepInvariantExpr. It also resolves the preprocessors.

Types

type ActiveQueryTracker

type ActiveQueryTracker struct {
	// contains filtered or unexported fields
}

func NewActiveQueryTracker

func NewActiveQueryTracker(localStoragePath string, maxConcurrent int, logger log.Logger) *ActiveQueryTracker

func (ActiveQueryTracker) Delete

func (tracker ActiveQueryTracker) Delete(insertIndex int)

func (ActiveQueryTracker) GetMaxConcurrent

func (tracker ActiveQueryTracker) GetMaxConcurrent() int

func (ActiveQueryTracker) Insert

func (tracker ActiveQueryTracker) Insert(ctx context.Context, query string) (int, error)

type Engine

type Engine struct {
	// contains filtered or unexported fields
}

Engine handles the lifetime of queries from beginning to end. It is connected to a querier.

func NewEngine

func NewEngine(opts EngineOpts) *Engine

NewEngine returns a new engine.

func (*Engine) NewInstantQuery

func (ng *Engine) NewInstantQuery(q storage.Queryable, opts *QueryOpts, qs string, ts time.Time) (Query, error)

NewInstantQuery returns an evaluation query for the given expression at the given time.

func (*Engine) NewRangeQuery

func (ng *Engine) NewRangeQuery(q storage.Queryable, opts *QueryOpts, qs string, start, end time.Time, interval time.Duration) (Query, error)

NewRangeQuery returns an evaluation query for the given time range and with the resolution set by the interval.

func (*Engine) SetQueryLogger

func (ng *Engine) SetQueryLogger(l QueryLogger)

SetQueryLogger sets the query logger.

type EngineOpts

type EngineOpts struct {
	Logger             log.Logger
	Reg                prometheus.Registerer
	MaxSamples         int
	Timeout            time.Duration
	ActiveQueryTracker QueryTracker
	// LookbackDelta determines the time since the last sample after which a time
	// series is considered stale.
	LookbackDelta time.Duration

	// NoStepSubqueryIntervalFn is the default evaluation interval of
	// a subquery in milliseconds if no step in range vector was specified `[30m:<step>]`.
	NoStepSubqueryIntervalFn func(rangeMillis int64) int64

	// EnableAtModifier if true enables @ modifier. Disabled otherwise. This
	// is supposed to be enabled for regular PromQL (as of Prometheus v2.33)
	// but the option to disable it is still provided here for those using
	// the Engine outside of Prometheus.
	EnableAtModifier bool

	// EnableNegativeOffset if true enables negative (-) offset
	// values. Disabled otherwise. This is supposed to be enabled for
	// regular PromQL (as of Prometheus v2.33) but the option to disable it
	// is still provided here for those using the Engine outside of
	// Prometheus.
	EnableNegativeOffset bool

	// EnablePerStepStats if true allows for per-step stats to be computed on request. Disabled otherwise.
	EnablePerStepStats bool
}

EngineOpts contains configuration options used when creating a new Engine.

type Entry

type Entry struct {
	Query     string `json:"query"`
	Timestamp int64  `json:"timestamp_sec"`
}

type ErrQueryCanceled

type ErrQueryCanceled string

ErrQueryCanceled is returned if a query was canceled during processing.

func (ErrQueryCanceled) Error

func (e ErrQueryCanceled) Error() string

type ErrQueryTimeout

type ErrQueryTimeout string

ErrQueryTimeout is returned if a query timed out during processing.

func (ErrQueryTimeout) Error

func (e ErrQueryTimeout) Error() string

type ErrStorage

type ErrStorage struct{ Err error }

ErrStorage is returned if an error was encountered in the storage layer during query handling.

func (ErrStorage) Error

func (e ErrStorage) Error() string

type ErrTooManySamples

type ErrTooManySamples string

ErrTooManySamples is returned if a query would load more than the maximum allowed samples into memory.

func (ErrTooManySamples) Error

func (e ErrTooManySamples) Error() string

type EvalNodeHelper

type EvalNodeHelper struct {
	// Evaluation timestamp.
	Ts int64
	// Vector that can be used for output.
	Out Vector

	// Caches.
	// DropMetricName and label_*.
	Dmn map[uint64]labels.Labels
	// contains filtered or unexported fields
}

EvalNodeHelper stores extra information and caches for evaluating a single node across steps.

func (*EvalNodeHelper) DropMetricName

func (enh *EvalNodeHelper) DropMetricName(l labels.Labels) labels.Labels

DropMetricName is a cached version of DropMetricName.

type EvalSeriesHelper

type EvalSeriesHelper struct {
	// contains filtered or unexported fields
}

EvalSeriesHelper stores extra information about a series.

type FunctionCall

type FunctionCall func(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector

FunctionCall is the type of a PromQL function implementation

vals is a list of the evaluated arguments for the function call.

For range vectors it will be a Matrix with one series, instant vectors a
Vector, scalars a Vector with one series whose value is the scalar
value,and nil for strings.

args are the original arguments to the function, where you can access

matrixSelectors, vectorSelectors, and StringLiterals.

enh.Out is a pre-allocated empty vector that you may use to accumulate

output before returning it. The vectors in vals should not be returned.a

Range vector functions need only return a vector with the right value,

the metric and timestamp are not needed.

Instant vector functions need only return a vector with the right values and

metrics, the timestamp are not needed.

Scalar results should be returned as the value of a sample in a Vector.

type LazyLoader

type LazyLoader struct {
	testutil.T

	SubqueryInterval time.Duration
	// contains filtered or unexported fields
}

LazyLoader lazily loads samples into storage. This is specifically implemented for unit testing of rules.

func NewLazyLoader

func NewLazyLoader(t testutil.T, input string, opts LazyLoaderOpts) (*LazyLoader, error)

NewLazyLoader returns an initialized empty LazyLoader.

func (*LazyLoader) Close

func (ll *LazyLoader) Close()

Close closes resources associated with the LazyLoader.

func (*LazyLoader) Context

func (ll *LazyLoader) Context() context.Context

Context returns the LazyLoader's context.

func (*LazyLoader) QueryEngine

func (ll *LazyLoader) QueryEngine() *Engine

QueryEngine returns the LazyLoader's query engine.

func (*LazyLoader) Queryable

func (ll *LazyLoader) Queryable() storage.Queryable

Queryable allows querying the LazyLoader's data. Note: only the samples till the max timestamp used

in `WithSamplesTill` can be queried.

func (*LazyLoader) Storage

func (ll *LazyLoader) Storage() storage.Storage

Storage returns the LazyLoader's storage.

func (*LazyLoader) WithSamplesTill

func (ll *LazyLoader) WithSamplesTill(ts time.Time, fn func(error))

WithSamplesTill loads the samples till given timestamp and executes the given function.

type LazyLoaderOpts

type LazyLoaderOpts struct {
	// Both of these must be set to true for regular PromQL (as of
	// Prometheus v2.33). They can still be disabled here for legacy and
	// other uses.
	EnableAtModifier, EnableNegativeOffset bool
}

LazyLoaderOpts are options for the lazy loader.

type Matrix

type Matrix []Series

Matrix is a slice of Series that implements sort.Interface and has a String method.

func (Matrix) ContainsSameLabelset

func (m Matrix) ContainsSameLabelset() bool

ContainsSameLabelset checks if a matrix has samples with the same labelset. Such a behavior is semantically undefined. https://github.com/prometheus/prometheus/issues/4562

func (Matrix) Len

func (m Matrix) Len() int

func (Matrix) Less

func (m Matrix) Less(i, j int) bool

func (Matrix) String

func (m Matrix) String() string

func (Matrix) Swap

func (m Matrix) Swap(i, j int)

func (Matrix) TotalSamples

func (m Matrix) TotalSamples() int

TotalSamples returns the total number of samples in the series within a matrix.

func (Matrix) Type

func (Matrix) Type() parser.ValueType

type Point

type Point struct {
	T int64
	V float64
}

Point represents a single data point for a given timestamp.

func (Point) MarshalJSON

func (p Point) MarshalJSON() ([]byte, error)

MarshalJSON implements json.Marshaler.

JSON marshaling is only needed for the HTTP API. Since Point is such a frequently marshaled type, it gets an optimized treatment directly in web/api/v1/api.go. Therefore, this method is unused within Prometheus. It is still provided here as convenience for debugging and for other users of this code. Also note that the different marshaling implementations might lead to slightly different results in terms of formatting and rounding of the timestamp.

func (Point) String

func (p Point) String() string

type Query

type Query interface {
	// Exec processes the query. Can only be called once.
	Exec(ctx context.Context) *Result
	// Close recovers memory used by the query result.
	Close()
	// Statement returns the parsed statement of the query.
	Statement() parser.Statement
	// Stats returns statistics about the lifetime of the query.
	Stats() *stats.Statistics
	// Cancel signals that a running query execution should be aborted.
	Cancel()
	// String returns the original query string.
	String() string
}

A Query is derived from an a raw query string and can be run against an engine it is associated with.

type QueryLogger

type QueryLogger interface {
	Log(...interface{}) error
	Close() error
}

QueryLogger is an interface that can be used to log all the queries logged by the engine.

type QueryOpts

type QueryOpts struct {
	// Enables recording per-step statistics if the engine has it enabled as well. Disabled by default.
	EnablePerStepStats bool
}

type QueryOrigin

type QueryOrigin struct{}

type QueryTracker

type QueryTracker interface {
	// GetMaxConcurrent returns maximum number of concurrent queries that are allowed by this tracker.
	GetMaxConcurrent() int

	// Insert inserts query into query tracker. This call must block if maximum number of queries is already running.
	// If Insert doesn't return error then returned integer value should be used in subsequent Delete call.
	// Insert should return error if context is finished before query can proceed, and integer value returned in this case should be ignored by caller.
	Insert(ctx context.Context, query string) (int, error)

	// Delete removes query from activity tracker. InsertIndex is value returned by Insert call.
	Delete(insertIndex int)
}

QueryTracker provides access to two features:

1) Tracking of active query. If PromQL engine crashes while executing any query, such query should be present in the tracker on restart, hence logged. After the logging on restart, the tracker gets emptied.

2) Enforcement of the maximum number of concurrent queries.

type Result

type Result struct {
	Err      error
	Value    parser.Value
	Warnings storage.Warnings
}

Result holds the resulting value of an execution or an error if any occurred.

func (*Result) Matrix

func (r *Result) Matrix() (Matrix, error)

Matrix returns a Matrix. An error is returned if the result was an error or the result value is not a Matrix.

func (*Result) Scalar

func (r *Result) Scalar() (Scalar, error)

Scalar returns a Scalar value. An error is returned if the result was an error or the result value is not a Scalar.

func (*Result) String

func (r *Result) String() string

func (*Result) Vector

func (r *Result) Vector() (Vector, error)

Vector returns a Vector if the result value is one. An error is returned if the result was an error or the result value is not a Vector.

type Sample

type Sample struct {
	Point

	Metric labels.Labels
}

Sample is a single sample belonging to a metric.

func (Sample) MarshalJSON

func (s Sample) MarshalJSON() ([]byte, error)

func (Sample) String

func (s Sample) String() string

type Scalar

type Scalar struct {
	T int64
	V float64
}

Scalar is a data point that's explicitly not associated with a metric.

func (Scalar) MarshalJSON

func (s Scalar) MarshalJSON() ([]byte, error)

func (Scalar) String

func (s Scalar) String() string

func (Scalar) Type

func (Scalar) Type() parser.ValueType

type Series

type Series struct {
	Metric labels.Labels `json:"metric"`
	Points []Point       `json:"values"`
}

Series is a stream of data points belonging to a metric.

func (Series) String

func (s Series) String() string

type StorageSeries

type StorageSeries struct {
	// contains filtered or unexported fields
}

StorageSeries simulates promql.Series as storage.Series.

func NewStorageSeries

func NewStorageSeries(series Series) *StorageSeries

NewStorageSeries returns a StorageSeries from a Series.

func (*StorageSeries) Iterator

func (ss *StorageSeries) Iterator() chunkenc.Iterator

Iterator returns a new iterator of the data of the series.

func (*StorageSeries) Labels

func (ss *StorageSeries) Labels() labels.Labels

type String

type String struct {
	T int64
	V string
}

String represents a string value.

func (String) MarshalJSON

func (s String) MarshalJSON() ([]byte, error)

func (String) String

func (s String) String() string

func (String) Type

func (String) Type() parser.ValueType

type Test

type Test struct {
	testutil.T
	// contains filtered or unexported fields
}

Test is a sequence of read and write commands that are run against a test storage.

func NewTest

func NewTest(t testutil.T, input string) (*Test, error)

NewTest returns an initialized empty Test.

func (*Test) Close

func (t *Test) Close()

Close closes resources associated with the Test.

func (*Test) Context

func (t *Test) Context() context.Context

Context returns the test's context.

func (*Test) ExemplarQueryable

func (t *Test) ExemplarQueryable() storage.ExemplarQueryable

func (*Test) ExemplarStorage

func (t *Test) ExemplarStorage() storage.ExemplarStorage

ExemplarStorage returns the test's exemplar storage.

func (*Test) QueryEngine

func (t *Test) QueryEngine() *Engine

QueryEngine returns the test's query engine.

func (*Test) Queryable

func (t *Test) Queryable() storage.Queryable

Queryable allows querying the test data.

func (*Test) Run

func (t *Test) Run() error

Run executes the command sequence of the test. Until the maximum error number is reached, evaluation errors do not terminate execution.

func (*Test) Storage

func (t *Test) Storage() storage.Storage

Storage returns the test's storage.

func (*Test) TSDB

func (t *Test) TSDB() *tsdb.DB

TSDB returns test's TSDB.

type Vector

type Vector []Sample

Vector is basically only an alias for model.Samples, but the contract is that in a Vector, all Samples have the same timestamp.

func (Vector) ContainsSameLabelset

func (vec Vector) ContainsSameLabelset() bool

ContainsSameLabelset checks if a vector has samples with the same labelset Such a behavior is semantically undefined https://github.com/prometheus/prometheus/issues/4562

func (Vector) String

func (vec Vector) String() string

func (Vector) Type

func (Vector) Type() parser.ValueType

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL