Documentation ¶
Index ¶
- Constants
- Variables
- func EvaluatorUnsupportedType(expr Expr, ev Evaluator) error
- func IsComparisonOperator(op string) bool
- func IsLogicalBinOp(op string) bool
- func ParseLabels(lbs string) (labels.Labels, error)
- func ParseMatchers(input string) ([]*labels.Matcher, error)
- func PopulateMatrixFromScalar(data promql.Scalar, params Params) promql.Matrix
- func QueryType(query string) (string, error)
- func RecordMetrics(ctx context.Context, p Params, status string, stats stats.Result, ...)
- func ResultIterator(res Result, params Params) (iter.EntryIterator, error)
- type BinOpOptions
- type ConcatLogSelectorExpr
- type ConcatSampleExpr
- type DefaultEvaluator
- type DownstreamEvaluator
- func (ev DownstreamEvaluator) Downstream(ctx context.Context, queries []DownstreamQuery) ([]Result, error)
- func (ev *DownstreamEvaluator) Iterator(ctx context.Context, expr LogSelectorExpr, params Params) (iter.EntryIterator, error)
- func (ev *DownstreamEvaluator) StepEvaluator(ctx context.Context, nextEv SampleEvaluator, expr SampleExpr, params Params) (StepEvaluator, error)
- type DownstreamLogSelectorExpr
- type DownstreamQuery
- type DownstreamSampleExpr
- type Downstreamable
- type Downstreamer
- type Engine
- type EngineOpts
- type EntryEvaluator
- type Evaluator
- type Expr
- type Limits
- type LiteralParams
- func (p LiteralParams) Copy() LiteralParams
- func (p LiteralParams) Direction() logproto.Direction
- func (p LiteralParams) End() time.Time
- func (p LiteralParams) Interval() time.Duration
- func (p LiteralParams) Limit() uint32
- func (p LiteralParams) Query() string
- func (p LiteralParams) Shards() []string
- func (p LiteralParams) Start() time.Time
- func (p LiteralParams) Step() time.Duration
- type LogSelectorExpr
- type MatrixStepper
- type MockDownstreamer
- type MockQuerier
- type MultiStageExpr
- type Params
- type ParseError
- type Pipeline
- type PipelineExpr
- type Querier
- type Query
- type QueryParams
- type QueryRangeType
- type RangeVectorAggregator
- type RangeVectorIterator
- type Result
- type SampleEvaluator
- type SampleEvaluatorFunc
- type SampleExpr
- type SampleExtractor
- type SelectLogParams
- type SelectSampleParams
- type ShardMapper
- type ShardedEngine
- type ShardingMetrics
- type Shards
- type StageExpr
- type StepEvaluator
- type Streams
Constants ¶
const ( // vector ops OpTypeSum = "sum" OpTypeAvg = "avg" OpTypeMax = "max" OpTypeMin = "min" OpTypeCount = "count" OpTypeStddev = "stddev" OpTypeStdvar = "stdvar" OpTypeBottomK = "bottomk" OpTypeTopK = "topk" // range vector ops OpRangeTypeCount = "count_over_time" OpRangeTypeRate = "rate" OpRangeTypeBytes = "bytes_over_time" OpRangeTypeBytesRate = "bytes_rate" OpRangeTypeAvg = "avg_over_time" OpRangeTypeSum = "sum_over_time" OpRangeTypeMin = "min_over_time" OpRangeTypeMax = "max_over_time" OpRangeTypeStdvar = "stdvar_over_time" OpRangeTypeStddev = "stddev_over_time" OpRangeTypeQuantile = "quantile_over_time" OpRangeTypeAbsent = "absent_over_time" // binops - logical/set OpTypeOr = "or" OpTypeAnd = "and" OpTypeUnless = "unless" // binops - operations OpTypeAdd = "+" OpTypeSub = "-" OpTypeMul = "*" OpTypeDiv = "/" OpTypeMod = "%" OpTypePow = "^" // binops - comparison OpTypeCmpEQ = "==" OpTypeNEQ = "!=" OpTypeGT = ">" OpTypeGTE = ">=" OpTypeLT = "<" OpTypeLTE = "<=" // parsers OpParserTypeJSON = "json" OpParserTypeLogfmt = "logfmt" OpParserTypeRegexp = "regexp" OpParserTypeUnpack = "unpack" OpFmtLine = "line_format" OpFmtLabel = "label_format" OpPipe = "|" OpUnwrap = "unwrap" // conversion Op OpConvBytes = "bytes" OpConvDuration = "duration" OpConvDurationSeconds = "duration_seconds" OpLabelReplace = "label_replace" )
const ( QueryTypeMetric = "metric" QueryTypeFilter = "filter" QueryTypeLimited = "limited" )
const ( StreamsKey = "streams" MetricsKey = "metrics" SuccessKey = "success" FailureKey = "failure" NoopKey = "noop" )
keys used in metrics
const ABSENT_OVER_TIME = 57400
const ADD = 57412
const AND = 57404
const AVG = 57372
const AVG_OVER_TIME = 57390
const BOOL = 57382
const BOTTOMK = 57378
const BY = 57367
const BYTES = 57346
const BYTES_CONV = 57397
const BYTES_OVER_TIME = 57380
const BYTES_RATE = 57381
const CLOSE_BRACE = 57358
const CLOSE_BRACKET = 57360
const CLOSE_PARENTHESIS = 57366
const CMP_EQ = 57406
const COMMA = 57361
const COUNT = 57375
const COUNT_OVER_TIME = 57369
const DIV = 57415
const DOT = 57362
const DURATION = 57350
const DURATION_CONV = 57398
const DURATION_SECONDS_CONV = 57399
const EQ = 57354
const GT = 57410
const GTE = 57411
const IDENTIFIER = 57347
const JSON = 57383
const LABELS = 57353
const LABEL_FMT = 57388
const LABEL_REPLACE = 57401
const LINE_FMT = 57387
const LOGFMT = 57385
const LT = 57408
const LTE = 57409
const MATCHERS = 57352
const MAX = 57373
const MAX_OVER_TIME = 57393
const MIN = 57374
const MIN_OVER_TIME = 57392
const MOD = 57416
const MUL = 57414
const NEQ = 57407
const NRE = 57356
const NUMBER = 57349
const OPEN_BRACE = 57357
const OPEN_BRACKET = 57359
const OPEN_PARENTHESIS = 57365
const OR = 57403
const PIPE = 57386
const PIPE_EXACT = 57364
const PIPE_MATCH = 57363
const POW = 57417
const QUANTILE_OVER_TIME = 57396
const RANGE = 57351
const RATE = 57370
const RE = 57355
const REGEXP = 57384
const STDDEV = 57376
const STDDEV_OVER_TIME = 57395
const STDVAR = 57377
const STDVAR_OVER_TIME = 57394
const STRING = 57348
const SUB = 57413
const SUM = 57371
const SUM_OVER_TIME = 57391
const TOPK = 57379
const UNLESS = 57405
const UNPACK = 57402
const UNWRAP = 57389
const ValueTypeStreams = "streams"
ValueTypeStreams promql.ValueType for log streams
const WITHOUT = 57368
Variables ¶
var ( ErrParse = errors.New("failed to parse the log query") ErrPipeline = errors.New("failed execute pipeline") ErrLimit = errors.New("limit reached while evaluating the query") )
Those errors are useful for comparing error returned by the engine. e.g. errors.Is(err,logql.ErrParse) let you know if this is a ast parsing error.
var (
NoLimits = &fakeLimits{maxSeries: math.MaxInt32}
)
Functions ¶
func EvaluatorUnsupportedType ¶
EvaluatorUnsupportedType is a helper for signaling that an evaluator does not support an Expr type
func IsComparisonOperator ¶
func IsLogicalBinOp ¶
IsLogicalBinOp tests whether an operation is a logical/set binary operation
func ParseLabels ¶
ParseLabels parses labels from a string using logql parser.
func ParseMatchers ¶
ParseMatchers parses a string and returns labels matchers, if the expression contains anything else it will return an error.
func RecordMetrics ¶
func ResultIterator ¶
func ResultIterator(res Result, params Params) (iter.EntryIterator, error)
ResultIterator coerces a downstream streams result into an iter.EntryIterator
Types ¶
type BinOpOptions ¶
type BinOpOptions struct {
ReturnBool bool
}
type ConcatLogSelectorExpr ¶
type ConcatLogSelectorExpr struct { DownstreamLogSelectorExpr // contains filtered or unexported fields }
ConcatLogSelectorExpr is an expr for concatenating multiple LogSelectorExpr
func (ConcatLogSelectorExpr) String ¶
func (c ConcatLogSelectorExpr) String() string
type ConcatSampleExpr ¶
type ConcatSampleExpr struct { DownstreamSampleExpr // contains filtered or unexported fields }
ConcatSampleExpr is an expr for concatenating multiple SampleExpr Contract: The embedded SampleExprs within a linked list of ConcatSampleExprs must be of the same structure. This makes special implementations of SampleExpr.Associative() unnecessary.
func (ConcatSampleExpr) String ¶
func (c ConcatSampleExpr) String() string
type DefaultEvaluator ¶
type DefaultEvaluator struct {
// contains filtered or unexported fields
}
func NewDefaultEvaluator ¶
func NewDefaultEvaluator(querier Querier, maxLookBackPeriod time.Duration) *DefaultEvaluator
NewDefaultEvaluator constructs a DefaultEvaluator
func (*DefaultEvaluator) Iterator ¶
func (ev *DefaultEvaluator) Iterator(ctx context.Context, expr LogSelectorExpr, q Params) (iter.EntryIterator, error)
func (*DefaultEvaluator) StepEvaluator ¶
func (ev *DefaultEvaluator) StepEvaluator( ctx context.Context, nextEv SampleEvaluator, expr SampleExpr, q Params, ) (StepEvaluator, error)
type DownstreamEvaluator ¶
type DownstreamEvaluator struct { Downstreamer // contains filtered or unexported fields }
DownstreamEvaluator is an evaluator which handles shard aware AST nodes
func NewDownstreamEvaluator ¶
func NewDownstreamEvaluator(downstreamer Downstreamer) *DownstreamEvaluator
func (DownstreamEvaluator) Downstream ¶
func (ev DownstreamEvaluator) Downstream(ctx context.Context, queries []DownstreamQuery) ([]Result, error)
Downstream runs queries and collects stats from the embedded Downstreamer
func (*DownstreamEvaluator) Iterator ¶
func (ev *DownstreamEvaluator) Iterator( ctx context.Context, expr LogSelectorExpr, params Params, ) (iter.EntryIterator, error)
Iterator returns the iter.EntryIterator for a given LogSelectorExpr
func (*DownstreamEvaluator) StepEvaluator ¶
func (ev *DownstreamEvaluator) StepEvaluator( ctx context.Context, nextEv SampleEvaluator, expr SampleExpr, params Params, ) (StepEvaluator, error)
Evaluator returns a StepEvaluator for a given SampleExpr
type DownstreamLogSelectorExpr ¶
type DownstreamLogSelectorExpr struct { LogSelectorExpr // contains filtered or unexported fields }
DownstreamLogSelectorExpr is a LogSelectorExpr which signals downstream computation
func (DownstreamLogSelectorExpr) String ¶
func (d DownstreamLogSelectorExpr) String() string
type DownstreamQuery ¶
type DownstreamSampleExpr ¶
type DownstreamSampleExpr struct { SampleExpr // contains filtered or unexported fields }
DownstreamSampleExpr is a SampleExpr which signals downstream computation
func (DownstreamSampleExpr) String ¶
func (d DownstreamSampleExpr) String() string
type Downstreamable ¶
type Downstreamable interface {
Downstreamer() Downstreamer
}
type Downstreamer ¶
type Downstreamer interface {
Downstream(context.Context, []DownstreamQuery) ([]Result, error)
}
Downstreamer is an interface for deferring responsibility for query execution. It is decoupled from but consumed by a downStreamEvaluator to dispatch ASTs.
type Engine ¶
type Engine struct {
// contains filtered or unexported fields
}
Engine is the LogQL engine.
type EngineOpts ¶
type EngineOpts struct { // Timeout for queries execution Timeout time.Duration `yaml:"timeout"` // MaxLookBackPeriod is the maximum amount of time to look back for log lines. // only used for instant log queries. MaxLookBackPeriod time.Duration `yaml:"max_look_back_period"` }
EngineOpts is the list of options to use with the LogQL query engine.
func (*EngineOpts) RegisterFlagsWithPrefix ¶
func (opts *EngineOpts) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet)
type EntryEvaluator ¶
type EntryEvaluator interface { // Iterator returns the iter.EntryIterator for a given LogSelectorExpr Iterator(context.Context, LogSelectorExpr, Params) (iter.EntryIterator, error) }
type Evaluator ¶
type Evaluator interface { SampleEvaluator EntryEvaluator }
Evaluator is an interface for iterating over data at different nodes in the AST
type Expr ¶
type Expr interface { Shardable() bool // A recursive check on the AST to see if it's shardable. fmt.Stringer // contains filtered or unexported methods }
Expr is the root expression which can be a SampleExpr or LogSelectorExpr
type LiteralParams ¶
type LiteralParams struct {
// contains filtered or unexported fields
}
LiteralParams impls Params
func NewLiteralParams ¶
func (LiteralParams) Copy ¶
func (p LiteralParams) Copy() LiteralParams
func (LiteralParams) Direction ¶
func (p LiteralParams) Direction() logproto.Direction
Direction impls Params
func (LiteralParams) Interval ¶
func (p LiteralParams) Interval() time.Duration
Interval impls Params
type LogSelectorExpr ¶
type LogSelectorExpr interface { Matchers() []*labels.Matcher PipelineExpr HasFilter() bool Expr }
LogSelectorExpr is a LogQL expression filtering and returning logs.
func AddFilterExpr ¶
func AddFilterExpr(expr LogSelectorExpr, ty labels.MatchType, match string) (LogSelectorExpr, error)
AddFilterExpr adds a filter expression to a logselector expression.
func ParseLogSelector ¶
func ParseLogSelector(input string) (LogSelectorExpr, error)
ParseLogSelector parses a log selector expression `{app="foo"} |= "filter"`
type MatrixStepper ¶
type MatrixStepper struct {
// contains filtered or unexported fields
}
MatrixStepper exposes a promql.Matrix as a StepEvaluator. Ensure that the resulting StepEvaluator maintains the same shape that the parameters expect. For example, it's possible that a downstream query returns matches no log streams and thus returns an empty matrix. However, we still need to ensure that it can be merged effectively with another leg that may match series. Therefore, we determine our steps from the parameters and not the underlying Matrix.
func NewMatrixStepper ¶
func (*MatrixStepper) Close ¶
func (m *MatrixStepper) Close() error
func (*MatrixStepper) Error ¶
func (m *MatrixStepper) Error() error
type MockDownstreamer ¶
type MockDownstreamer struct {
*Engine
}
func (MockDownstreamer) Downstream ¶
func (m MockDownstreamer) Downstream(ctx context.Context, queries []DownstreamQuery) ([]Result, error)
func (MockDownstreamer) Downstreamer ¶
func (m MockDownstreamer) Downstreamer() Downstreamer
type MockQuerier ¶
type MockQuerier struct {
// contains filtered or unexported fields
}
Shard aware mock querier
func NewMockQuerier ¶
func NewMockQuerier(shards int, streams []logproto.Stream) MockQuerier
func (MockQuerier) SelectLogs ¶
func (q MockQuerier) SelectLogs(ctx context.Context, req SelectLogParams) (iter.EntryIterator, error)
func (MockQuerier) SelectSamples ¶
func (q MockQuerier) SelectSamples(ctx context.Context, req SelectSampleParams) (iter.SampleIterator, error)
type MultiStageExpr ¶
type MultiStageExpr []StageExpr
MultiStageExpr is multiple stages which implement a PipelineExpr.
func (MultiStageExpr) String ¶
func (m MultiStageExpr) String() string
type Params ¶
type Params interface { Query() string Start() time.Time End() time.Time Step() time.Duration Interval() time.Duration Limit() uint32 Direction() logproto.Direction Shards() []string }
Params details the parameters associated with a vali request
type ParseError ¶
type ParseError struct {
// contains filtered or unexported fields
}
ParseError is what is returned when we failed to parse.
func (ParseError) Error ¶
func (p ParseError) Error() string
func (ParseError) Is ¶
func (p ParseError) Is(target error) bool
Is allows to use errors.Is(err,ErrParse) on this error.
type PipelineExpr ¶
PipelineExpr is an expression defining a log pipeline.
type Querier ¶
type Querier interface { SelectLogs(context.Context, SelectLogParams) (iter.EntryIterator, error) SelectSamples(context.Context, SelectSampleParams) (iter.SampleIterator, error) }
Querier allows a LogQL expression to fetch an EntryIterator for a set of matchers and filters
type QueryParams ¶
type QueryRangeType ¶
type QueryRangeType string
var ( InstantType QueryRangeType = "instant" RangeType QueryRangeType = "range" )
func GetRangeType ¶
func GetRangeType(q Params) QueryRangeType
GetRangeType returns whether a query is an instant query or range query
type RangeVectorAggregator ¶
RangeVectorAggregator aggregates samples for a given range of samples. It receives the current milliseconds timestamp and the list of point within the range.
type RangeVectorIterator ¶
type RangeVectorIterator interface { Next() bool At(aggregator RangeVectorAggregator) (int64, promql.Vector) Close() error Error() error }
RangeVectorIterator iterates through a range of samples. To fetch the current vector use `At` with a `RangeVectorAggregator`.
type Result ¶
type Result struct { Data promql_parser.Value Statistics stats.Result }
Result is the result of a query execution.
type SampleEvaluator ¶
type SampleEvaluator interface { // StepEvaluator returns a StepEvaluator for a given SampleExpr. It's explicitly passed another StepEvaluator// in order to enable arbitrary computation of embedded expressions. This allows more modular & extensible // StepEvaluator implementations which can be composed. StepEvaluator(ctx context.Context, nextEvaluator SampleEvaluator, expr SampleExpr, p Params) (StepEvaluator, error) }
type SampleEvaluatorFunc ¶
type SampleEvaluatorFunc func(ctx context.Context, nextEvaluator SampleEvaluator, expr SampleExpr, p Params) (StepEvaluator, error)
func (SampleEvaluatorFunc) StepEvaluator ¶
func (s SampleEvaluatorFunc) StepEvaluator(ctx context.Context, nextEvaluator SampleEvaluator, expr SampleExpr, p Params) (StepEvaluator, error)
type SampleExpr ¶
type SampleExpr interface { // Selector is the LogQL selector to apply when retrieving logs. Selector() LogSelectorExpr Extractor() (SampleExtractor, error) Expr }
SampleExpr is a LogQL expression filtering logs and returning metric samples.
func ParseSampleExpr ¶
func ParseSampleExpr(input string) (SampleExpr, error)
ParseSampleExpr parses a string and returns the sampleExpr
type SampleExtractor ¶
type SampleExtractor = log.SampleExtractor
Type alias for backward compatibility
type SelectLogParams ¶
type SelectLogParams struct {
*logproto.QueryRequest
}
SelectParams specifies parameters passed to data selections.
func (SelectLogParams) LogSelector ¶
func (s SelectLogParams) LogSelector() (LogSelectorExpr, error)
LogSelector returns the LogSelectorExpr from the SelectParams. The `LogSelectorExpr` can then returns all matchers and filters to use for that request.
type SelectSampleParams ¶
type SelectSampleParams struct {
*logproto.SampleQueryRequest
}
func (SelectSampleParams) Expr ¶
func (s SelectSampleParams) Expr() (SampleExpr, error)
Expr returns the SampleExpr from the SelectSampleParams. The `LogSelectorExpr` can then returns all matchers and filters to use for that request.
func (SelectSampleParams) LogSelector ¶
func (s SelectSampleParams) LogSelector() (LogSelectorExpr, error)
LogSelector returns the LogSelectorExpr from the SelectParams. The `LogSelectorExpr` can then returns all matchers and filters to use for that request.
type ShardMapper ¶
type ShardMapper struct {
// contains filtered or unexported fields
}
func NewShardMapper ¶
func NewShardMapper(shards int, metrics *ShardingMetrics) (ShardMapper, error)
type ShardedEngine ¶
type ShardedEngine struct {
// contains filtered or unexported fields
}
ShardedEngine is an Engine implementation that can split queries into more parallelizable forms via querying the underlying backend shards individually and reaggregating them.
func NewShardedEngine ¶
func NewShardedEngine(opts EngineOpts, downstreamable Downstreamable, metrics *ShardingMetrics, limits Limits) *ShardedEngine
NewShardedEngine constructs a *ShardedEngine
type ShardingMetrics ¶
type ShardingMetrics struct {
// contains filtered or unexported fields
}
ShardingMetrics is the metrics wrapper used in shard mapping
func NewShardingMetrics ¶
func NewShardingMetrics(registerer prometheus.Registerer) *ShardingMetrics
type Shards ¶
type Shards []astmapper.ShardAnnotation
func ParseShards ¶
ParseShards parses a list of string encoded shards
type StepEvaluator ¶
type StepEvaluator interface { // while Next returns a promql.Value, the only acceptable types are Scalar and Vector. Next() (bool, int64, promql.Vector) // Close all resources used. Close() error // Reports any error Error() error }
StepEvaluator evaluate a single step of a query.
func ConcatEvaluator ¶
func ConcatEvaluator(evaluators []StepEvaluator) (StepEvaluator, error)
ConcatEvaluator joins multiple StepEvaluators. Contract: They must be of identical start, end, and step values.
func ResultStepEvaluator ¶
func ResultStepEvaluator(res Result, params Params) (StepEvaluator, error)
ResultStepEvaluator coerces a downstream vector or matrix into a StepEvaluator
Source Files ¶
Directories ¶
Path | Synopsis |
---|---|
logfmt
Adapted from https://github.com/go-logfmt/logfmt/ but []byte as parameter instead Original license is MIT.
|
Adapted from https://github.com/go-logfmt/logfmt/ but []byte as parameter instead Original license is MIT. |
Package marshal converts internal objects to loghttp model objects.
|
Package marshal converts internal objects to loghttp model objects. |
legacy
Package marshal converts internal objects to loghttp model objects.
|
Package marshal converts internal objects to loghttp model objects. |
Package stats provides primitives for recording metrics across the query path.
|
Package stats provides primitives for recording metrics across the query path. |