Documentation ¶
Index ¶
- Constants
- Variables
- type Details
- type Elect
- type ElectLine
- type ElectPathVar
- type ElectRegMeta
- type ElectRegexp
- type ExecuteRule
- type From
- type FromLog
- type FromLogMultiline
- type FromLogParse
- type FromLogPath
- type FromLogPaths
- type FromProcessPerf
- type Gateway
- type Group
- type GroupBy
- type LeftRight
- type LogAnalysisConf
- type LogAnalysisPatternConf
- type LogParseGrok
- type LogParseRegexp
- type LogParseSeparator
- type LogSamples
- type LogStructure
- type MContains
- type MContainsAny
- type MIn
- type MNumberBetween
- type MNumberOp
- type MRegexp
- type MetricConfig
- type Output
- type RefIndex
- type RefName
- type RefVar
- type SQLTask
- type Select
- type SelectOne
- type SlsConfig
- type TimeConf
- type TransformConf
- type TransformFilterAppendV1
- type TransformFilterCleanUrlV1
- type TransformFilterCompositeV1
- type TransformFilterConf
- type TransformFilterConstV1
- type TransformFilterMappingV1
- type TransformFilterRegexpReplaceV1
- type TransformFilterSubstringV1
- type TransformFilterSwitchCaseV1
- type TransformFilterSwitchCaseV1Case
- type Var
- type Vars
- type Where
- type Window
Constants ¶
const ( EElectLine = "line" EElectRefIndex = "refIndex" EElectRefName = "refName" EElectLeftRight = "leftRight" EElectRegexp = "regexp" EElectRefMeta = "refMeta" EElectPathVar = "pathvar" EElectContext = "context" EElectRefVar = "refVar" )
const ( ElectRefMetaTypePodLabels = "labels" ElectRefMetaTypePodAnnotations = "annotations" )
Variables ¶
var ( // 瞬时值 MetricTypeGauge = "GAUGE" // 增量 MetricTypeCounter = "COUNTER" // 增量除以采集周期 得到的是速度 increment/s MetricTypeCounterByTime = "COUNTER_BY_TIME" CElectLine = &Elect{ Type: EElectLine, } // CElectContext is a const for EElectContext CElectContext = &Elect{ Type: EElectContext, } )
Functions ¶
This section is empty.
Types ¶
type Details ¶
type Details struct { // If Enabled is true, the elect results will be reported as details Enabled bool `json:"enabled"` }
type Elect ¶
type Elect struct { // refIndex/refName: 引用一个已有的字段 // leftRight: 使用左起右至切出一个字段 // regexp // context: fetch string value from context Type string `json:"type,omitempty"` Line *ElectLine `json:"line,omitempty"` RefIndex *RefIndex `json:"refIndex,omitempty"` RefName *RefName `json:"refName,omitempty"` RefVar *RefVar `json:"refVar,omitempty"` LeftRight *LeftRight `json:"leftRight,omitempty"` Regexp *ElectRegexp `json:"regexp,omitempty"` RefMeta *ElectRegMeta `json:"refMeta,omitempty"` PathVar *ElectPathVar `json:"pathVar,omitempty"` // Every elect can have its own transform, named adhoc transform. // It will be executed everytime elect is called. Transform *TransformConf `json:"transform,omitempty"` }
Elect 表示如何从当前的数据(可能是个日志行或结构化数据)里提取出想要的字段 Elect出的结果默认是string 有少数情况结果是float64或其他, 这个需要每个type明确说明
type ElectPathVar ¶
type ElectPathVar struct {
Name string `json:"name"`
}
type ElectRegMeta ¶
type ElectRegMeta struct {
Name string `json:"name"`
}
type ElectRegexp ¶
type ElectRegexp struct { // 正则表达式 Expression string `json:"expression"` // 捕获组索引 Index int `json:"index"` // 捕获组名, 非空情况下优先级比 index 高 Name string `json:"name"` }
考虑到性能, 除非正则表达式逻辑比较简单, 否则一般不太推荐用这个方式
type ExecuteRule ¶
type From ¶
type From struct { Type string `json:"type"` Log *FromLog `json:"log"` ProcessPerf *FromProcessPerf `json:"processPerf"` }
type FromLog ¶
type FromLog struct { Path []*FromLogPath `json:"path"` // defaults to UTF8 Charset string `json:"charset"` Parse *FromLogParse `json:"parse"` // 定义时间戳如何解析 Time *TimeConf `json:"time"` Multiline *FromLogMultiline `json:"multiline"` // Vars define vars for log processing Vars *Vars `json:"vars,omitempty"` Mode string `json:"mode,omitempty"` }
type FromLogMultiline ¶
type FromLogMultiline struct { // 多行日志是否启动 Enabled bool `json:"enabled"` // 行首的判断条件, 满足这个条件的就是行首 // 比如一种方式是断言行首配 ^yyyy-MM-dd 的格式 Where *Where `json:"where"` // limit max logs in a log group MaxLines int `json:"maxLines"` What string `json:"what"` }
TODO 遵循 logstash 风格
type FromLogParse ¶
type FromLogParse struct { // 有的parse代价太大, 可以在parse前做一次过滤减少parse的量 // 此时where里仅能使用 leftRight 类型的切分 Where *Where `json:"where,omitempty"` // free/separator/regexp/json/leftRight Type string `json:"type,omitempty"` Separator *LogParseSeparator `json:"separator,omitempty"` Regexp *LogParseRegexp `json:"regexp,omitempty"` Grok *LogParseGrok `json:"grok,omitempty"` }
type FromLogPath ¶
type FromLogPath struct { // path/glob/regexp/'glob®exp' // 当type==regexp 时, 需要制定一个dir, 该dir的所有子孙文件满足regexp的都会纳入采集范围 // 这个方式比较容易出问题: // 1. 递归遍历代价大/符号死循环/符号导致重复采集 // 当type=='glob®exp' 时表示先执行glob, 再执行regexp, 这样可以使用glob的简单方式圈定一些文件 // 然后再使用regexp更精细的匹配文件 Type string `json:"type"` // used when type==path // used when type==format // /home/admin/logs/foo/{time:yyyy}/{time:MM}/{time:dd}/{time:HH}/foo.log // /home/admin/logs/foo/{time:yy}/{time:MM}/{time:dd}/{time:HH}/foo.log // used when type==glob // used when type==regexp Pattern string `json:"pattern"` // used when type==regexp Dir string `json:"dir"` // Limit how many files can be matched using this FromLogPath object. // 0 means agent defaults (maybe 10) MaxMatched int `json:"maxMatched"` }
type FromLogPaths ¶
type FromLogPaths struct { // 99% 的情况 paths里应该有且只有一项 type==simple 的绝对路径 // 另外所有被匹配的文件都是按照相同的规则执行的, 因此这些文件应该具有某种共性, 比如相同格式 Paths []*FromLogPath `json:"paths"` // 多久监听一下文件tree变化, 对于 glob/regexp 有效 WatchInterval string `json:"watchInterval"` }
TODO 用于支持多文件
type FromProcessPerf ¶
type Gateway ¶
type Gateway struct { // 用户可以覆盖, 否则默认使用 tableName MetricName string `json:"metricName"` }
type GroupBy ¶
type GroupBy struct { Groups []*Group `json:"groups"` MaxKeySize int `json:"maxKeySize"` LogAnalysis *LogAnalysisConf `json:"logAnalysis"` Details *Details `json:"details"` }
type LeftRight ¶
type LeftRight struct { LeftIndex int `json:"leftIndex,omitempty"` Left string `json:"left,omitempty"` Right string `json:"right,omitempty"` MatchToEndIfMissRight bool `json:"matchToEndIfMissRight,omitempty"` // 如果左起右至切不到结果那么用这个default // 如果找不到左起 则返回这个 DefaultValue *string `json:"defaultValue,omitempty"` }
type LogAnalysisConf ¶
type LogAnalysisConf struct { // patterns to match, will be visited in order, break when first match Patterns []*LogAnalysisPatternConf `json:"patterns"` // max Count of generated unknown patterns, defaults to 64 MaxUnknownPatterns int `json:"maxUnknownPatterns"` // truncate log if length(bytes) exceed MaxLogLength, defaults to 300 MaxLogLength int `json:"maxLogLength"` }
type LogAnalysisPatternConf ¶
type LogParseGrok ¶
type LogParseGrok struct { // grok表达式 Expression string `json:"expression"` }
type LogParseRegexp ¶
type LogParseRegexp struct { // 正则表达式 Expression string `json:"expression"` }
type LogParseSeparator ¶
type LogParseSeparator struct { // 简单分隔符 Separator string `json:"separator"` }
type LogSamples ¶
type LogStructure ¶
1. pre-where: 可选, 只能支持左起右至的elect 2. structure: 可选, 使得数据结构化 3. 常规处理: select [...] from {} where {} group by [...] having {} window {} output {} 日志结构化
type MContainsAny ¶
type MNumberBetween ¶
type MNumberBetween struct { Elect *Elect `json:"elect" yaml:"elect"` Min float64 `json:"min" yaml:"min"` Max float64 `json:"max" yaml:"max"` MinIncluded bool `json:"minIncluded" yaml:"minIncluded"` MaxIncluded bool `json:"maxIncluded" yaml:"maxIncluded"` // 数值是否是整数 ParseNumberToInt bool `json:"parseNumberToInt"` }
type MRegexp ¶
type MRegexp struct { Elect *Elect `json:"elect" yaml:"elect"` Expression string `json:"expression" yaml:"expression"` Multiline bool `json:"multiline" yaml:"multiline"` // CatchGroups indicates whether to store the capture group in the processing context CatchGroups bool `json:"catchGroups" yaml:"catchGroups"` }
type MetricConfig ¶
type SQLTask ¶
type SQLTask struct { Select *Select `json:"select"` From *From `json:"from"` Where *Where `json:"where"` GroupBy *GroupBy `json:"groupBy"` Window *Window `json:"window"` Output *Output `json:"output"` ExecuteRule ExecuteRule `json:"executeRule"` }
SQL style task
type Select ¶
type Select struct { Values []*SelectOne `json:"values"` LogSamples *LogSamples `json:"logSamples"` }
type TransformConf ¶
type TransformConf struct {
Filters []*TransformFilterConf `json:"filters" yaml:"filters"`
}
TransformConf represents transform conf
type TransformFilterAppendV1 ¶
type TransformFilterAppendV1 struct { Value string `json:"value" yaml:"value"` AppendIfMissing bool `json:"appendIfMissing" yaml:"appendIfMissing"` }
TransformFilterAppendV1 represents appending suffix to the current value
type TransformFilterCleanUrlV1 ¶
type TransformFilterCleanUrlV1 struct { }
type TransformFilterCompositeV1 ¶
type TransformFilterCompositeV1 struct { // Filters sub filters Filters []*TransformFilterConf `json:"filters" yaml:"filters"` // BreaksWhenError indicates whether to break the execution process when an error is encountered. // If it is false, it will print a log after encountering an error and ignore errors, and then continue to execute. BreaksWhenError bool `json:"breaksWhenError" yaml:"breaksWhenError"` }
TransformFilterCompositeV1 is a container of filters. It executes filters in order.
type TransformFilterConf ¶
type TransformFilterConf struct { SwitchCaseV1 *TransformFilterSwitchCaseV1 `json:"switchCaseV1" yaml:"switchCaseV1"` SubstringV1 *TransformFilterSubstringV1 `json:"substringV1" yaml:"substringV1"` AppendV1 *TransformFilterAppendV1 `json:"appendV1" yaml:"appendV1"` CompositeV1 *TransformFilterCompositeV1 `json:"compositeV1" yaml:"compositeV1"` MappingV1 *TransformFilterMappingV1 `json:"mappingV1" yaml:"mappingV1"` // avoid using 'const' as field name or json field name, because 'const' is a keyword in some languages ConstV1 *TransformFilterConstV1 `json:"constV1" yaml:"constV1"` RegexpReplaceV1 *TransformFilterRegexpReplaceV1 `json:"regexpReplaceV1" yaml:"regexpReplaceV1"` DiscardV1 *struct{} `json:"discardV1" yaml:"discardV1"` CleanUrlV1 *TransformFilterCleanUrlV1 `json:"cleanUrlV1" yaml:"cleanUrlV1"` }
TransformFilterConf transform filter
type TransformFilterConstV1 ¶
type TransformFilterConstV1 struct {
Value string `json:"value" yaml:"value"`
}
type TransformFilterMappingV1 ¶
type TransformFilterMappingV1 struct { Mappings map[string]string `json:"mappings" yaml:"mappings"` // DefaultValue is used when mappings doesn't match source. // If DefaultValue is empty "", then the source value is returned. DefaultValue string `json:"defaultValue" yaml:"defaultValue"` }
TransformFilterMappingV1 represents mapping value from one to another
type TransformFilterSubstringV1 ¶
type TransformFilterSubstringV1 struct { // Begin represents beginning offset of substring Begin int `json:"begin" yaml:"begin"` // End represents end offset of substring, -1 means match to string length End int `json:"end" yaml:"end"` // EmptyIfError represents returning "" if meet any error (e.g. begin >= len(str)) EmptyIfError bool `json:"emptyIfError" yaml:"emptyIfError"` }
TransformFilterSubstringV1 represents extracting substring from the current value
type TransformFilterSwitchCaseV1 ¶
type TransformFilterSwitchCaseV1 struct { // Cases will be tested in order, when find first matched case, its action will be executed, and then current filter process terminates. Cases []*TransformFilterSwitchCaseV1Case `json:"cases" yaml:"cases"` // DefaultAction will be executed if no case matches. DefaultAction *TransformFilterConf `json:"defaultAction" yaml:"defaultAction"` }
TransformFilterSwitchCaseV1 represents `switch/case/default` control flow.
type TransformFilterSwitchCaseV1Case ¶
type TransformFilterSwitchCaseV1Case struct { Case *Where `json:"caseWhere" yaml:"caseWhere"` Action *TransformFilterConf `json:"action" yaml:"action"` }
TransformFilterSwitchCaseV1Case represents one case and its action
type Where ¶
type Where struct { And []*Where `json:"and,omitempty" yaml:"and"` Or []*Where `json:"or,omitempty" yaml:"or"` Not *Where `json:"not,omitempty" yaml:"not"` Contains *MContains `json:"contains,omitempty" yaml:"contains"` ContainsAny *MContainsAny `json:"containsAny,omitempty" yaml:"containsAny"` In *MIn `json:"in,omitempty" yaml:"in"` NumberBetween *MNumberBetween `json:"numberBetween,omitempty" yaml:"numberBetween"` Regexp *MRegexp `json:"regexp,omitempty" yaml:"regexp"` NumberOp *MNumberOp `json:"numberOp,omitempty" yaml:"numberOp"` }