Documentation ¶
Index ¶
- Constants
- Variables
- func Time1Day(t int64) string
- func Time1DayInt(t int64) int64
- func Time5Min(t int64) string
- func Time5MinInt(t int64) int64
- type CsvParser
- type CsvType
- type EmptyParser
- type GrokParser
- type JsonParser
- type KafaRestlogParser
- func (krp *KafaRestlogParser) Name() string
- func (krp *KafaRestlogParser) Parse(lines []string) ([]sender.Data, error)
- func (krp *KafaRestlogParser) ParseCode(fields []string) int
- func (krp *KafaRestlogParser) ParseDuration(fields []string) int
- func (krp *KafaRestlogParser) ParseIp(fields []string) string
- func (krp *KafaRestlogParser) ParseLogTime(fields []string) int64
- func (krp *KafaRestlogParser) ParseMethod(fields []string) string
- func (krp *KafaRestlogParser) ParseRespCL(fields []string) int
- func (krp *KafaRestlogParser) ParseTopic(fields []string) string
- type Label
- type LogParser
- func NewCsvParser(c conf.MapConf) (LogParser, error)
- func NewEmptyParser(c conf.MapConf) (LogParser, error)
- func NewGrokParser(c conf.MapConf) (LogParser, error)
- func NewJsonParser(c conf.MapConf) (LogParser, error)
- func NewKafaRestlogParser(c conf.MapConf) (LogParser, error)
- func NewNginxParser(c conf.MapConf) (LogParser, error)
- func NewQiniulogParser(c conf.MapConf) (LogParser, error)
- func NewRawlogParser(c conf.MapConf) (LogParser, error)
- type NginxParser
- type ParserRegistry
- type QiniulogParser
- type RawlogParser
Constants ¶
View Source
const ( KeyCSVSchema = "csv_schema" // csv 每个列的列名和类型 long/string/float/date KeyCSVSplitter = "csv_splitter" // csv 的分隔符 KeyCSVLabels = "csv_labels" // csv 额外增加的标签信息,比如机器信息等 )
View Source
const ( KeyGrokMode = "grok_mode" //是否替换\n以匹配多行 KeyGrokPatterns = "grok_patterns" // grok 模式串名 KeyGrokCustomPatternFiles = "grok_custom_pattern_files" KeyGrokCustomPatterns = "grok_custom_patterns" KeyTimeZoneOffset = "timezone_offset" )
View Source
const ( LONG = "long" FLOAT = "float" STRING = "string" DATE = "date" DROP = "drop" )
View Source
const ( KEY_SRC_IP = "source_ip" KEY_METHOD = "method" KEY_TOPIC = "topic" KEY_CODE = "code" KEY_RESP_LEN = "resp_len" KEY_DURATION = "duration" KEY_LOG_TIME = "log_time" KEY_ERROR = "error" KEY_WARN = "warn" EMPTY_STRING = "" )
View Source
const ( NginxSchema string = "nginx_schema" NginxConfPath = "nginx_log_format_path" NginxLogFormat = "nginx_log_format_name" NginxFormatRegex = "nginx_log_format_regex" )
View Source
const ( KeyParserName = utils.GlobalKeyName KeyParserType = "type" KeyRunnerName = "runner_name" KeyLabels = "labels" // 额外增加的标签信息,比如机器信息等 )
conf 字段
View Source
const ( TypeCSV = "csv" TypeLogv1 = "qiniulog" TypeKafkaRest = "kafkarest" TypeRaw = "raw" TypeEmpty = "empty" TypeGrok = "grok" TypeInnerSQL = "_sql" TypeInnerMysql = "_mysql" TypeJson = "json" TypeNginx = "nginx" )
parser 的类型
View Source
const ( LogHeadPrefix string = "prefix" LogHeadDate string = "date" LogHeadTime string = "time" LogHeadReqid string = "reqid" LogHeadLevel string = "level" LogHeadFile string = "file" LogHeadLog string = "log" //默认在最后,不能改变顺序 )
View Source
const ( KeyQiniulogPrefix = "qiniulog_prefix" //qiniulog的日志前缀 KeyLogHeaders = "qiniulog_log_headers" )
conf 字段
View Source
const ( KeyRaw = "raw" KeyTimestamp = "timestamp" )
View Source
const DEFAULT_PATTERNS = `` /* 2684-byte string literal not displayed */
View Source
const MaxGrokMultiLineBuffer = 64 * 1024 * 1024 // 64MB
View Source
const MaxParserSchemaErrOutput = 5
View Source
const (
ModeMulti = "multi"
)
View Source
const SECOND_PER_5MIN = 5 * 60
View Source
const SECOND_PER_DAY = 24 * 60 * 60
Variables ¶
View Source
var ( HeaderPattern = map[string]string{ LogHeadDate: "^[1-9]\\d{3}/[0-1]\\d/[0-3]\\d$", LogHeadTime: "^[0-2]\\d:[0-6]\\d:[0-6]\\d(\\.\\d{6})?$", LogHeadReqid: "^\\[\\w+\\]\\[\\w+\\]$", LogHeadLevel: "^\\[[A-Z]+\\]$", LogHeadFile: ":\\d+:$", } CompliedPatterns map[string]*regexp.Regexp )
View Source
var ModeKeyOptions = map[string][]utils.Option{ TypeNginx: { { KeyName: NginxConfPath, ChooseOnly: false, Default: "/opt/nginx/conf/nginx.conf", DefaultNoUse: true, Description: "nginx配置路径", }, { KeyName: NginxLogFormat, ChooseOnly: false, Default: "main", DefaultNoUse: true, Description: "nginx日志格式名称", }, { KeyName: KeyParserName, ChooseOnly: false, Default: "parser", DefaultNoUse: false, Description: "parser名称", }, { KeyName: NginxSchema, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "指定nginx字段类型", }, { KeyName: NginxFormatRegex, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "直接通过正则表达式解析", }, { KeyName: KeyLabels, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "额外的标签信息", }, }, TypeGrok: { { KeyName: KeyGrokPatterns, ChooseOnly: false, Default: "%{COMMON_LOG_FORMAT}", DefaultNoUse: true, Description: "匹配日志的grok表达式", }, { KeyName: KeyParserName, ChooseOnly: false, Default: "parser", DefaultNoUse: false, Description: "parser名称", }, { KeyName: KeyGrokMode, ChooseOnly: true, ChooseOptions: []string{"oneline", ModeMulti}, Default: "oneline", DefaultNoUse: false, Description: "grok单行多行模式", }, { KeyName: KeyGrokCustomPatternFiles, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "自定义 grok pattern 文件路径", }, { KeyName: KeyGrokCustomPatterns, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "自定义 grok pattern 串", }, { KeyName: KeyTimeZoneOffset, ChooseOnly: true, Default: "0", ChooseOptions: []string{"0", "-1", "-2", "-3", "-4", "-5", "-6", "-7", "-8", "-9", "-10", "-11", "-12", "1", "2", "3", "4", "5", "6", "7", "8", "9", "11", "12"}, DefaultNoUse: false, Description: "时区偏移量", }, { KeyName: KeyLabels, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "额外的标签信息", }, }, TypeJson: { { KeyName: KeyParserName, ChooseOnly: false, Default: "parser", DefaultNoUse: false, Description: "parser名称", }, { KeyName: KeyLabels, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "额外的标签信息", }, }, TypeCSV: { { KeyName: KeyCSVSchema, ChooseOnly: false, Default: "abc string,xyz long,data1 string,data2 float", DefaultNoUse: true, Description: "csv格式的字段类型", }, { KeyName: KeyParserName, ChooseOnly: false, Default: "parser", DefaultNoUse: false, Description: "parser名称", }, { KeyName: KeyCSVSplitter, ChooseOnly: false, Default: ",", DefaultNoUse: false, Description: "csv分隔符", }, { KeyName: KeyLabels, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "额外的标签信息", }, { KeyName: KeyTimeZoneOffset, ChooseOnly: true, Default: "0", ChooseOptions: []string{"0", "-1", "-2", "-3", "-4", "-5", "-6", "-7", "-8", "-9", "-10", "-11", "-12", "1", "2", "3", "4", "5", "6", "7", "8", "9", "11", "12"}, DefaultNoUse: false, Description: "时区偏移量", }, }, TypeRaw: { { KeyName: KeyParserName, ChooseOnly: false, Default: "parser", DefaultNoUse: false, Description: "parser名称", }, { KeyName: KeyLabels, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "额外的标签信息", }, }, TypeLogv1: { { KeyName: KeyParserName, ChooseOnly: false, Default: "parser", DefaultNoUse: false, Description: "parser名称", }, { KeyName: KeyLabels, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "额外的标签信息", }, { KeyName: KeyQiniulogPrefix, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "日志前缀", }, { KeyName: KeyLogHeaders, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "七牛日志格式顺序", }, }, TypeKafkaRest: { { KeyName: KeyParserName, ChooseOnly: false, Default: "parser", DefaultNoUse: false, Description: "parser名称", }, { KeyName: KeyLabels, ChooseOnly: false, Default: "", DefaultNoUse: false, Description: "额外的标签信息", }, }, TypeEmpty: {}, }
View Source
var ModeUsages = []utils.KeyValue{ {TypeNginx, "nginx 日志解析"}, {TypeGrok, "grok 方式解析"}, {TypeJson, "json 格式解析"}, {TypeCSV, "csv 格式日志解析"}, {TypeRaw, "raw 原始日志按行解析"}, {TypeLogv1, "qiniulog 七牛日志库解析"}, {TypeKafkaRest, "kafkarest 日志格式解析"}, {TypeEmpty, "empty 通过解析清空数据"}, }
ModeUsages 用途说明
View Source
var SampleLogs = map[string]string{
TypeNginx: `110.110.101.101 - - [21/Mar/2017:18:14:17 +0800] "GET /files/yyyysx HTTP/1.1" 206 607 1 "-" "Apache-HttpClient/4.4.1 (Java/1.7.0_80)" "-" "122.121.111.222, 122.121.111.333, 192.168.90.61" "192.168.42.54:5000" www.qiniu.com llEAAFgmnoIa3q0U "0.040" 0.040 760 "-" "-" - - QCloud
1.11.1.1 - - [25/Mar/2017:18:14:17 +0800] "GET /files HTTP/1.1" 200 607 1 "-" "Apache-HttpClient/4.4.1 (Java/1.7.0_80)" "-" "122.121.111.222, 122.121.111.333, 192.168.90.61" "192.168.42.54:5000" www.qiniu.com sfvfv123axs "0.040" 0.040 760 "-" "-" - - QCloud`,
TypeGrok: `127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326
123.45.12.1 user-identifier bob [10/Oct/2013:13:55:36 -0700] "GET /hello.gif HTTP/1.0" 200 2326`,
TypeJson: `{"a":"b","c":1,"d":1.1}`,
TypeCSV: `a,123,bcd,1.2
xsxs,456,asv,5.12`,
TypeRaw: `raw log1[05-May-2017 13:44:39] [pool log] pid 4109
script_filename = /data/html/
[0x00007fec119d1720] curl_exec() /data/html/xyframework/base.go:123
[05-May-2017 13:45:39] [pool log] pid 4108`,
TypeLogv1: `2017/08/11 10:24:17 [WARN][github.com/qiniu/logkit/mgr] runner.go:398: Runner[byds] sender pandora.sender.31 closed`,
TypeKafkaRest: `[2016-12-05 03:35:20,682] INFO 172.16.16.191 - - [05/Dec/2016:03:35:20 +0000] "POST /topics/VIP_VvBVy0tuMPPspm1A_0000000000 HTTP/1.1" 200 101640 46 (io.confluent.rest-utils.requests)`,
TypeEmpty: "empty 通过解析清空数据",
}
SampleLogs 样例日志,用于前端界面试玩解析器
Functions ¶
func Time1DayInt ¶
func Time5MinInt ¶
Types ¶
type EmptyParser ¶
type EmptyParser struct {
// contains filtered or unexported fields
}
func (*EmptyParser) Name ¶
func (p *EmptyParser) Name() string
type GrokParser ¶
type GrokParser struct { Patterns []string // 正式的pattern名称 CustomPatterns string CustomPatternFiles []string // contains filtered or unexported fields }
func (*GrokParser) Name ¶
func (gp *GrokParser) Name() string
type JsonParser ¶
type JsonParser struct {
// contains filtered or unexported fields
}
func (*JsonParser) Name ¶
func (im *JsonParser) Name() string
type KafaRestlogParser ¶
type KafaRestlogParser struct {
// contains filtered or unexported fields
}
func (*KafaRestlogParser) Name ¶
func (krp *KafaRestlogParser) Name() string
func (*KafaRestlogParser) Parse ¶
func (krp *KafaRestlogParser) Parse(lines []string) ([]sender.Data, error)
func (*KafaRestlogParser) ParseCode ¶
func (krp *KafaRestlogParser) ParseCode(fields []string) int
func (*KafaRestlogParser) ParseDuration ¶
func (krp *KafaRestlogParser) ParseDuration(fields []string) int
func (*KafaRestlogParser) ParseIp ¶
func (krp *KafaRestlogParser) ParseIp(fields []string) string
func (*KafaRestlogParser) ParseLogTime ¶
func (krp *KafaRestlogParser) ParseLogTime(fields []string) int64
func (*KafaRestlogParser) ParseMethod ¶
func (krp *KafaRestlogParser) ParseMethod(fields []string) string
func (*KafaRestlogParser) ParseRespCL ¶
func (krp *KafaRestlogParser) ParseRespCL(fields []string) int
func (*KafaRestlogParser) ParseTopic ¶
func (krp *KafaRestlogParser) ParseTopic(fields []string) string
type LogParser ¶
type NginxParser ¶ added in v1.2.1
type NginxParser struct {
// contains filtered or unexported fields
}
func NewNginxAccParser ¶ added in v1.2.1
func NewNginxAccParser(c conf.MapConf) (p *NginxParser, err error)
func (*NginxParser) Name ¶ added in v1.2.1
func (p *NginxParser) Name() string
type ParserRegistry ¶
type ParserRegistry struct {
// contains filtered or unexported fields
}
func NewParserRegistry ¶
func NewParserRegistry() *ParserRegistry
func (*ParserRegistry) NewLogParser ¶
func (ps *ParserRegistry) NewLogParser(conf conf.MapConf) (p LogParser, err error)
func (*ParserRegistry) RegisterParser ¶
type QiniulogParser ¶
type QiniulogParser struct {
// contains filtered or unexported fields
}
func (*QiniulogParser) Name ¶
func (p *QiniulogParser) Name() string
type RawlogParser ¶
type RawlogParser struct {
// contains filtered or unexported fields
}
func (*RawlogParser) Name ¶
func (p *RawlogParser) Name() string
Click to show internal directories.
Click to hide internal directories.