Documentation ¶
Overview ¶
@Time : 2019-10-11 10:37 @Author : zr
@Time : 2019-10-11 10:12 @Author : zr
@Time : 2019-10-11 10:27 @Author : zr
@Time : 2019-11-04 9:31 @Author : zr
@Time : 2019-10-12 10:26 @Author : zr
@Time : 2019-10-12 10:29 @Author : zr
@Time : 2019-10-11 11:20 @Author : zr
@Time : 2019-10-12 10:20 @Author : zr
@Time : 2019-10-12 10:18 @Author : zr
Index ¶
- func LogrusWithFields(logger *logrus.Logger, endTime, beginTime time.Time, url, src string, ...) *logrus.Entry
- func NewLoggerGorm(slowThreshold time.Duration) logger.Interface
- type Collector
- type Hook
- type Log
- type LogLevel
- type LoggerGorm
- func (l LoggerGorm) Error(ctx context.Context, s string, i ...interface{})
- func (l LoggerGorm) Info(ctx context.Context, s string, i ...interface{})
- func (l *LoggerGorm) LogMode(level logger.LogLevel) logger.Interface
- func (l LoggerGorm) Trace(ctx context.Context, begin time.Time, fc func() (string, int64), err error)
- func (l LoggerGorm) Warn(ctx context.Context, s string, i ...interface{})
- type LogrusHook
- type LogstashFrontKafka
- type LogstashFrontKafkaConfig
- type Processor
- type QueueConsumer
- type QueueConsumerKafka
- type QueueProducer
- type QueueProducerKafka
- type Writer
- type WriterSQL
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func LogrusWithFields ¶
Types ¶
type Collector ¶
type Collector struct {
// contains filtered or unexported fields
}
日志收集类
func NewCollector ¶
func NewCollector() *Collector
func (*Collector) AddReceiver ¶
func (c *Collector) AddReceiver(receiver QueueProducer)
func (*Collector) GetLogrusHook ¶
func (c *Collector) GetLogrusHook() *LogrusHook
type Log ¶
type Log struct { Time time.Time `gorm:"index:idx_log_time"` Trace string `gorm:"type:varchar(200);index:idx_log_name"` Level LogLevel `gorm:"index:idx_log_level"` Code int `gorm:"type:int;index:idx_log_code"` OptUserId int ExecTime int Msg string `gorm:"type:varchar(2000)"` // contains filtered or unexported fields }
type LoggerGorm ¶ added in v1.3.3
type LoggerGorm struct { LogLevel logger.LogLevel SlowThreshold time.Duration // contains filtered or unexported fields }
func (LoggerGorm) Error ¶ added in v1.3.3
func (l LoggerGorm) Error(ctx context.Context, s string, i ...interface{})
func (LoggerGorm) Info ¶ added in v1.3.3
func (l LoggerGorm) Info(ctx context.Context, s string, i ...interface{})
func (*LoggerGorm) LogMode ¶ added in v1.3.3
func (l *LoggerGorm) LogMode(level logger.LogLevel) logger.Interface
type LogrusHook ¶
type LogrusHook struct {
// contains filtered or unexported fields
}
func (LogrusHook) Levels ¶
func (l LogrusHook) Levels() []logrus.Level
type LogstashFrontKafka ¶ added in v1.1.0
func NewLogstashFrontKafka ¶ added in v1.1.0
func NewLogstashFrontKafka(config *LogstashFrontKafkaConfig) *LogstashFrontKafka
func (*LogstashFrontKafka) GetLogrusFormatter ¶ added in v1.1.0
func (l *LogstashFrontKafka) GetLogrusFormatter() *logrus.JSONFormatter
func (*LogstashFrontKafka) GetLogrusHook ¶ added in v1.1.0
func (l *LogstashFrontKafka) GetLogrusHook() Hook
func (*LogstashFrontKafka) RestartKafka ¶ added in v1.2.1
func (l *LogstashFrontKafka) RestartKafka() (err error)
func (*LogstashFrontKafka) SetKafkaAddrs ¶ added in v1.2.1
func (l *LogstashFrontKafka) SetKafkaAddrs(kafkaAddrs []string)
type LogstashFrontKafkaConfig ¶ added in v1.1.5
type LogstashFrontKafkaConfig struct { //应用名,用于创建ES索引 AppName string //连接Kafka失败时信息储存位置 FileLogPath string //trace字段预设值 TracePrefix string KafkaAddrs []string }
func NewLogstashFrontKafkaConfig ¶ added in v1.1.5
func NewLogstashFrontKafkaConfig() *LogstashFrontKafkaConfig
type Processor ¶ added in v1.0.1
type Processor struct {
// contains filtered or unexported fields
}
日志处理类
func NewProcessor ¶ added in v1.0.1
func NewProcessor(consumer QueueConsumer) *Processor
type QueueConsumer ¶ added in v1.0.1
type QueueConsumerKafka ¶ added in v1.0.1
type QueueConsumerKafka struct {
// contains filtered or unexported fields
}
func NewQueueConsumerKafka ¶ added in v1.0.1
func NewQueueConsumerKafka(addrs []string, topic string) (q *QueueConsumerKafka, err error)
func (*QueueConsumerKafka) Get ¶ added in v1.0.1
func (q *QueueConsumerKafka) Get() (log *Log, err error)
type QueueProducer ¶ added in v1.0.1
type QueueProducerKafka ¶ added in v1.0.1
type QueueProducerKafka struct {
// contains filtered or unexported fields
}
func NewReceiverKafka ¶
func NewReceiverKafka(addrs []string, topic string) *QueueProducerKafka
func (*QueueProducerKafka) Receive ¶ added in v1.0.1
func (l *QueueProducerKafka) Receive(logRecord *Log) (err error)
type WriterSQL ¶ added in v1.0.1
type WriterSQL struct {
// contains filtered or unexported fields
}
func NewWriterSQL ¶ added in v1.0.1
Open initialize a new db connection, need to import driver first, e.g:
import _ "github.com/go-sql-driver/mysql" func main() { db, err := gorm.Open("mysql", "user:password@/dbname?charset=utf8&parseTime=True&loc=Local") }
GORM has wrapped some drivers, for easier to remember driver's import path, so you could import the mysql driver with
import _ "github.com/jinzhu/gorm/dialects/mysql" // import _ "gorm.io/gorm/postgres" // import _ "gorm.io/gorm/sqlite" // import _ "gorm.io/gorm/mssql"
func (*WriterSQL) SetLogAliveTime ¶ added in v1.0.8
Source Files ¶
Directories ¶
Path | Synopsis |
---|---|
@Time : 2019-11-04 9:31 @Author : zr
|
@Time : 2019-11-04 9:31 @Author : zr |
Click to show internal directories.
Click to hide internal directories.