功能实现
1.将error以下级别添加到info.log文件中
2.将error及以上添加到error.log文件中
4.将日志写入到kafka服务中
4.按指定时间段记录日志
下载:
go get -u go.uber.org/zap
go get gopkg.in/Shopify/sarama.v1
go get github.com/robfig/cron
cron相关内容可参考:https://blog.csdn.net/skh2015java/article/details/78223951
示例如下:
func main() {
logger,err := newLogger("D:/GoProject/src/zap_study/prod/","test","0 */2 * * * *","0 */5 * * * *",)
if err != nil {
log.Fatal(err)
}
for i := 0 ; i < 100 ; i++ {
logger.Debug("i am debug",zap.String("key","debug"),zap.Int("id",i))
logger.Info("i am info",zap.String("key","info"),zap.Int("id",i))
logger.Error("i am error",zap.String("key","error"),zap.Int("id",i))
time.Sleep(time.Second*10)
}
}
func newKafkaProducer(topic,addr string) (*kafkaProducer,error) {
config := sarama.NewConfig()
config.Producer.RequiredAcks = sarama.WaitForLocal
config.Producer.Return.Successes = true
syncProducer ,err := sarama.NewSyncProducer([]string{addr},config)
if err != nil {
return nil,err
}
return &kafkaProducer{
topic: topic,
syncProducer: syncProducer,
},nil
}
type kafkaProducer struct {
topic string
syncProducer sarama.SyncProducer
}
func (kp *kafkaProducer) Write(p []byte) (n int, err error) {
_,_, err = kp.syncProducer.SendMessage(&sarama.ProducerMessage{
Topic: kp.topic,
Value: sarama.ByteEncoder(p),
})
if err != nil {
log.Println(err)
return 0,err
}
fmt.Println("send msgs:",string(p))
return len(p),nil
}
func (kp *kafkaProducer) Close(p []byte) {
kp.syncProducer.Close()
}
func newLogger(dir, prefix ,infoCron,errorCron string) (*zap.Logger, error) {
c := cron.New()
cronLogger := &autoDailyLoger{
dir:dir,
prefix: prefix,
infoFile: &fileWrite{},
errorFile: &fileWrite{},
c : c,
}
cronLogger.changeInfoWiter()
cronLogger.changeErrorWiter()
c.AddFunc(infoCron,cronLogger.changeInfoWiter)
c.AddFunc(errorCron,cronLogger.changeErrorWiter)
c.Start()
return cronLogger.getLoggerWithCron()
}
type fileWrite struct {
file *os.File
}
func (f *fileWrite) Write(p []byte) (n int, err error){
return f.file.Write(p)
}
func (f *fileWrite) Close() error {
return f.file.Close()
}
type autoDailyLoger struct {
dir string
prefix string
infoFile *fileWrite
errorFile *fileWrite
c *cron.Cron
}
func (l *autoDailyLoger)getLoggerWithCron() (*zap.Logger,error) {
kp,err := newKafkaProducer("klog","localhost:9092")
if err != nil {
log.Fatal(err)
}
kws := zapcore.AddSync(kp)
highPriority := zap.LevelEnablerFunc(func(lev zapcore.Level) bool{
return lev >= zap.ErrorLevel
})
lowPriority := zap.LevelEnablerFunc(func(lev zapcore.Level) bool {
return lev < zap.ErrorLevel && lev >= zap.DebugLevel
})
kafkaPriority := zap.LevelEnablerFunc(func(lev zapcore.Level) bool{
return lev >= zap.DebugLevel
})
prodEncoder := zap.NewProductionEncoderConfig()
prodEncoder.EncodeTime = zapcore.ISO8601TimeEncoder
highCore := zapcore.NewCore(zapcore.NewJSONEncoder(prodEncoder),zapcore.AddSync(l.errorFile),highPriority)
lowCore := zapcore.NewCore(zapcore.NewJSONEncoder(prodEncoder),zapcore.AddSync(l.infoFile),lowPriority)
kafkaCore := zapcore.NewCore(zapcore.NewJSONEncoder(prodEncoder),kws,kafkaPriority)
return zap.New(zapcore.NewTee(highCore,lowCore,kafkaCore)),nil
}
func (l *autoDailyLoger) changeInfoWiter() {
infoOutput,err := os.OpenFile(getPath(filepath.Join(l.dir,l.prefix+"_info")), os.O_CREATE|os.O_RDWR|os.O_APPEND, 0766)
if l.infoFile.file != nil {
l.infoFile.file.Close()
}
if err == nil {
l.infoFile.file = infoOutput
}
}
func (l *autoDailyLoger) changeErrorWiter() {
errOutput ,err := os.OpenFile(getPath(filepath.Join(l.dir,l.prefix+"_error")),os.O_CREATE|os.O_RDWR|os.O_APPEND, 0766)
if l.errorFile.file != nil {
l.errorFile.file.Close()
}
if err == nil {
l.errorFile.file = errOutput
}
}
func getPath(path string) string {
return path + "_" + time.Now().Format("200601021504") +".log"
}