ELK中FileBeat收集多个多类型日志配置

话不多说了,直接上配置代码!!!

一、FileBeat配置:

filebeat.inputs:
    - type: log
      enabled: true
      paths:
        - /opt/mall/logs/*/catalina.log 
      fields:
        server: 服务器1
        filetype: logfile  #主要是这里个自定义字段,用来区分的
      fields_under_root: true 
    
    - type: log
      enabled: true
      paths:
        - /opt/mall/testlogs/test.log 
      fields:
        server: 服务器1
        filetype: logjson  #主要是这里个自定义字段,用来区分的
      fields_under_root: true 

二、logstash配置:

filter {
    if [filetype] == "logjson" { #这里对应的是FileBeat里配置的filetype
        grok {
            #切割后日期名字叫logdate
            match => ["message", "%{TIMESTAMP_ISO8601:logdate}"]
        }
        date {
            #logdate 从上面过滤后取到的字段名,yyyy-MM-dd HH:mm:ss.SSS 日期格式条件
            match => ["logdate", "yyyy-MM-dd HH:mm:ss.SSS"]
            #赋值给那个key
            target => "@timestamp"
            remove_field =>["logdate"]
        }
        json {
            source => "message"
            remove_field => ["host","@version","ecs","agent","tags"]
        }
    } else {
        grok {
            #切割后日期名字叫logdate
            match => ["message", "%{TIMESTAMP_ISO8601:logdate}"]
        }
        date {
            #logdate 从上面过滤后取到的字段名,yyyy-MM-dd HH:mm:ss.SSS 日期格式条件
            match => ["logdate", "yyyy-MM-dd HH:mm:ss.SSS"]
            #赋值给那个key
            target => "@timestamp"
            remove_field =>["logdate"]
        }
        mutate {
            remove_field => ["host","@version","ecs","agent","tags"]
        }
    }
}

output {
    if [filetype] == "logjson" { #这里对应的是FileBeat里配置的filetype
        elasticsearch {
            hosts => ["192.168.19.133:9200"]
            index => "malltrackdev"
        }
    } else {
        elasticsearch {
            hosts => ["192.168.19.133:9200"]
            index => "openaccount"
        }
    }
}

 

你可能感兴趣的:(ELK)