filebeat(8.9.0)采集日志到logstash,由logstash发送的es

filebeat采集日志到logstash,由logstash发送的es

  • 下载并配置filebeat
    • 下载
    • 配置
    • logback.xml
  • logstash配置

下载并配置filebeat

下载

参考
filebeat(8.9.0)采集日志到logstash,由logstash发送的es_第1张图片

配置

filebeat.inputs:
- type: filestream
  enabled: true
  paths:
  	# 日志文件目录
    - D:\modellog\elkdemo\*\*.log
  parsers:
  # 多行日志合并(例如java的异常堆栈信息)
  - multiline:
      type: pattern
      # 匹配以[开头的
      pattern: '^\['
      negate: true
      match: after

# 自定义日志格式
processors:
- dissect:
    tokenizer: '[%{timestamp}] -%{parentTraceId} -%{traceId} -%{host} -%{port} -%{appName} -%{pid} -[%{level}] -[%{thread}] -%{class} -%{line} - %{message}'
    field: "message"

# 输出到logstash
output.logstash:
  enabled: true
  hosts: ["192.168.0.100:21069"]  

logback.xml


<configuration>
    
    
    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter"/>
    <conversionRule conversionWord="wex"
                    converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"/>
    <conversionRule conversionWord="wEx"
                    converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>

    <define name="hostAddress" class="top.mingempty.commons.definer.IPLogDefiner"/>
    <springProperty scope="context" name="appName" source="spring.application.name"/>
    <springProperty scope="context" name="port" source="server.port"/>

    
    <property name="CONSOLE_LOG_PATTERN"
              value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint}  %clr([${LOG_LEVEL_PATTERN:-%5p}]) %clr(${PID:- }){magenta} %clr(-){faint} %clr([%t]){faint} %clr(%logger){cyan} %clr(:){magenta} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>

    <property name="FILE_LOG_PATTERN"
              value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] -${hostAddress} -${port} -${appName} -${PID:-} -[%-5level] -[%thread] -%logger -%-3L - %msg%n"/>


    <contextName>fanxlxscontextName>

    
    <property name="LOG_PATH" value="/modellog"/>
    
    <property name="APPDIR" value="/filebeat"/>

    
    <appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
        
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            
            <FileNamePattern>${LOG_PATH}/${APPDIR}/%d{yyyy-MM-dd}/error-%d{yyyy-MM-dd}.%i.logFileNamePattern>
            
            
            <maxFileSize>10MBmaxFileSize>
            
        rollingPolicy>
        
        <append>trueappend>
        
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${FILE_LOG_PATTERN}pattern>
            <charset>utf-8charset>
        encoder>
        
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>errorlevel>
            <onMatch>ACCEPTonMatch>
            <onMismatch>DENYonMismatch>
        filter>
    appender>


    
    <appender name="WARN" class="ch.qos.logback.core.rolling.RollingFileAppender">
        
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            
            <FileNamePattern>${LOG_PATH}/${APPDIR}/%d{yyyy-MM-dd}/warn-%d{yyyy-MM-dd}.%i.logFileNamePattern>
            
            
            <maxFileSize>10MBmaxFileSize>
            
        rollingPolicy>
        
        <append>trueappend>
        
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${FILE_LOG_PATTERN}pattern>
            <charset>utf-8charset>
        encoder>
        
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>warnlevel>
            <onMatch>ACCEPTonMatch>
            <onMismatch>DENYonMismatch>
        filter>
    appender>


    
    <appender name="INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
        
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            
            <FileNamePattern>${LOG_PATH}/${APPDIR}/%d{yyyy-MM-dd}/info-%d{yyyy-MM-dd}.%i.logFileNamePattern>
            
            
            <maxFileSize>10MBmaxFileSize>
            
        rollingPolicy>
        
        <append>trueappend>
        
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${FILE_LOG_PATTERN}pattern>
            <charset>utf-8charset>
        encoder>
        
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>infolevel>
            <onMatch>ACCEPTonMatch>
            <onMismatch>DENYonMismatch>
        filter>
    appender>


    
    <appender name="DEBUG" class="ch.qos.logback.core.rolling.RollingFileAppender">
        
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            
            <FileNamePattern>${LOG_PATH}/${APPDIR}/%d{yyyy-MM-dd}/debug-%d{yyyy-MM-dd}.%i.logFileNamePattern>
            
            
            <maxFileSize>10MBmaxFileSize>
            
        rollingPolicy>
        <append>trueappend>
        
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${FILE_LOG_PATTERN}pattern>
            <charset>utf-8charset>
        encoder>
        
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>debuglevel>
            <onMatch>ACCEPTonMatch>
            <onMismatch>DENYonMismatch>
        filter>
    appender>

    
    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
        
        <encoder>
            <pattern>${CONSOLE_LOG_PATTERN}pattern>
            <charset>utf-8charset>
        encoder>
        
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>debuglevel>
        filter>
    appender>
    
    
    <logger name="org.springframework" level="WARN"/>

    
    <logger name="com.alibaba.nacos" level="WARN"/>
    
    
    <logger name="top.mingempty.metadata" level="INFO"/>

    
    <root level="INFO">
        <appender-ref ref="ERROR"/>
        <appender-ref ref="WARN"/>
        <appender-ref ref="INFO"/>
        <appender-ref ref="DEBUG"/>
        
        <appender-ref ref="STDOUT"/>
    root>


configuration>

logstash配置

input {
  beats  {
    port => 5044
  }
}

filter {
  grok {
    ## 指定日志输出格式(和logback对应上)
    match => { "message" => "\[%{TIMESTAMP_ISO8601:timestamp}\] -%{IP:host} -%{NUMBER:port} -%{DATA:appName} -%{NUMBER:pid} -\[%{DATA:level}\] -\[%{DATA:thread}\] -%{DATA:class} -%{NUMBER:line} - %{GREEDYDATA:message}" }
  }
  
  mutate {
    remove_field => [ "[@timestamp]"]
    rename => {
        "[dissect][parentTraceId]" => "parentTraceId"
        "[dissect][traceId]" => "traceId"
        "[dissect][host]" => "host"
        "[dissect][port]" => "port"
        "[dissect][appName]" => "appName"
        "[dissect][pid]" => "pid"
        "[dissect][level]" => "level"
        "[dissect][thread]" => "thread"
        "[dissect][class]" => "class"
        "[dissect][line]" => "line"
        "[dissect][message]" => "message"
        "[dissect][timestamp]" => "timestamp"
        "[log][file][path]" => "path"
      }
  }

  date {
    match => [ "timestamp", "yyyy-MM-dd HH:mm:ss.SSS" ] # 这里指定日志中时间戳的格式
    target => "@timestamp" # 将解析后的时间戳存储到  字段
    timezone => "Asia/Shanghai" # 指定时区为中国标准时间
  }
  
  mutate {
    remove_field => [ "timestamp","event", "agent", "ecs", "input", "tags", "flags", "log", "dissect" ,"@version"]
  }
}


output {
  elasticsearch {
    hosts => ["https://172.27.0.2:9200"]
    index => "beats3-mingempty-top-%{+YYYY.MM.dd}"
    #这里开启了es的x-pack
    user => "帐号"
    password => "密码"
    ssl_enabled => true
    ssl_certificate_authorities => ["/usr/share/logstash/config/logstash.pem"]
  }
}

docker-compose部署elk(8.9.0)并开启ssl认证

你可能感兴趣的:(#,elastic,elasticsearch,filebeat,logstash)