logback输出json格式日志(包括mdc)发送到kafka

1,pom.xml

               
        <dependency>
            <groupId>com.github.danielwegenergroupId>
            <artifactId>logback-kafka-appenderartifactId>
            <version>0.2.0-RC1version>
            <scope>runtimescope>
        dependency>
 
        <dependency>
            <groupId>ch.qos.logbackgroupId>
            <artifactId>logback-coreartifactId>
        dependency>
        <dependency>
            <groupId>ch.qos.logbackgroupId>
            <artifactId>logback-classicartifactId>
            <version>1.2.3version>
            <scope>runtimescope>
        dependency>
        <dependency>
            <groupId>net.logstash.logbackgroupId>
            <artifactId>logstash-logback-encoderartifactId>
            <version>5.0version>
        dependency>

 
     <dependency>
        <groupId>org.slf4jgroupId>
        <artifactId>slf4j-apiartifactId>
        <version>1.7.25version>
        <scope>compilescope>
    dependency>           
            <dependency>
                <groupId>ch.qos.logbackgroupId>
                <artifactId>logback-accessartifactId>
            dependency>
                    

2, spring-logback.xml

xml version="1.0" encoding="UTF-8"?>

<configuration debug="false" scan="true" scanPeriod="600000">
    
    <property name="LOG_HOME" value="/var/log" />
    <contextName>${HOSTNAME}contextName>
    <springProperty scope="context" name="appName"
        source="spring.application.name" />
    <springProperty scope="context" name="ip"
        source="spring.cloud.client.ipAddress" />

    
    <property name="CONSOLE_LOG_PATTERN"
        value="[%d{yyyy-MM-dd HH:mm:ss.SSS} ${ip} ${appName} %highlight(%-5level) %yellow(%X{X-B3-TraceId}),%green(%X{X-B3-SpanId}),%blue(%X{X-B3-ParentSpanId}) %yellow(%thread) %green(%logger) %msg%n" />


    


    
    
    
    
    

    
    <logger name="logging.level.com.italktv.platform" level="info" />

    
    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
        <encoder>
            <pattern>${CONSOLE_LOG_PATTERN}pattern>
            <charset>utf-8charset>
        encoder>
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>debuglevel>
        filter>
    appender>

    
    <appender name="FILE"
        class="ch.qos.logback.core.rolling.RollingFileAppender">
        
        <file>${LOG_HOME}/bigdata/data-api.logfile>
        <rollingPolicy
            class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            
            <FileNamePattern>${LOG_HOME}/bigdata/data-api.%d{yyyy-MM-dd}.%i.log
            FileNamePattern>
            
            <MaxHistory>30MaxHistory>
            <maxFileSize>1MBmaxFileSize>
            <totalSizeCap>10MBtotalSizeCap>
        rollingPolicy>
        <encoder
            class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
            <providers>
                <timestamp>
                    <timeZone>UTCtimeZone>
                timestamp>
                <pattern>
                    <pattern>
                        {
                        "level": "%level",
                        "trace": "%X{X-B3-TraceId:-}",
                        "requestId": "%X{requestId}",
                        "remoteIp": "%X{remoteIp}",
                        "span": "%X{X-B3-SpanId:-}",
                        "parent":
                        "%X{X-B3-ParentSpanId:-}",
                        "thread": "%thread",
                        "class":
                        "%logger{40}",
                        "message": "%message",
                        "stack_trace":
                        "%exception{10}"
                        }
                    pattern>
                pattern>
            providers>
        encoder>
        
    appender>
    
        
    

    
    <appender name="kafkaAppender"
        class="com.github.danielwegener.logback.kafka.KafkaAppender">

        <encoder charset="UTF-8"
            class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">

            <providers>
                <mdc />
                <context />
                <timestamp>
                    <timeZone>UTCtimeZone>
                timestamp>
                <pattern>
                    <pattern>
                        {

                        "level": "%level",
                        "trace": "%X{X-B3-TraceId:-}",
                        "span":
                        "%X{X-B3-SpanId:-}",
                        "parent": "%X{X-B3-ParentSpanId:-}",
                        "thread":
                        "%thread",
                        "class": "%logger{40}",
                        "message": "%message",
                        "stack_trace": "%exception{10}"
                        }
                    pattern>
                pattern>
            providers>
        encoder>

        <topic>tv_server_logstash_logtopic>
        <keyingStrategy
            class="com.github.danielwegener.logback.kafka.keying.HostNameKeyingStrategy" />
        <deliveryStrategy
            class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy" />
        <producerConfig>bootstrap.servers=127.0.0.1:9092producerConfig>
        
        <producerConfig>acks=0producerConfig>
        
        <producerConfig>linger.ms=1000producerConfig>
        
        
        <producerConfig>block.on.buffer.full=falseproducerConfig>
        
        <appender-ref ref="STDOUT" />
    appender>



    <appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
        <appender-ref ref="kafkaAppender" />
    appender>

    
    <root level="INFO">
        
        <appender-ref ref="STDOUT" />
        

        
        <appender-ref ref="ASYNC" />

    root>

configuration>

3, 添加一个mdc在logback

import java.util.UUID;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;


@Component
public class LogInterceptor implements HandlerInterceptor {

    private final static String REQUEST_ID = "requestId";
    private static final Logger LOGGER  = LoggerFactory.getLogger(LogInterceptor.class);

    @Override
    public boolean preHandle(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o) throws Exception {
        String xForwardedForHeader = httpServletRequest.getHeader("X-Forwarded-For");
        String remoteIp = httpServletRequest.getRemoteAddr();
        String uuid = UUID.randomUUID().toString();
        LOGGER.info("put requestId ({}) to logger", uuid);
        LOGGER.info("request id:{}, client ip:{}, X-Forwarded-For:{}", uuid, remoteIp, xForwardedForHeader);
        MDC.put(REQUEST_ID, uuid);
        MDC.put("remoteIp", remoteIp);
        return true;
    }

    @Override
    public void postHandle(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o,
            ModelAndView modelAndView) throws Exception {
        String uuid = MDC.get(REQUEST_ID);
        LOGGER.info("remove requestId ({}) from logger", uuid);
        MDC.remove(REQUEST_ID);
    }

    @Override
    public void afterCompletion(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o, Exception e)
            throws Exception {

    }
}

4,添加切面 intercept

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;

@Configuration
public class WebMvcConfigurer extends WebMvcConfigurerAdapter {
    @Autowired
    private LogInterceptor logInterceptor;
 
    @Override
    public void addInterceptors(InterceptorRegistry registry) {
        registry.addInterceptor(logInterceptor);
        super.addInterceptors(registry);
    }
}

 

参考:

logback 手册:https://logback.qos.ch/manual/layouts.html

http://www.importnew.com/28541.html

https://www.jianshu.com/p/a26da0c55255

https://blog.csdn.net/Soinice/article/details/84033382

https://examples.javacodegeeks.com/enterprise-java/logback/logback-kafka-appender-example/

http://stevetarver.github.io/2016/04/20/whole-product-logging.html 讲解详细

转载于:https://www.cnblogs.com/bigben0123/p/10613257.html

你可能感兴趣的:(json,大数据,python)