转自:https://blog.csdn.net/appearb...
目录
1.需求
记录服务接口调用情况,包括通用字段:域名、IP、接口路径、查询参数、是否成功、请求时间、耗时、错误信息、接口名;以及业务自定义字段
做成通用组件的形式,最小化的侵入业务系统
2.设计思路
自定义Spring拦截器,获取通用字段,以及业务自定义字段放到ThreadLocal,接口调用完成后异步发送到Kafka,然后消费到ClickHouse进行统计
需要统计的服务只需要依赖本组件,加一个注解,并配置指定的kafka地址即可
3.架构图
4.具体实现
1.定义拦截器
在接口调用前初始化并从request里获取ip、域名、请求时间、请求参数等
接口调用后设置接口耗时、错误信息等;需要删除该线程的ThreadLocal对象,因为spring的请求线程会在线程池里复用,如果不删掉,后续请求会用到该线程的ThreadLocal数据
package cn.xianbin.apistat.interceptor;
import cn.xianbin.apistat.service.ApiStatService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.HandlerInterceptor;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@Component
@Slf4j
public class ApiStatInterceptor implements HandlerInterceptor {
@Autowired
private ApiStatService apiStatService;
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
apiStatService.before(request);
return true;
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) {
apiStatService.after(ex);
ApiStatService.threadLocal.remove();
}
}
2.获取接口参数服务
package cn.xianbin.apistat.service;
import cn.xianbin.apistat.bean.ApiStatBean;
import cn.xianbin.apistat.utils.IpUtil;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
@Slf4j
@Service
public class ApiStatService {
private static DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS");
public static ThreadLocal threadLocal = new ThreadLocal<>();
@Resource(name = "apiStatKafkaTemplate")
private KafkaTemplate kafkaTemplate;
/**
* 业务代码调用
*/
public static void apiName(String apiName) {
ApiStatBean apiStatBean = threadLocal.get();
if(apiStatBean == null){
log.warn("api stat not init");
return;
}
apiStatBean.setApi_name(apiName);
}
public void before(HttpServletRequest request) {
LocalDateTime now = LocalDateTime.now();
ApiStatBean apiStatBean = ApiStatBean.builder()
.ip(IpUtil.getIP(request))
.domain(domain(request))
.path(request.getRequestURI())
.query_param(request.getQueryString())
.startTime(System.currentTimeMillis())
.start_time(now.format(dateTimeFormatter))
.build();
threadLocal.set(apiStatBean);
}
public void after(Exception ex) {
ApiStatBean apiStatBean = threadLocal.get();
apiStatBean.setCost_time(System.currentTimeMillis() - apiStatBean.getStartTime());
if (ex == null) {
apiStatBean.setIs_success(1);
} else {
apiStatBean.setError(ex.getMessage());
apiStatBean.setIs_success(0);
}
log();
}
public void log() {
String invokeLog = JSONObject.toJSONString(threadLocal.get());
log.debug("asyncSend={}", invokeLog);
kafkaTemplate.send("api_stat_test", invokeLog);
}
private String domain(HttpServletRequest request) {
return String.format("%s://%s:%s", request.getScheme(), request.getServerName(), request.getServerPort());
}
}
3.对外需要导入的注解
用spring的@import注解导入我们的配置类,用@ComponentScan扫描我们的类
package cn.xianbin.apistat;
import cn.xianbin.apistat.config.ApiStatConfig;
import org.springframework.context.annotation.Import;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Import(ApiStatConfig.class)
public @interface EnableApiStat {
}
package cn.xianbin.apistat.config;
import cn.xianbin.apistat.interceptor.ApiStatInterceptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@ComponentScan("cn.dgg.bigdata.apistat")
@Configuration
public class ApiStatConfig implements WebMvcConfigurer {
@Autowired
private ApiStatInterceptor apiStatHandlerInterceptor;
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(apiStatHandlerInterceptor).addPathPatterns("/**");
}
}
5.同步到ClickHouse
1.创建kafka引擎表
CREATE TABLE api_stat.kafka_api_stat (
`ip` String,
`api_name` String,
`domain` String,
`path` String,
`query_param` String,
`start_time` DateTime64 ( 3, 'Asia/Shanghai' ),
`is_success` UInt8,
`cost_time` Int32,
`error` String,
`api_name` String
) ENGINE = Kafka('localhost:9092', 'topic', 'group1', 'JSONEachRow');
2.创建ClickHouse物理表
CREATE TABLE api_stat.api_stat (
`ip` String,
`api_name` String,
`domain` String,
`path` String,
`query_param` String,
`start_time` DateTime64 ( 3, 'Asia/Shanghai' ),
`is_success` UInt8,
`cost_time` Int32,
`error` String,
`api_name` String
) ENGINE = MergeTree()
PARTITION BY toYYYYMM(start_time)
ORDER BY (domain, path, start_time);
3.创建kafka到物理表的视图
CREATE MATERIALIZED VIEW api_stat.consumer_api_stat TO api_stat.api_stat
AS SELECT ip,api_name,domain,path,query_param,start_time,is_success,cost_time,error,api_name FROM api_stat.kafka_api_stat;
6.外部系统使用
1.添加依赖:
cn.dgg.bigdata
apistat
1.0-SNAPSHOT
2.配置kafka地址
apiStat:
kafka:
bootstrap-servers: localhost:9200
3.加入注解:@EnableApiStat