设计思路:
1、通过openresty-1.15.8.1 引入lua-resty-kafka 插件(自带lua脚步支持),定义采集客户端URL返回的response_body具体信息(前后端分离项目,只采集ajax请求即可)的.conf文件
2、将采集信息已日志形式输出,同时生产消息至KafkaMQ
3、SpringBoot 开发KafkaMQ消费端,消费消息(获取自定义的规则信息,进行匹配。将匹配结果存储至Mysql)
具体实现nginx.conf
#user nobody;
worker_processes 1;
#error_log logs/error.log;
#error_log logs/error.log notice;
#error_log logs/error.log info;
#pid logs/nginx.pid;
daemon off;
events {
worker_connections 1024;
}
http {
log_format main escape=json '{ "@timestamp": "$time_local", '
'"x_requested_with":"$http_x_requested_with", '
'"remote_addr": "$remote_addr", '
'"upstream_addr": "$upstream_addr",'
'"remote_user": "$remote_user", '
'"body_bytes_sent": "$body_bytes_sent", '
'"request_time": "$request_time", '
'"status": "$status", '
'"request": "$request", '
'"request_method": "$request_method", '
'"http_referrer": "$http_referer", '
'"body_bytes_sent":"$body_bytes_sent", '
'"http_x_forwarded_for": "$http_x_forwarded_for", '
'"host":""$host",'
'"remote_addr":""$remote_addr",'
'"http_user_agent": "$http_user_agent",'
'"http_uri": "$uri",'
'"http_host":"$http_host",'
'"req_body":"$resp_body" }';
include mime.types;
default_type application/octet-stream;
charset utf-8;
server_names_hash_bucket_size 128;
client_header_buffer_size 32k;
large_client_header_buffers 4 64k;
client_max_body_size 80m;
sendfile on;
tcp_nopush on;
keepalive_timeout 65;
server_tokens off;
fastcgi_connect_timeout 600;
fastcgi_send_timeout 600;
fastcgi_read_timeout 600;
fastcgi_buffer_size 16k;
fastcgi_buffers 16 16k;
fastcgi_busy_buffers_size 16k;
fastcgi_temp_file_write_size 16k;
tcp_nodelay on;
gzip on;
gzip_min_length 1k;
gzip_buffers 4 16k;
gzip_http_version 1.0;
gzip_comp_level 2;
gzip_types text/plain text/javascript application/x-javascript text/css application/xml image/jpeg image/gif image/png;
gzip_vary on;
proxy_ignore_client_abort on;
# 配置lua依赖库地址
lua_package_path "F:/cg-test/openresty-1.15.8.1-win64/lualib/kafka/?.lua;;";
include ../conf.d/*.conf;
#log_format main '$remote_addr - $remote_user [$time_local] "$request" '
# '$status $body_bytes_sent "$http_referer" '
# '"$http_user_agent" "$http_x_forwarded_for"';
#access_log logs/access.log main;
}
base.conf
server {
listen 80;
server_name localhost;
#access_log logs/access.log main ;
error_log logs/error.log;
# lua代码
set $resp_body "";
lua_need_request_body on;
location / {
#root /usr/local/openresty/nginx/html/;
alias F:/chengangGit/fintech-DP/JY-pangu/pangu-html/;
index index.html index.htm;
error_page 405 =200 http://$host$request_uri;
}
location ^~ /api/{
log_escape_non_ascii off;
proxy_pass http://127.0.0.1:8092/api/;
access_log logs/bizaccess.log main ;
#local resp_body = string.sub(ngx.arg[1], 1, 1000)
body_filter_by_lua '
local resp_body = ngx.arg[1]
ngx.ctx.buffered = (ngx.ctx.buffered or "") .. resp_body
if ngx.arg[2] and "XMLHttpRequest" == ngx.var.http_x_requested_with then
ngx.var.resp_body = ngx.ctx.buffered
end
';
# 使用log_by_lua 包含lua代码,因为log_by_lua指令运行在请求最后且不影响proxy_pass机制
log_by_lua '
-- 引入lua所有api
local cjson = require "cjson"
local producer = require "resty.kafka.producer"
-- 定义kafka broker地址,ip需要和kafka的host.name配置一致
local broker_list = {
{ host = "172.18.101.128", port = 9092 },
}
-- 定义json便于日志数据整理收集
local log_json = {}
log_json["uri"]=ngx.var.uri
log_json["args"]=ngx.var.args
log_json["host"]=ngx.var.host
log_json["request_body"]=ngx.var.request_body
log_json["remote_addr"] = ngx.var.remote_addr
log_json["remote_user"] = ngx.var.remote_user
log_json["time_local"] = ngx.var.time_local
log_json["status"] = ngx.var.status
log_json["body_bytes_sent"] = ngx.var.body_bytes_sent
log_json["http_referer"] = ngx.var.http_referer
log_json["http_user_agent"] = ngx.var.http_user_agent
log_json["http_x_forwarded_for"] = ngx.var.http_x_forwarded_for
log_json["upstream_response_time"] = ngx.var.upstream_response_time
log_json["request_time"] = ngx.var.request_time
log_json["resp_body"] = ngx.var.resp_body
-- 转换json为字符串
local message = cjson.encode(log_json);
-- 定义kafka异步生产者
local bp = producer:new(broker_list, { producer_type = "async" })
-- 发送日志消息,send第二个参数key,用于kafka路由控制:
-- key为nill(空)时,一段时间向同一partition写入数据
-- 指定key,按照key的hash写入到对应的partition 仅记录ajax请求
if "XMLHttpRequest" == ngx.var.http_x_requested_with then
local ok, err = bp:send("openresty-log", nil, message)
if not ok then
ngx.log(ngx.ERR, "kafka send err:", err)
return
end
if ok then
ngx.log(ngx.INFO, "kafka send ok:", err)
return
end
end
';
}
location /healthz {
access_log off;
return 200;
}
error_page 500 502 503 504 /50x.html;
location = /50x.html {
}
}
SpringBoot MQ消费端代码参考:
KafkaConsumer.java
/**
* className: KafkaConsumer
* description: MQ消费者 设置项目启动时就开始消费
*
* @author CG
* @version 1.0.0
* @date 2019-08-27 10:00
*/
@Lazy(false)
@Component("com.mq.kafka.KafkaConsumer")
public class KafkaConsumer {
private static Logger logger = LoggerFactory.getLogger(KafkaConsumer.class);
@Value("${kakfa.mq.topic:openresty-log}")
private String mqTopic ;
@Autowired
private ISysDictDataService dictDataService;
@Autowired
private IBizLogMqService bizLogMqService;
@KafkaListener(topics= {"${kakfa.mq.topic:openresty-log}"})
public void consumerMsg(ConsumerRecord,?> record){
logger.info("------------------------------------------consumerMsg---------------------------------");
SysDictData dictData = new SysDictData();
dictData.setDictType("my_rule");
List rules = dictDataService.selectDictDataList(dictData);
Optional> kafkaMsg = Optional.ofNullable(record.value());
if(kafkaMsg.isPresent()){
Object msg = kafkaMsg.get();
logger.info("-------record:{}",record);
logger.info("-------msg:{}",msg);
String msgStr = msg.toString();
JSONObject msgObj = JSONObject.parseObject(msgStr);
//TODO 1 通过消息获取 cookie获取用户信息
//2 将消息内容写入 DB 支持可视化查询 消息内容
BizLogMq bizLogMq = new BizLogMq();
bizLogMq.setStatus("1");
//消息内容大小
bizLogMq.setExt1(String.valueOf(record.serializedValueSize()));
if(record.serializedValueSize() >0){
bizLogMq.setUri(msgObj.getString("http_referer"));
bizLogMq.setRemoteUser(msgObj.getString("remote_user"));
bizLogMq.setArgs(msgObj.getString("request_body"));
bizLogMq.setCreateBy("0");
bizLogMq.setMqTitle(String.valueOf(msgObj.getString("host")));
bizLogMq.setExt2(msgObj.getString("remote_addr"));
}
//3 支持规则匹配 并完成消息自定义的标识
StringBuffer ruleRemark = new StringBuffer(String.valueOf(record.offset()));
if(rules != null && rules.size() > 0){
for (int i =0;i < rules.size(); i++){
SysDictData rule = rules.get(i);
String dictVal = rule.getDictValue();
if(StringUtils.isNotEmpty(dictVal) ){
ruleRemark.append(",").append(this.matchRexgex(dictVal,msgStr));
}
}
}
//内配符合规则
if(ruleRemark.toString().contains(",1")){
bizLogMq.setMqType("1");
}
bizLogMq.setRemark(ruleRemark.toString());
if(msgStr.length() >= 4000){
msgStr = msgStr.substring(0, 3800);
}
bizLogMq.setMqContent(msgStr);
bizLogMqService.insertBizLogMq(bizLogMq);
//4 支持可视化规则配置,并可以自动历史匹配并完成数据标识
}
}
public int matchRexgex(String rex,String src){
boolean exists = false;
exists = Pattern.matches(rex, src);
if(exists){
return 1;
}else{
return 0;
}
}
public boolean matchRexgex(String src){
boolean exists = false;
String rex = "";
exists = Pattern.matches(rex, src);
return exists;
}
public static void main(String [] args) {
//验证手机号 .*((17[0-9])|(14[0-9])|(13[0-9])|(15[^4,\D])|(18[0,5-9]))\d{8}.*
//验证邮箱 .*([a-z0-9A-Z]+[-|\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\.)+[a-zA-Z]{2,}.*
//验证身份证 .*(\d{17}).*
//客户姓名:TODO
String rex = ".*([a-z0-9A-Z]+[-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}.*";
//".*(\\d{17}).*";//".*((17[0-9])|(14[0-9])|(13[0-9])|(15[^4,\\D])|(18[0,5-9]))\\d{8}.*";//".*[0-9]{11}.*";
String src = "{\"host\":\"127.0.0.1\",\"time_local\":\"10\\/Sep\\/2019:14:22:19 +0800\",\"resp_body\":\"{\\r\\n \\\"retCode\\\" : \\\"0000\\\",\\r\\n \\\"retDesc\\\" : \\\"成功\\\",\\r\\n \\\"responseBody\\\" : {\\r\\n \\\"pageParameter\\\" : {\\r\\n \\\"pageSize\\\" : 10,\\r\\n \\\"currentPage\\\" : 1,\\r\\n \\\"totalPage\\\" : 1,\\r\\n \\\"totalCount\\\" : 1\\r\\n },\\r\\n \\\"result\\\" : [ {\\r\\n \\\"id\\\" : 4,\\r\\n \\\"configName\\\" : \\\"1\\\",\\r\\n \\\"configCode\\\" : \\\"1\\\",\\r\\n \\\"configValue\\\" : \\\"[email protected]\\\",\\r\\n \\\"configType\\\" : \\\"1\\\",\\r\\n \\\"validateState\\\" : \\\"1\\\"\\r\\n } ]\\r\\n }\\r\\n}\",\"http_referer\":\"http:\\/\\/127.0.0.1\\/pangu\\/platform\\/sysConfig\\/querySysConfig.html\",\"status\":\"200\",\"request_time\":\"0.026\",\"remote_addr\":\"127.0.0.1\",\"request_body\":\"{\\\"pageParameter\\\":{\\\"currentPage\\\":\\\"1\\\",\\\"pageSize\\\":\\\"10\\\",\\\"selectRowIndex\\\":\\\"undefined\\\"},\\\"searchParams\\\":{\\\"dto\\\":{\\\"configName\\\":\\\"\\\",\\\"configCode\\\":\\\"\\\",\\\"configValue\\\":\\\"[email protected]\\\",\\\"configType\\\":\\\"\\\"}}}\",\"uri\":\"\\/api\\/sysConfig\\/searchByPage\\/v1\",\"args\":\"reqMsgPage\",\"http_user_agent\":\"Mozilla\\/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit\\/537.36 (KHTML, like Gecko) Chrome\\/75.0.3770.100 Safari\\/537.36\",\"body_bytes_sent\":\"421\",\"upstream_response_time\":\"0.026\"}";
KafkaConsumer test = new KafkaConsumer();
System.out.println("==========="+test.matchRexgex(rex,src));
}