搭建ELK做日志分析,以做记录,关于kafka以及docker的安装所以在此不做说明
docker run -d elasticsearch
docker run -d -p 9200:9200 -p 9300:9300 --restart always --name es -v /home/docker/esdata/config:/usr/share/elasticsearch/config 192.168.187.108:5000/es:5.5.1-alpine
192.168.187.108 是我搭建的私有registry 仓库的ip,如果没有搭建,可以直接用docker hub上的,至此,es 就算弄好了,接下来就是logstash了,关于logstash的配置是最多的。
搭建logstash
logstash 也是有官方镜像的,由于我是用数据卷挂载外部配置文件的方式启动的,所以先在宿主机创建了配置文件,命名为 logstash-kafka.conf。
2017-09-26 14:26:02|sever1|192.168.187.108|{"id":"123456","token":"3fa0343fc93a4d7999e28d202edc8c11"}|check_app_version|okhttp/3.8.0|0|成功|2|
input{
kafka{
group_id => "log-api-1"
topics => ["log-api"]
bootstrap_servers => "192.168.187.108:9092,192.168.187.109:9092,192.168.187.110:9092"
}
}
filter {
grok {
match => { "message" => "%{DATA:timestamp}\|%{DATA:serverHost}\|%{IP:clientIp}\|%{DATA:auth}\|%{DATA:uri}\|%{DATA:userAgent}\|%{NUMBER:resultCode}\|%{DATA:resultMsg}\|%{NUMBER:costime}\|"}
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss" ]
target => "@timestamp"
}
geoip {
source => "clientIp"
}
if [userAgent] != "-" {
useragent {
target => "ua"
source => "userAgent"
}
}
mutate {
convert => ["costime", "integer"]
convert => ["resultCode", "integer"]
}
json {
source => "auth"
target => "auth_content"
remove_field => ["auth"]
}
}
output {
elasticsearch{
hosts => ["192.168.187.108:9200"]
index => "logstash-api-%{+YYYY.MM.dd}"
}
stdout{
codec => rubydebug
}
}
docker run -it --rm -v "$PWD":/config-dir 10.45.53.221:5000/logstash -f /config-dir/logstash-kafka.conf
{
"geoip" => {
"city_name" => "Shenzhen",
"timezone" => "Asia/Shanghai",
"ip" => "192.168.187.108",
"latitude" => 22.5333,
"country_name" => "China",
"country_code2" => "CN",
"continent_code" => "AS",
"country_code3" => "CN",
"region_name" => "Guangdong",
"location" => {
"lon" => 114.1333,
"lat" => 22.5333
},
"region_code" => "44",
"longitude" => 114.1333
},
"resultCode" => 0,
"costime" => 11,
"userAgent" => "okhttp/3.8.0",
"message" => "2017-09-26 11:53:46|server1|192.168.187.108|{\"id\":\"123456\",\"token\":\"e684379baeca4efb9ca3ec4dbe35b9b5\"}|/get_symp_dict|okhttp/3.8.0|0|成功|11|",
"ua" => {
"patch" => "0",
"os" => "Other",
"major" => "3",
"minor" => "8",
"build" => "",
"name" => "okhttp",
"os_name" => "Other",
"device" => "Other"
},
"uri" => "get_symp_dict",
"serverHost" => "iZ238h51mijZ",
"resultMsg" => "成功",
"auth_content" => {
"id" => "59b90e24d7e29b7b4af80b4e",
"token" => "e684379baeca4efb9ca3ec4dbe35b9b5"
},
"@timestamp" => 2017-09-26T11:53:46.000Z,
"clientIp" => "183.12.64.236",
"@version" => "1",
"timestamp" => "2017-09-26 11:53:46"
}
docker run --name kibana -e ELASTICSEARCH_URL=http://192.168.187.108:9200 -p 5601:5601 -d kibana