yum install -y yum-utils device-mapper-persistent-data lvm2
yum -y install wget vim
wget https://mirrors.aliyun.com/docker-ce/linux/centos/docker-ce.repo -O /etc/yum.repos.d/docker-ce.repo
yum -y install docker-ce-18.06.1.ce-3.el7
docker --version
mkdir /etc/docker
cat > /etc/docker/daemon.json << EOF
{
"registry-mirrors": ["https://jo6348gu.mirror.aliyuncs.com"]
}
EOF
systemctl enable docker && systemctl start docker
vi /etc/security/limits.conf #末尾追加
es soft nofile 65536
es hard nofile 65536
es soft nproc 65536
es hard nproc 65536
vi /etc/security/limits.d/20-nproc.conf #将*改为用户名es
es soft nproc 4096
root soft nproc unlimited
vi /etc/sysctl.conf
vm.max_map_count=655360
sysctl -p
mkdir -p /data/es/config
cd /data
wget https://mirrors.huaweicloud.com/elasticsearch/7.8.0/elasticsearch-7.8.0-linux-x86_64.tar.gz
tar -zxvf elasticsearch-7.8.0-linux-x86_64.tar.gz
cp -r elasticsearch-7.8.0/config/* /data/es/config
vi /data/es/config/elasticsearch.yml #追加配置文件
discovery.type: single-node #单节点模式
network.host: 0.0.0.0
useradd es
passwd es
cd /data
chown -R es:es es
chmod -R 777 es
docker run -d --name es -p 9200:9200 -p 9300:9300 -v /data/es/config/:/usr/share/elasticsearch/config -v /data/es/data/:/usr/share/elasticsearch/data elasticsearch:7.8.0
2.部署kibana
mkdir /data/kibana
cd /data
vim kibana/kibana.yml
server.host: 0.0.0.0
elasticsearch.hosts: ["http://192.168.0.34:9200"]
i18n.locale: "zh-CN"
docker run -d --name kibana -p 5601:5601 -v /data/kibana/kibana.yml:/usr/share/kibana/config/kibana.yml:ro kibana:7.8.0
3.logstash部署
mkdir -p /data/logstash/config
wget https://artifacts.elastic.co/downloads/logstash/logstash-7.8.0.tar.gz
tar -zxvf logstash-7.8.0.tar.gz
cp -r /data/logstash-7.8.0/config/* /data/logstash/config/
vi /data/logstash/config/logstash.conf
input {
beats {
port => 5044
}
}
filter {
dissect {
mapping => { "message" => "[%{Time}] %{LogLevel} %{message}" }
}
}
output {
if "secure.log" in [tags] {
elasticsearch {
hosts => ["http://192.168.0.34:9200"]
index => "secure.log"
}
}
else if "logstash.log" in [tags] {
elasticsearch {
hosts => ["http://192.168.0.34:9200"]
index => "logstash.log"
}
}
}
vi /data/logstash/config/logstash.yml
http.host: "0.0.0.0"
xpack.monitoring.enabled: true
xpack.monitoring.elasticsearch.hosts: [ "http://192.168.0.34:9200" ]
vim /data/logstash/config/pipelines.yml
- pipeline.id: docker
path.config: "/usr/share/logstash/config/logstash.conf" #注意此处为容器内部路径
docker run -d -p 5044:5044 -p 9600:9600 --name logstash -v /data/logstash/config:/usr/share/logstash/config logstash:7.8.0
docker logs -f -t --tail 100 logstash #查看容器最后十行日志
4.filebeat日志收集
wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-7.8.0-linux-x86_64.tar.gz
tar -zxvf filebeat-7.8.0-linux-x86_64.tar.gz
vi /data/filebeat-7.8.0-linux-x86_64/filebeat.yml #filebeat.yml配置文件指定监听日志路径
filebeat.inputs:
- type: log
enabled: true
paths:
- /var/log/*.log
fields:
level: debug
tags: ["secure.log"]
- type: log
enabled: true
paths:
- /data/logstash-7.8.0/logs/*.log
fields:
level: debug
tags: ["logstash.log"]
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
setup.template.settings:
index.number_of_shards: 1
setup.kibana:
output.logstash:
hosts: ["192.168.0.34:5044"]
processors:
- add_host_metadata: ~
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
cd /data/filebeat-7.8.0-linux-x86_64
sudo ./filebeat -e -c filebeat.yml -d "publish" #前台启动filebeat
nohup ./filebeat -e -c filebeat.yml >/dev/null 2>&1& #后台启动
5.搭建redis
mkdir -p /data/redis/data
vim redis/data/redis.conf
bind 0.0.0.0
daemonize no
pidfile "/var/run/redis.pid"
port 6380
timeout 300
loglevel warning
logfile "redis.log"
databases 16
rdbcompression yes
dbfilename "redis.rdb"
dir "/data"
requirepass "123456"
masterauth "123456"
maxclients 10000
maxmemory 1000mb
maxmemory-policy allkeys-lru
appendonly yes
appendfsync always
docker run -d --name redis -p 6380:6380 -v `pwd`/redis/data/:/data redis:5.0 redis-server redis.conf
6.增加启用redis配置
vi /data/logstash/config/logstash.conf
input {
redis {
host => "192.168.0.34"
port => 6380
db => 0
key => "localhost"
password => "123456"
data_type => "list"
threads => 4
# tags => "localhost"
}
}
filter {
dissect {
mapping => { "message" => "[%{Time}] %{LogLevel} %{message}" }
}
}
output {
if "secure.log" in [tags] {
elasticsearch {
hosts => ["http://192.168.0.34:9200"]
index => "secure.log-%{+YYYY.MM.dd}"
}
}
}
vi /data/filebeat-7.8.0-linux-x86_64/filebeat.yml
filebeat.inputs:
- type: log
enabled: true
paths:
- /home/*/logs/*.log
fields:
level: debug
tags: ["secure.log"]
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
setup.template.settings:
index.number_of_shards: 1
setup.kibana:
#output.logstash:
# hosts: ["192.168.0.34:5044"]
output.redis:
enabled: true
hosts: ["192.168.0.34:6380"]
password: "123456"
db: 0
key: localhost #åå¨çkeyå¼
worker: 4
timeout: 5
max_retries: 3
datatype: list
processors:
- add_host_metadata: ~
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~