注意事项:
   我的软件包都是在ftp://192.168.10.250/pub/package/elk软件包/ 这个服务器当中

实验环境:
   两台内存为4G 的Linux服务器,一台http Apache服务器
   192.168.1.101 node1           + kibana
   192.168.1.102 node2             #elasticsearch的从服务器
   192.168.1.103 httpd-server  + logstash

#添加解析
vim /etc/hosts
192.168.1.101 node1
192.168.1.102 node2

yum -y install java 安装java环境
首先获取elasticsearch 的RPM包

rpm -ivh elasticsearch-5.5.0.rpm    #安装elasticsearch 的RPM包
systemctl daemon-reload                    
systemctl enable elasticsearch.service

#备份配置文件
cp /etc/elasticsearch/elasticsearch.yml /etc/elasticsearch/elasticsearch.yml.bak

vim /etc/elasticsearch/elasticsearch.yml

17/ cluster.name: my-elk-cluster
23/ node.name: node1
33/ path.data: /data/elk_data
37/ path.logs: /var/log/elasticsearch/
43/ bootstrap.memory_lock: false
55/ network.host: 0.0.0.0
59/ http.port: 9200
68/ discovery.zen.ping.unicast.hosts: ["node1", "node2"]

grep -v "^#" /etc/elasticsearch/elasticsearch.yml

mkdir -p /data/elk_data #创建elk的data目录
chown -R elasticsearch:elasticsearch /data/elk_data/   #更换data目录的属主跟属组

systemctl start elasticsearch.service
netstat -ntap | grep 9200       #启动并查看端口是否启动成功

使用浏览器打开  
http://192.168.1.101:9200  下面是节点信息

http://192.168.1.101:9200/_cluster/health?pretty    测试节点是否健康
http://192.168.1.102:9200/_cluster/health?pretty    测试节点是否健康

#获取node的源码包

tar zxvf node-v8.2.1.tar.gz     #解压缩软件包
./configure
make -j 2 && make install      #编译加安装

####安装phantomjs###前端框架
获取phantomjs的前段框架的软件包

tar xjvf phantomjs-2.1.1-linux-x86_64.tar.bz2   #解压该压缩包
cp phantomjs-2.1.1-linux-x86_64/bin/phantomjs /usr/local/bin/   #复制框架到/usr/local/bin下面

###安装elasticsearch-head###数据可视化工具
获取软件包
tar zxvf elasticsearch-head.tar.gz -C /usr/local/src/
cd /usr/local/src/elasticsearch-head/
npm install

###修改主配置文件###
vim /etc/elasticsearch/elasticsearch.yml
末行添加以下两行
http.cors.enabled: true
http.cors.allow-origin: "*"

systemctl restart elasticsearch.service

###启动elasticsearch-head 启动服务器###
cd /usr/local/src/elasticsearch-head/
npm run start &




##########以上为elasticsearch群集的配置##########
##########从下面开始搭建logstash+httpd服务的#####
!!!!!创建索引
file://c:\users\admini~1\appdata\local\temp\tmpi6ez7m\1.png

Apache服务器
yum -y install httpd   #安装Apache
关闭防火墙 跟 安全功能

###获取logstashRPM包然后安装
yum -y instsall logstash-5.5.1.rpm
systemctl start logstash.service
systemctl enable logstash.service


ln -s /usr/share/logstash/bin/logstash /usr/local/bin/

file://c:\users\admini~1\appdata\local\temp\tmpi6ez7m\2.png
#普通输出
logstash -e 'input { stdin{} } output { stdout{} }'

logstash -e 'input { stdin{} } output { stdout { codec=>rubydebug } }'  #格式化输出

logstash -e 'input { stdin{} } output { elasticsearch { hosts=>["192.168.1.101:9100"] } }'
file://c:\users\admini~1\appdata\local\temp\tmpi6ez7m\3.png

vim /etc/logstash/conf.d/system.conf    #编写系统日志搜集配置文件
input {
       file{
       path => "/var/log/messages"
       type => "system"
       start_position => "beginning"
       }
}
output {
       elasticsearch {
       hosts => ["192.168.1.101:9200"]
       index => "system-%{+YYYY.MM.dd}"
       }
}

systemctl restart logstash.service          #启动logstash服务

rpm -ivh kibana-5.5.1-x86_64.rpm                               #安装kibana软件    
cp /etc/kibana/kibana.yml /etc/kibana/kibana.yml.bak

vim /etc/kibana/kibana.yml
 2 server.port: 5601
 7 server.host: "0.0.0.0"
 21 elasticsearch.url: "http://192.168.1.101:9200"
 30 kibana.index: ".kibana"
 
systemctl start kibana.service
systemctl enable kibana.service
     #开启kibana服务

http://192.168.1.101:5601/
file://c:\users\admini~1\appdata\local\temp\tmpi6ez7m\4.png

chmod o+r /var/log/messages #添加可读权限

###对接Apache主机的Apache日志文件(访问的错误的)
vim /etc/logstash/conf.d/apache_log.conf

input {
       file{
       path => "/etc/httpd/logs/access_log"
       type => "access"
       start_position => "beginning"
       }
       file{
       path => "/etc/httpd/logs/error_log"
       type => "error"
       start_position => "beginning"
       }
}
output {
       if [type] == "access" {
       elasticsearch {
       hosts => ["192.168.1.101:9200"]
       index => "apache_access-%{+YYYY.MM.dd}"
               }
       }
       if [type] == "error" {
       elasticsearch {
       hosts => ["192.168.1.101:9200"]
       index => "apache_error-%{+YYYY.MM.dd}"
           }
       }
}

chmod -R o+r /etc/httpd/logs/       #给log可读的权限
logstash -f /etc/logstash/conf.d/apache_log.conf        #读取Apache_log