1.安装elasticsearch:6.4.0
1).下载elasticsearch:6.4.0
[root@pinyoyougou-docker ~]# docker pull elasticsearch:6.4.0
2).启动报错:
[1]: max virtual memory areas vm.max_map_count [65530] is too low, increase to at least [262144]
2.1设置方法:vm.max_map_count = 262144
[root@pinyoyougou-docker ~]# sudo sysctl -w vm.max_map_count=262144
3).创建容器
[root@pinyoyougou-docker ~]# docker run -di -p 9200:9200 -p 9300:9300 --name es -e ES_JAVA_OPTS="-Xms512m -Xmx512m" -e NETWORK_HOST="0.0.0.0" elasticsearch:6.4.0
3.1).查看日志
[root@pinyoyougou-docker ~]# docker logs -f 3253a4da8ffc
4).测试:
发现是正常启动的以后我们来访问一下 http 对应的端口:
http://192.168.117.130:9200/
二.kibana
1).安装kibana
docker pull kibana:6.4.0
2).运行kibana
docker run -di -p 5601:5601 -e SERVER_HOST="0.0.0.0" -e ELASTICSEARCH_URL="http://192.168.117.130:9200" --name kibana kibana:6.4.0
[root@pinyoyougou-docker ~]# docker logs -f 3253a4da8ffc
三.ik分词器
1).exec 进入容器elasticsearch
docker exec -it es /bin/bash
2).下载ik:在es根目录下,使用elasticsearch-plugin进行下载(supported from version v5.5.1)
# 连接超时的话多试几次
./bin/elasticsearch-plugin install https://github.com/medcl/elasticsearch-analysis-ik/releases/download/v6.4.0/elasticsearch-analysis-ik-6.4.0.zip
3).重启elasticsearch
四.测试ik分词器
测试
POST _analyze
{
"analyzer": "ik_max_word",
"text": "我是中国人"
}
{
"tokens": [
{
"token": "我",
"start_offset": 0,
"end_offset": 1,
"type": "CN_CHAR",
"position": 0
},
{
"token": "是",
"start_offset": 1,
"end_offset": 2,
"type": "CN_CHAR",
"position": 1
},
{
"token": "中国人",
"start_offset": 2,
"end_offset": 5,
"type": "CN_WORD",
"position": 2
},
{
"token": "中国",
"start_offset": 2,
"end_offset": 4,
"type": "CN_WORD",
"position": 3
},
{
"token": "国人",
"start_offset": 3,
"end_offset": 5,
"type": "CN_WORD",
"position": 4
}
]
}