docker network create es-net
docker pull elasticsearch:7.17.6
mkdir -p /Users//elasticsearch/plugins
mkdir -p /Users//elasticsearch/data
docker run -d \
--name elasticsearch \
-e "ES_JAVA_OPTS=-Xms512m -Xmx512m" \
-e "discovery.type=single-node" \
-v <自己的本地目录地址>/elasticsearch/data:/usr/share/elasticsearch/data \
-v <自己的本地目录地址>/elasticsearch/plugins:/usr/share/elasticsearch/plugins \
--privileged \
--network es-net \
-p 9200:9200 \
elasticsearch:7.17.6
⚠️:对于M1的出现no matching manifest for linux/arm64/v8 in the manifest list entries。可以去 docker官网搜锁看那个版本匹配
docker pull kibana:7.17.6
docker run -d \
--name kibana \
-e ELASTICSEARCH_HOSTS=http://elasticsearch:9200 \
--network=es-net \
-p 5601:5601 \
kibana:7.17.6
## 进入容器内部
docker exec -it elasticsearch /bin/bash
## 在线下载并安装
./bin/elasticsearch-plugin install https://github.com/medcl/elasticsearch-analysis-ik/releases/download/v7.17.6/elasticsearch-analysis-ik-7.17.6.zip
#退出
exit
#重启容器
docker restart elasticsearch
GET /_analyze
{
"analyzer": "ik_smart",
"text": "青春猪头G7人马文不会梦到JK黑丝兔女郎铁驭艾许"
}
{
"tokens" : [
{
"token" : "青春",
"start_offset" : 0,
"end_offset" : 2,
"type" : "CN_WORD",
"position" : 0
},
{
"token" : "猪头",
"start_offset" : 2,
"end_offset" : 4,
"type" : "CN_WORD",
"position" : 1
},
{
"token" : "g7",
"start_offset" : 4,
"end_offset" : 6,
"type" : "LETTER",
"position" : 2
},
{
"token" : "人",
"start_offset" : 6,
"end_offset" : 7,
"type" : "COUNT",
"position" : 3
},
{
"token" : "马文",
"start_offset" : 7,
"end_offset" : 9,
"type" : "CN_WORD",
"position" : 4
},
{
"token" : "不会",
"start_offset" : 9,
"end_offset" : 11,
"type" : "CN_WORD",
"position" : 5
},
{
"token" : "梦到",
"start_offset" : 11,
"end_offset" : 13,
"type" : "CN_WORD",
"position" : 6
},
{
"token" : "jk",
"start_offset" : 13,
"end_offset" : 15,
"type" : "ENGLISH",
"position" : 7
},
{
"token" : "黑",
"start_offset" : 15,
"end_offset" : 16,
"type" : "CN_CHAR",
"position" : 8
},
{
"token" : "丝",
"start_offset" : 16,
"end_offset" : 17,
"type" : "CN_CHAR",
"position" : 9
},
{
"token" : "兔女郎",
"start_offset" : 17,
"end_offset" : 20,
"type" : "CN_WORD",
"position" : 10
},
{
"token" : "铁",
"start_offset" : 20,
"end_offset" : 21,
"type" : "CN_CHAR",
"position" : 11
},
{
"token" : "驭",
"start_offset" : 21,
"end_offset" : 22,
"type" : "CN_CHAR",
"position" : 12
},
{
"token" : "艾",
"start_offset" : 22,
"end_offset" : 23,
"type" : "CN_CHAR",
"position" : 13
},
{
"token" : "许",
"start_offset" : 23,
"end_offset" : 24,
"type" : "CN_CHAR",
"position" : 14
}
]
}
GET /_analyze
{
"analyzer": "ik_max_word",
"text": "青春猪头G7人马文不会梦到JK黑丝兔女郎铁驭艾许"
}
{
"tokens" : [
{
"token" : "青春",
"start_offset" : 0,
"end_offset" : 2,
"type" : "CN_WORD",
"position" : 0
},
{
"token" : "猪头",
"start_offset" : 2,
"end_offset" : 4,
"type" : "CN_WORD",
"position" : 1
},
{
"token" : "g7",
"start_offset" : 4,
"end_offset" : 6,
"type" : "LETTER",
"position" : 2
},
{
"token" : "g",
"start_offset" : 4,
"end_offset" : 5,
"type" : "ENGLISH",
"position" : 3
},
{
"token" : "7",
"start_offset" : 5,
"end_offset" : 6,
"type" : "ARABIC",
"position" : 4
},
{
"token" : "人马",
"start_offset" : 6,
"end_offset" : 8,
"type" : "CN_WORD",
"position" : 5
},
{
"token" : "人",
"start_offset" : 6,
"end_offset" : 7,
"type" : "COUNT",
"position" : 6
},
{
"token" : "马文",
"start_offset" : 7,
"end_offset" : 9,
"type" : "CN_WORD",
"position" : 7
},
{
"token" : "不会",
"start_offset" : 9,
"end_offset" : 11,
"type" : "CN_WORD",
"position" : 8
},
{
"token" : "梦到",
"start_offset" : 11,
"end_offset" : 13,
"type" : "CN_WORD",
"position" : 9
},
{
"token" : "jk",
"start_offset" : 13,
"end_offset" : 15,
"type" : "ENGLISH",
"position" : 10
},
{
"token" : "黑",
"start_offset" : 15,
"end_offset" : 16,
"type" : "CN_CHAR",
"position" : 11
},
{
"token" : "丝",
"start_offset" : 16,
"end_offset" : 17,
"type" : "CN_CHAR",
"position" : 12
},
{
"token" : "兔女郎",
"start_offset" : 17,
"end_offset" : 20,
"type" : "CN_WORD",
"position" : 13
},
{
"token" : "女郎",
"start_offset" : 18,
"end_offset" : 20,
"type" : "CN_WORD",
"position" : 14
},
{
"token" : "铁",
"start_offset" : 20,
"end_offset" : 21,
"type" : "CN_CHAR",
"position" : 15
},
{
"token" : "驭",
"start_offset" : 21,
"end_offset" : 22,
"type" : "CN_CHAR",
"position" : 16
},
{
"token" : "艾",
"start_offset" : 22,
"end_offset" : 23,
"type" : "CN_CHAR",
"position" : 17
},
{
"token" : "许",
"start_offset" : 23,
"end_offset" : 24,
"type" : "CN_CHAR",
"position" : 18
}
]
}
## 进入容器内部
docker exec -it elasticsearch /bin/bash
cd /usr/share/elasticsearch/config/analysis-ik
IK Analyzer 扩展配置