安装过程略
vi /etc/sysconfig/network-scripts/ifcfg-ens33
修改:ONBOOT=yes
保存退出(:wq)
重启:reboot
查看ip地址:ip addr
将jdk安装到/usr/local/java目录下
1.将jdk安装包移动到/usr/local/java目录下
mkdir /usr/local/java/
mv jdk-8u171-linux-x64.tar.gz /usr/local/java/
2.tar -zxvf jdk-8u171-linux-x64.tar.gz 解压jdk的安装包
3.yum -y install vim
4.vim /etc/profile
5.添加一下三项
export JAVA_HOME=/usr/local/java/jdk1.8.0_171
export PATH=$PATH:$JAVA_HOME/bin
export CLASSPATH=.
6.保存一下:wq
7.source /etc/profile 编译一下
8.java -version 查看一下环境变量是否配置成功
1.创建用户组
groupadd es
2.创建其他用户
useradd es -g es
3.修改es用户的密码
passwd es
4.新建一个连接,用es用户登录
5.用root用户将elasticsearch文件移动到es用户下
mv elasticsearch-6.2.4 /home/es
6.用root用户将elasticsearch文件的所有者更改为es
cd /home/es/ 进入到elasticsearch所在的目录
chown -R es:es elasticsearch-6.2.4 更改所有者
7.用es用户启动elasticsearch
cd elasticsearch-6.2.4/bin/
./elasticsearch
出现started表示启动成功
8.测试es是否启动成功
在命令终端中执行:curl http://localhost:9200
9.开启es的远程访问权限
cd elasticsearch-6.2.4/config/
vim elasticsearch.yml
network.host: 0.0.0.0(55行)
切换到root用户修改
vim /etc/security/limits.conf
在最后面追加以下内容
* soft nofile 65536
* hard nofile 65536
* soft nproc 4096
* hard nproc 4096
检测修改是否正确,三个连接exit再重连
ulimit -Hn
ulimit -Sn
ulimit -Hu
ulimit -Su
切换到root用户修改
vim /etc/sysctl.conf
在最后面追加以下内容
vm.max_map_count=655360
是刚才添加的配置生效
sysctl -p
用root用户关闭防火墙
systemctl stop firewalld
用es用户启动elasticsearch
cd elasticsearch-6.2.4/bin/
./elasticsearch
在浏览器中输入 http://192.168.181.134:9200
使用root用户安装
rpm -ivh kibana-6.2.4-x86_64.rpm
查找kibana的安装位置
find / -name kibana
编辑kibana的配置文件
cd /etc/kibana
vim kibana.yml
修改
server.host: "192.168.181.134" #es服务器的主机ip(第7行)
elasticsearch.url: "http://192.168.181.134:9200" #es服务器地址(第21行)
启动kibana
systemctl start kibana
systemctl stop kibana
systemctl status kibana
用浏览器访问kibana的可视化界面
http://192.168.181.134:5601 #kibana默认端口号为5601
1.使用es用户将本地的ik分词器安装包上传到es服务器
2.使用root用户安装unzip
yum install -y unzip
3.使用es用户解压ik分词器的安装包
unzip elasticsearch-analysis-ik-6.2.4.zip
4.使用es用户将解压的ik包移动到es的plugins目录下
mv elasticsearch elasticsearch-6.2.4/plugins/
5.使用es用户重启es服务生效
./elasticsearch-6.2.4/bin/elasticsearch
注意:词典的编码必须是UTF-8,否则无法生效,注意CRT的编码格式
1.修改ik分词器的配置文件
vim IKAnalyzer.cfg.xml
<properties>
<comment>IK Analyzer 扩展配置comment>
<entry key="ext_dict">ext_dict.dicentry>
<entry key="ext_stopwords">ext_stopwords.dicentry>
properties>
2.在ik分词器目录下config目录中创建ext_dict.dic文件 编码一定要为UTF-8才能生效
vim ext_dict.dic 加入扩展词即可
3. 在ik分词器目录下config目录中创建ext_stopwords.dic文件
vim ext_stopwords.dic 加入停用词即可
4.重启es生效
PUT /ems
{
"mappings":{
"emp":{
"properties":{
"name":{
"type":"text",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word"
},
"age":{
"type":"integer"
},
"bir":{
"type":"date"
},
"content":{
"type":"text",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word"
},
"address":{
"type":"keyword"
}
}
}
}
}
PUT /ems/emp/_bulk
{"index":{}}
{"name":"小黑","age":23,"bir":"2012-12-12","content":"为开发团队选择一款优秀的MVC框架是件难事儿,在众多可行的方案中决择需要很高的经验和水平","address":"北京"}
{"index":{}}
{"name":"王小黑","age":24,"bir":"2012-12-12","content":"Spring 框架是一个分层架构,由 7 个定义良好的模块组成。Spring 模块构建在核心容器之上,核心容器定义了创建、配置和管理 bean 的方式","address":"上海"}
{"index":{}}
{"name":"张小五","age":8,"bir":"2012-12-12","content":"Spring Cloud 作为Java 语言的微服务框架,它依赖于Spring Boot,有快速开发、持续交付和容易部署等特点。Spring Cloud 的组件非常多,涉及微服务的方方面面,井在开源社区Spring 和Netflix 、Pivotal 两大公司的推动下越来越完善","address":"无锡"}
{"index":{}}
{"name":"win7","age":9,"bir":"2012-12-12","content":"Spring的目标是致力于全方位的简化Java开发。 这势必引出更多的解释, Spring是如何简化Java开发的?","address":"南京"}
{"index":{}}
{"name":"梅超风","age":43,"bir":"2012-12-12","content":"Redis是一个开源的使用ANSI C语言编写、支持网络、可基于内存亦可持久化的日志型、Key-Value数据库,并提供多种语言的API","address":"杭州"}
{"index":{}}
{"name":"张无忌","age":59,"bir":"2012-12-12","content":"ElasticSearch是一个基于Lucene的搜索服务器。它提供了一个分布式多用户能力的全文搜索引擎,基于RESTful web接口","address":"北京"}
GET /ems/emp/_search
{
"query":{
"term":{
"content":"框架"
}
},
"highlight": {
"pre_tags": [""],
"post_tags": [""],
"require_field_match":true,
"fields": {
"*":{}
}
}
}
org.springframework.boot
spring-boot-starter-data-elasticsearch
配置文件
spring:
data:
elasticsearch:
cluster-nodes: 192.168.93.135:9300
实体类添加注解
@Document(indexName = "cmfz",type = "article")
public class Article {}
创建articleRepository
public interface ArticleRepository extends ElasticsearchRepository {
}
业务层代码(核心代码)
@Autowired
private ArticleRepository articleRepository;
@Autowired
private ElasticsearchTemplate elasticsearchTemplate;
@Override
public List<Article> selectArticleByContent(String content) {
if("".equals(content) || content==null){
Iterable<Article> all = articleRepository.findAll();
List<Article> list = IterableUtils.toList(all);
return list;
}else{
HighlightBuilder.Field highlightBuilder = new HighlightBuilder
.Field("*")
.preTags("")
.postTags("")
.requireFieldMatch(false);
NativeSearchQuery query = new NativeSearchQueryBuilder()
.withQuery(QueryBuilders.queryStringQuery(content).field("title").field("author").field("content"))
.withSort(SortBuilders.scoreSort())
.withHighlightFields(highlightBuilder)
.build();
AggregatedPage<Article> articles = elasticsearchTemplate.queryForPage(query, Article.class, new SearchResultMapper() {
@Override
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> aClass, Pageable pageable) {
SearchHits searchHits = response.getHits();
SearchHit[] hits = searchHits.getHits();
List<Article> list = new ArrayList<>();
for (SearchHit hit : hits) {
Article article = new Article();
Map<String, Object> map = hit.getSourceAsMap();
article.setId(map.get("id").toString());
article.setTitle(map.get("title").toString());
article.setAuthor(map.get("author").toString());
article.setContent(map.get("content").toString());
String date = map.get("createDate").toString();
article.setCreateDate(new Date(Long.valueOf(date)));
//高亮
Map<String, HighlightField> fieldMap = hit.getHighlightFields();
if(fieldMap.get("title") != null){
article.setTitle(fieldMap.get("title").getFragments()[0].toString());
}
if(fieldMap.get("author") != null){
article.setAuthor(fieldMap.get("author").getFragments()[0].toString());
}
if(fieldMap.get("content") != null){
article.setContent(fieldMap.get("content").getFragments()[0].toString());
}
list.add(article);
}
return new AggregatedPageImpl<T>((List<T>)list);
}
});
List<Article> list = articles.getContent();
return list;
}
}
articleRepository.save(primaryKey);
articleRepository.deleteById(article.getId());
}
return new AggregatedPageImpl((List)list);
}
});
List list = articles.getContent();
return list;
}
}
~~~java
articleRepository.save(primaryKey);
articleRepository.deleteById(article.getId());