Elasticsearch是一个基于Lucene的分布式可扩展的实时搜索和分析引擎,。它提供了一个分布式多用户能力的全文搜索引擎,基于RESTful web接口。Elasticsearch是用Java语言开发的,并作为Apache许可条款下的开放源码发布,是一种流行的企业级搜索引擎。Elasticsearch用于云计算中,能够达到实时搜索,稳定,可靠,快速,安装使用方便。官方客户端在Java、PHP、Python、Ruby和许多其他语言中都是可用的。Elasticsearch是最受欢迎的企业搜索引擎,关于Elasticsearch的使用教程本文不再赘述,可关注博主,后期会陆续推出Elasticsearch相关详细实战教程。下面进入项目解析。
本文基于Springboot-2.2.5.RELEASE + Elasticsearch-6.7.0打造一个简易的亿级搜索引擎系统。项目结构如下:
主要项目目录结构介绍
以上就是项目的所有目录结构的说明。本项目模拟电商系统中用户搜索想要的商品和商家这样的业务场景,为用户展现最为匹配需求的商品。
#应用端口
server.port=8080
#应用名称
spring.application.name=datatuanyuan-search
#ES账户配置信息
elasticsearch.host=192.168.3.239
elasticsearch.port=9200
elasticsearch.username=admin
elasticsearch.password=11111
#商品索引别名
search.goods.alias=search_goods
#商品数据类型名称
search.goods.type=goods
#搜索商品字段以及权重
search.goods.fields=goods_name_smart^4,goods_name_standard^3,goods_name_ik^2,merchants_name_ik^1
#商家索引别名
search.merchants.alias=search_merchants
#商家数据类型名称
search.merchants.type=merchants
#搜索商家字段以及权重
search.merchants.fields=merchants_name_ik^4,merchants_name_smart^3,merchants_name_standard^2,merchants_service_feature^1
#搜索关键字索引别名
search.searchkey.alias=search_key
#搜索关键字类型名称
search.searchkey.type=keysval
#自定义分词,下划线和逗号
search.simple.pattern.split.symbol=comma,underline
#关键字联想搜索字段
search.popup.fields=goods_name^2,merchants_name^1
配置文件中主要关注关于搜索别名,数据类型,搜索字段的配置,比如商品的相关配置如下:
#商品索引别名
search.goods.alias=search_goods
#商品数据类型名称
search.goods.type=goods
#搜索商品字段以及权重
search.goods.fields=goods_name_smart^4,goods_name_standard^3,goods_name_ik^2,merchants_name_ik^1
注意到search.goods.fields搜索字段配置中,字段名称后面跟了一个 “^“ 符号,然后跟了一个数字。该符号和数字非常重要,它标识了该字段的权重,对搜索结果集的排名有直接影响,数字越大权重越高,结果集中该字段和商品的匹配度越高,在结果集中排名越靠前。
package com.datuanyuan.annotation;
import java.lang.annotation.*;
/**
* 定义ES映射类
*
* @author weiyuan
* @version 1.0
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
public @interface ESMappingType {
/**
* 映射的文档类型
*
* @return
*/
String typeName() default "";
}
该注解类型用于注解映射到ES的数据实体,类似Hibernate中实体类映射数据库中的表。
package com.datuanyuan.annotation;
import java.lang.annotation.*;
/**
* 定义ES映射类
*
* @author weiyuan
* @version 1.0
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
public @interface ESMappingType {
/**
* 映射的文档类型
*
* @return
*/
String typeName() default "";
}
该注解类用于注解映射到ES中的数据字段,类似Hibernate中实体字段映射数据库中的表字段。
package com.datuanyuan.bean;
import com.datuanyuan.annotation.ESMappingField;
import com.datuanyuan.annotation.ESMappingType;
/**
* 商品实体类
*
* @author weiyuan
* @version 1.0
*/
@ESMappingType(typeName = "goods")
public class SearchGoodsBean implements java.io.Serializable {
/**
*
*/
private static final long serialVersionUID = -3407466587287002039L;
/**
* 商家ID
*/
@ESMappingField(fieldName = "merchants_id", dataType = "keyword")
private String merchants_id;
/**
* 商家名称
*/
@ESMappingField(fieldName = "merchants_name", dataType = "keyword")
private String merchants_name;
/**
* 商家名称,使用ik_max_word进行分词
*/
@ESMappingField(fieldName = "merchants_name_ik", dataType = "text", isAnalyze = true, setAnalyzer = "ik_max_word")
private String merchants_name_ik;
/**
* 商品ID
*/
@ESMappingField(fieldName = "goods_id", dataType = "keyword")
private String goods_id;
/**
* 商品名称-不分词
*/
@ESMappingField(fieldName = "goods_name", dataType = "text", isAnalyze = true, setAnalyzer = "keyword")
private String goods_name;
/**
* 商品名称-使用ik_max_word分词
*/
@ESMappingField(fieldName = "goods_name_ik", dataType = "text", isAnalyze = true, setAnalyzer = "ik_max_word")
private String goods_name_ik;
/**
* 商品名称-使用smartcn分词
*/
@ESMappingField(fieldName = "goods_name_smart", dataType = "text", isAnalyze = true, setAnalyzer = "smartcn")
private String goods_name_smart;
/**
* 商品名称-使用标准分词器
*/
@ESMappingField(fieldName = "goods_name_standard", dataType = "text", isAnalyze = true, setAnalyzer = "standard")
private String goods_name_standard;
/**
* 商品分类ID
*/
@ESMappingField(fieldName = "goods_class_id", dataType = "keyword")
private String goods_class_id;
/**
* 商品分类名称
*/
@ESMappingField(fieldName = "goods_class_name", dataType = "keyword")
private String goods_class_name;
/**
* 商品图片
*/
@ESMappingField(fieldName = "goods_img", dataType = "keyword")
private String goods_img;
/**
* 商品价格
*/
@ESMappingField(fieldName = "goods_price", dataType = "long")
private Long goods_price;
/**
* 商品状态,1正常,0下架,-1已删除
*/
@ESMappingField(fieldName = "goods_status", dataType = "keyword")
private String goods_status;
/**
* 商品销量
*/
@ESMappingField(fieldName = "goods_sales", dataType = "long")
private Long goods_sales;
/**
* 商品库存
*/
@ESMappingField(fieldName = "goods_num", dataType = "integer")
private Integer goods_num;
/**
* 搜索结果匹配度
*/
private float score;
}
商品实体类,类用ESMappingType注解,并且指明在ES中的数据映射名称。字段用ESMappingField注解,并且指明在ES中的字段名称和对应的数据类型。特别注意的是goods_name有三个后缀的字段,表明使用了三种不同的分词器进行分词,以达到最大化匹配的目的。上述提到的ik和smartcn分词插件需额外安装。底下会出这两个分词插件的安装教程。
package com.datuanyuan.bean;
import com.datuanyuan.annotation.ESMappingField;
import com.datuanyuan.annotation.ESMappingType;
import java.util.List;
/**
* 商家信息
*
* @author weiyuan
* @version 1.0
*/
@ESMappingType(typeName = "merchants")
public class SearchMerchantsBean implements java.io.Serializable {
/**
*
*/
private static final long serialVersionUID = 6762419767390093593L;
/**
* 商家ID
*/
@ESMappingField(fieldName = "merchants_id", dataType = "keyword")
private String merchants_id;
/**
* 商家名称-不分词
*/
@ESMappingField(fieldName = "merchants_name", dataType = "keyword")
private String merchants_name;
/**
* 商家名称-使用IK分词
*/
@ESMappingField(fieldName = "merchants_name_ik", dataType = "text", isAnalyze = true, setAnalyzer = "ik_max_word")
private String merchants_name_ik;
/**
* 商家名称-使用smart分词
*/
@ESMappingField(fieldName = "merchants_name_smart", dataType = "text", isAnalyze = true, setAnalyzer = "smartcn")
private String merchants_name_smart;
/**
* 商家名称-使用标准分词器
*/
@ESMappingField(fieldName = "merchants_name_standard", dataType = "text", isAnalyze = true, setAnalyzer = "standard")
private String merchants_name_standard;
/**
* 商家logo
*/
@ESMappingField(fieldName = "merchants_logo", dataType = "keyword")
private String merchants_logo;
/**
* 商家总销量
*/
@ESMappingField(fieldName = "merchants_total_sales_num", dataType = "long")
private Long merchants_total_sales_num;
/**
* 商家在售商品数量
*/
@ESMappingField(fieldName = "merchants_on_saling_goods_num", dataType = "long")
private Long merchants_on_saling_goods_num;
/**
* 商家特色
*/
@ESMappingField(fieldName = "merchants_service_feature", dataType = "text", isAnalyze = true, setAnalyzer = "ik_max_word")
private String merchants_service_feature;
/**
* 商家所在地区
*/
@ESMappingField(fieldName = "merchants_province", dataType = "keyword")
private String merchants_province;
/**
* 商家状态,1正常,0关闭,-1删除
*/
@ESMappingField(fieldName = "merchants_status", dataType = "keyword")
private String merchants_status;
/**
* 搜索结果匹配度
*/
private float score;
商家信息实体类,映射配置同理商品实体类。
package com.datuanyuan.config;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* ES配置
*
* @author weiyuan
* @version 1.0
*/
@Configuration
public class ElasticsearchConfig {
@Value("${elasticsearch.host}")
private String host;
@Value("${elasticsearch.port}")
private int port;
@Value("${elasticsearch.username}")
private String username;
@Value("${elasticsearch.password}")
private String password;
@Bean
public RestHighLevelClient restHighLevelClient() {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(username, password));
return new RestHighLevelClient(RestClient.builder(new HttpHost(host, port))
.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
}
}));
}
}
初始化ES连接,注入ES客端端操作接口。
package com.datuanyuan.controller;
import java.io.IOException;
import com.datuanyuan.service.ElasticsearchService;
import com.datuanyuan.util.StringHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
/**
* 搜索引擎基础数据维护
* * @author weiyuan
* @version 1.0
*/
@Controller
@RequestMapping(value = "/elasticsearch/v1.0/")
public class ElasticsearchController {
@Autowired
private ElasticsearchService elasticsearchService;
/**
* 创建索引
*
* @param indexName
* @param aliasName
* @param shards
* @param replicas
* @return
* @throws IOException
*/
@RequestMapping(value = "/creatindex", method = RequestMethod.POST)
public Object creatindex(String indexName, String aliasName, Integer shards, Integer replicas) throws IOException {
try {
if (elasticsearchService.createIndex(indexName, aliasName, shards, replicas)) {
return "creatindex success";
}
} catch (Exception e) {
e.printStackTrace();
}
return "creatindex fail";
}
/**
* 添加数据映射
*
* @param indexName
* @param typeName
* @param classname
* @return
* @throws IOException
*/
@RequestMapping(value = "/addmapping", method = RequestMethod.POST)
public Object addmapping(String indexName, String typeName, String classname) throws IOException {
try {
Class<?> clazz = Class.forName(classname);
if (elasticsearchService.addMapping(indexName, typeName, clazz)) {
return "createmapping success";
}
} catch (Exception e) {
e.printStackTrace();
}
return "createmapping fail";
}
/**
* 重建索引
*
* @param old_indexname
* @param new_indexname
* @return
* @throws IOException
*/
@RequestMapping(value = "/reindex", method = RequestMethod.POST)
public Object reindex(String old_indexname, String new_indexname) throws IOException {
elasticsearchService.reindex(old_indexname, new_indexname);
return "success";
}
/**
* 重建索引后修改别名
*
* @param aliasname
* @param old_indexname
* @param new_indexname
* @return
* @throws IOException
*/
@RequestMapping(value = "/changeAliasAfterReindex", method = RequestMethod.POST)
public Object changeAliasAfterReindex(String aliasname, String old_indexname, String new_indexname)
throws IOException {
if (elasticsearchService.changeAliasAfterReindex(aliasname, old_indexname, new_indexname)) {
return "changeAlias success";
}
return "changeAlias fail";
}
/**
* 删除索引
*
* @param indexName
* @return
* @throws IOException
*/
@RequestMapping(value = "/deleteindex", method = RequestMethod.POST)
public Object deleteindex(String indexName) throws IOException {
if (elasticsearchService.deleteIndex(indexName)) {
return "deleteindex success";
}
return "deleteindex fail";
}
/**
* 删除数据
*
* @param aliasename
* @param type
* @param id
* @return
*/
@RequestMapping(value = "/deletedata", method = RequestMethod.POST)
public Object deletedata(String aliasename, String type, String id) {
if (elasticsearchService.deleteData(aliasename, type, id)) {
return "deletedata success";
}
return "deletedata fail";
}
/**
* 批量删除数据
*
* @param aliasename
* @param type
* @param ids
* @return
*/
@RequestMapping(value = "/deleteBatchdata", method = RequestMethod.POST)
public Object deleteBatchdata(String aliasename, String type, String ids) {
if (StringHelper.isEmpty(ids)) {
return "no data";
}
if (elasticsearchService.deleteBatchData(aliasename, type, ids.split(","))) {
return "deleteBatchdata success";
}
return "deleteBatchdata fail";
}
}
ElasticsearchController提供ES中索引的创建,删除,重建。别名的创建。数据类型映射的初始化操作等等。具体接口方法如下:
creatindex 创建索引方法。
addmapping 添加数据映射方法。参数中classname提供类的完整路径名称,例如:com.datuanyuan.bean.SearchGoodsBean
reindex 重建索引方法。改方法主要用于数据的转移,将一个索引中的数据转移至另一个索引。
changeAliasAfterReindex 改方法用于重建索引后别名的转移,将一个索引的别名转移至另一个索引。在项目实践中强烈建议大家用别名。索引别名类似:windows的快捷方式,linux的软链接,mysql的视图。索引别名可以指向一个或多个索引,并且可以在任何需要索引名称的API中使用。 别名为我们提供了极大的灵活性。
deleteindex 索引删除。
deletedata 删除数据。
deleteBatchdata 批量删除数据。
package com.datuanyuan.controller;
import java.util.List;
import java.util.Map;
import com.datuanyuan.bean.*;
import com.datuanyuan.service.ElasticsearchService;
import com.datuanyuan.service.SearchService;
import com.datuanyuan.util.CollectionHelp;
import com.datuanyuan.util.StringHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/**
* 搜索引擎搜索请求入口
*
* @author weiyuan
* @version 1.0
*/
@Controller
@RequestMapping("/search/v1.0")
public class SearchController {
@Autowired
private SearchService searchService;
@Autowired
private ElasticsearchService elasticsearchService;
@Value("${search.searchkey.alias}")
private String searchKeyAlias;
@Value("${search.searchkey.type}")
private String searchKeyType;
@Value("${search.goods.alias}")
private String searchGoodsAlias;
@Value("${search.goods.type}")
private String searchGoodsType;
@Value("${search.goods.fields}")
private String searchGoodsField;
@Value("${search.merchants.alias}")
private String searchMerchantsAlias;
@Value("${search.merchants.type}")
private String searchMerchantsType;
@Value("${search.merchants.fields}")
private String searchMerchantsField;
/**
* 搜索商品入口
* @param searchRequestBean
* @return
*/
@RequestMapping(value = "/searchGoods", method = RequestMethod.POST)
public Object searchGoods(SearchRequestBean searchRequestBean) {
if (StringHelper.isNotEmpty(searchRequestBean.getSearchKey())) {//保存用户搜索的关键字
elasticsearchService.saveOrUpdateEntity(new SearchKeyBean(searchRequestBean.getSearchKey(), "goods"),
searchKeyAlias, searchKeyType, "");
}
//搜索商品
SearchResponseBean response = searchService.searchGoods(searchGoodsAlias, searchGoodsType, searchGoodsField, searchRequestBean);
SearchScreenBean searchScreenBean = new SearchScreenBean();
searchScreenBean.setGoodsClassNameBeans(searchService.getSreens(searchGoodsAlias, searchGoodsType, searchGoodsField, searchRequestBean,
"goods_class_name", OperatorType.AND));
searchScreenBean.setMerchantsNameBeans(searchService.getSreens(searchGoodsAlias, searchGoodsType, searchGoodsField, searchRequestBean,
"merchants_name", OperatorType.AND));
response.setSearchScreenBean(searchScreenBean);
return response;
}
/**
* 搜索商家
*
* @param searchRequestBean
* @return
*/
@RequestMapping(value = "/searchMerchants", method = RequestMethod.POST)
public Object searchMerchants(SearchRequestBean searchRequestBean) {
if (StringHelper.isNotEmpty(searchRequestBean.getSearchKey())) {//保存搜索关键字
elasticsearchService.saveOrUpdateEntity(new SearchKeyBean(searchRequestBean.getSearchKey(), "merchants"),
searchMerchantsAlias, searchMerchantsType, "");
}
//搜索商家
SearchResponseBean searchResponseBean = searchService.searchMerchants(searchMerchantsAlias, searchMerchantsType, searchMerchantsField, searchRequestBean);
/**
* 聚合商家名称搜索条件
*/
SearchScreenBean searchScreenBean = new SearchScreenBean();
searchScreenBean.setMerchantsNameBeans(searchService.getSreens(searchMerchantsAlias, searchMerchantsType, searchMerchantsField,
searchRequestBean, "merchants_name", OperatorType.AND));
searchResponseBean.setSearchScreenBean(searchScreenBean);
return searchResponseBean;
}
/**
* 关键词联想功能
*
* @param searchRequestBean
* @return
*/
@RequestMapping(value = "/searchKeyWordPopup", method = RequestMethod.POST)
public Object searchKeyWordPopup(SearchRequestBean searchRequestBean) {
SearchResponseBean searchResponseBean = searchService.searchKeyWordPopup(searchGoodsAlias, searchGoodsType, searchGoodsField, searchRequestBean);
return searchResponseBean;
}
/**
* 获取关键字搜索排名
*
* @param returnDataSize
* @return
*/
@RequestMapping(value = "/statisticSearchKey", method = RequestMethod.POST)
public Object statisticSearchKey(Integer returnDataSize) {
Map<String, Long> map = CollectionHelp.mapSortByValueDesc(elasticsearchService
.statisticSearchKey(returnDataSize != null && returnDataSize > 0 ? returnDataSize : 50));
List<SearchKeyBean> list = Lists.newArrayList();
for (String key : map.keySet()) {
list.add(new SearchKeyBean(key, map.get(key)));
}
return list;
}
}
SearchController即是用户发起搜索请求的入口类,提供了商品搜索,商家搜索,动态筛选条件获取,关键字联想,获取关键字搜索频率等功能,具体方法详解如下:
searchGoods 商品搜索入口,第一步保存了用户搜索的关键字以便后期统计用户的搜索关键字频率,然后执行搜索逻辑,最后组装动态筛选条件,关于动态筛选条件解释如下图:
上图,在某东上搜索啤酒,然后展示了 分类,国产/进口,类别,产地 这几个筛选项。但不是所有的商品都是这几个筛选项,包括里面的词条(比如类别中的精酿,黄啤酒,白啤酒),上面的几个筛选项只针对啤酒这个关键字。也就是说筛选条件是根据不同的关键字动态变动的。
searchMerchants 搜索商家接口。
searchKeyWordPopup 关键字联想功能。根据用户输入的部分关键字进行联想操作,给用户推荐搜索关键字功能,解释入下图:
statisticSearchKey 统计用户搜索的关键字排名,我们需要查看统计用户经常搜的关键字是哪些,以便给用户推送相关商品。
package com.datuanyuan.service;
import com.datuanyuan.bean.OperatorType;
import com.datuanyuan.bean.SearchRequestBean;
import com.datuanyuan.bean.SearchResponseBean;
import java.util.List;
import java.util.Map;
/**
* 搜索引擎服务类
*
* @author weiyuan
* @version 1.0
*/
public interface SearchService {
/**
* 搜索商品
*
* @param aliasName
* @param type
* @param searchFields
* @param searchRequestBean
* @return
*/
public SearchResponseBean searchGoods(String aliasName, String type, String searchFields, SearchRequestBean searchRequestBean);
/**
* 搜索商家
*
* @param aliasName
* @param type
* @param searchFields
* @param searchRequestBean
* @return
*/
public SearchResponseBean searchMerchants(String aliasName, String type, String searchFields, SearchRequestBean searchRequestBean);
/**
* 关键字联想功能
*
* @param aliasName
* @param type
* @param searchFields
* @param searchRequestBean
* @return
*/
public SearchResponseBean searchKeyWordPopup(String aliasName, String type, String searchFields, SearchRequestBean searchRequestBean);
/**
* 获取结果集中筛选条件
*
* @param aliasName
* @param type
* @param searchFields
* @param searchRequestBean
* @param buketFieldName
* @param operatorType
* @return
*/
public List<String> getSreens(String aliasName, String type, String searchFields, SearchRequestBean searchRequestBean, String buketFieldName,
OperatorType operatorType);
}
业务搜索逻辑服务类接口,它的实现类如下:
package com.datuanyuan.service.impl;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import com.alibaba.fastjson.JSONObject;
import com.datuanyuan.bean.*;
import com.datuanyuan.service.ElasticsearchService;
import com.datuanyuan.service.SearchService;
import com.datuanyuan.util.CollectionHelp;
import com.datuanyuan.util.StringHelper;
import io.micrometer.core.instrument.util.JsonUtils;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.CountResponse;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.MatchPhrasePrefixQueryBuilder;
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
import org.elasticsearch.search.aggregations.metrics.tophits.ParsedTopHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@Service
public class SearchServiceImpl implements SearchService {
@Autowired
public RestHighLevelClient client;
@Autowired
public ElasticsearchService elasticsearchService;
@Value("${search.goods.alias}")
private String searchGoodsAlias;
@Value("${search.goods.type}")
private String searchGoodsType;
@Value("${search.goods.fields}")
private String searchGoodsField;
@Override
public SearchResponseBean searchKeyWordPopup(String aliasName, String type, String searchFields, SearchRequestBean sbean) {
SearchResponseBean searchResponseBean = new SearchResponseBean();
if (StringHelper.isEmpty(sbean.getSearchKey())) {
return searchResponseBean;
}
Map<String, Float> fields = getFieldsMap(searchFields);
MultiSearchRequest multiRequest = new MultiSearchRequest();
for (String fieldName : fields.keySet()) {
SearchRequest searchRequest = new SearchRequest(aliasName);
searchRequest.types(type);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
MatchPhrasePrefixQueryBuilder prefixQuery = QueryBuilders.matchPhrasePrefixQuery(fieldName, sbean.getSearchKey());
prefixQuery.boost(fields.get(fieldName));
prefixQuery.maxExpansions(5);
searchSourceBuilder.query(prefixQuery);
searchSourceBuilder.fetchSource(new String[]{fieldName}, null);
searchRequest.source(searchSourceBuilder);
multiRequest.add(searchRequest);
}
try {
MultiSearchResponse mulSearchResponse = client.msearch(multiRequest, RequestOptions.DEFAULT);
for (MultiSearchResponse.Item item : mulSearchResponse.getResponses()) {
for (SearchHit hit : item.getResponse().getHits().getHits()) {
for (Object value : hit.getSourceAsMap().values()) {
if (value != null && StringHelper.isNotEmpty(String.valueOf(value)) && !searchResponseBean.getSetPopUp().contains(value.toString())) {
searchResponseBean.getSetPopUp().add(value.toString());
}
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
return searchResponseBean;
}
@Override
public SearchResponseBean searchMerchants(String aliasName, String type, String searchFields, SearchRequestBean sbean) {
Map<String, Float> fields = getFieldsMap(searchFields);
SearchResponseBean searchResponseBean = new SearchResponseBean();
SearchRequest searchMerchantsRequest = new SearchRequest(aliasName);
searchMerchantsRequest.types(type);
SearchSourceBuilder searchMerchantsBuilder = new SearchSourceBuilder();
if (StringHelper.isNotEmpty(sbean.getSort_field())) {
searchMerchantsBuilder.sort(sbean.getSort_field(),
StringHelper.isEmpty(sbean.getSort_order()) || "asc".equals(sbean.getSort_order()) ? SortOrder.ASC : SortOrder.DESC);
} else {
searchMerchantsBuilder.sort(SortBuilders.scoreSort().order(SortOrder.DESC));
}
if (sbean.getPageNum() != null && sbean.getSize() != null) {
searchMerchantsBuilder.from((sbean.getPageNum() - 1) * sbean.getSize());
searchMerchantsBuilder.size(sbean.getSize());
} else {
searchMerchantsBuilder.from(0);
searchMerchantsBuilder.size(10);
}
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
boolQuery.must(QueryBuilders.termQuery("merchants_status", "1"));//搜索正常的商家
if (StringHelper.isNotEmpty(sbean.getSearchKey())) {
for (String k : sbean.getSearchKey().split(Constant.BLANKSTR)) {
if (StringHelper.isNotEmpty(k)) {
MultiMatchQueryBuilder multiQuery = QueryBuilders.multiMatchQuery(StringHelper.removeBlank(k)).fields(fields);
boolQuery.must(multiQuery);
}
}
}
searchMerchantsBuilder.query(boolQuery);
searchMerchantsRequest.source(searchMerchantsBuilder);
try {
SearchResponse searchResponse = client.search(searchMerchantsRequest, RequestOptions.DEFAULT);
SearchHits hits = searchResponse.getHits();
SearchHit[] searchHits = hits.getHits();
List<String> listMerchantsId = Lists.newArrayList();
if (CollectionHelp.isArrayNotEmpty(searchHits)) {
for (SearchHit hit : searchHits) {
SearchMerchantsBean b = JSONObject.parseObject(JSONObject.toJSONString(hit.getSourceAsMap()), SearchMerchantsBean.class);
b.setScore(hit.getScore());
searchResponseBean.getListMerchantsBean().add(b);
listMerchantsId.add(b.getMerchants_id());
}
}
CountRequest countRequest = new CountRequest();
SearchSourceBuilder countSourceBuilder = new SearchSourceBuilder();
countSourceBuilder.query(boolQuery);
countRequest.source(countSourceBuilder);
CountResponse countResponse = client.count(countRequest, RequestOptions.DEFAULT);
searchResponseBean.setTotalNum(countResponse.getCount());
searchResponseBean.setPageNum(sbean.getPageNum().longValue());
searchResponseBean.setSize(sbean.getSize().longValue());
} catch (IOException e) {
e.printStackTrace();
}
return searchResponseBean;
}
@Override
public SearchResponseBean searchGoods(String aliasName, String type, String searchFields, SearchRequestBean sbean) {
Map<String, Float> fields = getFieldsMap(searchFields);
SearchResponseBean searchResponseBean = new SearchResponseBean();
SearchRequest searchRequest = new SearchRequest(aliasName);
searchRequest.types(type);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
if (StringHelper.isNotEmpty(sbean.getSort_field())) {
searchSourceBuilder.sort(sbean.getSort_field(), StringHelper.isEmpty(sbean.getSort_order()) || "asc".equals(sbean.getSort_order()) ? SortOrder.ASC : SortOrder.DESC);
} else {
searchSourceBuilder.sort(SortBuilders.scoreSort().order(SortOrder.DESC));
}
if (sbean.getPageNum() != null && sbean.getSize() != null) {
searchSourceBuilder.from((sbean.getPageNum() - 1) * sbean.getSize());
searchSourceBuilder.size(sbean.getSize());
} else {
searchSourceBuilder.from(0);
searchSourceBuilder.size(50);
}
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
boolQuery.must(QueryBuilders.termsQuery("goods_status", "1"));
if (StringHelper.isNotEmpty(sbean.getSearchKey())) {
for (String k : sbean.getSearchKey().split(Constant.BLANKSTR)) {
if (StringHelper.isNotEmpty(k)) {
MultiMatchQueryBuilder multiQuery = QueryBuilders.multiMatchQuery(StringHelper.removeBlank(k)).fields(fields);
boolQuery.must(multiQuery);
}
}
}
if (CollectionHelp.isArrayNotEmpty(sbean.getGoods_class__name())) {
boolQuery.must(QueryBuilders.termsQuery("goods_class_name", sbean.getGoods_class__name()));
}
if (CollectionHelp.isArrayNotEmpty(sbean.getMerchants_name())) {
boolQuery.must(QueryBuilders.termsQuery("merchants_name", sbean.getMerchants_name()));
}
searchSourceBuilder.query(boolQuery);
searchRequest.source(searchSourceBuilder);
try {
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
SearchHits hits = searchResponse.getHits();
SearchHit[] searchHits = hits.getHits();
if (CollectionHelp.isArrayNotEmpty(searchHits)) {
for (SearchHit hit : searchHits) {
SearchGoodsBean searchGoodsBean = JSONObject.parseObject(JSONObject.toJSONString(hit.getSourceAsMap()), SearchGoodsBean.class);
searchGoodsBean.setScore(hit.getScore());
searchResponseBean.getListGoodsBean().add(searchGoodsBean);
}
}
CountRequest countRequest = new CountRequest();
SearchSourceBuilder countSourceBuilder = new SearchSourceBuilder();
countSourceBuilder.query(boolQuery);
countRequest.source(countSourceBuilder);
CountResponse countResponse = client.count(countRequest, RequestOptions.DEFAULT);
searchResponseBean.setTotalNum(countResponse.getCount());
searchResponseBean.setPageNum(sbean.getPageNum().longValue());
searchResponseBean.setSize(sbean.getSize().longValue());
} catch (IOException e) {
e.printStackTrace();
}
return searchResponseBean;
}
@Override
public List<String> getSreens(String aliasName, String type, String searchFields, SearchRequestBean sbean, String buketFieldName, OperatorType operatorType) {
List<String> l = Lists.newArrayList();
Map<String, Float> fields = getFieldsMap(searchFields);
SearchRequest searchRequest = new SearchRequest(aliasName);
searchRequest.types(type);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
if (StringHelper.isNotEmpty(sbean.getSearchKey())) {
for (String k : sbean.getSearchKey().split(Constant.BLANKSTR)) {
if (StringHelper.isNotEmpty(k)) {
MultiMatchQueryBuilder multiQuery = QueryBuilders.multiMatchQuery(StringHelper.removeBlank(k)).fields(fields);
if (OperatorType.AND.equals(operatorType)) {
boolQuery.must(multiQuery);
} else if (OperatorType.OR.equals(operatorType)) {
boolQuery.should(multiQuery);
}
}
}
if (OperatorType.OR.equals(operatorType)) {
boolQuery.minimumShouldMatch(1);
}
}
if ("goods".equals(type)) {
if (CollectionHelp.isArrayNotEmpty(sbean.getMerchants_name())) {
boolQuery.must(QueryBuilders.termsQuery("merchants_name", sbean.getMerchants_name()));
}
if (CollectionHelp.isArrayNotEmpty(sbean.getGoods_class__name())) {
boolQuery.must(QueryBuilders.termsQuery("goods_class_name", sbean.getGoods_class__name()));
}
}
searchSourceBuilder.query(boolQuery);
searchSourceBuilder.aggregation(AggregationBuilders.terms("aggregationList").field(buketFieldName).size(50));
searchRequest.source(searchSourceBuilder);
try {
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
Object obj = searchResponse.getAggregations().get("aggregationList");
if (obj instanceof ParsedStringTerms) {
ParsedStringTerms terms = (ParsedStringTerms) obj;
for (org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket bucket : terms.getBuckets()) {
String keystring = bucket.getKeyAsString();
for (String s : keystring.split(",")) {
if (StringHelper.isNotEmpty(s) && !l.contains(s.trim())) {
l.add(s.trim());
}
}
}
} else if (obj instanceof ParsedLongTerms) {
ParsedLongTerms terms = (ParsedLongTerms) obj;
for (org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket bucket : terms.getBuckets()) {
String keystring = bucket.getKeyAsString();
for (String s : keystring.split(",")) {
if (StringHelper.isNotEmpty(s) && !l.contains(s.trim())) {
l.add(s.trim());
}
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
return l;
}
/**
* 解析搜索字段
*
* @param searchFields
* @return
*/
private Map<String, Float> getFieldsMap(String searchFields) {
String[] fieldsArray = searchFields.split(",");
Map<String, Float> fields = Maps.newHashMap();
for (String k : fieldsArray) {
fields.put(k.split("\\^")[0], Float.parseFloat(k.split("\\^")[1]));
}
return fields;
}
}
package com.datuanyuan.service;
import java.util.Map;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.script.Script;
/**
* 搜索引擎基础元素服务类
*
* @author weiyuan
* @version 1.0
*/
public interface ElasticsearchService {
/**
* 创建索引
*
* @param indexName
* @param aliasename
* @param shards
* @param replicas
* @return
*/
public boolean createIndex(String indexName, String aliasename, Integer shards, Integer replicas);
/**
* 添加映射
*
* @param indexName
* @param typeName
* @param clazz
* @return
*/
public boolean addMapping(String indexName, String typeName, Class<?> clazz);
/**
* 删除索引
*
* @param indexName
* @return
*/
public boolean deleteIndex(String indexName);
/**
* 重建索引
*
* @param old_indexname
* @param new_indexname
*/
public void reindex(String old_indexname, String new_indexname);
/**
* 重建索引后修改别名
*
* @param aliasname
* @param old_indexname
* @param new_indexname
* @return
*/
public boolean changeAliasAfterReindex(String aliasname, String old_indexname, String new_indexname);
/**
* 判断记录是否存在
*
* @param aliasName
* @param typeName
* @param id
* @return
*/
public boolean isExist(String aliasName, String typeName, String id);
/**
* 删除数据
*
* @param aliasName
* @param typeName
* @param id
* @return
*/
public boolean deleteData(String aliasName, String typeName, String id);
/**
* 根据查询删除数据
*
* @param aliasname
* @param type
* @param query
* @return
*/
public boolean deleteByQuery(String aliasname, String type, QueryBuilder query);
/**
* 根据查询更新数据
*
* @param aliasname
* @param type
* @param query
* @param script
* @return
*/
public boolean updateByQuery(String aliasname, String type, QueryBuilder query, Script script);
/**
* 获取各关键字搜锁频率
*
* @param returnDataSize
* @return
*/
public Map<String, Long> statisticSearchKey(Integer returnDataSize);
/**
* 获取单个记录
*
* @param aliasname
* @param type
* @param id
* @param clazz
* @param
* @return
*/
public <T> T getEntity(String aliasname, String type, String id, Class<T> clazz);
/**
* 批量删除数据
*
* @param aliasName
* @param typeName
* @param ids
* @return
*/
public boolean deleteBatchData(String aliasName, String typeName, String[] ids);
/**
* 保存或者更新数据
*
* @param entity
* @param aliasName
* @param typeName
* @param id
* @param
* @return
*/
public <T> Boolean saveOrUpdateEntity(T entity, String aliasName, String typeName, String id);
}
package com.datuanyuan.service.impl;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.List;
import java.util.Map;
import com.alibaba.fastjson.JSONObject;
import com.datuanyuan.annotation.ESMappingField;
import com.datuanyuan.annotation.ESMappingType;
import com.datuanyuan.bean.SymbolType;
import com.datuanyuan.service.ElasticsearchService;
import com.datuanyuan.util.StringHelper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.support.replication.ReplicationResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.index.reindex.ScrollableHitSource;
import org.elasticsearch.index.reindex.UpdateByQueryRequest;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.google.common.collect.Maps;
@Service
public class ElasticsearchServiceImpl implements ElasticsearchService {
private Log logger = LogFactory.getLog(ElasticsearchServiceImpl.class);
@Autowired
public RestHighLevelClient client;
@Value("${search.simple.pattern.split.symbol}")
public String simplePatternSplitSymbol;
@Value("${search.searchkey.alias}")
private String searchKeyAlias;
@Value("${search.searchkey.type}")
private String searchKeyType;
@Override
public boolean createIndex(String indexName, String aliasename, Integer shards, Integer replicas) {
CreateIndexRequest request = new CreateIndexRequest(indexName);
Settings.Builder builder = Settings.builder().put("index.mapper.dynamic", false)
.put("index.number_of_shards", shards == null ? 1 : shards)
.put("index.number_of_replicas", replicas == null ? 1 : replicas)
.put("index.max_result_window", 1000000);
if (StringHelper.isNotEmpty(simplePatternSplitSymbol)) {
for (String s : simplePatternSplitSymbol.split(",")) {
SymbolType symbolType = SymbolType.valueOf(s);
String analyzerName = "customer_split_" + symbolType.name() + "_analyzer";
String tokenizerName = "customer_split_" + symbolType.name() + "_tokenizer";
builder = builder.put("index.analysis.analyzer." + analyzerName + ".tokenizer", tokenizerName)
.put("index.analysis.tokenizer." + tokenizerName + ".type", "simple_pattern_split")
.put("index.analysis.tokenizer." + tokenizerName + ".pattern", symbolType.getText());
}
}
request.settings(builder);
if (StringHelper.isNotEmpty(aliasename)) {
request.alias(new Alias(aliasename));
}
request.timeout(TimeValue.timeValueMinutes(1));
request.masterNodeTimeout(TimeValue.timeValueMinutes(2));
try {
CreateIndexResponse createIndexResponse = client.indices().create(request, RequestOptions.DEFAULT);
boolean acknowledged = createIndexResponse.isAcknowledged();
boolean shardsAcknowledged = createIndexResponse.isShardsAcknowledged();
if (acknowledged && shardsAcknowledged) {
logger.info("索引创建成功");
}
return acknowledged && shardsAcknowledged;
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
@Override
public boolean addMapping(String indexName, String typeName, Class<?> clazz) {
if (clazz.getAnnotation(ESMappingType.class) == null) {
return true;
}
PutMappingRequest putMappingRequest = new PutMappingRequest(indexName);
putMappingRequest.type(typeName);
Map<String, Object> jsonMap = Maps.newHashMap();
Map<String, Object> properties = Maps.newHashMap();
Field[] fields = clazz.getDeclaredFields();
for (Field f : fields) {
ESMappingField esMappingField = f.getAnnotation(ESMappingField.class);
if (esMappingField != null) {
String fieldname = esMappingField.fieldName();
String datatype = esMappingField.dataType();
String analyzer = esMappingField.setAnalyzer();
boolean isanalye = esMappingField.isAnalyze();
Map<String, Object> m = Maps.newHashMap();
m.put("type", datatype);
if (isanalye && StringHelper.isNotEmpty(analyzer)) {
m.put("analyzer", analyzer);
m.put("search_analyzer", analyzer);
}
properties.put(fieldname, m);
}
}
jsonMap.put("properties", properties);
putMappingRequest.source(jsonMap);
putMappingRequest.timeout(TimeValue.timeValueMinutes(2));
try {
AcknowledgedResponse putMappingResponse = client.indices().putMapping(putMappingRequest,
RequestOptions.DEFAULT);
if (putMappingResponse.isAcknowledged()) {
}
return putMappingResponse.isAcknowledged();
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
@Override
public boolean deleteIndex(String indexName) {
DeleteIndexRequest request = new DeleteIndexRequest(indexName);
request.timeout(TimeValue.timeValueMinutes(2));
request.masterNodeTimeout(TimeValue.timeValueMinutes(3));
try {
AcknowledgedResponse deleteIndexResponse = client.indices().delete(request, RequestOptions.DEFAULT);
boolean acknowledged = deleteIndexResponse.isAcknowledged();
if (acknowledged) {
}
return acknowledged;
} catch (IOException e) {
e.printStackTrace();
} catch (ElasticsearchException exception) {
if (exception.status() == RestStatus.NOT_FOUND) {
}
}
return false;
}
@Override
public void reindex(String old_indexname, String new_indexname) {
ReindexRequest request = new ReindexRequest();
request.setSourceIndices(old_indexname);
request.setDestIndex(new_indexname);
request.setDestVersionType(VersionType.EXTERNAL);
request.setSourceBatchSize(1000);
request.setDestOpType("create");
request.setConflicts("proceed");
request.setScroll(TimeValue.timeValueMinutes(10));
request.setTimeout(TimeValue.timeValueMinutes(20));
request.setRefresh(true);
try {
BulkByScrollResponse bulkResponse = client.reindex(request, RequestOptions.DEFAULT);
boolean timedOut = bulkResponse.isTimedOut();
long totalDocs = bulkResponse.getTotal();
long updatedDocs = bulkResponse.getUpdated();
long createdDocs = bulkResponse.getCreated();
long deletedDocs = bulkResponse.getDeleted();
long batches = bulkResponse.getBatches();
long noops = bulkResponse.getNoops();
long versionConflicts = bulkResponse.getVersionConflicts();
long bulkRetries = bulkResponse.getBulkRetries();
long searchRetries = bulkResponse.getSearchRetries();
TimeValue throttledMillis = bulkResponse.getStatus().getThrottled();
TimeValue throttledUntilMillis = bulkResponse.getStatus().getThrottledUntil();
List<ScrollableHitSource.SearchFailure> searchFailures = bulkResponse.getSearchFailures();
List<BulkItemResponse.Failure> bulkFailures = bulkResponse.getBulkFailures();
logger.info("timedOut: " + timedOut);
logger.info("totalDocs: " + totalDocs);
logger.info("updatedDocs: " + updatedDocs);
logger.info("createdDocs: " + createdDocs);
logger.info("deletedDocs: " + deletedDocs);
logger.info("batches: " + batches);
logger.info("noops: " + noops);
logger.info("versionConflicts: " + versionConflicts);
logger.info("bulkRetries: " + bulkRetries);
logger.info("searchRetries: " + searchRetries);
logger.info("throttledMillis: " + throttledMillis.toString());
logger.info("throttledUntilMillis: " + throttledUntilMillis.toString());
logger.info("searchFailures: " + searchFailures.size());
logger.info("bulkFailures: " + bulkFailures.size());
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public boolean changeAliasAfterReindex(String aliasname, String old_indexname, String new_indexname) {
IndicesAliasesRequest.AliasActions addIndexAction = new IndicesAliasesRequest.AliasActions(
IndicesAliasesRequest.AliasActions.Type.ADD).index(new_indexname).alias(aliasname);
IndicesAliasesRequest.AliasActions removeAction = new IndicesAliasesRequest.AliasActions(
IndicesAliasesRequest.AliasActions.Type.REMOVE).index(old_indexname).alias(aliasname);
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
indicesAliasesRequest.addAliasAction(addIndexAction);
indicesAliasesRequest.addAliasAction(removeAction);
try {
AcknowledgedResponse indicesAliasesResponse = client.indices().updateAliases(indicesAliasesRequest,
RequestOptions.DEFAULT);
boolean acknowledged = indicesAliasesResponse.isAcknowledged();
if (acknowledged) {
}
return acknowledged;
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
/**
* 判断新增的记录是否已存在
*
* @return
* @throws IOException
*/
@Override
public boolean isExist(String aliasName, String typeName, String id) {
GetRequest getRequest = new GetRequest(aliasName, typeName, id);
getRequest.fetchSourceContext(new FetchSourceContext(false));
getRequest.storedFields("_none_");
try {
return client.exists(getRequest, RequestOptions.DEFAULT);
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
@Override
public boolean deleteData(String aliasName, String typeName, String id) {
DeleteRequest deleteRequest = new DeleteRequest(aliasName, typeName, id);
deleteRequest.timeout(TimeValue.timeValueMinutes(20));
deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL);
deleteRequest.setRefreshPolicy("wait_for");
try {
DeleteResponse deleteResponse = client.delete(deleteRequest, RequestOptions.DEFAULT);
ReplicationResponse.ShardInfo shardInfo = deleteResponse.getShardInfo();
if (shardInfo.getTotal() != shardInfo.getSuccessful()) {
}
if (shardInfo.getFailed() > 0) {
for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
String reason = failure.reason();
}
}
} catch (Exception e) {
e.printStackTrace();
}
return true;
}
@Override
public <T> Boolean saveOrUpdateEntity(T entity, String aliasName, String typeName, String id) {
try {
if (StringHelper.isNotEmpty(id)) {
if (this.isExist(aliasName, typeName, id)) {
UpdateRequest updateRequest = new UpdateRequest(aliasName, typeName, id);
updateRequest.doc(JSONObject.toJSONString(entity), XContentType.JSON);
UpdateResponse updateResponse = client.update(updateRequest, RequestOptions.DEFAULT);
if (updateResponse.getResult() == DocWriteResponse.Result.CREATED) {
logger.info("CREATED!");
} else if (updateResponse.getResult() == DocWriteResponse.Result.UPDATED) {
logger.info("UPDATED!");
} else if (updateResponse.getResult() == DocWriteResponse.Result.DELETED) {
logger.info("DELETED!");
} else if (updateResponse.getResult() == DocWriteResponse.Result.NOOP) {
logger.info("NOOP!");
}
return true;
} else {
String jsonString = JSONObject.toJSONString(entity);
IndexRequest indexRequest = new IndexRequest(aliasName, typeName, id);
indexRequest.source(jsonString, XContentType.JSON);
IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
if (indexResponse != null && "CREATED".equals(indexResponse.getResult().toString())) {
logger.info("CREATED!");
return true;
}
}
} else {
String jsonString = JSONObject.toJSONString(entity);
IndexRequest indexRequest = new IndexRequest(aliasName, typeName);
indexRequest.source(jsonString, XContentType.JSON);
IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
if (indexResponse != null && "CREATED".equals(indexResponse.getResult().toString())) {
logger.info("CREATED!");
return true;
}
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
@Override
public boolean deleteByQuery(String aliasname, String type, QueryBuilder query) {
DeleteByQueryRequest request = new DeleteByQueryRequest(aliasname);
request.types(type);
request.setConflicts("proceed");
request.setQuery(query);
request.setTimeout(TimeValue.timeValueMinutes(10));
request.setRefresh(true);
request.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);
try {
BulkByScrollResponse bulkResponse = client.deleteByQuery(request, RequestOptions.DEFAULT);
boolean timedOut = bulkResponse.isTimedOut();
long totalDocs = bulkResponse.getTotal();
long deletedDocs = bulkResponse.getDeleted();
long batches = bulkResponse.getBatches();
long noops = bulkResponse.getNoops();
long versionConflicts = bulkResponse.getVersionConflicts();
long bulkRetries = bulkResponse.getBulkRetries();
long searchRetries = bulkResponse.getSearchRetries();
logger.info("timedOut: " + timedOut);
logger.info("totalDocs: " + totalDocs);
logger.info("deletedDocs: " + deletedDocs);
logger.info("batches: " + batches);
logger.info("noops: " + noops);
logger.info("versionConflicts: " + versionConflicts);
logger.info("bulkRetries: " + bulkRetries);
logger.info("searchRetries: " + searchRetries);
return true;
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
@Override
public boolean updateByQuery(String aliasname, String type, QueryBuilder query, Script script) {
UpdateByQueryRequest request = new UpdateByQueryRequest(aliasname);
request.setDocTypes(type);
request.setConflicts("proceed");
request.setQuery(query);
request.setScript(script);
request.setTimeout(TimeValue.timeValueMinutes(10));
request.setRefresh(true);
try {
BulkByScrollResponse bulkResponse = client.updateByQuery(request, RequestOptions.DEFAULT);
boolean timedOut = bulkResponse.isTimedOut();
long totalDocs = bulkResponse.getTotal();
long deletedDocs = bulkResponse.getDeleted();
long batches = bulkResponse.getBatches();
long noops = bulkResponse.getNoops();
long versionConflicts = bulkResponse.getVersionConflicts();
long bulkRetries = bulkResponse.getBulkRetries();
long searchRetries = bulkResponse.getSearchRetries();
logger.info("timedOut: " + timedOut);
logger.info("totalDocs: " + totalDocs);
logger.info("deletedDocs: " + deletedDocs);
logger.info("batches: " + batches);
logger.info("noops: " + noops);
logger.info("versionConflicts: " + versionConflicts);
logger.info("bulkRetries: " + bulkRetries);
logger.info("searchRetries: " + searchRetries);
return true;
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
@Override
public Map<String, Long> statisticSearchKey(Integer returnDataSize) {
Map<String, Long> map = Maps.newHashMap();
SearchRequest searchRequest = new SearchRequest(searchKeyAlias);
searchRequest.types(searchKeyType);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.aggregation(AggregationBuilders.terms("aggregationList").field("searh_key")
.size(returnDataSize));
searchRequest.source(searchSourceBuilder);
try {
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
ParsedStringTerms terms = searchResponse.getAggregations().get("aggregationList");
for (org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket bucket : terms.getBuckets()) {
map.put(bucket.getKeyAsString(), bucket.getDocCount());
}
} catch (IOException e) {
e.printStackTrace();
}
return map;
}
@Override
public <T> T getEntity(String aliasname, String type, String id, Class<T> clazz) {
GetRequest getRequest = new GetRequest(aliasname, type, id);
try {
GetResponse getResponse = client.get(getRequest, RequestOptions.DEFAULT);
if (getResponse.isExists()) {
return JSONObject.parseObject(JSONObject.toJSONString(getResponse.getSourceAsMap()), clazz);
}
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
@Override
public boolean deleteBatchData(String aliasName, String typeName, String[] ids) {
BulkRequest bulkRequest = new BulkRequest();
for (String id : ids) {
DeleteRequest del = new DeleteRequest(aliasName, typeName, id);
bulkRequest.add(del);
}
bulkRequest.timeout(TimeValue.timeValueMinutes(40));
try {
BulkResponse bulkResponse = client.bulk(bulkRequest, RequestOptions.DEFAULT);
if (bulkResponse.hasFailures()) {
} else {
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
}
好了,至此,搜索引擎系统搭建完毕,本人亲测,支持上亿级的数据完全没有问题,响应都是毫秒级。今天分享给大家,希望对大家有用,欢迎留言,一起讨论。关注以下公众号,回复 “源码” 即可获得完整源代码。