elasticsearch下载
elasticsearch-analysis-ik分词器下载
解压之后,直接找到 elasticsearch-6.5.2\bin
目录下的elasticsearch.bat
双击运行,然后打开postman
输入 http://localhost:9200,如能出现以下表示安装成功
{
"name": "O-hhQXI",
"cluster_name": "elasticsearch",
"cluster_uuid": "p7YnSZiHSISj2TxBBwQYOg",
"version": {
"number": "6.5.2",
"build_flavor": "default",
"build_type": "zip",
"build_hash": "9434bed",
"build_date": "2018-11-29T23:58:20.891072Z",
"build_snapshot": false,
"lucene_version": "7.5.0",
"minimum_wire_compatibility_version": "5.6.0",
"minimum_index_compatibility_version": "5.0.0"
},
"tagline": "You Know, for Search"
}
添加
添加一条数据,使用post
方式
http://localhost:9200/test/abc
http://localhost:9200/test/abc/testid
注意:test表示索引,abc表示类型,testid表示id.....
数据,json格式
提交:
{
"name":"小三",
"age":180
}
结果出现:
{
"_index": "test",
"_type": "abc",
"_id": "BhmHeW8BOAkBt3zm6zuy",
"_version": 1,
"result": "created",
"_shards": {
"total": 2,
"successful": 1,
"failed": 0
},
"_seq_no": 0,
"_primary_term": 1
}
添加成功。
查询
查询数据get方式
,输入:
http://localhost:9200/test/abc/_search
结果出现
{
"took": 84,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"skipped": 0,
"failed": 0
},
"hits": {
"total": 3,
"max_score": 1,
"hits": [
{
"_index": "test",
"_type": "abc",
"_id": "BRmEeW8BOAkBt3zm2TtI",
"_score": 1,
"_source": {
"name": "小二",
"age": 18
}
}
]
}
}
分词
post方式
,json格式
数据,输入地址:
# 地址
http://localhost:9200/test/_analyze
# 数据
{
"analyzer":"chinese", # chinese 默认分词器
"text":"我有一只小毛驴"
}
结果如下:
{
"tokens": [
{
"token": "我",
"start_offset": 0,
"end_offset": 1,
"type": "",
"position": 0
},
{
"token": "有",
"start_offset": 1,
"end_offset": 2,
"type": "",
"position": 1
},
{
"token": "一",
"start_offset": 2,
"end_offset": 3,
"type": "",
"position": 2
},
{
"token": "只",
"start_offset": 3,
"end_offset": 4,
"type": "",
"position": 3
},
{
"token": "小",
"start_offset": 4,
"end_offset": 5,
"type": "",
"position": 4
},
{
"token": "毛",
"start_offset": 5,
"end_offset": 6,
"type": "",
"position": 5
},
{
"token": "驴",
"start_offset": 6,
"end_offset": 7,
"type": "",
"position": 6
}
]
}
可以看到,默认分词器分成了一个个字,并不是我们想要的,所以需要安装一个 ik分词器
,来识别分词。
ik分词器下载地址文章开始已经提供了,ik分词器安装,新建一个不目录 ik
,将压缩包解压后加压到 ik
的目录下,安装完成,然后重启 elasticsearch
,测试 ik分词器
。
ik分词器
提供了两种算法,ik_smart(最少切分)
与ik_max_word(最细切分)
如下:
# 地址
http://localhost:9200/test/_analyze
# 数据
{
"analyzer":"ik_smart", # ik 分词器最少算法
"text":"我有一只小毛驴"
}
结果如下:
{
"tokens": [
{
"token": "我",
"start_offset": 0,
"end_offset": 1,
"type": "CN_CHAR",
"position": 0
},
{
"token": "有",
"start_offset": 1,
"end_offset": 2,
"type": "CN_CHAR",
"position": 1
},
{
"token": "一只",
"start_offset": 2,
"end_offset": 4,
"type": "CN_WORD",
"position": 2
},
{
"token": "小毛驴",
"start_offset": 4,
"end_offset": 7,
"type": "CN_WORD",
"position": 3
}
]
}
可以看到,已经出现分词了,到此流程大致如此。
ik自定义词库
在 elasticsearch-6.5.2\plugins\ik\config
目录下新建一个my.dic
词库文件
我你他
然后在 elasticsearch-6.5.2\plugins\ik\config
找到IKAnalyzer.cfg.xml
文件中添加配置
IK Analyzer 扩展配置
my.dic
保存重启,到此ok了。
elasticsearch JavaRest 高级客户端
官方文档
导包:
org.elasticsearch.client
elasticsearch-rest-high-level-client
6.2.1
使用方式:
package com.qingcheng.service.impl;
import com.alibaba.dubbo.config.annotation.Service;
import com.qingcheng.dao.BrandMapper;
import com.qingcheng.dao.SpecMapper;
import com.qingcheng.service.goods.SkuSearchService;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.*;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class SkuSearchServiceImpl implements SkuSearchService {
@Autowired
private RestHighLevelClient restHighLevelClient;
@Autowired
private BrandMapper brandMapper;
@Autowired
private SpecMapper specMapper;
public Map search(Map searchMap) {
//1.封装查询请求
SearchRequest searchRequest=new SearchRequest("sku");
searchRequest.types("doc"); //设置查询的类型
SearchSourceBuilder searchSourceBuilder=new SearchSourceBuilder();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();//布尔查询构建器
//1.1 关键字搜索
MatchQueryBuilder matchQueryBuilder= QueryBuilders.matchQuery("name",searchMap.get("keywords"));
boolQueryBuilder.must(matchQueryBuilder);
//1.2 商品分类过滤
if(searchMap.get("category")!=null){
TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery("categoryName", searchMap.get("category"));
boolQueryBuilder.filter(termQueryBuilder);
}
//1.3 品牌过滤
if(searchMap.get("brand")!=null){
TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery("brandName", searchMap.get("brand"));
boolQueryBuilder.filter(termQueryBuilder);
}
//1.4 规格过滤
for(String key: searchMap.keySet() ){
if( key.startsWith("spec.") ){//如果是规格参数
TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery(key+".keyword", searchMap.get(key));
boolQueryBuilder.filter(termQueryBuilder);
}
}
//1.5 价格过滤
if(searchMap.get("price")!=null ){
String[] price = searchMap.get("price").split("-");
if(!price[0].equals("0")){ //最低价格不等于0
RangeQueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery("price").gte(price[0] + "00");
boolQueryBuilder.filter(rangeQueryBuilder);
}
if(!price[1].equals("*")){ //如果价格由上限
RangeQueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery("price").lte(price[1] + "00");
boolQueryBuilder.filter(rangeQueryBuilder);
}
}
searchSourceBuilder.query(boolQueryBuilder);
//分页
Integer pageNo = Integer.parseInt( searchMap.get("pageNo") );//页码
Integer pageSize=30;//页大小
int fromIndex= (pageNo-1) *pageSize; //计算开始索引
searchSourceBuilder.from(fromIndex);//开始索引设置
searchSourceBuilder.size(pageSize);//每页记录数设置
//排序
String sort = searchMap.get("sort");//排序字段
String sortOrder = searchMap.get("sortOrder");//排序规则
if(!"".equals(sort)){
searchSourceBuilder.sort(sort, SortOrder.valueOf(sortOrder));
}
//高亮设置
HighlightBuilder highlightBuilder=new HighlightBuilder();
highlightBuilder.field("name").preTags("").postTags("");
searchSourceBuilder.highlighter(highlightBuilder);
searchRequest.source(searchSourceBuilder);
//聚合查询(商品分类)
TermsAggregationBuilder termsAggregationBuilder = AggregationBuilders.terms("sku_category").field("categoryName");
searchSourceBuilder.aggregation(termsAggregationBuilder);
//2.封装查询结果
Map resultMap=new HashMap();
try {
SearchResponse searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
SearchHits searchHits = searchResponse.getHits();
long totalHits = searchHits.getTotalHits();
System.out.println("记录数:"+totalHits);
SearchHit[] hits = searchHits.getHits();
//2.1 商品列表
List