第一步引入 maven
4.0.0
com.elasticsearch
demo
0.0.1-SNAPSHOT
jar
demo
Demo project for Spring Boot
org.springframework.boot
spring-boot-starter-parent
2.0.5.RELEASE
UTF-8
UTF-8
1.8
org.springframework.boot
spring-boot-starter-data-elasticsearch
org.springframework.boot
spring-boot-starter-web
com.baomidou
mybatis-plus-boot-starter
3.0.2
com.alibaba
druid-spring-boot-starter
1.1.10
mysql
mysql-connector-java
5.1.46
org.springframework.boot
spring-boot-maven-plugin
2 配置文件
spring:
data:
elasticsearch:
cluster-name: my-application
cluster-nodes: 127.0.0.1:9300
properties:
path:
logs: ./elasticsearch/log
data: ./elasticsearch/data
datasource:
name: oss-service
url: jdbc:mysql://localhost:3306/es?useSSL=false&characterEncoding=utf8
username: root
password: root
# 使用druid数据源
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: com.mysql.jdbc.Driver
druid:
query-timeout: 2400
filters: stat
maxActive: 20
initialSize: 1
maxWait: 60000
minIdle: 1
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
validationQuery: select 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
3 config配置
@Configuration
public class ElasticSearchConfig {
@Bean
public ElasticsearchTemplate elasticsearchTemplate(Client client) {
return new ElasticsearchTemplate(client);
}
}
@Configuration
@MapperScan("com.elasticsearch.demo.dao")
public class MybatisPlusConfig {
@Bean
public PaginationInterceptor paginationInterceptor() {
return new PaginationInterceptor();
}
/**
* SQL执行效率插件
*/
@Bean
@Profile({"dev","test"})// 设置 dev test 环境开启
public PerformanceInterceptor performanceInterceptor() {
return new PerformanceInterceptor();
}
}
4 java Bean
@Document(indexName = "es", type = "posts", indexStoreType = "fs", shards = 5, replicas = 1, refreshInterval = "-1")
public class Posts {
}
@Document注解里面的几个属性,类比mysql的话是这样:
index –> DB
type –> Table
Document –> row
@Id注解加上后,在Elasticsearch里相应于该列就是主键了,在查询时就可以直接用主键查询,后面一篇会讲到。其实和mysql非常类似,基本就是一个数据库。
@Persistent
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE})
public @interface Document {
String indexName();//索引库的名称,个人建议以项目的名称命名
String type() default "";//类型,个人建议以实体的名称命名
short shards() default 5;//默认分区数
short replicas() default 1;//每个分区默认的备份数
String refreshInterval() default "1s";//刷新间隔
String indexStoreType() default "fs";//索引文件存储类型
}
加上了@Document注解之后,默认情况下这个实体中所有的属性都会被建立索引、并且分词。
我们通过@Field注解来进行详细的指定,如果没有特殊需求,那么只需要添加@Document即可。
@Field注解的定义如下:
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
@Documented
@Inherited
public @interface Field {
FieldType type() default FieldType.Auto;#自动检测属性的类型
FieldIndex index() default FieldIndex.analyzed;#默认情况下分词
DateFormat format() default DateFormat.none;
String pattern() default "";
boolean store() default false;#默认情况下不存储原文
String searchAnalyzer() default "";#指定字段搜索时使用的分词器
String indexAnalyzer() default "";#指定字段建立索引时指定的分词器
String[] ignoreFields() default {};#如果某个字段需要被忽略
boolean includeInParent() default false;
}
@RestController
public class IndexController {
@Autowired
private PostsService postsService;
@Autowired
private ElasticsearchTemplate elasticsearchTemplate;
@GetMapping("save")
public String save() {
List list = postsService.list(null);
list.stream().forEach(posts -> {
IndexQuery indexQuery = new IndexQueryBuilder().withId(posts.getId() + "").withObject(posts).build();
elasticsearchTemplate.index(indexQuery);
});
return "success";
}
@GetMapping("get")
public Page test(String word, @PageableDefault Pageable pageable) {
SearchQuery searchQuery = new NativeSearchQueryBuilder()
.withQuery(matchQuery("postTitle",word ))
.withPageable(pageable)
.withHighlightFields(new HighlightBuilder.Field("postTitle").preTags("").postTags(""))
.build();
Page page = elasticsearchTemplate.queryForPage(searchQuery, Posts.class, new DefaultResultMapper() {
@Override
public AggregatedPage mapResults(SearchResponse searchResponse, Class aClass, Pageable pageable) {
List chunk = new ArrayList<>();
for (SearchHit searchHit : searchResponse.getHits()) {
if (searchResponse.getHits().getHits().length <= 0) {
return null;
}
String[] heightFields = {"postTitle"};
Map entityMap = searchHit.getSource();
for (String highName : heightFields) {
Text text[] = searchHit.getHighlightFields().get(highName).fragments();
if (text.length > 0) {
String highValue = searchHit.getHighlightFields().get(highName).fragments()[0].toString();
entityMap.put(highName, highValue);
}
}
try {
T t = getEntityMapper().mapToObject(searchHit.getSourceAsString(), aClass);
Field postTitle = aClass.getDeclaredField("postTitle");
postTitle.setAccessible(true);
postTitle.set(t,entityMap.get("postTitle"));
// Field postContent = aClass.getDeclaredField("postContent");
// postContent.setAccessible(true);
// postContent.set(t,entityMap.get("postContent"));
chunk.add(t);
} catch (IOException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
// chunk.add((Posts) PropertyHelper.getFansheObj(
// Posts.class, entityMap));
}
return new AggregatedPageImpl(chunk);
}
});
return page;
}
}
转载 http://www.51csdn.cn/article/165.html