idea整合redis、kafka、es
package com.example.demokafkaredises;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class DemoKafkaRedisEsApplication {
public static void main(String[] args) {
System.setProperty("es.set.netty.runtime.available.processors", "false");
SpringApplication.run(DemoKafkaRedisEsApplication.class, args);
}
}
package com.example.demokafkaredises;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.github.pagehelper.PageInfo;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.SearchResultMapper;
import org.springframework.data.elasticsearch.core.aggregation.AggregatedPage;
import org.springframework.data.elasticsearch.core.aggregation.impl.AggregatedPageImpl;
import org.springframework.data.elasticsearch.core.query.GetQuery;
import org.springframework.data.elasticsearch.core.query.IndexQuery;
import org.springframework.data.elasticsearch.core.query.IndexQueryBuilder;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
public class EsUtils {
public static void saveObject(ElasticsearchTemplate elasticsearchTemplate, String id, Object object) {
IndexQuery query = new IndexQueryBuilder().withId(id).withObject(object).build();
elasticsearchTemplate.index(query);
}
public static void deleteObject(ElasticsearchTemplate elasticsearchTemplate, Class<?> clazz, Integer ids[]) {
for (Integer id : ids) {
elasticsearchTemplate.delete(clazz, id + "");
}
}
public static Object selectById(ElasticsearchTemplate elasticsearchTemplate, Class<?> clazz, Integer id) {
GetQuery query = new GetQuery();
query.setId(id + "");
return elasticsearchTemplate.queryForObject(query, clazz);
}
@SuppressWarnings({
"rawtypes", "unchecked" })
public static PageInfo<?> findByKeyword(ElasticsearchTemplate elasticsearchTemplate, String keyword,
Class<?> clazz, String sortField,Integer pageNum,Integer pageSize, String highLightfieldNames) {
PageInfo<?> pageInfo = new PageInfo<>();
AggregatedPage<?> page = null;
final Pageable pageable = PageRequest.of(pageNum - 1, pageSize, Sort.by(Sort.Direction.ASC, sortField));
SearchQuery searchQuery = null;
QueryBuilder queryBuilder = null;
String preTags = "";
String postTags = "";
String[] fieldNames = highLightfieldNames.split(",");
HighlightBuilder.Field highlightFields[] = new HighlightBuilder.Field[fieldNames.length];
for (int i = 0; i < fieldNames.length; i++) {
highlightFields[i] = new HighlightBuilder.Field(fieldNames[i]).preTags(preTags).postTags(postTags);
}
if (keyword != null && !"".equals(keyword)) {
queryBuilder = QueryBuilders.multiMatchQuery(keyword, fieldNames);
searchQuery = new NativeSearchQueryBuilder()
.withQuery(queryBuilder)
.withHighlightFields(highlightFields)
.withPageable(pageable).build();
page = elasticsearchTemplate.queryForPage(searchQuery, clazz, new SearchResultMapper() {
public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable1) {
List<T> contentList = new ArrayList<T>();
long total = 0l;
try {
SearchHits hits = response.getHits();
if(hits==null) {
return new AggregatedPageImpl<T>(contentList, pageable, total);
}
total = hits.getTotalHits();
SearchHit[] searchHits = hits.getHits();
for (SearchHit searchHit:searchHits) {
T entity = clazz.newInstance();
Field[] fields = clazz.getDeclaredFields();
for (Field field:fields) {
field.setAccessible(true);
String fieldName = field.getName();
if("serialVersionUID".equals(fieldName) || "user".equals(fieldName)
|| "channel".equals(fieldName) || "category".equals(fieldName)) {
continue;
}
Object fieldValue = searchHit.getSourceAsMap().get(fieldName);
Class<?> type = field.getType();
if (type == Date.class) {
fieldValue = new Date(Long.valueOf(fieldValue + ""));
}
HighlightField highlightField = searchHit.getHighlightFields().get(fieldName);
if(highlightField!=null) {
fieldValue = highlightField.getFragments()[0].toString();
}
field.set(entity, fieldValue);
}
contentList.add(entity);
}
} catch (Exception e) {
e.printStackTrace();
}
return new AggregatedPageImpl<T>(contentList, pageable, total);
}
@Override
public <T> T mapSearchHit(SearchHit searchHit, Class<T> aClass) {
return null;
}
});
} else {
searchQuery = new NativeSearchQueryBuilder().withPageable(pageable).build();
page = elasticsearchTemplate.queryForPage(searchQuery, clazz);
}
int totalCount = (int) page.getTotalElements();
int pages = (totalCount+ pageSize-1) / pageSize;
pageInfo.setTotal(page.getTotalElements());
pageInfo.setPageNum(pageNum);
pageInfo.setPageSize(pageSize);
pageInfo.setPrePage(pageNum-1);
pageInfo.setNextPage(pageNum+1);
pageInfo.setPages(pages);
pageInfo.setHasNextPage(pageNum!=pages);
pageInfo.setHasPreviousPage(pageNum>1);
int[] navigatepageNums = new int[pages];
for(int i=0;i<pages;i++) {
navigatepageNums[i] = i+1;
}
pageInfo.setNavigatepageNums(navigatepageNums);
List content = page.getContent();
pageInfo.setList(content);
return pageInfo;
}
}
package com.example.demokafkaredises;
import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.util.HashMap;
import java.util.Map;
@Component
public class KafkaConsumer {
@KafkaListener(topics = {
"testTopic"})
public void listen(ConsumerRecord<String,Object> consumerRecord){
System.out.println("the Message:"+consumerRecord.value());
Object value = consumerRecord.value();
User user = JSON.parseObject(value.toString(), User.class);
System.out.println(user);
}
}
package com.example.demokafkaredises;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class TestController {
@Autowired
private ElasticsearchTemplate elasticsearchTemplate;
@RequestMapping("es")
public String es(){
User user = new User();
user.setId(1);
user.setName("zhangsan");
EsUtils.saveObject(elasticsearchTemplate,"1",user);
Object o = EsUtils.selectById(elasticsearchTemplate, User.class, 1);
System.out.println(o);
return "es";
}
}
package com.example.demokafkaredises;
import org.springframework.data.elasticsearch.annotations.Document;
@Document(indexName = "user", type = "user")
public class User {
private Integer id;
private String name;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "User{" +
"id=" + id +
", username='" + name + '\'' +
'}';
}
}
配置文件
#redis配置
spring.redis.host=192.168.16.129
spring.redis.port=6379
spring.redis.database=0
#指定kafka 代理地址,可以多个
spring.kafka.bootstrap-servers=192.168.16.129:9092
spring.kafka.producer.retries=0
# 每次批量发送消息的数量
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
# 指定消息key和消息体的编解码方式
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
# 指定默认消费者group id
spring.kafka.consumer.group-id=test-group
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-commit-interval=100
# 指定消息key和消息体的编解码方式
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
#spring.datasource.url=jdbc:mysql://localhost:3306/1711d?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai
#spring.datasource.username=root
#spring.datasource.password=root
#spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
# Elasticsearch
spring.data.elasticsearch.cluster-name=elasticsearch
spring.data.elasticsearch.cluster-nodes=192.168.16.129:9300
测试类
package com.example.demokafkaredises;
import com.alibaba.fastjson.JSON;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;
import java.util.HashMap;
import java.util.Map;
@SpringBootTest
class DemoKafkaRedisEsApplicationTests {
@Autowired
private RedisTemplate redisTemplate;
@Autowired
private KafkaTemplate kafkaTemplate;
@Autowired
private ElasticsearchTemplate elasticsearchTemplate;
@Test
void contextLoads() {
redisTemplate.opsForValue().set("name","张三");
Object name = redisTemplate.opsForValue().get("name");
System.out.println(name);
Map<String,Object> userMap = new HashMap<>();
userMap.put("id","1");
userMap.put("name","张三");
String userJsonStr = JSON.toJSONString(userMap);
kafkaTemplate.send("testTopic",userJsonStr);
User user = new User();
user.setId(1);
user.setName("zhangsan");
EsUtils.saveObject(elasticsearchTemplate,"1",user);
Object o = EsUtils.selectById(elasticsearchTemplate, User.class, 1);
System.out.println(o);
}
}
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.2.5.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.example</groupId>
<artifactId>demo-kafka-redis-es</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>demo-kafka-redis-es</name>
<description>Demo project for Spring Boot</description>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.54</version>
</dependency>
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper</artifactId>
<version>5.1.2</version>
</dependency>
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper</artifactId>
<version>5.1.2</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>