idea整合redis、kafka、es

idea整合redis、kafka、es

idea整合redis、kafka、es_第1张图片

package com.example.demokafkaredises;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class DemoKafkaRedisEsApplication {
     

    public static void main(String[] args) {
     
        System.setProperty("es.set.netty.runtime.available.processors", "false");
        SpringApplication.run(DemoKafkaRedisEsApplication.class, args);
    }

}

package com.example.demokafkaredises;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import com.github.pagehelper.PageInfo;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.SearchResultMapper;
import org.springframework.data.elasticsearch.core.aggregation.AggregatedPage;
import org.springframework.data.elasticsearch.core.aggregation.impl.AggregatedPageImpl;
import org.springframework.data.elasticsearch.core.query.GetQuery;
import org.springframework.data.elasticsearch.core.query.IndexQuery;
import org.springframework.data.elasticsearch.core.query.IndexQueryBuilder;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.elasticsearch.core.query.SearchQuery;
/**
 * Es工具类
 */
public class EsUtils {
     
	/**
	 * 保存及更新方法
	 * @param elasticsearchTemplate
	 * @param id
	 * @param object
	 */
	public static void saveObject(ElasticsearchTemplate elasticsearchTemplate, String id, Object object) {
     
		// 创建所以对象
		IndexQuery query = new IndexQueryBuilder().withId(id).withObject(object).build();
		// 建立索引
		elasticsearchTemplate.index(query);
	}

	/**
	 * 批量删除
	 * @param elasticsearchTemplate
	 * @param clazz
	 * @param ids
	 */
	public static void deleteObject(ElasticsearchTemplate elasticsearchTemplate, Class<?> clazz, Integer ids[]) {
     
		for (Integer id : ids) {
     
			elasticsearchTemplate.delete(clazz, id + "");
		}
	}

	/**
	 *
	 * @Title: selectById
	 * @Description: 根据id在es服务启中查询对象
	 * @param elasticsearchTemplate
	 * @param clazz 对象的Class
	 * @param id 主键
	 * @return: Object
	 */
	public static Object selectById(ElasticsearchTemplate elasticsearchTemplate, Class<?> clazz, Integer id) {
     
		GetQuery query = new GetQuery();
		query.setId(id + "");
		return elasticsearchTemplate.queryForObject(query, clazz);
	}

	/**
	 * 根据关键词查询文档,支持关键词高亮、可以指定排序字段
	 * @param elasticsearchTemplate
	 * @param keyword 搜索关键词
	 * @param clazz 返回实体类class
	 * @param sortField
	 * @param pageNum
	 * @param pageSize
	 * @param highLightNames
	 * @return
	 */
	@SuppressWarnings({
      "rawtypes", "unchecked" })
	public static  PageInfo<?> findByKeyword(ElasticsearchTemplate elasticsearchTemplate, String keyword,
			Class<?> clazz, String sortField,Integer pageNum,Integer pageSize, String highLightfieldNames) {
     
		/** 定义要返回结果PageInfo **/
		PageInfo<?> pageInfo = new PageInfo<>();
		/** ES的查询结果  **/
		AggregatedPage<?> page = null;
		/** 创建Pageable对象,主键的实体类属性名 **/
		final Pageable pageable = PageRequest.of(pageNum - 1, pageSize, Sort.by(Sort.Direction.ASC, sortField));
		/** 定义查询对象 **/
		SearchQuery searchQuery = null;
		/** 查询条件高亮的构建对象 **/
		QueryBuilder queryBuilder = null;
		/** 高亮拼接的前缀与后缀 **/
		String preTags = "";
		String postTags = "";
		/** 定义创建高亮的构建集合对象 **/
		String[] fieldNames = highLightfieldNames.split(",");
		HighlightBuilder.Field highlightFields[] = new HighlightBuilder.Field[fieldNames.length];
		for (int i = 0; i < fieldNames.length; i++) {
     
			// 这个代码有问题
			highlightFields[i] = new HighlightBuilder.Field(fieldNames[i]).preTags(preTags).postTags(postTags);
		}
		/** 查询数据 **/
		if (keyword != null && !"".equals(keyword)) {
     
			/** 创建queryBuilder对象 **/
			queryBuilder = QueryBuilders.multiMatchQuery(keyword, fieldNames);
			searchQuery = new NativeSearchQueryBuilder()
					.withQuery(queryBuilder)
					.withHighlightFields(highlightFields)
					.withPageable(pageable).build();
			/** 查询数据 **/
			page = elasticsearchTemplate.queryForPage(searchQuery, clazz, new SearchResultMapper() {
     
				public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable1) {
     
					List<T> contentList = new ArrayList<T>();
					long total = 0l;
					try {
     
						/** 查询结果 **/
						SearchHits hits = response.getHits();
						/** 查询结果为空 **/
						if(hits==null) {
     
							return new AggregatedPageImpl<T>(contentList, pageable, total);
						}
						/** 获取总记录数 **/
						total = hits.getTotalHits();
						/** 获取结果数组 **/
						SearchHit[] searchHits = hits.getHits();
						/** 遍历封装查询的对象 **/
						for (SearchHit searchHit:searchHits) {
     
							/** 对象值  **/
							T entity = clazz.newInstance();
							/** 获取对象的所有的字段 **/
							Field[] fields = clazz.getDeclaredFields();
							/** 遍历字段对象 **/
							for (Field field:fields) {
     
								/** 暴力反射 **/
								field.setAccessible(true);
								/** 字段名称 **/
								String fieldName = field.getName();
								if("serialVersionUID".equals(fieldName) || "user".equals(fieldName)
										 || "channel".equals(fieldName)  || "category".equals(fieldName)) {
     
									continue;
								}
								/** 字段值 **/
								Object fieldValue = searchHit.getSourceAsMap().get(fieldName);
								/** 字段类型 **/
								Class<?> type = field.getType();
								if (type == Date.class) {
     
									fieldValue = new Date(Long.valueOf(fieldValue + ""));
								}
								/** 是否高亮字段 **/
								HighlightField highlightField = searchHit.getHighlightFields().get(fieldName);
								if(highlightField!=null) {
     
									/** 高亮 处理 拿到 被 结束所包围的内容部分 **/
									fieldValue = highlightField.getFragments()[0].toString();
								}
								field.set(entity, fieldValue);
							}
							contentList.add(entity);
						}
					} catch (Exception e) {
     
						e.printStackTrace();
					}

					return new AggregatedPageImpl<T>(contentList, pageable, total);
				}

				@Override
				public <T> T mapSearchHit(SearchHit searchHit, Class<T> aClass) {
     
					return null;
				}
			});

		} else {
     
			/** 没有查询条件,分页获取ES中的全部数据  **/
			searchQuery = new NativeSearchQueryBuilder().withPageable(pageable).build();
			page = elasticsearchTemplate.queryForPage(searchQuery, clazz);
		}
		/** 封装PageInfo对象 **/
		int totalCount = (int) page.getTotalElements();
		int pages = (totalCount+ pageSize-1) / pageSize;
		//totalCount%pageSize==0?totalCount/pageSize:totalCount/pageNum+1;
		pageInfo.setTotal(page.getTotalElements());
		pageInfo.setPageNum(pageNum);
		pageInfo.setPageSize(pageSize);
		pageInfo.setPrePage(pageNum-1);
		pageInfo.setNextPage(pageNum+1);
		pageInfo.setPages(pages);
		pageInfo.setHasNextPage(pageNum!=pages);
		pageInfo.setHasPreviousPage(pageNum>1);
		int[] navigatepageNums = new int[pages];
		for(int i=0;i<pages;i++) {
     
			navigatepageNums[i] = i+1;
		}
		pageInfo.setNavigatepageNums(navigatepageNums);
		List content = page.getContent();
		pageInfo.setList(content);
		return pageInfo;
	}
}

package com.example.demokafkaredises;

import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.Map;

@Component
public class KafkaConsumer {
     

    @KafkaListener(topics = {
     "testTopic"})
    public void listen(ConsumerRecord<String,Object> consumerRecord){
     
        System.out.println("the Message:"+consumerRecord.value());
        Object value = consumerRecord.value();
        User user = JSON.parseObject(value.toString(), User.class);
        System.out.println(user);
    }
}

package com.example.demokafkaredises;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

@RestController
public class TestController {
     
    @Autowired
    private ElasticsearchTemplate elasticsearchTemplate;


    @RequestMapping("es")
    public String es(){
     
        User user = new User();
        user.setId(1);
        user.setName("zhangsan");
        EsUtils.saveObject(elasticsearchTemplate,"1",user);
        Object o = EsUtils.selectById(elasticsearchTemplate, User.class, 1);
        System.out.println(o);
        return "es";
    }
}

package com.example.demokafkaredises;

import org.springframework.data.elasticsearch.annotations.Document;

@Document(indexName = "user", type = "user")
public class User {
     
    private Integer id;
    private String name;

    public Integer getId() {
     
        return id;
    }

    public void setId(Integer id) {
     
        this.id = id;
    }

    public String getName() {
     
        return name;
    }

    public void setName(String name) {
     
        this.name = name;
    }

    @Override
    public String toString() {
     
        return "User{" +
                "id=" + id +
                ", username='" + name + '\'' +
                '}';
    }
}

配置文件

#redis配置
spring.redis.host=192.168.16.129
spring.redis.port=6379
spring.redis.database=0


#指定kafka 代理地址,可以多个
spring.kafka.bootstrap-servers=192.168.16.129:9092
spring.kafka.producer.retries=0
# 每次批量发送消息的数量
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
# 指定消息key和消息体的编解码方式
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer

# 指定默认消费者group id
spring.kafka.consumer.group-id=test-group
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-commit-interval=100
# 指定消息key和消息体的编解码方式
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer

#spring.datasource.url=jdbc:mysql://localhost:3306/1711d?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai
#spring.datasource.username=root
#spring.datasource.password=root
#spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver

# Elasticsearch
spring.data.elasticsearch.cluster-name=elasticsearch
spring.data.elasticsearch.cluster-nodes=192.168.16.129:9300


测试类

package com.example.demokafkaredises;

import com.alibaba.fastjson.JSON;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;

import java.util.HashMap;
import java.util.Map;

@SpringBootTest
class DemoKafkaRedisEsApplicationTests {
     
    @Autowired
    private RedisTemplate redisTemplate;

    @Autowired
    private KafkaTemplate kafkaTemplate;

    @Autowired
    private ElasticsearchTemplate elasticsearchTemplate;

    @Test
    void contextLoads() {
     
//        System.setProperty("es.set.netty.runtime.available.processors", "false");
       redisTemplate.opsForValue().set("name","张三");
        Object name = redisTemplate.opsForValue().get("name");
        System.out.println(name);
        Map<String,Object> userMap = new HashMap<>();
        userMap.put("id","1");
        userMap.put("name","张三");
        String userJsonStr = JSON.toJSONString(userMap);

        kafkaTemplate.send("testTopic",userJsonStr);


        User user = new User();
        user.setId(1);
        user.setName("zhangsan");
        EsUtils.saveObject(elasticsearchTemplate,"1",user);
        Object o = EsUtils.selectById(elasticsearchTemplate, User.class, 1);
        System.out.println(o);

    }

}

pom.xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.2.5.RELEASE</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.example</groupId>
    <artifactId>demo-kafka-redis-es</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>demo-kafka-redis-es</name>
    <description>Demo project for Spring Boot</description>

    <properties>
        <java.version>1.8</java.version>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-elasticsearch</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-redis</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
            <exclusions>
                <exclusion>
                    <groupId>org.junit.vintage</groupId>
                    <artifactId>junit-vintage-engine</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka-test</artifactId>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.54</version>
        </dependency>
        <dependency>
            <groupId>com.github.pagehelper</groupId>
            <artifactId>pagehelper</artifactId>
            <version>5.1.2</version>
        </dependency>
        <dependency>
            <groupId>com.github.pagehelper</groupId>
            <artifactId>pagehelper</artifactId>
            <version>5.1.2</version>
            <scope>compile</scope>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>

</project>

你可能感兴趣的:(elasticsearch,redis)