业务需求:前端页面需要提交信息查询,由于后端数据量大,要实现前端提交,后端异步查询。
可以前端提交后,用redis实现消息队列异步查询数据。使用多线程查询可能有提交量非常大,线程非常多,使用消息队列,实现一个查询完再查询第二个任务。
创建消费组,监听消费组与创建的消费组必须一致,才能监听到消息。
@GetMapping("/createGroup")
@ApiOperation("创建消费组")
public CommonResult> createGroup(String streamKey, String groupName ) {
// streamKey : redis_stream_01
// groupName : group_b
return CommonResult.success(redisStream.createGroup(streamKey, groupName));
}
生产者
@GetMapping("/query")
@ApiOperation("query")
public CommonResult> createGroup(QueryModel queryModel) {
Map map = new LinkedHashMap<>();
map.put("params", JSON.toJSONString(queryModel));
redisStream.addMap("redis_stream_01", map);
return CommonResult.success("提交成功");
}
消费者
package com.dpxdata.backend.report;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.RecordId;
import org.springframework.data.redis.stream.StreamListener;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.Map;
/**
* @Author: LiSaiHang
* @Date: 2023/2/7 10:07 上午
*/
@Slf4j
@Component
public class ConsumeListener implements StreamListener> {
@Autowired
private RedisStream redisStream;
private static ConsumeListener consumeListener;
@PostConstruct
public void init(){
consumeListener = this;
consumeListener.redisStream = this.redisStream;
}
@Override
public void onMessage(MapRecord message) {
String stream = message.getStream();
RecordId id = message.getId();
Map map = message.getValue();
log.info("[自动] group:[group-b] 接收到一个消息 stream:[{}],id:[{}],value:[{}]", stream, id, map);
// TODO 实际业务代码 json数据转对象
final QueryModel queryModel = JSON.parseObject(map.get("params"), QueryModel.class);
//消费完毕删除该条消息
consumeListener.redisStream.del(stream, id.getValue());
}
}
配置及使用的封装类
RedisStream
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.connection.stream.Record;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Component;
import java.util.Map;
/**
* @Author: LiSaiHang
* @Date: 2023/2/7 10:08 上午
*/
@Component
public class RedisStream {
@Autowired
private StringRedisTemplate redisTemplate;
/**
* create by: Lsh
* description: 创建消费组
* create time: 2022/5/11 16:45
* @param:
* @return java.lang.String
*/
public String createGroup(String key, String group){
return redisTemplate.opsForStream().createGroup(key, group);
}
/**
* create by: Lsh
* description: 添加Map消息
* create time: 2022/5/11 16:28
* @param: key
* @param: value
* @return
*/
public String addMap(String key, Map value){
return redisTemplate.opsForStream().add(key, value).getValue();
}
/**
* create by: Lsh
* description: 添加Record消息
* create time: 2022/5/11 16:30
* @param: record
* @return
*/
public String addRecord(Record record){
return redisTemplate.opsForStream().add(record).getValue();
}
/**
* create by: Lsh
* description: 确认消费
* create time: 2022/5/19 11:21
* @param: key
* @param: group
* @param: recordIds
* @return java.lang.Long
*/
public Long ack(String key, String group, String... recordIds){
return redisTemplate.opsForStream().acknowledge(key, group, recordIds);
}
/**
* create by: Lsh
* description: 删除消息。当一个节点的所有消息都被删除,那么该节点会自动销毁
* create time: 2023/02/08
* @param: key
* @param: recordIds
* @return java.lang.Long
*/
public Long del(String key, String... recordIds){
return redisTemplate.opsForStream().delete(key, recordIds);
}
}
RedisStreamConfig
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.stream.Consumer;
import org.springframework.data.redis.connection.stream.MapRecord;
import org.springframework.data.redis.connection.stream.ReadOffset;
import org.springframework.data.redis.connection.stream.StreamOffset;
import org.springframework.data.redis.stream.StreamMessageListenerContainer;
import org.springframework.util.ErrorHandler;
import java.time.Duration;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* @Author: LiSaiHang
* @Date: 2023/2/7 10:04 上午
*/
@Configuration
@Slf4j
public class RedisStreamConfiguration {
@Autowired
private RedisConnectionFactory redisConnectionFactory;
@Bean(initMethod = "start", destroyMethod = "stop")
public StreamMessageListenerContainer> streamMessageListenerContainer() {
AtomicInteger index = new AtomicInteger(1);
int processors = Runtime.getRuntime().availableProcessors();
ThreadPoolExecutor executor = new ThreadPoolExecutor(processors, processors, 0, TimeUnit.SECONDS,
new LinkedBlockingDeque<>(), r -> {
Thread thread = new Thread(r);
thread.setName("async-stream-consumer-" + index.getAndIncrement());
thread.setDaemon(true);
return thread;
});
StreamMessageListenerContainer.StreamMessageListenerContainerOptions> options =
StreamMessageListenerContainer.StreamMessageListenerContainerOptions
.builder()
// 一次最多获取多少条消息
.batchSize(10)
// 运行 Stream 的 poll task
.executor(executor)
// Stream 中没有消息时,阻塞多长时间,需要比 `spring.redis.timeout` 的时间小
.pollTimeout(Duration.ofSeconds(3))
// 获取消息的过程或获取到消息给具体的消息者处理的过程中,发生了异常的处理
.errorHandler(new ErrorHandler() {
@Override
public void handleError(Throwable throwable) {
throwable.printStackTrace();
log.error("[MQ handler exception] " + throwable.getMessage());
}
})
.build();
StreamMessageListenerContainer> streamMessageListenerContainer =
StreamMessageListenerContainer.create(redisConnectionFactory, options);
// 独立消费
String streamKey = "redis_stream_01";
// 消费组A,不自动ack
// 从消费组中没有分配给消费者的消息开始消费
// streamMessageListenerContainer.receive(Consumer.from("group-a", "consumer-a"),
// StreamOffset.create(streamKey, ReadOffset.lastConsumed()), new ConsumeListener1());
// 消费组B,自动ack
streamMessageListenerContainer.receiveAutoAck(Consumer.from("group_b", "consumer-a"),
StreamOffset.create(streamKey, ReadOffset.lastConsumed()), new ConsumeListener());
return streamMessageListenerContainer;
}
}