集成kafka之前需要安装kafka。详见上一篇博客安装zookeeper+kafka。本文不再赘述
无网络环境的Centos7中安装Docker
pom.xml引入包
org.springframework.kafka
spring-kafka
spring:
kafka:
#自定义的,不需要可以不要
enable: false
#生产者配置
producer:
acks: all #all为传到所有副本和-1一样
retries: 3 #重试3次
bootstrap-servers: 192.168.1.206:9092 #kafka地址
key-serializer: org.apache.kafka.common.serialization.StringSerializer #key序列化
value-serializer: org.apache.kafka.common.serialization.StringSerializer #value序列化
#消费者配置
consumer:
enable-auto-commit: false #是否自动提交。此处我手动提交
bootstrap-servers: ${spring.kafka.producer.bootstrap-servers} #kafka消费地址
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
group-id: lgDefaultGroup #消费组
#其他配置
properties: {
#配置拉取消息的最大间隔时间,超过这个时间可能会出发rebalance
max.poll.interval.ms: 500000,
#一次拉取的消息量
max.poll.records: 50
}
template:
#默认主题
default-topic: my-topic
其他参数可以参照KafkaProperties.class
springboot中kafka配置类为KafkaAutoConfiguration.class
配置消费监听
/**
* @Description
* @Author Singh
* @Date 2020-06-27 11:01
* @Version
**/
@Configuration
@EnableKafka
@ConditionalOnProperty(name = "spring.kafka.enable",havingValue = "true")
public class KafkaConfig {
/**
* 配置@KafkaListener监听
* @param kafkaConsumerFactory
* @return
*/
@Bean
@Primary
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Integer, String>>
kafkaListenerContainerFactory(ConsumerFactory<Integer, String> kafkaConsumerFactory) {
ConcurrentKafkaListenerContainerFactory<Integer, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(kafkaConsumerFactory);
factory.setConcurrency(3);//分区数量
factory.getContainerProperties().setPollTimeout(3000);//拉消息超时时间
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);//设置手动提交ackMode
return factory;
}
@Bean
@ConditionalOnMissingBean(StringKafkaTemplate.class)
public StringKafkaTemplate stringKafkaTemplate(
ProducerFactory<String, String> kafkaProducerFactory,KafkaProperties properties) {
StringKafkaTemplate kafkaTemplate = new StringKafkaTemplate(kafkaProducerFactory);
kafkaTemplate.setDefaultTopic(properties.getTemplate().getDefaultTopic());
return kafkaTemplate;
}
}
消费者监听DefaultKafkaListener
/**
* @Description 消费监听,默认主题 **此处需要手动提交**
* @Author Singh
* @Date 2020-06-27 11:11
* @Version
**/
@Slf4j
@Component
@ConditionalOnProperty(name = "spring.kafka.enable",havingValue = "true")
public class DefaultKafkaListener implements BeanPostProcessor {
@Autowired
private StringRedisTemplate redisTemplate;
//缓存自定义消费者
private Map<Integer, LgKafkaConsumer> cacheHandlers = new HashMap<>();
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
Class<?> targetCls = bean.getClass();
KafkaConsumer[] kafkaHandlers = targetCls.getAnnotationsByType(KafkaConsumer.class);
if(kafkaHandlers.length > 0){
int type = kafkaHandlers[0].type();
if(bean instanceof LgKafkaConsumer){
cacheHandlers.put(type, (LgKafkaConsumer) bean);
}
}
return bean;
}
@KafkaListener(id = "singhCustomer" ,topics = {"my-topic"})
public void listen(Consumer consumer, ConsumerRecord<String, String> record, Acknowledgment ack){
log.info("kafka consume -------------> partition={},offset={} ",record.partition(),record.offset());
String id = null;
try{
KafkaMessage kafkaMessage = JSON.parseObject(record.value(),KafkaMessage.class);
int type = kafkaMessage.getType();
id = id(kafkaMessage);
if(redisTemplate.hasKey(id)){
ack.acknowledge();
return ;
}
LgKafkaConsumer messageHandler = cacheHandlers.get(type);
if(messageHandler == null){
log.error("type = {} kafka message handler not exist!" , type);
ack.acknowledge();
return;
}
Type type0 = messageHandler.getClass().getGenericInterfaces()[0];
Type type1 = ((ParameterizedType)type0).getActualTypeArguments()[0];
Class<?> clzz = (Class<?>) type1;
KafkaMessage msg = (KafkaMessage) JSON.parseObject(record.value(),clzz);
messageHandler.handle(msg);//handle kafka message
ack.acknowledge();
redisTemplate.opsForValue().set(id,"0",1, TimeUnit.HOURS);
}catch (Exception e){
if(id == null){
return ;
}
String retryTimesStr = redisTemplate.opsForValue().get(id);
int retryTimes = retryTimesStr == null ? 0 : Integer.valueOf(retryTimesStr);
if(retryTimes < 1){//重试1次
consumer.seek(new TopicPartition(record.topic(),record.partition()),record.offset());
redisTemplate.opsForValue().increment(id,1);
}else{
ack.acknowledge();
redisTemplate.delete(id);
}
}
}
private String id(KafkaMessage message){
return CommonConstant.PREFIX_KAFKA_CACHE_ID + message.getId();
}
}
注解消费者
/**
* @Description 消费者处理注解
* @Author Singh
* @Date 2020-06-28 15:03
* @Version
**/
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Component
public @interface KafkaConsumer {
int type() ;
}
/**
* @Description
* @Author Singh
* @Date 2020-06-28 15:00
* @Version
**/
public interface LgKafkaConsumer<T extends KafkaMessage> {
void handle(T object);
}
模仿redis弄一个StringKafkaTemplate.java
/**
* @Description 构建一个字符串生产端
* @Author Singh
* @Date 2020-06-28 11:15
* @Version
**/
public class StringKafkaTemplate extends KafkaTemplate<String,String>{
/**
* Create an instance using the supplied producer factory and autoFlush false.
*
* @param producerFactory the producer factory.
*/
public StringKafkaTemplate(ProducerFactory<String, String> producerFactory) {
super(producerFactory);
}
}
消息实体类KafkaMessage.java
/**
* @Description 消息实体类
* @Author Singh
* @Date 2020-06-28 13:37
* @Version
**/
@Data
public abstract class KafkaMessage {
private String id;//消息唯一标示
private int retryTimes;//重试次数
private int type;//类型
public KafkaMessage(int type,int retryTimes,String id){
this.id = id;
this.type = type;
this.retryTimes = retryTimes;
}
}
封装一个简单的生产者LgKafkaProducer.java
/**
* @Description 发送消息
* @Author Singh
* @Date 2020-06-28 18:05
* @Version
**/
@Service
@ConditionalOnProperty(name = "spring.kafka.enable",havingValue = "true")
public class LgKafkaProducer {
@Autowired
private StringKafkaTemplate kafkaTemplate;
/**
* 异步发送消息,默认主题
* @param message
* @param callback
* @param
*/
public <T extends KafkaMessage> void send(T message,ListenableFutureCallback<SendResult<String, String>> callback){
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.sendDefault(JSON.toJSONString(message));
future.addCallback(callback);
}
/**
* 同步发送消息,默认主题
* @param message
* @param
* @return
*/
public <T extends KafkaMessage> SendResult<String, String> send(T message){
try{
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.sendDefault(JSON.toJSONString(message));
return future.get();
}catch (Exception e){
throw new RRException("发送消息失败!");
}
}
/**
* 异步发送消息
* @param topic
* @param message
* @param callback
* @param
*/
public <T extends KafkaMessage> void send(String topic,T message,ListenableFutureCallback<SendResult<String, String>> callback){
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.sendDefault(JSON.toJSONString(message));
future.addCallback(callback);
}
/**
* 同步发送消息
* @param topic
* @param message
* @param
* @return
*/
public <T extends KafkaMessage> SendResult<String, String> send(String topic,T message){
try{
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.sendDefault(JSON.toJSONString(message));
return future.get();
}catch (Exception e){
throw new RRException("发送消息失败!");
}
}
}
测试
/**
* @Description junit
* @Author Singh
* @Date 2020-06-24 15:11
* @Version
**/
@Slf4j
@RunWith(SpringRunner.class)
@SpringBootTest(classes = {LigoApplication.class})
public class T {
@Autowired
private LgKafkaProducer kafkaProducer;
@Test
public void testAsync(){
KafkaMessage message = new KafkaMessage(1,1,UUID.randomUUID().toString()){};
kafkaProducer.send(message);
}
}
欢迎大家留言多多指正!!!