配置文件的方式实现spring整和kafka:
此文主要讲述的内容:
1,连接kafka服务器的配置
2,kafka-customer:消费者配置
3,kafka-provider:提供者配置
4,KfkaUtils:根据topic发送消息
5,消费者根据topic处理消息
一,安装kafka
网上百度一下,一大堆,在这里我就不赘述了(新版本的kafka集成了zookeeper插件,所以只需配置kafka安装包下的zookeeper.properties即可,),下载地址:http://kafka.apache.org/downloads
注意在启动kafka-service之前需启动zookeeper插件
二,配置maven仓库
由于是基于spring集成的kafka的方式 ,所以在添加spring依赖的同时,还需
org.springframework.kafka spring-kafka 2.1.4.RELEASE org.apache.kafka kafka-clients 1.0.0
三,kafka服务器配置
#brokers集群 bootstrap.servers=192.168.11.38:9092,192.168.11.40:9092 #即所有副本都同步到数据时send方法才返回, 以此来完全判断数据是否发送成功, 理论上来讲数据不会丢失. acks=all #发送失败重试次数 retries=10 #批处理条数:当多个记录被发送到同一个分区时,生产者会尝试将记录合并到更少的请求中。这有助于客户端和服务器的性能。 batch.size=1638 #批处理延迟时间上限:即1ms过后,不管是否达到批处理数,都直接发送一次请求 linger.ms=1 #即32MB的批处理缓冲区 buffer.memory=33554432 #消费者群组ID,发布-订阅模式,即如果一个生产者,多个消费者都要消费,那么需要定义自己的群组,同一群组内的消费者只有一个能消费到消息 group.id=order-beta #如果为true,消费者的偏移量将在后台定期提交。 enable.auto.commit=true #如何设置为自动提交(enable.auto.commit=true),这里设置自动提交周期 auto.commit.interval.ms=1000 #在使用Kafka的组管理时,用于检测消费者故障的超时 session.timeout.ms=15000 #消费监听器容器并发数 concurrency = 3
四,customer配置
xml version="1.0" encoding="UTF-8"?>xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> <context:property-placeholder location="classpath*:kafka.properties " /> id="consumerProperties" class="java.util.HashMap"> key="bootstrap.servers" value="${bootstrap.servers}" /> key="group.id" value="${group.id}" /> key="enable.auto.commit" value="${enable.auto.commit}" /> key="session.timeout.ms" value="${session.timeout.ms}" /> key="key.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer" /> key="value.deserializer" value="org.apache.kafka.common.serialization.StringDeserializer" /> id="consumerFactory" class="org.springframework.kafka.core.DefaultKafkaConsumerFactory" > bean="consumerProperties" /> id="kafkaConsumerService" class="KafkaSendMessageServiceImpl" /> id="containerProperties" class="org.springframework.kafka.listener.config.ContainerProperties"> name="topics">
topic1 topic2 name="messageListener" ref="kafkaConsumerService" /> id="messageListenerContainer" class="org.springframework.kafka.listener.ConcurrentMessageListenerContainer" init-method="doStart" > ref="consumerFactory" /> ref="containerProperties" /> name="concurrency" value="${concurrency}" />
五,provider配置
xml version="1.0" encoding="UTF-8"?>xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> <context:property-placeholder location="classpath*:kafka.properties " /> id="producerProperties" class="java.util.HashMap"> key="bootstrap.servers" value="${bootstrap.servers}" /> key="group.id" value="${group.id}" /> key="retries" value="${retries}" /> key="batch.size" value="${batch.size}" /> key="linger.ms" value="${linger.ms}" /> key="buffer.memory" value="${buffer.memory}" /> key="acks" value="${acks}" /> key="key.serializer" value="org.apache.kafka.common.serialization.StringSerializer" /> key="value.serializer" value="org.apache.kafka.common.serialization.StringSerializer" /> id="producerFactory" class="org.springframework.kafka.core.DefaultKafkaProducerFactory"> bean="producerProperties" /> id="kafkaTemplate" class="org.springframework.kafka.core.KafkaTemplate"> ref="producerFactory" /> name="autoFlush" value="true" /> name="defaultTopic" value="default" />
六,简单的KafkaUtils,发送kafka主题消息
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.kafka.support.SendResult; import org.springframework.util.concurrent.FailureCallback; import org.springframework.util.concurrent.ListenableFuture; import org.springframework.util.concurrent.SuccessCallback; public class KafkaSendMsgUtils { public static final ClassPathXmlApplicationContext CONTEXT = new ClassPathXmlApplicationContext("/kafka-provider.xml"); public static <K,T>void sendMessage(String topic, Integer partition, Long timestamp, K key, T data) { KafkaTemplate<K, T> kafkaTemplate = (KafkaTemplate<K, T>) CONTEXT.getBean("kafkaTemplate"); ListenableFutureK, T>> listenableFuture = null; if (kafkaTemplate.getDefaultTopic().equals(topic)) { listenableFuture = kafkaTemplate.sendDefault(partition,timestamp,key,data); }else { listenableFuture = kafkaTemplate.send(topic, partition, timestamp, key, data); } //发送成功回调 SuccessCallback K, T>> successCallback = new SuccessCallback K, T>>() { @Override public void onSuccess(SendResult<K, T> result) { //成功业务逻辑 System.out.println("成功"); } }; //发送失败回调 FailureCallback failureCallback = new FailureCallback() { @Override public void onFailure(Throwable ex) { //失败业务逻辑 throw new RuntimeException(ex); } }; listenableFuture.addCallback(successCallback, failureCallback); } public static void main(String[] args) { for (int i = 0; i < 5; i++) { try { Thread.sleep(10000); } catch (InterruptedException e) { e.printStackTrace(); } //KafkaTemplate kafkaTemplate = (KafkaTemplate KafkaSendMsgUtils.sendMessage("topic1",0,null,"key","kafka-test"); } } }) CONTEXT.getBean("kafkaTemplate");
七,消费者接受消息
import org.apache.kafka.clients.consumer.ConsumerRecord; import org.springframework.kafka.listener.MessageListener; public class KafkaSendMessageServiceImpl implements MessageListener,String> { @Override public void onMessage(ConsumerRecord , String> data) { //根据不同主题,消费 System.out.println("========"); if("topic1".equals(data.topic())){ //逻辑1 System.out.println(data.value()+"被消费"); }else if("topic2".equals(data.topic())){ //逻辑2 System.out.println(data.value()+"主题2 被消费"); } } }