转自:http://www.cnblogs.com/yuanermen/p/5453339.html
一、概述
Spring Integration Kafka 是基于 Apache Kafka 和Spring Integration来集成Kafka,对开发配置提供了方便。
二、配置
1、spring-kafka-consumer.xml
?xml version= "1.0" encoding= "UTF-8" ?>
xmlns:xsi= "http://www.w3.org/2001/XMLSchema-instance"
xmlns: int = "http://www.springframework.org/schema/integration"
xmlns: int -kafka= "http://www.springframework.org/schema/integration/kafka"
xmlns:task= "http://www.springframework.org/schema/task"
xsi:schemaLocation="http: //www.springframework.org/schema/integration/kafka
http: //www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
http: //www.springframework.org/schema/integration
http: //www.springframework.org/schema/integration/spring-integration.xsd
http: //www.springframework.org/schema/beans
http: //www.springframework.org/schema/beans/spring-beans.xsd
http: //www.springframework.org/schema/task
http: //www.springframework.org/schema/task/spring-task.xsd">
< int :channel id= "inputFromKafka" >
< int :dispatcher task-executor= "kafkaMessageExecutor" />
int :channel>
< int -kafka:zookeeper-connect id= "zookeeperConnect"
zk-connect= "192.168.1.237:2181" zk-connection-timeout= "6000"
zk-session-timeout= "6000" zk-sync-time= "2000" />
< int -kafka:inbound-channel-adapter
kafka-consumer-context-ref= "consumerContext" auto-startup= "true" channel= "inputFromKafka" >
< int :poller fixed-delay= "1" time-unit= "MILLISECONDS" />
int -kafka:inbound-channel-adapter>
class = "org.springframework.integration.kafka.serializer.common.StringDecoder" />
class = "org.springframework.beans.factory.config.PropertiesFactoryBean" >
< int :outbound-channel-adapter channel= "inputFromKafka"
ref= "kafkaConsumerService" method= "processMessage" />
< int -kafka:consumer-context id= "consumerContext"
consumer-timeout= "1000" zookeeper-connect= "zookeeperConnect"
consumer-properties= "consumerProperties" >
< int -kafka:consumer-configurations>
< int -kafka:consumer-configuration
group-id= "default1" value-decoder= "kafkaDecoder" key-decoder= "kafkaDecoder"
max-messages= "5000" >
< int -kafka:topic id= "mytopic" streams= "4" />
< int -kafka:topic id= "sunneytopic" streams= "4" />
int -kafka:consumer-configuration>
int -kafka:consumer-configurations>
int -kafka:consumer-context>
|
2、spring-kafka-producer.xml
xmlns:xsi= "http://www.w3.org/2001/XMLSchemainstance"
xmlns: int = "http://www.springframework.org/schema/integration"
xmlns: int -kafka= "http://www.springframework.org/schema/integration/kafka"
xmlns:task= "http://www.springframework.org/schema/task"
xsi:schemaLocation="http: //www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
http: //www.springframework.org/schema/integration
http://www.springframework.org/schema/integration/spring-integration.xsd
http: //www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http: //www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
class = "org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaEncoder" >
class = "org.springframework.beans.factory.config.PropertiesFactoryBean" >
< int :channel id= "kafkaTopicTest" >
< int :queue />
int :channel>
< int -kafka:outbound-channel-adapter
id= "kafkaOutboundChannelAdapterTopicTest" kafka-producer-context-ref= "producerContextTopicTest"
auto-startup= "true" channel= "kafkaTopicTest" order= "3" >
< int :poller fixed-delay= "1000" time-unit= "MILLISECONDS"
receive-timeout= "1" task-executor= "taskExecutor" />
int -kafka:outbound-channel-adapter>
keep-alive= "120" queue-capacity= "500" />
< int -kafka:producer-context id= "producerContextTopicTest"
producer-properties= "producerProperties" >
< int -kafka:producer-configurations>
< int -kafka:producer-configuration
broker-list= "192.168.1.237:9090,192.168.1.237:9091,192.168.1.237:9092"
key-serializer= "stringSerializer"
value- class -type= "java.lang.String"
value-serializer= "stringSerializer"
topic= "mytopic" />
< int -kafka:producer-configuration
broker-list= "192.168.1.237:9090,192.168.1.237:9091,192.168.1.237:9092"
key-serializer= "stringSerializer"
value- class -type= "java.lang.String"
value-serializer= "stringSerializer"
topic= "sunneytopic" />
int -kafka:producer-configurations>
int -kafka:producer-context>
|
3、发消息接口 KafkaService
package com.sunney.service;
/**
* 类KafkaService.java的实现描述:发消息接口类
* @author Sunney 2016年4月30日 上午11:30:53
*/
public interface KafkaService {
/**
* 发消息
* @param topic 主题
* @param obj 发送内容
*/
public void sendUserInfo(String topic, Object obj);
}
|
4、发消息实现类 KafkaServiceImpl
|
package com.sunney.service.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.integration.kafka.support.KafkaHeaders;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.messaging.MessageChannel;
import org.springframework.stereotype.Service;
import com.sunney.service.KafkaService;
/**
* 类KafkaServiceImpl.java的实现描述:发消息实现类
* @author Sunney 2016年4月30日 上午11:31:13
*/
@Service ( "kafkaService" )
public class KafkaServiceImpl implements KafkaService{
@Autowired
@Qualifier ( "kafkaTopicTest" )
MessageChannel channel;
public void sendUserInfo(String topic, Object obj) {
channel.send(MessageBuilder.withPayload(obj)
.setHeader(KafkaHeaders.TOPIC,topic)
.build());
}
}
|
5、消费接收类KafkaConsumerService
|
package com.sunney.service.impl;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.fastjson.JSON;
import com.sunney.service.UserDto;
/**
* 类KafkaConsumerService.java的实现描述:消费接收类
*
* @author Sunney 2016年4月30日 上午11:46:14
*/
public class KafkaConsumerService {
static final Logger logger = LoggerFactory.getLogger(KafkaConsumerService. class );
public void processMessage(Map
logger.info( "===============processMessage===============" );
for (Map.Entry
logger.info( "============Topic:" + entry.getKey());
LinkedHashMap
Set
for (Integer i : keys)
logger.info( "======Partition:" + i);
Collection
for (Iterator
String message = "[" +iterator.next()+ "]" ;
logger.info( "=====message:" + message);
List class );
logger.info( "=====userList.size:" + userList.size());
}
}
}
}
|
6、pom
|
1.3 . 0 .RELEASE
4.11
1.7 . 7
1.2 . 7
|
六、源代码地址:https://github.com/sunney2010/kafka-demo
七、遇到的问题
1、消费端口收不到消息
spring-kafka-consumer.xml的auto-startup设置为true