Kafka是由Apache软件基金会开发的一个开源流处理平台,是一种高吞吐量的分布式发布订阅消息系统。
主要包含几个组件:
当前SpringBoot版本为2.0.2.RELEASE,打包工具为Maven
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0modelVersion>
<groupId>com.kafkatestgroupId>
<artifactId>producerartifactId>
<version>1.0-SNAPSHOTversion>
<name>kafka-producername>
<parent>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-parentartifactId>
<version>2.0.2.RELEASEversion>
<relativePath/>
parent>
<properties>
<project.build.sourceEncoding>UTF-8project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8project.reporting.outputEncoding>
<java.version>1.8java.version>
<joda-time.version>2.3joda-time.version>
properties>
<dependencies>
<dependency>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starterartifactId>
dependency>
<dependency>
<groupId>org.projectlombokgroupId>
<artifactId>lombokartifactId>
<optional>trueoptional>
dependency>
<dependency>
<groupId>org.springframework.kafkagroupId>
<artifactId>spring-kafkaartifactId>
dependency>
dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-maven-pluginartifactId>
plugin>
plugins>
build>
project>
@Service
public class KafkaProducerTest {
@Autowired
private KafkaTemplate<String,byte[]> kafkaTemplate;
private final String topic = "byteArray_topic1";
public void sendMessage(int key,String value){
ProducerRecord<String,byte[]> record = new ProducerRecord<>(topic,
key%3,String.valueOf(key),value.getBytes());
kafkaTemplate.send(record);
}
}
spring:
kafka:
producer:
bootstrap-servers: 172.169.0.109:9092
batch-size: 16384
retries: 0
buffer-memory: 33554432
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.ByteArraySerializer
这里有一个非常陷阱的问题需要特别注意:序列化类的路径是:org.apache.kafka.common.serialization.StringSerializer
而不是
org.apache.kafka.config.serialization.StringSerializer
否则会出现如下错误:
2019-01-31 11:35:14.794 [main] WARN o.s.c.a.AnnotationConfigApplicationContext -
Exception encountered during context initialization - cancelling refresh attempt: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'kafkaProducerTest': Unsatisfied dependency expressed through field 'kafkaTemplate'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration': Unsatisfied dependency expressed through constructor parameter 0; nested exception is org.springframework.boot.context.properties.ConfigurationPropertiesBindException: Error creating bean with name 'spring.kafka-org.springframework.boot.autoconfigure.kafka.KafkaProperties': Could not bind properties to 'KafkaProperties' : prefix=spring.kafka, ignoreInvalidFields=false, ignoreUnknownFields=true; nested exception is org.springframework.boot.context.properties.bind.BindException: Failed to bind properties under 'spring.kafka.producer.key-serializer' to java.lang.Class<?>
2019-01-31 11:35:14.810 [main] ERROR o.s.b.d.LoggingFailureAnalysisReporter -
***************************
APPLICATION FAILED TO START
***************************
Description:
Failed to bind properties under 'spring.kafka.producer.key-serializer' to java.lang.Class<?>:
Property: spring.kafka.producer.key-serializer
Value: org.apache.kafka.config.serialization.StringSerializer
Origin: class path resource [application.yml]:8:25
Reason: No converter found capable of converting from type [java.lang.String] to type [java.lang.Class<?>]
Action:
Update your application's configuration
如果不使用并发获取、批量获取消费者的代码非常简单。
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0modelVersion>
<groupId>com.kafkatestgroupId>
<artifactId>consumerartifactId>
<version>1.0-SNAPSHOTversion>
<name>kafka-consumername>
<parent>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-parentartifactId>
<version>2.0.2.RELEASEversion>
<relativePath/>
parent>
<properties>
<project.build.sourceEncoding>UTF-8project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8project.reporting.outputEncoding>
<java.version>1.8java.version>
<joda-time.version>2.3joda-time.version>
properties>
<dependencies>
<dependency>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starterartifactId>
dependency>
<dependency>
<groupId>org.projectlombokgroupId>
<artifactId>lombokartifactId>
<optional>trueoptional>
dependency>
<dependency>
<groupId>org.springframework.kafkagroupId>
<artifactId>spring-kafkaartifactId>
dependency>
dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-maven-pluginartifactId>
plugin>
plugins>
build>
project>
@Service
@Slf4j
public class Listener {
private final String topic = "byteArray_topic1";
public void listen(ConsumerRecord<String, byte[]> record){
log.info("kafka的key: " + record.key());
log.info("kafka的value: " + new String(record.value()));
}
}
spring:
kafka:
consumer:
enable-auto-commit: true
group-id: gridMonitorGroup
auto-commit-interval: 1000
auto-offset-reset: latest
bootstrap-servers: "172.169.0.109:9092"
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.ByteArrayDeserializer
@Configuration
@EnableKafka
public class KafkaConsumerConfig {
@Value("${kafka.consumer.bootstrap-servers}")
private String servers;
@Value("${kafka.consumer.enable-auto-commit}")
private boolean enableAutoCommit;
@Value("${kafka.consumer.auto-commit-interval}")
private String autoCommitInterval;
@Value("${kafka.consumer.group-id}")
private String groupId;
@Value("${kafka.consumer.auto-offset-reset}")
private String autoOffsetReset;
@Value("${kafka.consumer.concurrency}")
private int concurrency;
@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, byte[]>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, byte[]> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
//并发数量
factory.setConcurrency(concurrency);
//批量获取
factory.setBatchListener(true);
factory.getContainerProperties().setPollTimeout(1500);
return factory;
}
public ConsumerFactory<String, byte[]> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
public Map<String, Object> consumerConfigs() {
Map<String, Object> propsMap = new HashMap<>();
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
//最多批量获取50个
propsMap.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG,50);
return propsMap;
}
@Bean
public Listener listener() {
return new Listener();
}
}
@Service
@Slf4j
public class Listener {
private final String topic = "byteArray_topic1";
@KafkaListener(id="myListener",
topicPartitions ={@TopicPartition(topic = topic, partitions = { "0", "1" ,"2"})})
public void listen(List<ConsumerRecord<String, byte[]>> recordList) {
recordList.forEach((record)->{
log.info("kafka的key: " + record.key());
log.info("kafka的value: " + new String(record.value()));
});
}
}
kafka:
consumer:
enable-auto-commit: true
group-id: gridMonitorGroup
auto-commit-interval: 1000
auto-offset-reset: latest
bootstrap-servers: "172.169.0.109:9092"
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.ByteArrayDeserializer
concurrency: 3