@KafkaListener的配置使用

@KafkaListener注解来自spring-kafka包。使用@KafkaListener消费消息,需要按照spring-kafka指定的格式填写kafka配置信息,即可自动装配生成相关的KafkaConsumer实例,然后使用@KafkaListener消费消息。这里需要注意,使用自动装载方式生成KafkaConsumer实例时,spring-kafka的配置参数与原生kafka的配置参数在格式上略有不同,因此,本文主要介绍了spring-kafka自动装载方式下生产者、消费者常用的配置参数,供参考使用:

1、依赖项

 
<dependency>
    <groupId>org.springframework.kafkagroupId>
    <artifactId>spring-kafkaartifactId>
    <version>2.6.0version>
dependency>
 
<dependency>
    <groupId>org.apache.kafkagroupId>
    <artifactId>kafka-clientsartifactId>
    <version>2.6.0version>
dependency>


<dependency>
    <groupId>org.springframework.bootgroupId>
    <artifactId>spring-boot-configuration-processorartifactId>
    <optional>trueoptional>
dependency>

2、配置文件

spring:
  kafka:
    producer:
      bootstrap-servers: 172.*.*.1:8423,172.*.*.2:8423,172.*.*.3:8423,172.*.*.4:8423,172.*.*.5:8423
      key-serializer: org.apache.kafka.common.serialization.StringDeserializer
      value-serializer: org.apache.kafka.common.serialization.StringDeserializer
      ### 这里无效,因为这是Kafka服务器的配置
      # auto.create.topics.enable: false
      # 生产者信息
      properties:
        sasl.mechanism: SCRAM-SHA-512
        security.protocol: SASL_PLAINTEXT
        sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username='***' password='md5(***)';
    consumer:
      bootstrap-servers: 172.*.*.1:8423,172.*.*.2:8423,172.*.*.3:8423,172.*.*.4:8423,172.*.*.5:8423
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: ***
      # 拉取数据数量上限(不满足时等待poll-timeout毫秒)
      max-poll-records: 200
      # 拉取数据字节下限(不满足时等待fetch-max-wait毫秒)
      fetch-min-size: 1
      # 拉取数据等待上限(不满足fetch-min-size的等待时间)
      fetch-max-wait: 5000
      # 手动提交偏移量
      enable-auto-commit: false
      # 偏移量复位方式 earliest latest none
      auto-offset-reset: earliest
      # 消费者信息
      properties:
        sasl.mechanism: SCRAM-SHA-512
        security.protocol: SASL_PLAINTEXT
        sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username='***' password='md5(***)';
    listener:
      # 拉取数据方式: 批量
      type: batch
      # 请求数据小于max-poll-records,poll方法会持续请求,直到超时
      poll-timeout: 500
      # 指定listener容器中的线程数,用于提高并发量(可在代码中配置)
      # concurrency: 6
      ack-mode: manual_immediate
    properties:
      # 拉取数据间隔(须大于消息处理耗时)
      max:
        poll:
          interval:
            ms: 600000
      # group coordinator判定消费实例僵死并踢除的时间阈值
      session:
        timeout:
          ms: 120000  #默认10000

3、代码块

@Slf4j
@Component
public class XxxKafkaListener {

    @Autowired
    XxxKafkaConsumer xxxKafkaConsumer;

    // @KafkaListener(topics = "#{'${topics.xxx}'.split(',')}",concurrency = "#{'${topics}'.split(',').length}")
    @KafkaListener(topics = "#{'${topics.xxx}'.split(',')}",concurrency = "#{'${concur.xxx}'}" )
    public void listenXxx(ConsumerRecords<?, ?> records, Acknowledgment ack){

        try {
		    /// 消息处理
		    /// Iterator> iterator = (Iterator)records.iterator();
			/// while(iterator.hasNext()){
			/// 	JSONObject json = JSON.parseObject((String)iterator.next().value());
			/// 	......
			/// }
			
			/// 消息处理
            xxxKafkaConsumer.processRecords(records);
        }catch (Exception e) {
            /// 上述语句抛出异常后,直接运行至切面,不会执行下述语句
            log.error("处理xxx信息异常:{}", e);
        }
        ack.acknowledge();
    }
}

你可能感兴趣的:(Kafka,kafka,spring,java)