spring-kafka批量消费

在使用kafka时,单条消费和提交有时候会影响性能。spring-kafka提供了批量拉取数据和手动提交的策略。

代码如下:

创建一个生产者:

package test.spring.kafka.producer;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
import test.spring.kafka.service.KafkaMessage;

import javax.annotation.Resource;
import java.util.UUID;


@Component
public class KafkaProducer {

    @Resource
    private KafkaTemplate kafkaTemplate;

    public void sendProducerRecord(){
        for(int i=0; i<10; i++) {
            KafkaMessage kafkaMessage = new KafkaMessage();
            kafkaMessage.setIndex(i);
            kafkaMessage.setId(UUID.randomUUID().toString());
            kafkaMessage.setValue("producerRecord " + i);
            ProducerRecord producerRecord =
                    new ProducerRecord<>("topic2", "key1", JSONObject.toJSONString(kafkaMessage));
            kafkaTemplate.send(producerRecord);
        }
    }

}

 

创建一个消费者

 

package test.spring.kafka.consumer;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.util.List;


@Component
public class KafkaConsumer {


    @KafkaListener(topics = "topic2")
    public void receiverProducerRecord(List> consumerRecords, Acknowledgment acknowledgment){
        System.out.println(consumerRecords.size());
        for(ConsumerRecord consumerRecord : consumerRecords) {
            System.out.println("receiverProducerRecord key is " + JSONObject.toJSONString(consumerRecord.key()));
            System.out.println("receiverProducerRecord value is " + JSONObject.toJSONString(consumerRecord.value()));
            // 手动提交offset
            acknowledgment.acknowledge();
        }
    }
}

 

配置文件:

server:
  port: 8083

spring:
  application:
    name: test-spring-kafka
  kafka:
    bootstrap-servers: 127.0.0.1:9092
    producer:
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
    consumer:
      auto-commit-interval: 100
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: test-group-id
      max-poll-records: 20 # 批次拉取数据的量
      # 手动提交offfset
      enable-auto-commit: false
    listener:
      ack-mode: manual_immediate # 手动
      type: batch  # 批量消费



在配置文件中关闭自动提交,开启手动提交和批量消费就可以批量消费了,但是最后需要手动提交offset

你可能感兴趣的:(java,spring-kafka)