kafka的生产者和消费者api代码开发

1、生产者代码开发
创建maven工程引入依赖

org.apache.kafka
kafka-clients
1.0.1

代码开发
package com.kaikeba.producer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
//todu:需求:开发kafka生产者代码
public class KafkaProducerStudy {
public static void main(String[] args){
//准备配置属性
Properties props = new Properties();
props.put("bootstrap.servers","node01:9092,node02:9092,node02:9092");
//acks它代表消息确认机制
props.put("acks","all");
//重试的次数
props.put("retrie",0);
//批处理数据的大小,每次写入多少数据到topic
props.put('batch.size",16384);
//可以延长多久发送数据
props.put("linger.ms",1);
//缓冲区的大小
props.put("buffer.memory",33554432);
props.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");
Producer producer=new KafkaProducer(props);
for(int i=0;i<100;i++){
//这里需要三个参数,第一个:topic的名称,第二个参数:表示消息的key,第三个参数:消息具体内容
producer.send(new ProducerRecord("test",Integer.toString(i),"hello-kafka-"+i));
}

}

}

2、消费者代码开发
自动提交偏移量代码开发
package com.kaikeba.consumer;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.Arrays;
import java.util.Properties;

//todo:需求:开发kafka消费者代码(自动提交偏移量)
public class KafkaConsumerStudy {
public static void main(String[] args) {
//准备配置属性
Properties props = new Properties();
//kafka集群地址
props.put("bootstrap.servers", "node01:9092,node02:9092,node03:9092");
//消费者组id
props.put("group.id", "test");
//自动提交偏移量
props.put("enable.auto.commit", "true");
//自动提交偏移量的时间间隔
props.put("auto.commit.interval.ms", "1000");
//默认是latest
//earliest: 当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
//latest: 当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
//none : topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
props.put("auto.offset.reset","earliest");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer consumer = new KafkaConsumer(props);
//指定消费哪些topic
consumer.subscribe(Arrays.asList("test"));
while (true) {
//指定每个多久拉取一次数据
ConsumerRecords records = consumer.poll(100);
for (ConsumerRecord record : records)
System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
}
}
}
手动提交偏移量代码开发
package com.kaikeba.consumer;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

//todo:需求:开发kafka消费者代码(手动提交偏移量)
public class KafkaConsumerControllerOffset {
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "node01:9092,node02:9092,node03:9092");
props.put("group.id", "controllerOffset");
//关闭自动提交,改为手动提交偏移量
props.put("enable.auto.commit", "false");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer consumer = new KafkaConsumer(props);
//指定消费者要消费的topic
consumer.subscribe(Arrays.asList("test"));

    //定义一个数字,表示消息达到多少后手动提交偏移量
    final int minBatchSize = 20;

    //定义一个数组,缓冲一批数据
    List> buffer = new ArrayList>();
    while (true) {
        ConsumerRecords records = consumer.poll(100);
        for (ConsumerRecord record : records) {
            buffer.add(record);
        }
        if (buffer.size() >= minBatchSize) {
            //insertIntoDb(buffer);  拿到数据之后,进行消费
            System.out.println("缓冲区的数据条数:"+buffer.size());
            System.out.println("我已经处理完这一批数据了...");
            consumer.commitSync();
            buffer.clear();
        }
    }
}

}

你可能感兴趣的:(kafka的生产者和消费者api代码开发)