kafka指定分区消费

关键代码

TopicPartition p = new TopicPartition("test6", 2);//只消费分区号为2的分区
consumer.assign(Arrays.asList(p));//只消费分区号为2的分区
consumer.subscribe(Arrays.asList("test6"));//消费topic 消费全部分区
consumer.seekToBeginning(Arrays.asList(p));//重头开始消费
consumer.seek(p,5);//指定从topic的分区的某个offset开始消费
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
import java.util.Random;

/**
 * 测试类
 */
public class Server {
    public static void main(String[] args) throws InterruptedException {
//        send();//生产者
        pull();//消费者
    }

    /**
     * 生产者
     */
    static void send() {
        Properties prop = new Properties();
        prop.put("bootstrap.servers", "mini1:9092,mini2:9092,mini3:9092");//指定节点地址
        prop.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        prop.put("partitioner.class", "com.kafka.server.KafkaCustomPartitioner");
        prop.put("acks", "-1");
        Producer producer = new KafkaProducer<>(prop);//生产者
        for (int i = 0; i < 10000; i++) {
            producer.send(new ProducerRecord<>("test6", i + "", "" + Math.random()));
            producer.flush();
        }
        producer.close();
    }

    /**
     * 消费者
     */
    static void pull() throws InterruptedException {
        Properties prop = new Properties();
        prop.put("bootstrap.servers", "mini1:9092,mini2:9092,mini3:9092");//指定节点地址
        prop.put("group.id", "001");
        prop.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.put("consumer-id", "test");
        Consumer consumer = new KafkaConsumer<>(prop);//消费者
        TopicPartition p = new TopicPartition("test6", 2);//只消费分区号为2的分区
        consumer.assign(Arrays.asList(p));
//        consumer.subscribe(Arrays.asList("test6"));//消费topic 消费全部分区
        while (true) {
            ConsumerRecords poll = consumer.poll(Duration.ofSeconds(10));//消费一定时间的数据
            Thread.sleep(3000);
            System.out.println("循环");
            for (ConsumerRecord record : poll) {
                System.out.println(String.format("key:%s , value:%s , offset:%s", record.key(), record.value(), record.offset()));
            }
        }
    }
}

你可能感兴趣的:(kafka,java后端,大数据工具)