SpringBoot整合Kafka

SpringBoot整合Kafka

一、添加kafka依赖

 <dependency>
             <groupId>org.apache.kafkagroupId>
             <artifactId>kafka-clientsartifactId>
             <version>2.4.0version>
dependency>

二、获取topic列表

/**
     * 获取kafka中topic列表
     * @param kafkaConnect kafka连接地址
     * @return long topic中消息总量
     * @Time 2022-11-09 15:37:10
     */
public static List<String> getKafkaTopicList(String kafkaConnect) throws ExecutionException, InterruptedException {
        Properties properties = new Properties();
        properties.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConnect);
        AdminClient adminClient = AdminClient.create(properties);
        ListTopicsResult listTopicsResult = adminClient.listTopics();
        Set<String> strings = listTopicsResult.names().get();
        List<String> topicList = new ArrayList<>();
        for (String topicName : strings) {
            topicList.add(topicName);
        }
        return topicList;
    }

三、通过每个topic名称获取每隔topic下面的消息总量

/**
     * 根据topic名称计算topic消息总量
     * @param topicName kafka中topic名称
     * @param kafkaConnect kafka连接地址
     * @return long topic中消息总量
     * @Time 2022-11-09 15:45:32
     */
    public static long getKafkaTopicMessageTotal(String topicName,String kafkaConnect) {
        Properties props = new Properties();
        props.put("bootstrap.servers", kafkaConnect);
        props.put("group.id", topicName);
        props.put("enable.auto.commit", "false");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer1 = new KafkaConsumer<>(props);

        try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props)) {
            List<TopicPartition> tps = Optional.ofNullable(consumer.partitionsFor(topicName))
                    .orElse(Collections.<PartitionInfo>emptyList())
                    .stream()
                    .map(info -> new TopicPartition(info.topic(), info.partition()))
                    .collect(Collectors.toList());
            Map<TopicPartition, Long> beginOffsets = consumer.beginningOffsets(tps);
            Map<TopicPartition, Long> endOffsets = consumer.endOffsets(tps);
            return tps.stream().mapToLong(tp -> endOffsets.get(tp) - beginOffsets.get(tp)).sum();
        }
    }

你可能感兴趣的:(java,spring,boot,kafka,kafka,spring,boot,java)