Java操作Kafka-入门

步骤

  1. 引入pom文件
  2. 写代码

引入pom文件

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>2.3.1</version>
        </dependency>

写代码

package kafka;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.Test;

import java.util.Collections;
import java.util.Properties;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

/**
 * desc : 测试kafka
 * create_user : cheng
 * create_date : 2019/10/29 15:49
 */
public class KafkaTest {

    /**
     * 服务器地址
     */
    private static final String SERVERS = "127.0.0.1:9092";
    /**
     * topic
     */
    private static final String TOPIC = "ahutcloud-kafka";
    /**
     * 消费组
     */
    private static final String COMSUMER_GROUP = "ahutcloud-comsumer";

    /**
     * desc : 生产者
     * create_user : cheng
     * create_date : 2019/10/29 18:13
     */
    @Test
    public void testProduct() throws Exception {
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS);
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);
        for (int i = 0; i <= 100; i++) {
            String msg = "hello kafka" + i;
            ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC, msg);

            Future<RecordMetadata> future = kafkaProducer.send(record);
            RecordMetadata recordMetadata = future.get(1, TimeUnit.SECONDS);
            System.out.println(recordMetadata.offset());
        }

        kafkaProducer.close();
    }

    /**
     * desc : 消费者, 需要先打开
     * create_user : cheng
     * create_date : 2019/10/29 18:13
     */
    @Test
    public void testConsumer() {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS);
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, COMSUMER_GROUP);

        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        kafkaConsumer.subscribe(Collections.singletonList(TOPIC));
        while (true) {
            ConsumerRecords<String, String> records = kafkaConsumer.poll(1000);
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.value());
            }
        }
    }

}

你可能感兴趣的:(消息队列)