java使用kafka入门

  1. 创建一个maven项目, 名称: kafkaDemo
  2. 添加依赖
    在pom.xml文件中:
    
  	
  		org.apache.kafka
  		kafka_2.11
  		2.2.0
  	
  	
  		org.apache.kafka
  		kafka-clients
  		2.2.0
  	
  	  
       org.slf4j
       slf4j-nop
       1.7.2
  
  

在src/main/java下定义包kafkaDemoMvn
定义文件后目录结构,如示意图:
java使用kafka入门_第1张图片

添加生产者Producer

package kafkaDemoMvn;
  
import java.util.Properties;
import java.util.Random;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

public class Producer {
    public static String topic = "mtest";//定义主题

    public static void main(String[] args) throws InterruptedException {
        Properties p = new Properties();
        p.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.137.51:9092");//kafka地址,多个地址用逗号分割
        p.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        p.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        KafkaProducer kafkaProducer = new KafkaProducer(p);

        try {
            while (true) {
                String msg = "Hello," + new Random().nextInt(100);
                ProducerRecord record = new ProducerRecord(topic, msg);
                kafkaProducer.send(record);
                System.out.println("消息发送成功:" + msg);
                Thread.sleep(500);
            }
        } finally {
            kafkaProducer.close();
        }

    }
}
 

消费者Consumer

package kafkaDemoMvn;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.util.Arrays;
import java.util.Properties;

public class Consumer {
    public static void main(String[] args) throws InterruptedException {
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "http://192.168.137.51:9092");
        properties.put("group.id", "group01");

        properties.put("session.timeout.ms", "30000");

        properties.put("enable.auto.commit", "false");
        properties.put("auto.commit.interval.ms", "1000");

        //earliest:在偏移量无效的情况下,消费者将从起始位置读取分区的记录。
        //latest:在偏移量无效的情况下,消费者将从最新位置读取分区的记录
//        properties.put("auto.offset.reset", "earliest");
        properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        
        KafkaConsumer kafkaConsumer = new KafkaConsumer(properties);
        
        kafkaConsumer.subscribe(Arrays.asList("mtest"));
        while (true) {
            ConsumerRecords records = kafkaConsumer.poll(100);
            for (ConsumerRecord record : records) {
                System.out.println("offset = %d, value = %s;", record.offset(), record.value());
            }
        }
    }
}



输出:

...
offset = 390, value = 7;
offset = 391, value = 8;
...

你可能感兴趣的:(kafka)