Kafka shell使用及Java简单实现Kafka生产者和消费者

创建topic

sh /usr/hdp/3.1.0.0-78/kafka/bin/kafka-topics.sh --create --zookeeper cluster1.hadoop:2181 --replication-factor 1 --partitions 1 --topic KafkaTest

删除指定名称的topic(假删除,数据还在)

delete.topic.enable=true

sh /usr/hdp/3.1.0.0-78/kafka/bin/kafka-topics.sh --delete -- zookeeper sc-slave1:2181 --topic TestKafka

作为⽣产者向指定Broker发送消息

sh /usr/hdp/3.1.0.0-78/kafka/bin/kafka-console-consumer.sh --bootstrap-server cluster1.hadoop:6667 -topic KafkaTest -from-beginning

consumer从topic消费消息数据生产者 

sh /usr/hdp/3.1.0.0-78/kafka/bin/kafka-console-producer.sh --broker-list cluster0.hadoop:6667 --topic KafkaTest

Java实现

导入依赖


        org.apache.kafka
        kafka-clients
        2.0.0



        org.slf4j
        slf4j-simple
        1.7.25


    
        
            
                maven-compiler-plugin
                2.3.2
                
                    1.8
                    1.8
                    UTF-8
                
            
            
                maven-assembly-plugin
                
                    
                        jar-withdependencies
                    
                
                
                    
                        make-assembly
                        package
                        
                            assembly
                        
                    
                
            
        
    

Java实现生产者

  // ⽣产者抽象对象
   public KafkaProducer producer;
   // 传⼊brokerList,以hostname:port的⽅式,多个之间⽤,号隔开
   public KafkaProducerUtil(String brokerList) {
      Properties props = new Properties();
      // 服务器ip:端⼝号,集群⽤逗号分隔
      props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
      // key序列化指定类
      props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
           StringSerializer.class.getName());
      // value序列化指定类
      props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
           StringSerializer.class.getName());
      // ⽣产者对象
      producer = new KafkaProducer(props);
   }
  
   public void close(){
      this.producer.close();
   }
   public static void main(String[] args) {
      // 初始化broker列表
      String brokerList = "cluster1.hadoop:6667,cluster0.hadoop:6667";
      String topic="TestKafka";
      // 初始化⽣产者⼯具类
      KafkaProducerUtil kafkaProducerUtil = new
KafkaProducerUtil(brokerList);
      // 向test_topic发送hello, kafka
      kafkaProducerUtil.producer.send(new ProducerRecord(
           topic, "hello,天亮教育!"));
      kafkaProducerUtil.close();
      
      System.out.println("done!");
   }

Java实现消费者

   // 消费者对象
   public KafkaConsumer kafkaConsumer;
   public KafkaConsumerUtil(String brokerList, String topic) {
      Properties props = new Properties();
      // 服务器ip:端⼝号,集群⽤逗号分隔
      props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
      // 消费者指定组,名称可以随意,注意相同消费组中的消费者只能对同⼀个分区消费⼀次
      props.put(ConsumerConfig.GROUP_ID_CONFIG, "TestTL");
      // 是否启⽤⾃动提交offset,默认true
      props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
      // ⾃动提交间隔时间1s
      props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000);
      // key反序列化指定类
      props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
           StringDeserializer.class.getName());
      // value反序列化指定类,注意⽣产者与消费者要保持⼀致,否则解析出问题
      props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
           StringDeserializer.class.getName());
      // 消费者对象
      kafkaConsumer = new KafkaConsumer<>(props);
      //订阅Topic
      kafkaConsumer.subscribe(Arrays.asList(topic));
   }
   public void close() {
      kafkaConsumer.close();
   }
   public static void main(String[] args) {
      // 初始化broker列表
      String brokerList = "cluster0.hadoop:6667,cluster1.hadoop:6667";
      String topic = "TestKafka";
      // 初始化消费者⼯具类
      KafkaConsumerUtil kafkaConsumerUtil = new KafkaConsumerUtil(brokerList, topic);
      
      boolean runnable=true;
      while (runnable) {
        ConsumerRecords records =
kafkaConsumerUtil.kafkaConsumer
             .poll(100);
        for (ConsumerRecord record : records) {
           System.out.printf("key = %s, offset = %d, value = %s",
record.key(),record.offset(),
                 record.value());
           System.out.println();
       }
     }
      
      kafkaConsumerUtil.close();
      System.out.println("done!");
   }

你可能感兴趣的:(kafka,java,分布式)