使用Java代码实现实时消费kafka的消息

首先maven构建开发项目,配置pom.xml文件

  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  4.0.0

  om.cctsoft
  kafkaTest
  0.0.1-SNAPSHOT
  jar

 
   
      java.net
      hhttp://repo.springsource.org/libs-milestone
   

   
      repo2
      http://repo2.maven.org/maven2/
   

   
        clojars.org
        http://clojars.org/repo
    

 

 

 
    UTF-8
 



       
          junit
          junit
          3.8.1
          test
       


       
            org.apache.hadoop
            hadoop-common
            2.2.0
           
               
                  org.slf4j
                  slf4j-log4j12
               
        
               
                  log4j
                  log4j
               

           

       

       
            org.apache.hadoop
            hadoop-hdfs
            2.2.0
       

       
            org.apache.hadoop
            hadoop-client
            2.2.0
           
               
                  org.slf4j
                  slf4j-log4j12
               
        
               
                  log4j
                  log4j
               

           

       

       
            org.apache.hbase
            hbase-client
            1.0.3
       

       
            org.apache.hbase
            hbase-server
            1.0.3
       

       
            org.apache.hadoop
            hadoop-hdfs
            2.2.0
       

       
            jdk.tools
            jdk.tools
            1.7
            system
            ${JAVA_HOME}/lib/tools.jar
       

       
            org.apache.httpcomponents
            httpclient
            4.3.6
       

       
            org.slf4j
            slf4j-log4j12
            1.7.21
       

        
            net.java.dev.jets3t
            jets3t
            0.9.4
        

        
       
            org.apache.storm
            storm-core
            0.9.6
            provided
           
               
                    org.slf4j
                    log4j-over-slf4j
               

           

       

       
            org.apache.storm
            storm-kafka
            0.9.6  
            provided
       

               
            org.apache.kafka
            kafka_2.11
            0.10.1.1
           
               
                    org.apache.zookeeper
                    zookeeper
               

               
                  org.slf4j
                  slf4j-log4j12
               

               
                    log4j
                    log4j
               

           

       

        
   





   
       
        maven-assembly-plugin  
         
            false  
             
                jar-with-dependencies  
           
 
             
                 
                    om.cctsoft.kafka.customer.storm.MyKafkaBolt  
               
 
           
 
       
 
         
             
                make-assembly  
                package  
                 
                    assembly  
               
 
           
 
       
 
     

   

 


kafka服务信息配置kafka.properties

zookeeper.connect=vm12:2181,vm13:2181,vm14:2181
group.id=liuygTest  
zookeeper.session.timeout.ms=4000  
zookeeper.sync.time.ms=200  
auto.commit.interval.ms=1000  
auto.offset.reset=smallest  
serializer.class=kafka.serializer.StringEncoder

java代码实现

package om.cctsoft.kafka.customer.java;
import java.io.File;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;
public class KafkaConsume {
    private final static String TOPIC = "cctsoft1102";

    private static Properties properties;

    static {
        properties = new Properties();
        String path = "D:\\src\\eclipse-workspace20171025\\kafkaTest\\src\\main\\java\\kafka.properties";
        System.out.println(path);
        try {
            FileInputStream fis = new FileInputStream(new File(path));
            properties.load(fis);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 获取消息
     *
     * @throws Exception
     */
    public void getMsg() throws Exception {
        ConsumerConfig config = new ConsumerConfig(properties);

        ConsumerConnector consumer = kafka.consumer.Consumer
                .createJavaConsumerConnector(config);

        Map topicCountMap = new HashMap();

        topicCountMap.put(TOPIC, new Integer(1));

        StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());

        StringDecoder valueDecoder = new StringDecoder(
                new VerifiableProperties());

        Map>> consumerMap = consumer
                .createMessageStreams(topicCountMap, keyDecoder, valueDecoder);

        KafkaStream stream = consumerMap.get(TOPIC).get(0);

        ConsumerIterator it = stream.iterator();

        while (it.hasNext()) {
            String json = it.next().message();
            
            System.out.println(json);
        }
    }

    public static void main(String[] args) {
        // TODO Auto-generated method stub
        try {
            new KafkaConsume().getMsg();
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

    }

}


你可能感兴趣的:(大数据,kafka资料)