Java+Kafka消息队列

本文主要针对,Java端对Kafka消息队列的生产和消费。Kafka的安装部署,请看查看相关文章。


笔者最近所用的是Spring mvc,监听文件路径,然后将读取到的文件内容发送到消息队列中。由另外系统去消费消息。

当然消息队列作为消息交换机,本系统既有生产消息也有消费消息。不做详述。


生成者代码相对简单很多。

package com.dhc.test.kafka;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.log4j.Logger;

import java.util.Properties;

public class ProducerHandler {
    private final KafkaProducer producer;
    private static Logger logger = Logger.getLogger(DataInManager.class.getName());

    public ProducerHandler(String topic,String message) {

        Properties props = new Properties();
        props.put("bootstrap.servers”,"127.0.0.1:9092");
        props.put("acks", "all");
        props.put("retries", "0");
        props.put("batch.size", "16384");
        props.put("linger.ms", "1");
        props.put("buffer.memory", "33554432");
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producer = new KafkaProducer(props);
        //生成消息
        ProducerRecord record = new ProducerRecord(topic,message);
        //发送消息
        producer.send(record);
        logger.info("【kafka】向Kafka的TOPIC【" + topic + "】中发送消息");
        logger.info("【kafka】消息内容:" + message);
        logger.info("【kafka】推送成功");
    }
}
消费者代码

package com.dhc.test.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.log4j.Logger;

import java.util.List;
import java.util.Properties;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

public class ConsumerHandler {

    static Logger logger = Logger.getLogger(DataInManager.class.getName());
    private final KafkaConsumer consumer;
    private ExecutorService executors;

    public ConsumerHandler(List topics) {

        Properties props = new Properties();
        props.put("bootstrap.servers", "127.0.0.1:9092");
        props.put("group.id", "test");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        consumer = new KafkaConsumer(props);
        consumer.subscribe(topics);
        execute(1);
    }

    public void execute(int workerNum) {
        executors = new ThreadPoolExecutor(workerNum, workerNum, 0L, TimeUnit.MILLISECONDS,
                new ArrayBlockingQueue(1000), new ThreadPoolExecutor.CallerRunsPolicy());
        Thread t = new Thread(new Runnable(){//启动一个子线程来监听kafka消息
            public void run(){
                while (true) {
                    ConsumerRecords records = consumer.poll(200);
                    for (final ConsumerRecord record : records) {
                        logger.info("【Kafka】监听到kafka的TOPIC【" + record.topic() + "】的消息");
                        logger.info("【Kafka】消息内容:" + record.value());
                        executors.submit(new ConsumerWorker(record));
                    }
                }
            }});
        t.start();
    }

    public void shutdown() {
        if (consumer != null) {
            consumer.close();
        }
        if (executors != null) {
            executors.shutdown();
        }
        try {
            if (!executors.awaitTermination(10, TimeUnit.SECONDS)) {
                logger.info("【Kafka】Timeout.... Ignore for this case ");
            }
        } catch (InterruptedException ignored) {
            logger.info("【Kafka】Other thread interrupted this shutdown, ignore for this case.");
            Thread.currentThread().interrupt();
        }
    }
}

package com.dhc.test.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.log4j.Logger;

public class ConsumerWorker implements Runnable {

    private ConsumerRecord consumerRecord;

    public ConsumerWorker(ConsumerRecord record) {
        this.consumerRecord = record;
    }

    private static Logger logger = Logger.getLogger(DataInManager.class.getName());

    public void run() {
        // consumer接收消息后,这里可以写针对收到的消息的业务处理
        System.out.println(consumerRecord.value());
    }
}
main方法启动

package com.dhc.test;

import com.dhc.test.kafka.ConsumerHandler;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

public class Start {
    public static void main(String[] args) throws Exception {
        // 启动Kafka consumer监视
        List topics = new ArrayList();
        // 监听的消息通道
        topics.add("test");
        new ConsumerHandler(topics);
    }
}

谢谢关注!


你可能感兴趣的:(Java,消息队列)