1.创建 Kafka producer 工具类

创建 Kafka producer 工具类

避免频繁创建和销毁producer

代码如下

package com.qf.ca.cadp.base.Utils;


import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;


import java.io.Serializable;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;


public class KafkaUtils implements Serializable {




    private static final Map<String, KafkaProducer<String, String>> producerCache = new ConcurrentHashMap<>();


    /**
     * create producer
     *
     * @param brokers
     * @return
     */
    private static KafkaProducer<String, String> createProducer(String brokers) {
        Properties prop = new Properties();
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
        prop.put(ProducerConfig.ACKS_CONFIG, "-1");
        prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        return new KafkaProducer<>(prop);
    }


    /**
     * close producer
     */
    static {
        Runtime.getRuntime().addShutdownHook(
                new Thread(() -> producerCache.forEach(KafkaUtils::accept)
                )
        );
    }


    private static KafkaProducer<String, String> getProducer(String brokers) {
        return producerCache.compute(brokers, (k, oldProducer) -> {
            if (oldProducer == null) {
                oldProducer = createProducer(brokers);
            }
            return oldProducer;
        });


    }


    /**
     * send message,need key
     *
     * @param topic
     * @param key
     * @param message
     * @return
     */
    public static Future<RecordMetadata> send(String brokers, String topic, String key, String message) {
        KafkaProducer<String, String> producer = getProducer(brokers);
        ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>(topic, key, message);
        return producer.send(producerRecord);
    }


    /**
     * send message,do not need key
     *
     * @param topic
     * @param message
     * @return
     */
    public static Future<RecordMetadata> send(String brokers, String topic, String message) {
        KafkaProducer<String, String> producer = getProducer(brokers);
        ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>(topic, message);
        return producer.send(producerRecord);
    }


    /**
     * close method
     *
     * @param key
     * @param v
     */
    private static void accept(String key, KafkaProducer<String, String> v) {
        try {
            v.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


    public static void main(String[] args) {
        //KafkaUtils.send("node3:9092,node4:9092,node5:9092,node6:9092,node7:9092,node8:9092","xty_ht","0","888");
    }
}

你可能感兴趣的:(Apache,Kafka)