记一次KafkaClient poll阻塞的问题

问题描述:

使用新版kafka-clients包消费数据,程序未报任何异常,阻塞在poll,网上说是poll没数据就阻塞,但是看是有数据的,搞了半天发现是因为访问地址的问题。

出问题的配置:

kafka.point.host=10.2.1.55:6667
kafka.point.topic=galileo.metrics

解决后的配置:

kafka.point.host=v30:6667,v31:6667
kafka.point.topic=galileo.metrics
第一个问题,新版的不再连zk而是直连kafka。
第二个问题,直接配置ip不等于host访问. 配了host访问hostname就搞定了。老大说nginx对于这种ip和host处理是不一样的,可能造成了 访问不到.

最后一个问题:为什么访问不到没有报错?而是阻塞??????以后发现了 补上。

另外注意 producer新版api直连,不连zookeeper,否则报错 无法更新metadata


代码:

1.Consumer.java

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;


/**
 * kafka数据源 可以配置多个数据来源
 * @author Mingchenchen
 * @Date   2017-04-07
 *
 */
public class KafkaSource implements PointSource{
    /**
     * poll时长 : 毫秒
     */
    private final int pollMillions = 100;

    /**
     * 数据源适配器
     */
    private final SourceAdaptor sourceAdaptor;

    /**
     * 客户端消费者
     */
    private final KafkaConsumer consumer;

    /**
     * 数据源名称
     */
    private final String name;

    /**
     * 
     * @param commonProperties
     * @param properties
     * @param topics
     * @param adaptor
     * @param pollTimeSec
     * @param name
     */
    public KafkaSource(Properties commonProperties, 
                       Properties properties, 
                       List topics, 
                       SourceAdaptor adaptor, 
                       String name){
        Properties allProperties = commonProperties;
        allProperties.putAll(properties);
        consumer = new KafkaConsumer<>(allProperties);
        consumer.subscribe(topics);
        this.sourceAdaptor = adaptor;
        this.name          = name;
    }

    public List poll(){
        try {
            ConsumerRecords consumerRecords = consumer.poll(pollMillions);

            List records = new ArrayList<>(consumerRecords.count());

            //key is null
            for (ConsumerRecord record : consumerRecords) {
                records.add(record.value());
            }

            return records;
        } catch (Exception e) {
            throw new KafkaPollException(e);
        }
    }

    public List> pollWithKey(){
        try {
            ConsumerRecords consumerRecords = consumer.poll(pollMillions);

            List> records = new ArrayList<>(consumerRecords.count());

            //key is null
            for (ConsumerRecord record : consumerRecords) {
                Map map = new HashMap<>();
                map.put("key", record.key());
                map.put("val", record.value());
                records.add(map);
            }

            return records;
        } catch (Exception e) {
            throw new KafkaPollException(e);
        }
    }

    public SourceAdaptor getAdaptor() {
        return sourceAdaptor;
    }

    public String getName() {
        return name;
    }
}

2.spring-kafka.xml


<beans xmlns="http://www.springframework.org/schema/beans"
       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
       xmlns:context="http://www.springframework.org/schema/context"
       xmlns:mvc="http://www.springframework.org/schema/mvc"
       xmlns:util="http://www.springframework.org/schema/util"
       xsi:schemaLocation="
                http://www.springframework.org/schema/mvc http://www.springframework.org/schema/mvc/spring-mvc.xsd
                http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
                http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
                http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd">

  
  <bean id="alertKafkaProducer" class="com.core.source.kafka.KafkaSender">
    <constructor-arg index="0" ref="alertKafkaProducerConfig" />
    <constructor-arg index="1" value="${galileo.kafka.notify.topic}" />
  bean>

  <util:properties id="alertKafkaProducerConfig">
    <prop key="bootstrap.servers">${galileo.kafka.notify.host}prop>
    <prop key="retries">${metric.report.kafka.retries:3}prop>
    <prop key="batch.size">${metric.report.kafka.batch.size:1024}prop>
    <prop key="linger.ms">${metric.report.kafka.linger.ms:100}prop>
    <prop key="buffer.memory">${metric.report.kafka.buffer.memory:4096}prop>
    <prop key="key.serializer">org.apache.kafka.common.serialization.StringSerializerprop>
    <prop key="value.serializer">org.apache.kafka.common.serialization.StringSerializerprop>
  util:properties>

  
  <bean id="eventKafkaProducer" class="com.core.source.kafka.KafkaSender">
    <constructor-arg index="0" ref="eventKafkaProducerConfig" />
    <constructor-arg index="1" value="${galileo.kafka.event.topic}" />
  bean>
  <util:properties id="eventKafkaProducerConfig">
    <prop key="bootstrap.servers">${galileo.kafka.event.host}prop>
    <prop key="retries">${metric.report.kafka.retries:3}prop>
    <prop key="batch.size">${metric.report.kafka.batch.size:1024}prop>
    <prop key="linger.ms">${metric.report.kafka.linger.ms:100}prop>
    <prop key="buffer.memory">${metric.report.kafka.buffer.memory:4096}prop>
    <prop key="key.serializer">org.apache.kafka.common.serialization.StringSerializerprop>
    <prop key="value.serializer">org.apache.kafka.common.serialization.StringSerializerprop>
  util:properties>

  
  <util:properties id="commonConsumerConfig">
    <prop key="enable.auto.commit">${kafka.consumer.auto.commit.enable:true}prop>
    <prop key="request.timeout.ms">${kafka.consumer.request.timeout.ms:50000}prop>
    <prop key="auto.commit.interval.ms">${kafka.consumer.auto.commit.interval.ms:20000}prop>
    
    <prop key="auto.offset.reset">${kafka.consumer.auto.offset.reset:latest}prop>
    <prop key="heartbeat.interval.ms">${kafka.consumer.heartbeat.interval.ms:25000}prop>
    <prop key="session.timeout.ms">${kafka.consumer.session.timeout.ms:30000}prop>
    <prop key="key.deserializer">org.apache.kafka.common.serialization.StringDeserializerprop>
    <prop key="value.deserializer">org.apache.kafka.common.serialization.StringDeserializerprop>
  util:properties>

  
  <bean id="ulogKafkaConsumer" class="com.core.source.kafka.KafkaSource">
    <constructor-arg index="0" ref="commonConsumerConfig" />
    <constructor-arg index="1" ref="ulogKafkaConsumerConfig" />
    <constructor-arg index="2">
    <list>
        <value>${ulog.kafka.metrics.topic}value>
    list>
    constructor-arg>
    <constructor-arg index="3" ref="ulogSourceAdaptor" />
    <constructor-arg index="4" value="ulogKafkaConsumer" />
  bean>

  <util:properties id="ulogKafkaConsumerConfig">
    <prop key="bootstrap.servers">${ulog.kafka.metrics.host}prop>
    <prop key="group.id">${ulog.kafka.metrics.group.id:alert}prop>
  util:properties>

  
  <bean id="galileoKafkaConsumer" class="com.core.source.kafka.KafkaSource">
    <constructor-arg index="0" ref="commonConsumerConfig" />
    <constructor-arg index="1" ref="galileoKafkaConsumerConfig" />
    <constructor-arg index="2">
    <list>
        <value>${galileo.kafka.point.topic}value>
    list>
    constructor-arg>
    <constructor-arg index="3" ref="galileoSourceAdaptor" />
    <constructor-arg index="4" value="galileoKafkaConsumer" />
  bean>

  <util:properties id="galileoKafkaConsumerConfig">
    <prop key="bootstrap.servers">${galileo.kafka.point.host}prop>
    <prop key="group.id">${galileo.kafka.point.group.id:alert}prop>
  util:properties>

  
  <bean id="galileoEventKafkaConsumer" class="com.core.source.kafka.KafkaSource">
    <constructor-arg index="0" ref="commonConsumerConfig" />
    <constructor-arg index="1" ref="galileoEventKafkaConsumerConfig" />
    <constructor-arg index="2">
    <list>
        <value>${galileo.kafka.event.topic}value>
    list>
    constructor-arg>
    <constructor-arg index="3"><null/>constructor-arg>
    <constructor-arg index="4" value="galileoEventKafkaConsumer" />
  bean>

  <util:properties id="galileoEventKafkaConsumerConfig">
    <prop key="bootstrap.servers">${galileo.kafka.event.host}prop>
    <prop key="group.id">${galileo.kafka.event.group.id:alert}prop>
  util:properties>

beans>

你可能感兴趣的:(kafka)