springboot + apache kafka
[一] 建立springboot 整合
1 建立springboot启动项目
[1]在maven中修改pom.xml
[2] KafkaProducerConfig.java 生产者配置
package com.yangf.springboot002.controller;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import java.util.HashMap;
import java.util.Map;
@Configuration
@EnableKafka
public class KafkaProducerConfig {
@Value("${kafka.producer.servers}")
private String servers;
@Value("${kafka.producer.retries}")
private int retries;
@Value("${kafka.producer.batch.size}")
private int batchSize;
@Value("${kafka.producer.linger}")
private int linger;
@Value("${kafka.producer.buffer.memory}")
private int bufferMemory;
public Map
Map
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(ProducerConfig.RETRIES_CONFIG, retries);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
props.put(ProducerConfig.LINGER_MS_CONFIG, linger);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return props;
}
public ProducerFactory
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
@Bean
public KafkaTemplate
return new KafkaTemplate
}
}
[3] KafkaConsumerConfig.java 消费者配置
package com.yangf.springboot002.controller;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import java.util.HashMap;
import java.util.Map;
import com.yangf.springboot002.use.RawDataListener;
@Configuration
@EnableKafka
public class KafkaConsumerConfig {
@Value("${kafka.consumer.servers}")
private String servers;
@Value("${kafka.consumer.enable.auto.commit}")
private boolean enableAutoCommit;
@Value("${kafka.consumer.session.timeout}")
private String sessionTimeout;
@Value("${kafka.consumer.auto.commit.interval}")
private String autoCommitInterval;
@Value("${kafka.consumer.group.id}")
private String groupId;
@Value("${kafka.consumer.auto.offset.reset}")
private String autoOffsetReset;
@Value("${kafka.consumer.concurrency}")
private int concurrency;
@Bean
public KafkaListenerContainerFactory
ConcurrentKafkaListenerContainerFactory
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(concurrency);
factory.getContainerProperties().setPollTimeout(1500);
return factory;
}
public ConsumerFactory
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
public Map
Map
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout);
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
return propsMap;
}
@Bean
public RawDataListener listener() {
return new RawDataListener();
}
}
[4] ProducerController.java 生产者
package com.yangf.springboot002.use;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@RequestMapping(value = "/kafka")
@Controller
public class ProducerController {
@Autowired
private KafkaTemplate kafkaTemplate;
@RequestMapping(value = "/producer",method = RequestMethod.GET)
public void consume(HttpServletRequest request, HttpServletResponse response) throws IOException{
String value ="{\"message From Producer con...\"}";
for (int i = 1; i<=500; i++){
kafkaTemplate.send("result",value);
}
}
}
[5] RawDataListener.java 消费者
package com.yangf.springboot002.use;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.messaging.MessageHeaders;
import org.springframework.stereotype.Component;
import java.io.IOException;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.messaging.handler.annotation.Headers;
@Component
public class RawDataListener {
Logger LOG= LoggerFactory.getLogger(RawDataListener.class);
@KafkaListener(topics = {"${kafka.consumer.topic}"})
public void receive(@Payload String message,
@Headers MessageHeaders headers) {
LOG.info("received message='{}'", message);
headers.keySet().forEach(key -> LOG.info("{}: {}", key, headers.get(key)));
}
}
[6] Springboot002ApplicationTests.java 启动类
package com.yangf.springboot002;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest
public class Springboot002ApplicationTests {
@Test
public void contextLoads() {
}
}
[7] application.properties
#保持与kafka中的zookeeper 的ip 与clientPort 一致
kafka.consumer.zookeeper.connect=localhost:2181
#原始数据kafka读取
kafka.consumer.servers=localhost:9093
kafka.consumer.enable.auto.commit=true
kafka.consumer.session.timeout=20000
kafka.consumer.auto.commit.interval=100
kafka.consumer.auto.offset.reset=latest
kafka.consumer.topic=result
kafka.consumer.group.id=test
kafka.consumer.concurrency=10
#协议转换后存储kafka
kafka.producer.servers=localhost:9093
kafka.producer.topic=result
kafka.producer.retries=0
kafka.producer.batch.size=4096
kafka.producer.linger=1
kafka.producer.buffer.memory=40960
[二] 结合 已经安装好的kafka 进行测试
[1] 在\kafka_2.12-2.2.0\config\server.properties中添加一行,
指定kafka server 的监听IP
listeners=PLAINTEXT://:9093
这个ip跟application.properties中的 kafka.consumer.servers 和 kafka.producer.servers
保持一致。
[2] 首先
启动 内建的 zookeeper
.\bin\windows\zookeeper-server-start.bat .\config\zookeeper.properties
启动apache kafka
.\bin\windows\kafka-server-start.bat .\config\server.properties
[3] 启动springboot的启动类
Springboot002ApplicationTests.java
[4] 在浏览器中 输入 http://localhost:8080/kafka/producer
[5] 查看运行结果:
可以看到 kafka server 控制台会有3 个线程,
在springboot 控制台可以看到消费者RawDataListener 自动接收了 生产者发送的数据。