在springboot项目中对接kafka,并将kafka做成一个微服务。废话不多说,直接上代码。
1.配置application.yml如下
spring:
application:
name: @project.name@
profiles:
active: @package.environment@
# jackson时间格式化
jackson:
time-zone: GMT+8
date-format: yyyy-MM-dd HH:mm:ss
kafka:
bootstrap-servers: (你kafka服务器的外网ip):9092
producer:
linger: 1
retries: 0
batch-size: 4096
buffer-memory: 40960
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
concurrency: 10
session-timeout: 15000
auto-offset-reset: latest
enable-auto-commit: false
auto-commit-interval: 100
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
group-id: test-group
#(配置debug的日志级别,可以查看kafka到底有没有成功连接。)
logging:
level:
root: debug
2.配置consumer的配置文件
package com.lenovoedu.kafka.config;
import com.lenovoedu.kafka.Listener;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import java.util.HashMap;
import java.util.Map;
@Configuration
@EnableKafka
public class KafkaConsumerConfig {
@Value("${spring.kafka.bootstrap-servers}")
private String servers;
@Value("${spring.kafka.consumer.enable-auto-commit}")
private boolean enableAutoCommit;
@Value("${spring.kafka.consumer.session-timeout}")
private String sessionTimeout;
@Value("${spring.kafka.consumer.auto-commit-interval}")
private String autoCommitInterval;
@Value("${spring.kafka.consumer.auto-offset-reset}")
private String autoOffsetReset;
@Value("${spring.kafka.consumer.group-id}")
private String groupId;
@Value("${spring.kafka.consumer.concurrency}")
private int concurrency;
@Bean
public KafkaListenerContainerFactory> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory factory = new
ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(concurrency);
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
public ConsumerFactory consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
public Map consumerConfigs() {
Map propsMap = new HashMap<>();
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout);
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
return propsMap;
}
@Bean
public Listener listener() {
return new Listener();
}
}
3.配置producer的配置文件
package com.lenovoedu.kafka.config;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
@Configuration
@EnableKafka
public class KafkaProducerConfig {
@Value("${spring.kafka.bootstrap-servers}")
private String servers;
@Value("${spring.kafka.producer.retries}")
private int retries;
@Value("${spring.kafka.producer.batch-size}")
private int batchSize;
@Value("${spring.kafka.producer.linger}")
private int linger;
@Value("${spring.kafka.producer.buffer-memory}")
private int bufferMemory;
public Map producerConfigs() {
Map props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(ProducerConfig.RETRIES_CONFIG, retries);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
props.put(ProducerConfig.LINGER_MS_CONFIG, linger);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return props;
}
public ProducerFactory producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
@Bean
public KafkaTemplate kafkaTemplate() {
return new KafkaTemplate(producerFactory());
}
}
4.写一个往kafka写数据的类
package com.lenovoedu.kafka.controller;
import com.alibaba.fastjson.JSON;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.lenovoedu.form.business.PaperAnswerForm;
import com.lenovoedu.form.kafka.KafkaMessageForm;
import com.lenovoedu.model.business.model.ExamResult;
import com.lenovoedu.model.exam.model.PageAnswer;
import com.lenovoedu.utils.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.*;
;import java.util.ArrayList;
import java.util.List;
/**
* @Auther: YuanGaoLong
* @Date: 2019/5/28 10:51
* @Version: 2.0
* @Description: kafka的生产者
*/
@RestController
@RequestMapping("/kafka")
public class ProducerController {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private KafkaTemplate kafkaTemplate;
@RequestMapping(value = "/send", method = RequestMethod.POST)
public void sendKafka(@RequestBody KafkaMessageForm message) throws Exception{
logger.info("kafka的消息={}", message);
kafkaTemplate.send(message.getTopic(),message.getKey(), message.getObjStr());
logger.info("发送kafka成功.");
}
}
5.通过listener监控kafka,并进行业务处理。其中需要用到一个注解@KafkaListener指定要监控的主题。只要该主题中有数据,就会运行,注解指定的方法。
package com.lenovoedu.kafka;
import com.alibaba.fastjson.JSON;
import com.lenovoedu.business.exam.client.feign.client.BusinessClient;
import com.lenovoedu.exam.client.feign.client.ExamClient;
import com.lenovoedu.form.business.PaperAnswerForm;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.util.Optional;
/**
* @Auther: YuanGaoLong
* @Date: 2019/5/28 10:51
* @Version: 2.0
* @Description: kafka的监听器
*/
@Component
public class Listener {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private BusinessClient businessClient;
@Autowired
private ExamClient examClient;
@KafkaListener(topics = {"submitExam"})
public void submitExam(ConsumerRecord, ?> record) {
Optional> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
String message = (String)kafkaMessage.get();
PaperAnswerForm paperAnswerForm=JSON.parseObject(message,PaperAnswerForm.class);
paperAnswerForm=examClient.saveToDB(paperAnswerForm);
businessClient.save(paperAnswerForm.getExamResult());
}
}
}