1、properties配置
#============== kafka ===================
# 指定kafka 代理地址,可以多个
spring.kafka.bootstrap-servers=192.168.101.10:9092
#=============== provider =======================
spring.kafka.producer.retries=0
# 每次批量发送消息的数量
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
# 指定消息key和消息体的编解码方式
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
#=============== consumer =======================
# 指定默认消费者group id
spring.kafka.consumer.group-id=user-consumer-group
#auto-offset-reset
#earliest
#当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
#latest
#当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
#none
#topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit=false
# 设置手动提交offset
spring.kafka.listener.ack-mode=manual
2、定义kafka生产类
/**
* @ClassName MsgSender
* @Description
* @Auther: Mao
* @Date: 2019/2/18 15:11
* @Version 1.0
*/
@Component
@Slf4j
public class MsgSender{
Logger log= LoggerFactory.getLogger(MsgSender.class);
@Autowired
private KafkaTemplate
private Gson gson = new GsonBuilder().create();
@Value("${spring.kafka.topics}")
private String topic;
//发送消息方法
public void send() {
try {
String msg="{msg:测试信息}";
kafkaTemplate.send(topic, gson.toJson(msg));
log.info("+++++++++++++++++++++ message = {}", gson.toJson(msg));
}catch (Exception e){
e.printStackTrace();
}finally {
}
}
}
3、定义kafka消费监听类
/**
* @ClassName MsgReceiver
* @Description
* @Auther:Mao
* @Date: 2019/2/18 15:13
* @Version 1.0
*/
@Component
@Slf4j
public class MsgReceiver{
Logger log= LoggerFactory.getLogger(MsgReceiver.class);
@Autowired
SyncUserService syncUserService;
/**
* 监听消费
* @param record
* @param acknowledgment
* @throws Exception
*/
@KafkaListener(topics = {"${spring.kafka.topics}"})
@Async
public void listen(ConsumerRecord, ?> record, Acknowledgment acknowledgment){
String type="";
try {
Optional> kafkaMessage = Optional.ofNullable(record.value());
if (kafkaMessage.isPresent()) {
Object object=kafkaMessage.get();
if(object ==null){
log.info("------------------ 取到内容为空!");
}
log.info("------------------ json:"+object);
syncUserService.userInfoManager(object);
//手动提交offset,很重要,否则默认是自动提交,一旦报错被消费的消息就无法再次进行消费
acknowledgment.acknowledge();
log.info("------------------ 处理完成!");
}
}catch (Exception e){
e.printStackTrace();
}
}
}
4、发送kafak消息
/**
* @ClassName KafkaApplication
* @Description
* @Auther:Mao
* @Date: 2019/2/18 15:16
* @Version 1.0
*/
@SpringBootApplication
@EnableTransactionManagement
public class KafkaApplication {
static Logger log= LoggerFactory.getLogger(KafkaApplication.class);
public static void main(String[] args) {
log.info("|*****************程序启动********************|");
ConfigurableApplicationContext context = SpringApplication.run(KafkaApplication.class, args);
MsgSender sender = context.getBean(MsgSender.class);
for (int i = 0; i < 3; i++) {
//调用消息发送类中的消息发送方法
sender.send();
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}