下载安装zookeeper参考:https://blog.csdn.net/she_lock/article/details/80435176
下载安装Kafka参考:https://blog.csdn.net/qq_29116427/article/details/79949402
本文假设你已经有一个配置好logback的spring boot项目
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<appender name="kafkaAppender"
class="com.zoe.virtual.card.manage.infrastructure.kafkalog.KafkaAppender">
</appender>
<root level="debug">
<appender-ref ref="kafkaAppender" />
</root>
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.ConsoleAppender;
import com.zoe.virtual.card.infrastructure.context.bean.SpringContextHolder;
import org.springframework.kafka.core.KafkaTemplate;
public class KafkaAppender extends ConsoleAppender<ILoggingEvent> {
private KafkaTemplate kafkaTemplate;
@Override
public void start() {
super.start();
Map<String, Object> props = new HashMap();
props.put("bootstrap.servers", "ip:端口号");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("buffer.memory", 33554432);
props.put("key.serializer", StringSerializer.class);
props.put("value.serializer", StringSerializer.class);
kafkaTemplate = new KafkaTemplate(DefaultKafkaProducerFactory(props));
kafkaTemplate.send("test", "连接到Kafka。。。。。。。");// 先连接一遍,如果去掉可能报 Failed to update metadata after 60000 ms
}
@Override
protected void append(ILoggingEvent eventObject) {
kafkaTemplate.send("test", eventObject.getFormattedMessage());
}
}
kafaka整合springboot 错误:Failed to update metadata after 60000 ms
如果你用spring boot直接连接Kafka不报错的话,而加上logback报错的话可能是下原因。
如果你要换成yml配置方式,那你需要注意以下几点
1、KafkaAppender 加载比spring容器启动还快,自己写个@Configuration的类去加载配置文件吧!在KafkaAppender中写是 获取不到值的。(除非你手动获取yml文件)
2、如果你换成了@Configuration文件配置,那你需要在一个@Controller注释的类中注入KafkaTemplate类。
3、在KafkaAppender类中的append()方法中不能直接去连接Kafka(kafkaTemplate.send()方法),必须要在@Configuration 中先连接一次(在执行append()方法前需要先连接一遍)
我也不知道为啥少一个就报错,连接不上,这里把问题记一下,坑!
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import java.util.HashMap;
import java.util.Map;
@Configuration
@EnableKafka//配合@Configuration注解一起用,会激活Kafka的基于注解驱动的消息消费功能
public class KafkaConfig {
@Value("${spring.kafka.bootstrap-servers}")
private String servers;
@Value("${spring.kafka.producer.retries}")
private int retries;
@Value("${spring.kafka.producer.batch-size}")
private int batchSize;
@Value("${spring.kafka.producer.buffer-memory}")
private int bufferMemory;
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap();
props.put("bootstrap.servers", this.servers);
props.put("retries", this.retries);
props.put("batch.size", this.batchSize);
props.put("buffer.memory", this.bufferMemory);
props.put("key.serializer", StringSerializer.class);
props.put("value.serializer", StringSerializer.class);
return props;
}
public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory(this.producerConfigs());
}
@Bean
public KafkaTemplate<String, String> kafkaTemplate() {
KafkaTemplate kafkaTemplate = new KafkaTemplate(this.producerFactory());
kafkaTemplate.send("test", "连接到Kafka。。。。。。。");
return kafkaTemplate;
}
}
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.ConsoleAppender;
import com.zoe.virtual.card.infrastructure.context.bean.SpringContextHolder;
import org.springframework.kafka.core.KafkaTemplate;
public class KafkaAppender extends ConsoleAppender<ILoggingEvent> {
private KafkaTemplate<String, String> kafkaTemplate;
@Override
public void start() {
super.start();
}
@Override
protected void append(ILoggingEvent eventObject) {
if (kafkaTemplate != null){
kafkaTemplate.send("test", eventObject.getFormattedMessage());
}else if (SpringContextHolder.applicationContext != null){// 配置加载完毕发送消息到Kafka SpringContextHolder获取spring上下文(这个代码就不贴了)
kafkaTemplate = (KafkaTemplate<String, String>)SpringContextHolder.getBean("kafkaTemplate");
kafkaTemplate.send("test", eventObject.getFormattedMessage());
}
}
}
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping(value = "kafka")
public class KafkaConreoller {
@Autowired
private KafkaTemplate template;
}
其实还有个思路,以后可能会完善。
使用@PostConstruct注解,启动前去连接Kafka,当这个配置加载完毕的时候,再去KafkaAppender的append()方法中发送消息到Kafka,就不要那么麻烦了