kafka英文网站:http://kafka.apache.org/documentation/
kafka中文教程:http://orchome.com/kafka/index (翻译官网)
视频学习:http://edu.csdn.net/course/detail/5823
wget -q http://apache.fayea.com/apache-mirror/kafka/0.8.1/kafka_2.8.0-0.8.1.tgz
或者:
curl -L -O http://mirrors.cnnic.cn/apache/kafka/0.9.0.0/kafka_2.10-0.9.0.0.tgz
下载解压之后
bin/zookeeper-server-start.sh config/zookeeper.properties &
&是为了能退出命令行
bin/kafka-server-start.sh config/server.properties &
bin/kafka-server-stop.sh
bin/zookeeper-server-stop.sh
bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test
bin/kafka-console-consumer.sh --zookeeper localhost:2181 --topic test --from-beginning
bin/kafka-topics.sh -delete --zookeeper localhost:2181 --topic test
zookeeper.connect=localhost:2181
bootstrap.servers=localhost:9092
zookeeper.connect=127.0.0.1:2181
clientPort=2181
netstat -tunlp|egrep "(2181|9092)"
org.apache.kafka
kafka_2.11
0.11.0.1
import java.io.File;
import java.io.FileInputStream;
import java.util.Properties;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import com.alibaba.fastjson.JSON;
public class KafkaProduce {
private static Properties properties;
static {
properties = new Properties();
String path = KafkaProduce.class.getResource("/").getFile().toString() + "kafka.properties";
try {
FileInputStream fis = new FileInputStream(new File(path));
properties.load(fis);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 发送消息
*
* @param topic
* @param key
* @param value
*/
public void sendMsg(String topic, String key, String value) {
System.err.println("properties:" + JSON.toJSONString(properties));
// 实例化produce
KafkaProducer kp = new KafkaProducer(properties);
System.err.println("kp:" + JSON.toJSONString(kp));
// 消息封装
ProducerRecord pr = new ProducerRecord(topic, key, value);
// 发送数据
// kp.send(pr);
kp.send(pr, new Callback() {
// 回调函数
@Override
public void onCompletion(RecordMetadata metadata, Exception exception) {
if (null != exception) {
System.out.println("记录的offset在:" + metadata.offset());
System.out.println(exception.getMessage() + exception);
}
}
});
// 关闭produce
kp.close();
}
}
kafka.properties 的目录放在 resources 目录下:
##produce
bootstrap.servers=10.20.135.32:9092
producer.type=sync
request.required.acks=1
serializer.class=kafka.serializer.DefaultEncoder
key.serializer=org.apache.kafka.common.serialization.StringSerializer
value.serializer=org.apache.kafka.common.serialization.StringSerializer
bak.partitioner.class=kafka.producer.DefaultPartitioner
bak.key.serializer=org.apache.kafka.common.serialization.StringSerializer
bak.value.serializer=org.apache.kafka.common.serialization.StringSerializer
##consume
zookeeper.connect=10.20.135.32:2181
group.id=test-consumer-group
zookeeper.session.timeout.ms=4000
zookeeper.sync.time.ms=200
auto.commit.interval.ms=1000
auto.offset.reset=smallest
serializer.class=kafka.serializer.StringEncoder
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import com.hundsun.cloudtrade.match.dto.req.ClearDataReq;
import com.hundsun.cloudtrade.match.kafka.KafkaProduce;
import com.hundsun.cloudtrade.match.service.InitSystemService;
import com.hundsun.cloudtrade.test.BaseTest;
public class KafkaTest extends BaseTest {
@Autowired
private InitSystemService aInitSystemService;
@Test
public void kafkaTest() throws Exception {
ClearDataReq domain = new ClearDataReq();
domain.setFirm_account(0);
new KafkaProduce().sendMsg("test", "key", "lsn-20171024");
}
}
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.alibaba.fastjson.JSON;
/**
* 单元测试 .
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath:conf/spring/*-beans.xml" })
public class BaseTest {
public static Logger LOG = LoggerFactory.getLogger(BaseTest.class);
public void log(Object obj, @SuppressWarnings("rawtypes") Class clazz) {
if (null != clazz) {
LOG = LoggerFactory.getLogger(clazz);
}
LOG.info(JSON.toJSONString(obj));
}
public void log(Object obj) {
LOG.info(JSON.toJSONString(obj));
}
}
运行测试,测试结果如果报错:
import java.io.File;
import java.io.FileInputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;
public class KafkaConsume {
private final static String TOPIC = "test";
private static Properties properties;
static {
properties = new Properties();
String path = KafkaConsume.class.getResource("/").getFile().toString() + "kafka.properties";
System.err.println("path:" + path);
try {
FileInputStream fis = new FileInputStream(new File(path));
properties.load(fis);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 获取消息
*
* @throws Exception
*/
public static void getMsg() throws Exception {
ConsumerConfig config = new ConsumerConfig(properties);
ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
Map topicCountMap = new HashMap();
topicCountMap.put(TOPIC, new Integer(1));
StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
Map>> consumerMap = consumer.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);
KafkaStream stream = consumerMap.get(TOPIC).get(0);
ConsumerIterator it = stream.iterator();
while (it.hasNext()) {
String json = it.next().message();
System.err.println(json);
}
}
}
然后单元测试调用,跑起来,在 linux 端,启动 producer 输入一个 value 值,便可以看到在程序打印出来的 value 信息。