> bin/kafka-server-start.sh config/server.properties &
> bin/kafka-topics.sh --create --zookeeper localhost:2181
--replication-factor 1 --partitions 1 --topic test
> bin/kafka-topics.sh --describe --zookeeper localhost:2181 --topic test
修改maven模式中的核心配置文件pom.xml:
<dependency>
<groupId>org.apache.kafkagroupId>
<artifactId>kafka-clientsartifactId>
<version>0.10.1.0version>
dependency>
kafka生产者客户端API:http://orchome.com/303
通过阅读生产者API,得知其核心内容是创建生产者,并使用生产者发送消息到主题中。
// 通过Properties类设置生产者属性,然后创建生产者
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer producer = new KafkaProducer<>(props);
//send()发送消息
ProducerRecord record = new ProducerRecord<>(String topic, K key, V value);
public Future send(ProducerRecord record,Callback callback)
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
/*
* write message into topic test
*/
public class kafkaProducer extends Thread {
private String topic;
// Constructor
public kafkaProducer(String topic) {
super();
this.topic = topic;
}
// 新的生产者是线程安全的,在线程之间共享单个生产者实例,通常单例比多个实例要快
// 可以在日后考虑采用单例模式进行改造,初步使用private方法
private Producer createProducer() {
// 通过Properties类设置Producer的属性
Properties properties = new Properties();
properties.put("bootstrap.servers", "localhost:9092");
properties.put("acks", "all");
properties.put("retries", 0);
properties.put("batch.size", 16384);
properties.put("linger.ms", 1);
properties.put("buffer.memory", 33554432);
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
return new KafkaProducer(properties);
}
@Override
public void run() {
Producer producer = createProducer();
for (int i = 0; i < 10; i++) {
producer.send(new ProducerRecord(this.topic, "times", Integer.toString(i)));
}
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
// TODO: handle exception
e.printStackTrace();
}
}
public static void main(String[] args) {
new kafkaProducer("test").run();
}
}
偏移量(offset)
偏移量是主题分区中一条消息的唯一标识符,对于消费者而言始终指向下一个待访问消息。偏移量可以自动提交也可以消费者手动控制。
消费者组
组中包含多个消费进程,通过进程池瓜分消费和处理消息的工作。
每个消费进程通过subscribe API订阅一个主题列表,并和组内进程平衡主题分区。
消费组中的成员动态维护,不论什么原因增加减少都会重新平衡分配。
简单示例
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
public class kafkaConsumer extends Thread{
private String topic;
public kafkaConsumer(String topic) {
this.topic = topic;
}
private KafkaConsumer createConsumer() {
Properties properties = new Properties();
//指定一个或多个broker,可自动集群中其余broker
properties.put("bootstrap.servers", "localhost:9092");
//设置消费者组
properties.put("group.id", "group-test");
//设置自动提交offset
properties.put("enable.auto.commit", "true");
//设置自动提交频率间隔
properties.put("auto.commit.interval.ms", "1000");
//deserializer用于将byte转换成Object,StringDeserializer是一个String解析器
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
return new KafkaConsumer(properties);
}
@Override
public void run() {
KafkaConsumer consumer = createConsumer();
//设置订阅的主题列表
consumer.subscribe(Arrays.asList(this.topic));
while(true) {
//poll()方法持续接收消息,获得一个消息Map
//timeout参数:等待可用消息的时间ms
ConsumerRecords records = consumer.poll(2000);
for(ConsumerRecord record : records){
System.out.println(record.offset() + " " + record.key() + " " + record.value());
}
}
}
public static void main(String[] args) {
new kafkaConsumer("test").start();
}