1、编写消息生产者
package kafka;
import java.util.Properties;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
/**
*
* Title: KafkaProducerTest
* Description:
* kafka
* Version:2.2.1
* @author pancm
* @date 2019年6月15日
*/
public class KafkaProducerTestimplements Runnable {
private final KafkaProducerproducer;
private final Stringtopic;
public KafkaProducerTest(String topicName) {
Properties props =new Properties();
props.put("bootstrap.servers", "hadoop1:9092,hadoop2:9092,hadoop3:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("key.serializer", StringSerializer.class.getName());
props.put("value.serializer", StringSerializer.class.getName());
this.producer =new KafkaProducer(props);
this.topic = topicName;
}
@Override
public void run() {
int messageNo =1;
try {
for(;;) {
String messageStr="1.74.103.143\t2018-12-20 18:12:00\t \"GET /class/130.html HTTP/1.1\" \t404\thttps://search.yahoo.com/search?p=Flink实战\n";
producer.send(new ProducerRecord(topic, "Message", messageStr));
//生产了100条就打印
if(messageNo%100==0){
System.out.println("发送的信息:" + messageStr);
}
//生产1000条就退出
if(messageNo%1000==0){
System.out.println("成功发送了"+messageNo+"条");
break;
}
messageNo++;
}
}catch (Exception e) {
e.printStackTrace();
}finally {
producer.close();
}
}
public static void main(String args[]) {
KafkaProducerTest test =new KafkaProducerTest("KAFKA_TEST");
Thread thread =new Thread(test);
thread.start();
}
}
2、编写消息消费者
package kafka;
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
/**
*
* Title: KafkaConsumerTest
* Description:
* kafka消费者 demo
* Version:2.2.1
* @author pancm
* @date 2019年6月15日
*/
public class KafkaConsumerTestimplements Runnable {
private final KafkaConsumerconsumer;
private ConsumerRecordsmsgList;
private final Stringtopic;
private static final StringGROUPID ="groupA";
public KafkaConsumerTest(String topicName) {
Properties props =new Properties();
props.put("bootstrap.servers", "hadoop1:9092,hadoop2:9092,hadoop3:9092");
props.put("group.id", GROUPID);
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("session.timeout.ms", "30000");
props.put("auto.offset.reset", "earliest");
props.put("key.deserializer", StringDeserializer.class.getName());
props.put("value.deserializer", StringDeserializer.class.getName());
this.consumer =new KafkaConsumer(props);
this.topic = topicName;
this.consumer.subscribe(Arrays.asList(topic));
}
@Override
public void run() {
int messageNo =1;
System.out.println("---------开始消费---------");
try {
for (;;) {
msgList =consumer.poll(1000);
if(null!=msgList&&msgList.count()>0){
for (ConsumerRecord record :msgList) {
//消费100条就打印 ,但打印的数据不一定是这个规律的
if(messageNo%100==0){
System.out.println(messageNo+"=======receive: key = " + record.key() +", value = " + record.value()+" offset==="+record.offset());
}
//当消费了1000条就退出
if(messageNo%1000==0){
break;
}
messageNo++;
}
}else{
Thread.sleep(1000);
}
}
}catch (InterruptedException e) {
e.printStackTrace();
}finally {
consumer.close();
}
}
public static void main(String args[]) {
KafkaConsumerTest test1 =new KafkaConsumerTest("KAFKA_TEST");
Thread thread1 =new Thread(test1);
thread1.start();
}
}