(十四)flume+Kafka安装配置

1. 解压apache-flume-1.6.0-bin.tar.gz 至指定目录

[root@node02 software]# tar -zvxf apache-flume-1.6.0-bin.tar.gz -C /opt/ycyz/

2. 进入conf/ 目录,直接把flume-env.sh.template 文件复制为flume-env.sh

[root@node02 conf]# cp flume-env.sh.template flume-env.sh

3. 修改flume-env.sh,将JAVA_HOME修改为自己的JAVA安装路径

[root@node02 conf]# vi flume-env.sh
export JAVA_HOME=/usr/java/jdk1.8.0_231-amd64

4. 在conf/ 创建一个文件,用于配置flume和Kafka之间的连接信息

[root@node02 conf]# vi flume_kafka.conf
a1.sources = r1
a1.sinks = k1
a1.channels = c1

# Describe/configure the source
# 客户端连接flume类型
a1.sources.r1.type = avro
# flume绑定的节点
a1.sources.r1.bind = node02
a1.sources.r1.port = 41414

# Describe the sink
a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
# 输出到Kafka的topic,会自动创建
a1.sinks.k1.topic = testflume
# Kafka的节点地址
a1.sinks.k1.brokerList = node02:9092,node03:9092,node04:9092
# 消息保障机制
a1.sinks.k1.requiredAcks = 1
a1.sinks.k1.batchSize = 20

# Use a channel which buffers events in memory
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000000
a1.channels.c1.transactionCapacity = 10000

# Bind the source and sink to the channel
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1

5. 配置环境变量

6. 启动flume读取配置文件,向Kafka推送消息

[root@node02 ~]# flume-ng agent -n a1 -c conf -f $FLUME/conf/flume_kafka.conf -Dflume.root.logger=DEBUG,console

你可能感兴趣的:((十四)flume+Kafka安装配置)