flume官方下载地址:Welcome to Apache Flume — Apache Flume
tar -zxf ./apache-flume-1.9.0-bin.tar.gz -C /opt/soft/
cd /opt/soft/flume190/conf
ll
cp ./flume-env.sh.template ./flume-env.sh
vim ./flume-env.sh
------------------------------------
22 export JAVA_HOME=/opt/soft/jdk180
25 export JAVA_OPTS="-Xms2000m -Xmx2000m -Dcom.sun.management.jmxremete"
-----------------------------------
yum install -y net-tools 安装
yum install -y nc 安装netcat
yum install -y telnet-server 安装telnet服务端
yum install -y telnet.* 安装telnet客户端
netstat -lnp | grep 7777 查看指定端口是否被占用
nc -lk 7777 启动服务端
telnet localhost 7777 连接服务器
注:一个服务端可以连接多个客户端;服务端关闭,客户端也关闭。
cp /opt/soft/hadoop313/share/hadoop/hdfs/lib/guava-27.0-jre.jar /opt/soft/flume190/lib/
vim ./netcat-logger.conf
#配置Agent a1的组件
a1.sources=r1
a1.channels=c1
a1.sinks=k1
a1.sources.r1.type=netcat #netcat表示通过指定端口来访问
a1.sources.r1.bind=localhost #主机名称
a1.sources.r1.port=7777 #指定端口
a1.channels.c1.type=memory #选择管道类型
a1.sinks.k1.type=logger #表示数据汇聚点的类型是logger日志
a1.sources.r1.channels=c1
a1.sinks.k1.channel=c1
启动
./bin/flume-ng agent --name a1 --conf ./conf/ --conf-file ./conf/myconf2/netcat-logger.conf -Dflume.root.logger=INFO,console
1、拓展
mkdir /opt/soft/tmp 创建目录
vim ./flumelog.log 配置被监控文件
tail -f ./flumelog.log 启动被监控文件
echo "aaaa" >> ./flumelog.log 输入文件,检测可以输入内容
2、filelogger.conf配置文件
a2.sources=r1
a2.channels=c1
a2.sinks=k1
a2.sources.r1.type=exec
a2.sources.r1.command=tail -f /opt/soft/tmp/flumelog.log
a2.channels.c1.type=memory
a2.channels.c1.capacity=1000
a2.channels.c1.transactionCapacity=100
a2.sinks.k1.type=logger
a2.sources.r1.channels=c1
a2.sinks.k1.channel=c1 【没有S】
3、启动
./bin/flume-ng agent --name a2 --conf ./conf/ --conf-file ./conf/myconf2/filelogger.conf -Dflume.root.logger=INFO,console
(1)启动Hadoop,并退出安全模式
start-all.sh
hdfs dfsadmin -safemode leave
(2)配置file-flume-hdfs.conf文件
a3.sources=r1
a3.channels=c1
a3.sinks=k1
a3.sources.r1.type=exec
a3.sources.r1.command=tail -f /opt/soft/tmp/flumelog.log
a3.channels.c1.type=memory
a3.channels.c1.capacity=1000
a3.channels.c1.transactionCapacity=100
a3.sinks.k1.type=hdfs
a3.sinks.k1.hdfs.fileType=DataStream
a3.sinks.k1.hdfs.filePrefix=flumetohdfs
a3.sinks.k1.hdfs.fileSuffix=.txt
a3.sinks.k1.hdfs.path=hdfs://192.168.91.11:9000/kb23flume/
a3.sources.r1.channels=c1
a3.sinks.k1.channel=c1
a4.sources=s1
a4.channels=c1 c2
a4.sinks=k1 k2
a4.sources.s1.type=exec
a4.sources.s1.command=tail -f /opt/soft/tmp/flumelog.log
a4.channels.c1.type=memory
a4.channels.c2.type=memory
a4.sinks.k1.type=logger
a4.sinks.k2.type=hdfs
a4.sinks.k2.hdfs.fileType=DataStream
a4.sinks.k2.hdfs.filePrefix=flumetohdfs
a4.sinks.k2.hdfs.fileSuffix=.txt
a4.sinks.k2.hdfs.path=hdfs://192.168.91.11:9000/kb23flume1/
a4.sources.s1.channels=c1 c2
a4.sinks.k1.channel=c1
a4.sinks.k2.channel=c2