(1)conf配置
[cevent@hadoop207 ~]$ cd /opt/module/apache-flume-1.7.0/
[cevent@hadoop207 apache-flume-1.7.0]$ ll
总用量 176
drwxr-xr-x. 2 cevent cevent 4096 6月 11 13:35 bin
-rw-r--r--. 1 cevent cevent 77387 10月 11 2016 CHANGELOG
drwxr-xr-x. 2 cevent cevent 4096 6月 12 12:11 conf
-rw-r--r--. 1 cevent cevent 6172 9月 26 2016 DEVNOTES
-rw-r--r--. 1 cevent cevent 2873 9月 26 2016 doap_Flume.rdf
drwxr-xr-x. 10 cevent cevent 4096 10月 13 2016 docs
drwxrwxr-x. 2 cevent cevent 4096 6月 12 16:43 files
drwxrwxr-x. 2 cevent cevent 4096 6月 12 16:40 job
drwxrwxr-x. 2 cevent cevent 4096 6月 12 09:30 lib
-rw-r--r--. 1 cevent cevent 27625 10月 13 2016 LICENSE
drwxrwxr-x. 2 cevent cevent 4096 6月 12 11:48 loggers
drwxrwxr-x. 2 cevent cevent 4096 6月 11 17:05 logs
-rw-r--r--. 1 cevent cevent 249 9月 26 2016 NOTICE
-rw-r--r--. 1 cevent cevent 2520 9月 26 2016 README.md
-rw-r--r--. 1 cevent cevent 1585 10月 11 2016 RELEASE-NOTES
-rw-rw-r--. 1 cevent cevent 177 6月 12 16:57 tail_dir.json
drwxrwxr-x. 2 cevent cevent 4096 6月 11 13:35 tools
-rw-rw-r--. 1 cevent cevent 16 6月 12 16:45 tutu.txt
drwxrwxr-x. 3 cevent cevent 4096 6月 12 14:23 upload
[cevent@hadoop207 apache-flume-1.7.0]$ mkdir job/group1 创建flume工作组
[cevent@hadoop207 apache-flume-1.7.0]$ cd job/group1/
[cevent@hadoop207 group1]$ ll
总用量 0
[cevent@hadoop207 group1]$ vim flume-file-sinksavro.conf 创建flume1(开启sinks avro)
## 1.特殊:sinks 2个 channels 2个
# Name the components on this agent
a1.sources = r1
a1.sinks = k1 k2
a1.channels = c1 c2
# 将数据流复制replicating给所有channel
a1.sources.r1.selector.type = replicating
# Describe/configure the source
a1.sources.r1.type = exec
a1.sources.r1.command = tail -F
/opt/module/hive-1.2.1/logs/hive.log
a1.sources.r1.shell = /bin/bash -c
# Describe the sink
# sink端的avro是一个数据发送者
a1.sinks.k1.type = avro
a1.sinks.k1.hostname = hadoop207.cevent.com
a1.sinks.k1.port = 4141
a1.sinks.k2.type = avro
a1.sinks.k2.hostname = hadoop207.cevent.com
a1.sinks.k2.port = 4142
# Describe the channel
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
a1.channels.c2.type = memory
a1.channels.c2.capacity = 1000
a1.channels.c2.transactionCapacity = 100
# Bind the source and sink to the channel
a1.sources.r1.channels = c1 c2
a1.sinks.k1.channel = c1
a1.sinks.k2.channel = c2
~
"flume-file-sinksavro.conf" [新] 36L, 977C 已写入
[cevent@hadoop207 group1]$ vim flume-sinksavro-hdfs.conf 创建flume2(开启hdfs上传)
me the components on this agent
## 2.avro到hdfs
# Name the components on this agent
a2.sources = r1
a2.sinks = k1
a2.channels = c1
# Describe/configure the source
# source端的avro是一个数据接收服务
a2.sources.r1.type = avro
a2.sources.r1.bind = hadoop207.cevent.com
a2.sources.r1.port = 4141
# Describe the sink
a2.sinks.k1.type = hdfs
a2.sinks.k1.hdfs.path = hdfs://hadoop207.cevent.com:8020/flume2/%Y%m%d/%H 在根目录下创建flume2自动
#上传文件的前缀
a2.sinks.k1.hdfs.filePrefix = flume2-
#是否按照时间滚动文件夹
a2.sinks.k1.hdfs.round = true
#多少时间单位创建一个新的文件夹
a2.sinks.k1.hdfs.roundValue = 1
#重新定义时间单位
a2.sinks.k1.hdfs.roundUnit = hour
#是否使用本地时间戳
a2.sinks.k1.hdfs.useLocalTimeStamp = true
#积攒多少个Event才flush到HDFS一次
a2.sinks.k1.hdfs.batchSize = 100
#设置文件类型,可支持压缩
a2.sinks.k1.hdfs.fileType = DataStream
#多久生成一个新的文件
a2.sinks.k1.hdfs.rollInterval = 600
#设置每个文件的滚动大小大概是128M
a2.sinks.k1.hdfs.rollSize = 134217700
#文件的滚动与Event数量无关
a2.sinks.k1.hdfs.rollCount = 0
# Describe the channel
a2.channels.c1.type = memory
a2.channels.c1.capacity = 1000
a2.channels.c1.transactionCapacity = 100
# Bind the source and sink to the channel
a2.sources.r1.channels = c1
a2.sinks.k1.channel = c1
~
"flume-sinksavro-hdfs.conf" [新] 44L, 1324C 已写入
[cevent@hadoop207 group1]$ vim flume-sinksavro-files.conf 创建flumes3(sinks avro本地数据)输入目录
me the components on this agent
a3.sources = r1
a3.sinks = k1
# Name the components on this agent
a3.sources = r1
a3.sinks = k1
a3.channels = c2
# Describe/configure the source
a3.sources.r1.type = avro
a3.sources.r1.bind = hadoop207.cevent.com
a3.sources.r1.port = 4142
# Describe the sink 文件夹flumes需要先创立
a3.sinks.k1.type = file_roll
a3.sinks.k1.sink.directory
= /opt/module/datas/flumes
# Describe the channel
a3.channels.c2.type = memory
a3.channels.c2.capacity = 1000
a3.channels.c2.transactionCapacity = 100
# Bind the source and sink to the channel
a3.sources.r1.channels = c2
a3.sinks.k1.channel = c2
~
"flume-sinksavro-files.conf" [新] 22L, 566C 已写入
[cevent@hadoop207 group1]$ cd ../.. 先开启flume3本地目录
[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a3 -c conf/ -f
job/group1/flume-sinksavro-files.conf
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found
via (/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java
-Xmx20m -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a3 -f
job/group1/flume-sinksavro-files.conf
SLF4J: Class path contains multiple SLF4J
bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See
http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
(2)创建flumes本地文件传输目录
[cevent@hadoop207 ~]$ cd /opt/module/datas/
[cevent@hadoop207 datas]$ ll
总用量 383044
-rw-rw-r--. 1 cevent cevent 147 5月 10 13:46 510test.txt
-rw-rw-r--. 1 cevent cevent 120734753 6月 8 13:31 bigtable
-rw-rw-r--. 1 cevent cevent 266 5月 17 13:52 business.txt
-rw-rw-r--. 1 cevent cevent 129 5月 17 13:52 constellation.txt
-rw-rw-r--. 1 cevent cevent 71 5月 17 13:52 dept.txt
-rw-rw-r--. 1 cevent cevent 78 5月 17 13:52 emp_sex.txt
drwxrwxr-x. 3 cevent cevent 4096 6月 5 14:17 emp.txt
drwxrwxr-x. 4 cevent cevent 4096 5月 22 13:32 export
-rw-rw-r--. 1 cevent cevent 2794 6月 4 22:32
hadoop_hive_userdefinedfunc_plugin-1.0-SNAPSHOT.jar
-rw-rw-r--. 1 cevent cevent 37 5月 17 13:52 location.txt
-rw-rw-r--. 1 cevent cevent 19014993 5月 17 13:52 log.data
-rw-rw-r--. 1 cevent cevent 136 5月 17 13:52 movie.txt
-rw-rw-r--. 1 cevent cevent 118645854 6月 9 13:20 nullid
-rw-rw-r--. 1 cevent cevent 121734744 6月 9 13:16 ori
-rw-rw-r--. 1 cevent cevent 213 5月 17 13:52 score.txt
-rw-rw-r--. 1 cevent cevent 12018355 6月 8 13:31 smalltable
drwxrwxr-x. 3 cevent cevent 4096 6月 5 14:18
snappy-distribute-result
-rw-rw-r--. 1 cevent cevent 165 5月 17 13:52 student.txt
drwxrwxr-x. 2 cevent cevent 4096 6月 9 21:27 user
drwxrwxr-x. 2 cevent cevent 4096 6月 9 21:27 video
-rw-rw-r--. 1 cevent cevent 4874 6月 10 13:51
video_etl200609-1.0.jar
-rw-rw-r--. 1 cevent cevent 301 5月 17 13:52 数据说明.txt
[cevent@hadoop207 datas]$ mkdir -p /opt/module/datas/flumes 递归创建文件夹
[cevent@hadoop207 datas]$ ll
总用量 383048
-rw-rw-r--. 1 cevent cevent 147 5月 10 13:46 510test.txt
-rw-rw-r--. 1 cevent cevent 120734753 6月 8 13:31 bigtable
-rw-rw-r--. 1 cevent cevent 266 5月 17 13:52 business.txt
-rw-rw-r--. 1 cevent cevent 129 5月 17 13:52 constellation.txt
-rw-rw-r--. 1 cevent cevent 71 5月 17 13:52 dept.txt
-rw-rw-r--. 1 cevent cevent 78 5月 17 13:52 emp_sex.txt
drwxrwxr-x. 3 cevent cevent 4096 6月 5 14:17 emp.txt
drwxrwxr-x. 4 cevent cevent 4096 5月 22 13:32 export
drwxrwxr-x. 2 cevent cevent 4096 6月 13 13:25 flumes
-rw-rw-r--. 1 cevent cevent 2794 6月 4 22:32
hadoop_hive_userdefinedfunc_plugin-1.0-SNAPSHOT.jar
-rw-rw-r--. 1 cevent cevent 37 5月 17 13:52 location.txt
-rw-rw-r--. 1 cevent cevent 19014993 5月 17 13:52 log.data
-rw-rw-r--. 1 cevent cevent 136 5月 17 13:52 movie.txt
-rw-rw-r--. 1 cevent cevent 118645854 6月 9 13:20 nullid
-rw-rw-r--. 1 cevent cevent 121734744 6月 9 13:16 ori
-rw-rw-r--. 1 cevent cevent 213 5月 17 13:52 score.txt
-rw-rw-r--. 1 cevent cevent 12018355 6月 8 13:31 smalltable
drwxrwxr-x. 3 cevent cevent 4096 6月 5 14:18
snappy-distribute-result
-rw-rw-r--. 1 cevent cevent 165 5月 17 13:52 student.txt
drwxrwxr-x. 2 cevent cevent 4096 6月 9 21:27 user
drwxrwxr-x. 2 cevent cevent 4096 6月 9 21:27 video
-rw-rw-r--. 1 cevent cevent 4874 6月 10 13:51
video_etl200609-1.0.jar
-rw-rw-r--. 1 cevent cevent 301 5月 17 13:52 数据说明.txt
[cevent@hadoop207 datas]$ cd flumes/ 查询实现
[cevent@hadoop207 flumes]$ ll
总用量 4
-rw-rw-r--. 1 cevent cevent 0 6月 13 13:31 1592026308309-1
-rw-rw-r--. 1 cevent cevent 0 6月 13 13:32 1592026308309-2
-rw-rw-r--. 1 cevent cevent 0 6月 13 13:32 1592026308309-3
-rw-rw-r--. 1 cevent cevent 0 6月 13 13:33 1592026308309-4
-rw-rw-r--. 1 cevent cevent 0 6月 13 13:33 1592026308309-5
-rw-rw-r--. 1 cevent cevent 1623 6月 13 13:34 1592026308309-6
-rw-rw-r--. 1 cevent cevent 0 6月 13 13:34 1592026308309-7
-rw-rw-r--. 1 cevent cevent 0 6月 13 13:35 1592026308309-8
(3)开启flume2(hdfs传输)
[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a2 -c conf/ -f
job/group1/flume-sinksavro-hdfs.conf
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found
via (/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java
-Xmx20m -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a2 -f
job/group1/flume-sinksavro-hdfs.conf
SLF4J: Class path contains multiple SLF4J
bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See
http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
(4)开启flume1(source资源)
[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a1 -c conf/ -f
job/group1/flume-file-sinksavro.conf
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found
via (/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java
-Xmx20m -cp '/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a1 -f
job/group1/flume-file-sinksavro.conf
SLF4J: Class path contains multiple SLF4J
bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See
http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
(5) 多路复用配置解析
# 多路复用
## 1.特殊:sinks 2个 channels 2个
# Name the components on this agent
a1.sources = r1
a1.sinks = k1 k2
a1.channels = c1 c2
# 将数据流复制replicating给所有channel
a1.sources.r1.selector.type = replicating
# Describe/configure the source
a1.sources.r1.type = exec
a1.sources.r1.command = tail -F
/opt/module/hive-1.2.1/logs/hive.log
a1.sources.r1.shell = /bin/bash -c
# Describe the sink
# sink端的avro是一个数据发送者
a1.sinks.k1.type = avro
a1.sinks.k1.hostname =
hadoop207.cevent.com
a1.sinks.k1.port = 4141
a1.sinks.k2.type = avro
a1.sinks.k2.hostname =
hadoop207.cevent.com
a1.sinks.k2.port = 4142
# Describe the channel
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
a1.channels.c2.type = memory
a1.channels.c2.capacity = 1000
a1.channels.c2.transactionCapacity = 100
# Bind the source and sink to the channel
a1.sources.r1.channels = c1 c2
a1.sinks.k1.channel = c1
a1.sinks.k2.channel = c2
## 2.avro到hdfs
# Name the components on this agent
a2.sources = r1
a2.sinks = k1
a2.channels = c1
# Describe/configure the source
# source端的avro是一个数据接收服务
a2.sources.r1.type = avro
a2.sources.r1.bind = hadoop207.cevent.com
a2.sources.r1.port = 4141
# Describe the sink
a2.sinks.k1.type = hdfs
a2.sinks.k1.hdfs.path = hdfs://hadoop207.cevent.com:8020/flume2/%Y%m%d/%H
#上传文件的前缀
a2.sinks.k1.hdfs.filePrefix = flume2-
#是否按照时间滚动文件夹
a2.sinks.k1.hdfs.round = true
#多少时间单位创建一个新的文件夹
a2.sinks.k1.hdfs.roundValue = 1
#重新定义时间单位
a2.sinks.k1.hdfs.roundUnit = hour
#是否使用本地时间戳
a2.sinks.k1.hdfs.useLocalTimeStamp = true
#积攒多少个Event才flush到HDFS一次
a2.sinks.k1.hdfs.batchSize = 100
#设置文件类型,可支持压缩
a2.sinks.k1.hdfs.fileType = DataStream
#多久生成一个新的文件
a2.sinks.k1.hdfs.rollInterval = 600
#设置每个文件的滚动大小大概是128M
a2.sinks.k1.hdfs.rollSize = 134217700
#文件的滚动与Event数量无关
a2.sinks.k1.hdfs.rollCount = 0
# Describe the channel
a2.channels.c1.type = memory
a2.channels.c1.capacity = 1000
a2.channels.c1.transactionCapacity = 100
# Bind the source and sink to the channel
a2.sources.r1.channels = c1
a2.sinks.k1.channel = c1
## 3.avro到本地文件
# Name the components on this agent
a3.sources = r1
a3.sinks = k1
a3.channels = c2
# Describe/configure the source
a3.sources.r1.type = avro
a3.sources.r1.bind = hadoop207.cevent.com
a3.sources.r1.port = 4142
# Describe the sink 文件夹flumes需要先创立
a3.sinks.k1.type = file_roll
a3.sinks.k1.sink.directory =
/opt/module/datas/flumes
# Describe the channel
a3.channels.c2.type = memory
a3.channels.c2.capacity = 1000
a3.channels.c2.transactionCapacity = 100
# Bind the source and sink to the channel
a3.sources.r1.channels = c2
a3.sinks.k1.channel = c2
##4.先启动flumes2/3 再启动flumes1
(6) 实现
链接:http://hadoop207.cevent.com:50070/explorer.html#/flume2/20200613/13
(1)负载均衡flume链接:http://flume.apache.org/releases/content/1.9.0/FlumeUserGuide.html
a1.sinkgroups = g1
a1.sinkgroups.g1.sinks = k1 k2
a1.sinkgroups.g1.processor.type = load_balance
a1.sinkgroups.g1.processor.backoff = true
a1.sinkgroups.g1.processor.selector = random
(2)配置source-console1/2
C[cevent@hadoop207 apache-flume-1.7.0]$ mkdir job/group2 创建group2文件夹
[cevent@hadoop207 apache-flume-1.7.0]$ vim job/group2/flume-netcat-source.conf 配置source1
##Flume1:flume-netcat-source.conf 1个source1个channel 2个sink(共同构成一个sinkgroups)
# Name the components on this agent
a1.sources = r1
a1.channels = c1
a1.sinkgroups = g1
a1.sinks = k1 k2
# Describe/configure the source
a1.sources.r1.type = netcat
a1.sources.r1.bind = localhost
a1.sources.r1.port = 44444
# 2个sink构成sinkgroups 进程最长等待时间(违规)maxpanelty
a1.sinkgroups.g1.sinks = k1 k2
a1.sinkgroups.g1.processor.type = failover
a1.sinkgroups.g1.processor.priority.k1 = 5
a1.sinkgroups.g1.processor.priority.k2 = 10
a1.sinkgroups.g1.processor.maxpenalty = 10000
# Describe the sink
a1.sinks.k1.type = avro
a1.sinks.k1.hostname = hadoop207.cevent.com
a1.sinks.k1.port = 4141
a1.sinks.k2.type = avro
a1.sinks.k2.hostname = hadoop207.cevent.com
a1.sinks.k2.port = 4142
# Describe the channel
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
# Bind the source and sink to the channel
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1
a1.sinks.k2.channel = c1
~
"job/group2/flume-netcat-source.conf"
[新] 37L, 1061C 已写入
[cevent@hadoop207 apache-flume-1.7.0]$ vim job/group2/flume-netcat-console1.conf 配置a2
##Flume2:flume-netcat-console1.conf avro到控制台
# Name the components on this agent
a2.sources = r1
a2.sinks = k1
a2.channels = c1
# Describe/configure the source
a2.sources.r1.type = avro
a2.sources.r1.bind = hadoop207.cevent.com
a2.sources.r1.port = 4141
# Describe the sink
a2.sinks.k1.type = logger
# Describe the channel
a2.channels.c1.type = memory
a2.channels.c1.capacity = 1000
a2.channels.c1.transactionCapacity = 100
# Bind the source and sink to the channel
a2.sources.r1.channels = c1
a2.sinks.k1.channel = c1
~
"job/group2/flume-netcat-console1.conf"
[新] 22L, 534C 已写入
[cevent@hadoop207 apache-flume-1.7.0]$ vim job/group2/flume-netcat-console2.conf 配置a3
##Flume3:flume-netcat-console2.conf
# Name the components on this agent
a3.sources = r1
a3.sinks = k1
a3.channels = c2
# Describe/configure the source
a3.sources.r1.type = avro
a3.sources.r1.bind = hadoop207.cevent.com
a3.sources.r1.port = 4142
# Describe the sink
a3.sinks.k1.type = logger
# Describe the channel
a3.channels.c2.type = memory
a3.channels.c2.capacity = 1000
a3.channels.c2.transactionCapacity = 100
# Bind the source and sink to the channel
a3.sources.r1.channels = c2
a3.sinks.k1.channel = c2
~
"job/group2/flume-netcat-console2.conf"
[新] 22L, 516C 已写入
[cevent@hadoop207 apache-flume-1.7.0]$ 启动a3(console2)
bin/flume-ng agent
-n a3 -c conf/ -f job/group2/flume-netcat-console2.conf
-Dflume.root.logger=INFO,console
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found
via (/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via (/opt/module/hive-1.2.1) for
Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m
-Dflume.root.logger=INFO,console -cp '/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a3 -f
job/group2/flume-netcat-console2.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
2020-06-13 14:17:03,253 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider.start(PollingPropertiesFileConfigurationProvider.java:62)]
Configuration provider starting
2020-06-13 14:17:03,262 (conf-file-poller-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:134)]
Reloading configuration file:job/group2/flume-netcat-console2.conf
2020-06-13 14:17:03,268 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:17:03,269 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:17:03,269 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:930)]
Added sinks: k1 Agent: a3
2020-06-13 14:17:03,282 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration.validateConfiguration(FlumeConfiguration.java:140)]
Post-validation flume configuration contains configuration for agents: [a3]
2020-06-13 14:17:03,282 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:147)]
Creating channels
2020-06-13 14:17:03,294 (conf-file-poller-0) [INFO - org.apache.flume.channel.DefaultChannelFactory.create(DefaultChannelFactory.java:42)]
Creating instance of channel c2 type memory
2020-06-13 14:17:03,298 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:201)]
Created channel c2
2020-06-13 14:17:03,299 (conf-file-poller-0) [INFO -
org.apache.flume.source.DefaultSourceFactory.create(DefaultSourceFactory.java:41)]
Creating instance of source r1, type avro
2020-06-13 14:17:03,318 (conf-file-poller-0) [INFO -
org.apache.flume.sink.DefaultSinkFactory.create(DefaultSinkFactory.java:42)]
Creating instance of sink: k1, type: logger
2020-06-13 14:17:03,321 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.getConfiguration(AbstractConfigurationProvider.java:116)]
Channel c2 connected to [r1, k1]
2020-06-13 14:17:03,327 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:137)]
Starting new configuration:{ sourceRunners:{r1=EventDrivenSourceRunner: {
source:Avro source r1: { bindAddress: hadoop207.cevent.com, port: 4142 } }}
sinkRunners:{k1=SinkRunner: {
policy:org.apache.flume.sink.DefaultSinkProcessor@4dd1bb4d counterGroup:{
name:null counters:{} } }} channels:{c2=org.apache.flume.channel.MemoryChannel{name:
c2}} }
2020-06-13 14:17:03,341 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:144)]
Starting Channel c2
2020-06-13 14:17:03,390 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: CHANNEL, name: c2: Successfully registered
new MBean.
2020-06-13 14:17:03,390 (lifecycleSupervisor-1-0) [INFO - org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: CHANNEL, name: c2 started
2020-06-13 14:17:03,394 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:171)]
Starting Sink k1
2020-06-13 14:17:03,395 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:182)]
Starting Source r1
2020-06-13 14:17:03,396 (lifecycleSupervisor-1-1) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:234)] Starting Avro
source r1: { bindAddress: hadoop207.cevent.com, port: 4142 }...
2020-06-13 14:17:04,297 (lifecycleSupervisor-1-1) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: SOURCE, name: r1: Successfully registered
new MBean.
2020-06-13 14:17:04,297 (lifecycleSupervisor-1-1) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: SOURCE, name: r1 started
2020-06-13 14:17:04,307 (lifecycleSupervisor-1-1) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:259)] Avro source r1
started.
2020-06-13 14:20:05,941 (New I/O server boss #9) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0xefdbdae3, /192.168.1.207:44715 => /192.168.1.207:4142] OPEN
2020-06-13 14:20:05,942 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0xefdbdae3, /192.168.1.207:44715 => /192.168.1.207:4142] BOUND:
/192.168.1.207:4142
2020-06-13 14:20:05,943 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0xefdbdae3, /192.168.1.207:44715 => /192.168.1.207:4142] CONNECTED:
/192.168.1.207:44715
已接收的nv数据
2020-06-13 14:22:15,027
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)]
Event: { headers:{} body: 70 6F 77 65 72 power }
2020-06-13 14:23:01,037
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 63 65 76 65 6E 74 cevent }
^C2020-06-13 14:23:49,859 (agent-shutdown-hook) [INFO -
org.apache.flume.lifecycle.LifecycleSupervisor.stop(LifecycleSupervisor.java:78)]
Stopping lifecycle supervisor 10
2020-06-13 14:23:49,862 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:149)]
Component type: CHANNEL, name: c2 stopped
2020-06-13 14:23:49,862 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:155)]
Shutdown Metric for type: CHANNEL, name: c2. channel.start.time ==
1592029023390
2020-06-13 14:23:49,862 (agent-shutdown-hook) [INFO - org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:161)]
Shutdown Metric for type: CHANNEL, name: c2. channel.stop.time ==
1592029429862
2020-06-13 14:23:49,863 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.capacity == 1000
2020-06-13 14:23:49,865 (agent-shutdown-hook) [INFO - org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.current.size == 0
2020-06-13 14:23:49,865 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.event.put.attempt == 2
2020-06-13 14:23:49,865 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.event.put.success == 2
2020-06-13 14:23:49,866 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.event.take.attempt == 57
2020-06-13 14:23:49,866 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.event.take.success == 2
2020-06-13 14:23:49,866 (agent-shutdown-hook) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider.stop(PollingPropertiesFileConfigurationProvider.java:84)]
Configuration provider stopping
2020-06-13 14:23:49,867 (agent-shutdown-hook) [INFO -
org.apache.flume.source.AvroSource.stop(AvroSource.java:301)] Avro source r1
stopping: Avro source r1: { bindAddress: hadoop207.cevent.com, port: 4142 }
2020-06-13 14:23:49,877 (New I/O worker #1) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0xefdbdae3, /192.168.1.207:44715 :> /192.168.1.207:4142] DISCONNECTED
2020-06-13 14:23:49,877 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0xefdbdae3, /192.168.1.207:44715 :> /192.168.1.207:4142] UNBOUND
2020-06-13 14:23:49,879 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0xefdbdae3, /192.168.1.207:44715 :> /192.168.1.207:4142] CLOSED
2020-06-13 14:23:49,879 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.channelClosed(NettyServer.java:209)]
Connection to /192.168.1.207:44715 disconnected.
2020-06-13 14:23:49,890 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:149)]
Component type: SOURCE, name: r1 stopped
2020-06-13 14:23:49,891 (agent-shutdown-hook) [INFO - org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:155)]
Shutdown Metric for type: SOURCE, name: r1. source.start.time ==
1592029024297
2020-06-13 14:23:49,891 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:161)]
Shutdown Metric for type: SOURCE, name: r1. source.stop.time == 1592029429890
2020-06-13 14:23:49,891 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.append-batch.accepted == 2
2020-06-13 14:23:49,891 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.append-batch.received == 2
2020-06-13 14:23:49,891 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.append.accepted == 0
2020-06-13 14:23:49,891 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.append.received == 0
2020-06-13 14:23:49,891 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.events.accepted == 2
2020-06-13 14:23:49,892 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.events.received == 2
2020-06-13 14:23:49,892 (agent-shutdown-hook) [INFO - org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.open-connection.count == 1
2020-06-13 14:23:49,994 (agent-shutdown-hook) [INFO -
org.apache.flume.source.AvroSource.stop(AvroSource.java:323)] Avro source r1
stopped. Metrics: SOURCE:r1{src.events.accepted=2, src.events.received=2,
src.append.accepted=0, src.append-batch.accepted=2,
src.open-connection.count=1, src.append-batch.received=2,
src.append.received=0}
(3)启动a2
[cevent@hadoop207 apache-flume-1.7.0]$
bin/flume-ng agent -n a2 -c conf/ -f job/group1/flume-sinksavro-hdfs.conf
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found
via (/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a2 -f
job/group1/flume-sinksavro-hdfs.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
^C[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a2 -c
conf/ -f job/group2/fle-netcat-console1.conf -Dflume.root.logger=INFO,console
Info: Sourcing environment configuration script
/opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found via
(/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via (/opt/module/hive-1.2.1) for
Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m
-Dflume.root.logger=INFO,console -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a2 -f
job/group2/flume-netcat-console1.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
2020-06-13 14:18:43,373 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider.start(PollingPropertiesFileConfigurationProvider.java:62)]
Configuration provider starting
2020-06-13 14:18:43,380 (conf-file-poller-0) [INFO - org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:134)]
Reloading configuration file:job/group2/flume-netcat-console1.conf
2020-06-13 14:18:43,393 (conf-file-poller-0) [INFO - org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:18:43,394 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:930)]
Added sinks: k1 Agent: a2
2020-06-13 14:18:43,394 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:18:43,409 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration.validateConfiguration(FlumeConfiguration.java:140)]
Post-validation flume configuration contains configuration for agents: [a2]
2020-06-13 14:18:43,410 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:147)]
Creating channels
2020-06-13 14:18:43,420 (conf-file-poller-0) [INFO -
org.apache.flume.channel.DefaultChannelFactory.create(DefaultChannelFactory.java:42)]
Creating instance of channel c1 type memory
2020-06-13 14:18:43,424 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:201)]
Created channel c1
2020-06-13 14:18:43,424 (conf-file-poller-0) [INFO - org.apache.flume.source.DefaultSourceFactory.create(DefaultSourceFactory.java:41)]
Creating instance of source r1, type avro
2020-06-13 14:18:43,446 (conf-file-poller-0) [INFO -
org.apache.flume.sink.DefaultSinkFactory.create(DefaultSinkFactory.java:42)]
Creating instance of sink: k1, type: logger
2020-06-13 14:18:43,449 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.getConfiguration(AbstractConfigurationProvider.java:116)]
Channel c1 connected to [r1, k1]
2020-06-13 14:18:43,455 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:137)]
Starting new configuration:{ sourceRunners:{r1=EventDrivenSourceRunner: {
source:Avro source r1: { bindAddress: hadoop207.cevent.com, port: 4141 } }}
sinkRunners:{k1=SinkRunner: {
policy:org.apache.flume.sink.DefaultSinkProcessor@74e2ad16 counterGroup:{
name:null counters:{} } }}
channels:{c1=org.apache.flume.channel.MemoryChannel{name: c1}} }
2020-06-13 14:18:43,462 (conf-file-poller-0) [INFO - org.apache.flume.node.Application.startAllComponents(Application.java:144)]
Starting Channel c1
2020-06-13 14:18:43,464 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:159)]
Waiting for channel: c1 to start. Sleeping for 500 ms
2020-06-13 14:18:43,534 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: CHANNEL, name: c1: Successfully registered
new MBean.
2020-06-13 14:18:43,538 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: CHANNEL, name: c1 started
2020-06-13 14:18:43,965 (conf-file-poller-0) [INFO - org.apache.flume.node.Application.startAllComponents(Application.java:171)]
Starting Sink k1
2020-06-13 14:18:43,966 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:182)]
Starting Source r1
2020-06-13 14:18:43,966 (lifecycleSupervisor-1-2) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:234)] Starting Avro
source r1: { bindAddress: hadoop207.cevent.com, port: 4141 }...
2020-06-13 14:18:44,305 (lifecycleSupervisor-1-2) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: SOURCE, name: r1: Successfully registered
new MBean.
2020-06-13 14:18:44,306 (lifecycleSupervisor-1-2) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: SOURCE, name: r1 started
2020-06-13 14:18:44,307 (lifecycleSupervisor-1-2) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:259)] Avro source r1
started.
2020-06-13 14:20:05,685 (New I/O server boss #9) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x0d0eb060, /192.168.1.207:55062 => /192.168.1.207:4141] OPEN
2020-06-13 14:20:05,690 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x0d0eb060, /192.168.1.207:55062 => /192.168.1.207:4141] BOUND:
/192.168.1.207:4141
2020-06-13 14:20:05,691 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x0d0eb060, /192.168.1.207:55062 => /192.168.1.207:4141] CONNECTED:
/192.168.1.207:55062
A3故障停止,a2接收数据
2020-06-13 14:24:10,030
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 65 63 68 6F echo }
2020-06-13 14:24:10,030 (SinkRunner-PollingRunner-DefaultSinkProcessor)
[INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event:
{ headers:{} body: 6B 65 6B 65 keke }
(3)启动a1
[cevent@hadoop207 apache-flume-1.7.0]$
bin/flume-ng agent -n a2 -c conf/ -f job/group1/flume-sinksavro-hdfs.conf
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found via (/opt/module/hadoop-2.7.2/bin/hadoop)
for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a2 -f job/group1/flume-sinksavro-hdfs.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
^C[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a2 -c
conf/ -f job/group2/fle-netcat-console1.conf -Dflume.root.logger=INFO,console
Info: Sourcing environment configuration script
/opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found via
(/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via (/opt/module/hive-1.2.1) for
Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m
-Dflume.root.logger=INFO,console -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a2 -f
job/group2/flume-netcat-console1.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
2020-06-13 14:18:43,373 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider.start(PollingPropertiesFileConfigurationProvider.java:62)]
Configuration provider starting
2020-06-13 14:18:43,380 (conf-file-poller-0) [INFO - org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:134)]
Reloading configuration file:job/group2/flume-netcat-console1.conf
2020-06-13 14:18:43,393 (conf-file-poller-0) [INFO - org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:18:43,394 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:930)]
Added sinks: k1 Agent: a2
2020-06-13 14:18:43,394 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:18:43,409 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration.validateConfiguration(FlumeConfiguration.java:140)]
Post-validation flume configuration contains configuration for agents: [a2]
2020-06-13 14:18:43,410 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:147)]
Creating channels
2020-06-13 14:18:43,420 (conf-file-poller-0) [INFO -
org.apache.flume.channel.DefaultChannelFactory.create(DefaultChannelFactory.java:42)]
Creating instance of channel c1 type memory
2020-06-13 14:18:43,424 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:201)]
Created channel c1
2020-06-13 14:18:43,424 (conf-file-poller-0) [INFO - org.apache.flume.source.DefaultSourceFactory.create(DefaultSourceFactory.java:41)]
Creating instance of source r1, type avro
2020-06-13 14:18:43,446 (conf-file-poller-0) [INFO -
org.apache.flume.sink.DefaultSinkFactory.create(DefaultSinkFactory.java:42)]
Creating instance of sink: k1, type: logger
2020-06-13 14:18:43,449 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.getConfiguration(AbstractConfigurationProvider.java:116)]
Channel c1 connected to [r1, k1]
2020-06-13 14:18:43,455 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:137)]
Starting new configuration:{ sourceRunners:{r1=EventDrivenSourceRunner: {
source:Avro source r1: { bindAddress: hadoop207.cevent.com, port: 4141 } }}
sinkRunners:{k1=SinkRunner: {
policy:org.apache.flume.sink.DefaultSinkProcessor@74e2ad16 counterGroup:{
name:null counters:{} } }}
channels:{c1=org.apache.flume.channel.MemoryChannel{name: c1}} }
2020-06-13 14:18:43,462 (conf-file-poller-0) [INFO - org.apache.flume.node.Application.startAllComponents(Application.java:144)]
Starting Channel c1
2020-06-13 14:18:43,464 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:159)]
Waiting for channel: c1 to start. Sleeping for 500 ms
2020-06-13 14:18:43,534 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: CHANNEL, name: c1: Successfully registered
new MBean.
2020-06-13 14:18:43,538 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: CHANNEL, name: c1 started
2020-06-13 14:18:43,965 (conf-file-poller-0) [INFO - org.apache.flume.node.Application.startAllComponents(Application.java:171)]
Starting Sink k1
2020-06-13 14:18:43,966 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:182)]
Starting Source r1
2020-06-13 14:18:43,966 (lifecycleSupervisor-1-2) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:234)] Starting Avro
source r1: { bindAddress: hadoop207.cevent.com, port: 4141 }...
2020-06-13 14:18:44,305 (lifecycleSupervisor-1-2) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: SOURCE, name: r1: Successfully registered new
MBean.
2020-06-13 14:18:44,306 (lifecycleSupervisor-1-2) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: SOURCE, name: r1 started
2020-06-13 14:18:44,307 (lifecycleSupervisor-1-2) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:259)] Avro source r1
started.
2020-06-13 14:20:05,685 (New I/O server boss #9) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x0d0eb060, /192.168.1.207:55062 => /192.168.1.207:4141] OPEN
2020-06-13 14:20:05,690 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x0d0eb060, /192.168.1.207:55062 => /192.168.1.207:4141] BOUND:
/192.168.1.207:4141
2020-06-13 14:20:05,691 (New I/O worker #1) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x0d0eb060, /192.168.1.207:55062 => /192.168.1.207:4141] CONNECTED:
/192.168.1.207:55062
A3故障停止,a2接收数据
2020-06-13 14:24:10,030
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 65 63 68 6F echo }
2020-06-13 14:24:10,030 (SinkRunner-PollingRunner-DefaultSinkProcessor)
[INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 6B 65 6B 65 keke }
[cevent@hadoop207 ~]$ cd /opt/module/apache-flume-1.7.0/
[cevent@hadoop207 apache-flume-1.7.0]$ ll
总用量 176
drwxr-xr-x.
2 cevent cevent 4096 6月
11 13:35 bin
-rw-r--r--.
1 cevent cevent 77387 10月 11 2016 CHANGELOG
drwxr-xr-x.
2 cevent cevent 4096 6月
12 12:11 conf
-rw-r--r--. 1 cevent cevent 6172 9月 26 2016 DEVNOTES
-rw-r--r--.
1 cevent cevent 2873 9月
26 2016 doap_Flume.rdf
drwxr-xr-x. 10 cevent cevent 4096 10月 13 2016 docs
drwxrwxr-x.
2 cevent cevent 4096 6月
12 16:43 files
drwxrwxr-x.
3 cevent cevent 4096 6月
13 13:13 job
drwxrwxr-x.
2 cevent cevent 4096 6月
12 09:30 lib
-rw-r--r--.
1 cevent cevent 27625 10月 13 2016 LICENSE
drwxrwxr-x.
2 cevent cevent 4096 6月
12 11:48 loggers
drwxrwxr-x.
2 cevent cevent 4096 6月
11 17:05 logs
-rw-r--r--.
1 cevent cevent 249 9月
26 2016 NOTICE
-rw-r--r--.
1 cevent cevent 2520 9月
26 2016 README.md
-rw-r--r--.
1 cevent cevent 1585 10月 11 2016 RELEASE-NOTES
-rw-rw-r--.
1 cevent cevent 177 6月
12 16:57 tail_dir.json
drwxrwxr-x.
2 cevent cevent 4096 6月
11 13:35 tools
-rw-rw-r--.
1 cevent cevent 16 6月
12 16:45 tutu.txt
drwxrwxr-x.
3 cevent cevent 4096 6月
12 14:23 upload
[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a1 -c conf/ -f
job/group1/flume-file-sinksavro.conf 启动source
Info: Sourcing environment configuration script
/opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found via
(/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via (/opt/module/hive-1.2.1) for
Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native org.apache.flume.node.Application
-n a1 -f job/group1/flume-file-sinksavro.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
^C[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a1 -c
conf/ -f job/group2/fle-netcat-source.conf
Info: Sourcing environment configuration script
/opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found via
(/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via (/opt/module/hive-1.2.1) for
Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a1 -f job/group2/flume-netcat-source.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
(4)Nc localhost 44444传输数据
[cevent@hadoop207 flumes]$ netstat -nltp 查看进程状态
(Not all processes could be identified,
non-owned process info
will not be shown, you would have to be root
to see it all.)
Active Internet connections (only
servers)
Proto Recv-Q Send-Q Local Address Foreign Address State PID/Program name
tcp
0 0 127.0.0.1:44192 0.0.0.0:* LISTEN 5564/java
tcp
0 0 0.0.0.0:50020 0.0.0.0:* LISTEN 5564/java
tcp
0 0
192.168.1.207:50090
0.0.0.0:*
LISTEN 5761/java
tcp
0 0 0.0.0.0:111 0.0.0.0:* LISTEN -
tcp
0 0 192.168.1.207:8020 0.0.0.0:* LISTEN 5452/java
tcp
0 0 0.0.0.0:52917 0.0.0.0:* LISTEN -
tcp
0 0 0.0.0.0:50070 0.0.0.0:* LISTEN 5452/java
tcp
0 0 0.0.0.0:22 0.0.0.0:* LISTEN -
tcp
0 0 127.0.0.1:631 0.0.0.0:* LISTEN -
tcp
0 0 0.0.0.0:50010 0.0.0.0:* LISTEN 5564/java
tcp
0 0 0.0.0.0:50075 0.0.0.0:* LISTEN 5564/java
tcp
0 0
::ffff:192.168.1.207:8030 :::* LISTEN 5943/java
tcp
0 0 ::ffff:192.168.1.207:8031 :::* LISTEN 5943/java
tcp
0 0
::ffff:192.168.1.207:8032 :::* LISTEN 5943/java
tcp
0 0 :::38176 :::* LISTEN -
tcp
0 0
::ffff:192.168.1.207:8033 :::* LISTEN 5943/java
tcp
0 0 :::8040 :::* LISTEN 6057/java
tcp
0 0 :::8042 :::* LISTEN 6057/java
tcp
0 0 :::3306 :::* LISTEN -
tcp
0 0 :::58156 :::* LISTEN 6057/java
tcp 0 0 ::ffff:192.168.1.207:4141 :::* LISTEN 7275/java
tcp 0 0 ::ffff:192.168.1.207:4142 :::*
LISTEN 7178/java
tcp
0 0 :::111 :::* LISTEN -
tcp
0 0 :::22 :::* LISTEN -
tcp
0 0 ::1:631 :::* LISTEN -
tcp
0 0 ::ffff:192.168.1.207:8088 :::* LISTEN 5943/java
tcp
0 0 :::13562 :::* LISTEN 6057/java
tcp 0 0 ::ffff:127.0.0.1:44444 :::* LISTEN 7371/java
[cevent@hadoop207 flumes]$ nc localhost 44444
power
OK
cevent
OK
echo
OK
keke
OK
(1)负载均衡配置source,更改type=load_balance
[cevent@hadoop207 apache-flume-1.7.0]$ vim job/group2/flume-netcat-source.conf
###al update(负载均衡)
##Flume1:flume-netcat-source.conf
1个source1个channel 2个sink(共同构成一个sinkgroups)
## Name the components on this agent 端口port匹配4141→4142
a1.sources = r1
a1.channels = c1
a1.sinks = k1 k2
## Describe/configure the source
a1.sources.r1.type = netcat
a1.sources.r1.bind = localhost
a1.sources.r1.port = 44444
## 2个sink构成sinkgroups 进程最长等待时间(违规)maxpanelty,k2=10为优先传输
## a1.sinkgroups = g1
## a1.sinkgroups.g1.sinks = k1 k2
## a1.sinkgroups.g1.processor.type = failover
## a1.sinkgroups.g1.processor.priority.k1 = 5
## a1.sinkgroups.g1.processor.priority.k2 = 10
## a1.sinkgroups.g1.processor.maxpenalty = 10000
a1.sinkgroups = g1
a1.sinkgroups.g1.sinks = k1 k2
a1.sinkgroups.g1.processor.type = load_balance
a1.sinkgroups.g1.processor.backoff = true
a1.sinkgroups.g1.processor.selector = round_robin
## Describe the sink
a1.sinks.k1.type = avro
a1.sinks.k1.hostname = hadoop207.cevent.com
a1.sinks.k1.port = 4141
a1.sinks.k2.type = avro
a1.sinks.k2.hostname = hadoop207.cevent.com
a1.sinks.k2.port = 4142
## Describe the channel
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
## Bind the source and sink to the channel
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1
a1.sinks.k2.channel = c1
~
"job/group2/flume-netcat-source.conf"
46L, 1361C 已写入
[cevent@hadoop207 apache-flume-1.7.0]$ 启动a3
bin/flume-ng agent -n
a3 -c conf/ -f job/group2/flume-netcat-console2.conf
-Dflume.root.logger=INFO,console
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found via
(/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m
-Dflume.root.logger=INFO,console -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a3 -f
job/group2/flume-netcat-console2.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
2020-06-13 14:58:20,235 (lifecycleSupervisor-1-0) [INFO - org.apache.flume.node.PollingPropertiesFileConfigurationProvider.start(PollingPropertiesFileConfigurationProvider.java:62)]
Configuration provider starting
2020-06-13 14:58:20,245 (conf-file-poller-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:134)]
Reloading configuration file:job/group2/flume-netcat-console2.conf
2020-06-13 14:58:20,250 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:58:20,251 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:58:20,251 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:930)]
Added sinks: k1 Agent: a3
2020-06-13 14:58:20,265 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration.validateConfiguration(FlumeConfiguration.java:140)]
Post-validation flume configuration contains configuration for agents: [a3]
2020-06-13 14:58:20,265 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:147)]
Creating channels
2020-06-13 14:58:20,275 (conf-file-poller-0) [INFO -
org.apache.flume.channel.DefaultChannelFactory.create(DefaultChannelFactory.java:42)]
Creating instance of channel c2 type memory
2020-06-13 14:58:20,279 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:201)]
Created channel c2
2020-06-13 14:58:20,280 (conf-file-poller-0) [INFO -
org.apache.flume.source.DefaultSourceFactory.create(DefaultSourceFactory.java:41)]
Creating instance of source r1, type avro
2020-06-13 14:58:20,296 (conf-file-poller-0) [INFO -
org.apache.flume.sink.DefaultSinkFactory.create(DefaultSinkFactory.java:42)]
Creating instance of sink: k1, type: logger
2020-06-13 14:58:20,298 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.getConfiguration(AbstractConfigurationProvider.java:116)]
Channel c2 connected to [r1, k1]
2020-06-13 14:58:20,304 (conf-file-poller-0) [INFO - org.apache.flume.node.Application.startAllComponents(Application.java:137)]
Starting new configuration:{ sourceRunners:{r1=EventDrivenSourceRunner: {
source:Avro source r1: { bindAddress: hadoop207.cevent.com, port: 4142 } }}
sinkRunners:{k1=SinkRunner: { policy:org.apache.flume.sink.DefaultSinkProcessor@6e4ccfdc
counterGroup:{ name:null counters:{} } }}
channels:{c2=org.apache.flume.channel.MemoryChannel{name: c2}} }
2020-06-13 14:58:20,312 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:144)]
Starting Channel c2
2020-06-13 14:58:20,357 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: CHANNEL, name: c2: Successfully registered
new MBean.
2020-06-13 14:58:20,358 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: CHANNEL, name: c2 started
2020-06-13 14:58:20,361 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:171)]
Starting Sink k1
2020-06-13 14:58:20,362 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:182)] Starting
Source r1
2020-06-13 14:58:20,362 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:234)] Starting Avro
source r1: { bindAddress: hadoop207.cevent.com, port: 4142 }...
2020-06-13 14:58:20,648 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: SOURCE, name: r1: Successfully registered new
MBean.
2020-06-13 14:58:20,648 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: SOURCE, name: r1 started
2020-06-13 14:58:20,649 (lifecycleSupervisor-1-0) [INFO - org.apache.flume.source.AvroSource.start(AvroSource.java:259)]
Avro source r1 started.
2020-06-13 14:59:41,422 (New I/O server boss #9) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x6df490f3, /192.168.1.207:44850 => /192.168.1.207:4142] OPEN
2020-06-13 14:59:41,423 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x6df490f3, /192.168.1.207:44850 => /192.168.1.207:4142] BOUND:
/192.168.1.207:4142
2020-06-13 14:59:41,424 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x6df490f3, /192.168.1.207:44850 => /192.168.1.207:4142] CONNECTED:
/192.168.1.207:44850
2020-06-13 15:00:04,370
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 31 1
}
2020-06-13 15:00:04,370 (SinkRunner-PollingRunner-DefaultSinkProcessor)
[INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 32 2
}
2020-06-13 15:00:04,370
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 33 3
}
^C2020-06-13 15:05:55,116 (agent-shutdown-hook) [INFO - org.apache.flume.lifecycle.LifecycleSupervisor.stop(LifecycleSupervisor.java:78)]
Stopping lifecycle supervisor 10
2020-06-13 15:05:55,117 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:149)]
Component type: CHANNEL, name: c2 stopped
2020-06-13 15:05:55,118 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:155)]
Shutdown Metric for type: CHANNEL, name: c2. channel.start.time ==
1592031500358
2020-06-13 15:05:55,118 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:161)]
Shutdown Metric for type: CHANNEL, name: c2. channel.stop.time == 1592031955117
2020-06-13 15:05:55,118 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.capacity == 1000
2020-06-13 15:05:55,118 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.current.size == 0
2020-06-13 15:05:55,118 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.event.put.attempt == 3
2020-06-13 15:05:55,118 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.event.put.success == 3
2020-06-13 15:05:55,119 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.event.take.attempt == 63
2020-06-13 15:05:55,119 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: CHANNEL, name: c2. channel.event.take.success == 3
2020-06-13 15:05:55,119 (agent-shutdown-hook) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider.stop(PollingPropertiesFileConfigurationProvider.java:84)]
Configuration provider stopping
2020-06-13 15:05:55,119 (agent-shutdown-hook) [INFO -
org.apache.flume.source.AvroSource.stop(AvroSource.java:301)] Avro source r1
stopping: Avro source r1: { bindAddress: hadoop207.cevent.com, port: 4142 }
2020-06-13 15:05:55,138 (New I/O worker #1) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x6df490f3, /192.168.1.207:44850 :> /192.168.1.207:4142] DISCONNECTED
2020-06-13 15:05:55,139 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x6df490f3, /192.168.1.207:44850 :> /192.168.1.207:4142] UNBOUND
2020-06-13 15:05:55,139 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x6df490f3, /192.168.1.207:44850 :> /192.168.1.207:4142] CLOSED
2020-06-13 15:05:55,139 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.channelClosed(NettyServer.java:209)]
Connection to /192.168.1.207:44850 disconnected.
2020-06-13 15:05:55,148 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:149)]
Component type: SOURCE, name: r1 stopped
2020-06-13 15:05:55,148 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:155)]
Shutdown Metric for type: SOURCE, name: r1. source.start.time == 1592031500648
2020-06-13 15:05:55,148 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:161)]
Shutdown Metric for type: SOURCE, name: r1. source.stop.time == 1592031955148
2020-06-13 15:05:55,148 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.append-batch.accepted == 1
2020-06-13 15:05:55,148 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.append-batch.received == 1
2020-06-13 15:05:55,149 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.append.accepted == 0
2020-06-13 15:05:55,149 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.append.received == 0
2020-06-13 15:05:55,149 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.events.accepted == 3
2020-06-13 15:05:55,149 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.events.received == 3
2020-06-13 15:05:55,149 (agent-shutdown-hook) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.stop(MonitoredCounterGroup.java:177)]
Shutdown Metric for type: SOURCE, name: r1. src.open-connection.count == 1
2020-06-13 15:05:55,250 (agent-shutdown-hook) [INFO -
org.apache.flume.source.AvroSource.stop(AvroSource.java:323)] Avro source r1
stopped. Metrics: SOURCE:r1{src.events.accepted=3, src.events.received=3,
src.append.accepted=0, src.append-batch.accepted=1,
src.open-connection.count=1, src.append-batch.received=1,
src.append.received=0}
[cevent@hadoop207 apache-flume-1.7.0]$
bin/flume-ng agent
-n a2 -c conf/ -f job/group2/flume-netcat-console1.conf
-Dflume.root.logger=INFO,console
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found
via (/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m
-Dflume.root.logger=INFO,console -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a2 -f
job/group2/flume-netcat-console1.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
2020-06-13 14:59:26,358 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider.start(PollingPropertiesFileConfigurationProvider.java:62)]
Configuration provider starting
2020-06-13 14:59:26,363 (conf-file-poller-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:134)]
Reloading configuration file:job/group2/flume-netcat-console1.conf
2020-06-13 14:59:26,372 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:59:26,373 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:930)]
Added sinks: k1 Agent: a2
2020-06-13 14:59:26,373 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 14:59:26,382 (conf-file-poller-0) [INFO - org.apache.flume.conf.FlumeConfiguration.validateConfiguration(FlumeConfiguration.java:140)]
Post-validation flume configuration contains configuration for agents: [a2]
2020-06-13 14:59:26,382 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:147)]
Creating channels
2020-06-13 14:59:26,393 (conf-file-poller-0) [INFO -
org.apache.flume.channel.DefaultChannelFactory.create(DefaultChannelFactory.java:42)]
Creating instance of channel c1 type memory
2020-06-13 14:59:26,398 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:201)]
Created channel c1
2020-06-13 14:59:26,398 (conf-file-poller-0) [INFO - org.apache.flume.source.DefaultSourceFactory.create(DefaultSourceFactory.java:41)]
Creating instance of source r1, type avro
2020-06-13 14:59:26,415 (conf-file-poller-0) [INFO -
org.apache.flume.sink.DefaultSinkFactory.create(DefaultSinkFactory.java:42)]
Creating instance of sink: k1, type: logger
2020-06-13 14:59:26,418 (conf-file-poller-0) [INFO - org.apache.flume.node.AbstractConfigurationProvider.getConfiguration(AbstractConfigurationProvider.java:116)]
Channel c1 connected to [r1, k1]
2020-06-13 14:59:26,425 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:137)]
Starting new configuration:{ sourceRunners:{r1=EventDrivenSourceRunner: {
source:Avro source r1: { bindAddress: hadoop207.cevent.com, port: 4141 } }}
sinkRunners:{k1=SinkRunner: {
policy:org.apache.flume.sink.DefaultSinkProcessor@65fbadc2 counterGroup:{
name:null counters:{} } }}
channels:{c1=org.apache.flume.channel.MemoryChannel{name: c1}} }
2020-06-13 14:59:26,433 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:144)]
Starting Channel c1
2020-06-13 14:59:26,477 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: CHANNEL, name: c1: Successfully registered
new MBean.
2020-06-13 14:59:26,477 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: CHANNEL, name: c1 started
2020-06-13 14:59:26,481 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:171)]
Starting Sink k1
2020-06-13 14:59:26,482 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:182)]
Starting Source r1
2020-06-13 14:59:26,482 (lifecycleSupervisor-1-1) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:234)] Starting Avro
source r1: { bindAddress: hadoop207.cevent.com, port: 4141 }...
2020-06-13 14:59:26,778 (lifecycleSupervisor-1-1) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: SOURCE, name: r1: Successfully registered
new MBean.
2020-06-13 14:59:26,778 (lifecycleSupervisor-1-1) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: SOURCE, name: r1 started
2020-06-13 14:59:26,779 (lifecycleSupervisor-1-1) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:259)] Avro source r1
started.
2020-06-13 14:59:41,189 (New I/O server boss #9) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x76f536db, /192.168.1.207:55197 => /192.168.1.207:4141] OPEN
2020-06-13 14:59:41,190 (New I/O worker #1) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x76f536db, /192.168.1.207:55197 => /192.168.1.207:4141] BOUND:
/192.168.1.207:4141
2020-06-13 14:59:41,191 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x76f536db, /192.168.1.207:55197 => /192.168.1.207:4141] CONNECTED:
/192.168.1.207:55197
2020-06-13 15:00:29,433
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO
- org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)]
Event: { headers:{} body: 35
5 }
启动source
[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a1 -c conf/ -f
job/group2/fle-netcat-source.conf
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found
via (/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java
-Xmx20m -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a1 -f
job/group2/flume-netcat-source.conf
SLF4J: Class path contains multiple SLF4J
bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings
for an explanation.
[cevent@hadoop207 flumes]$ nc localhost
44444
1
OK
2
OK
3
OK
5
OK
(1)flume1(flume-aggregate-1)监控文件/opt/module/datas/group.log
(2)Flume2(flume-aggregate-2)监控某一个端口的数据流
(3)Flume1和2将数据发送给Flume-3(flume-aggregate-3),3最终数据打印到控制台
创建监控文件log
[cevent@hadoop207 ~]$ cd /opt/module/datas/
[cevent@hadoop207 datas]$ ll
总用量 383048
-rw-rw-r--. 1 cevent cevent 147 5月 10 13:46 510test.txt
-rw-rw-r--. 1 cevent cevent 120734753 6月 8 13:31 bigtable
-rw-rw-r--. 1 cevent cevent 266 5月 17 13:52 business.txt
-rw-rw-r--. 1 cevent cevent 129 5月 17 13:52 constellation.txt
-rw-rw-r--. 1 cevent cevent 71 5月 17 13:52 dept.txt
-rw-rw-r--. 1 cevent cevent 78 5月 17 13:52 emp_sex.txt
drwxrwxr-x. 3 cevent cevent 4096 6月 5 14:17 emp.txt
drwxrwxr-x. 4 cevent cevent 4096 5月 22 13:32 export
drwxrwxr-x. 2 cevent cevent 4096 6月 13 13:38 flumes
-rw-rw-r--. 1 cevent cevent 2794 6月 4 22:32
hadoop_hive_userdefinedfunc_plugin-1.0-SNAPSHOT.jar
-rw-rw-r--. 1 cevent cevent 37 5月 17 13:52 location.txt
-rw-rw-r--. 1 cevent cevent 19014993 5月 17 13:52 log.data
-rw-rw-r--. 1 cevent cevent 136 5月 17 13:52 movie.txt
-rw-rw-r--. 1 cevent cevent 118645854 6月 9 13:20 nullid
-rw-rw-r--. 1 cevent cevent 121734744 6月 9 13:16 ori
-rw-rw-r--. 1 cevent cevent 213 5月 17 13:52 score.txt
-rw-rw-r--. 1 cevent cevent 12018355 6月 8 13:31 smalltable
drwxrwxr-x. 3 cevent cevent 4096 6月 5 14:18
snappy-distribute-result
-rw-rw-r--. 1 cevent cevent 165 5月 17 13:52 student.txt
drwxrwxr-x. 2 cevent cevent 4096 6月 9 21:27 user
drwxrwxr-x. 2 cevent cevent 4096 6月 9 21:27 video
-rw-rw-r--. 1 cevent cevent 4874 6月 10 13:51
video_etl200609-1.0.jar
-rw-rw-r--. 1 cevent cevent 301 5月 17 13:52 数据说明.txt
[cevent@hadoop207 datas]$ vim group.log
this is hadoop207.cevent.com send sink
file to hadoop cluster server by cevent!
[cevent@hadoop207 apache-flume-1.7.0]$
mkdir job/group3
[cevent@hadoop207 apache-flume-1.7.0]$ vim job/group3/flume-aggregate-1.conf 配置flume1
##1.flume1(flume-aggregate-1)监控文件/opt/module/datas/group.log
# Name the components on this agent
a1.sources = r1
a1.sinks = k1
a1.channels = c1
# Describe/configure the source hadoop207监控文件
a1.sources.r1.type = exec
a1.sources.r1.command = tail -F /opt/module/datas/group.log
a1.sources.r1.shell = /bin/bash -c
# Describe the sink 发送到hadoop207(可以集群处理其他服务器)
a1.sinks.k1.type = avro
a1.sinks.k1.hostname = hadoop207.cevent.com
a1.sinks.k1.port = 4141
# Describe the channel
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
# Bind the source and sink to the channel
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1
~
"job/group3/flume-aggregate-1.conf"
[新] 24L, 724C 已写入
[cevent@hadoop207 apache-flume-1.7.0]$ vim job/group3/flume-aggregate-2.conf 配置flume2
## 2.Flume2(flume-aggregate-2)监控某一个端口的数据流
# Name the components on this agent
a2.sources = r1
a2.sinks = k1
a2.channels = c1
# Describe/configure the source 指定本机的source源
a2.sources.r1.type = netcat
a2.sources.r1.bind = hadoop207.cevent.com
a2.sources.r1.port = 44444
# Describe the sink 指定发送其他机器hadoop207,这里可以集群到其他机器
a2.sinks.k1.type = avro
a2.sinks.k1.hostname = hadoop207.cevent.com
a2.sinks.k1.port = 4141
# Use a channel which buffers events in memory
a2.channels.c1.type = memory
a2.channels.c1.capacity = 1000
a2.channels.c1.transactionCapacity = 100
# Bind the source and sink to the channel
a2.sources.r1.channels = c1
a2.sinks.k1.channel = c1
~
"job/group3/flume-aggregate-2.conf"
[新] 24L, 732C 已写入
[cevent@hadoop207 apache-flume-1.7.0]$ vim job/group3/flume-aggregate-3.conf 配置flume3
## 3.Flume1和2将数据发送给Flume-3(flume-aggregate-3),3最终数据打印到控制台
# Name the components on this agent
a3.sources = r1
a3.sinks = k1
a3.channels = c1
# Describe/configure the source
a3.sources.r1.type = avro
a3.sources.r1.bind = hadoop207.cevent.com
a3.sources.r1.port = 4141
# Describe the sink
# Describe the sink
a3.sinks.k1.type = logger
# Describe the channel
a3.channels.c1.type = memory
a3.channels.c1.capacity = 1000
a3.channels.c1.transactionCapacity = 100
# Bind the source and sink to the channel
a3.sources.r1.channels = c1
a3.sinks.k1.channel = c1
~
"job/group3/flume-aggregate-3.conf"
[新] 23L, 596C 已写入
[cevent@hadoop207 apache-flume-1.7.0]$ 启动flume3
bin/flume-ng agent
-n a3 -c conf/ -f job/group3/flume-aggregate-3.conf
-Dflume.root.logger=INFO,console
Info: Sourcing environment configuration script
/opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found via
(/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via (/opt/module/hive-1.2.1) for
Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java -Xmx20m
-Dflume.root.logger=INFO,console -cp
'/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a3 -f job/group3/flume-aggregate-3.conf
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
explanation.
2020-06-13 21:06:05,008 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider.start(PollingPropertiesFileConfigurationProvider.java:62)]
Configuration provider starting
2020-06-13 21:06:05,014 (conf-file-poller-0) [INFO -
org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:134)]
Reloading configuration file:job/group3/flume-aggregate-3.conf
2020-06-13 21:06:05,024 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 21:06:05,025 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:1016)]
Processing:k1
2020-06-13 21:06:05,025 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration$AgentConfiguration.addProperty(FlumeConfiguration.java:930)]
Added sinks: k1 Agent: a3
2020-06-13 21:06:05,046 (conf-file-poller-0) [INFO -
org.apache.flume.conf.FlumeConfiguration.validateConfiguration(FlumeConfiguration.java:140)]
Post-validation flume configuration contains configuration for agents: [a3]
2020-06-13 21:06:05,046 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:147)]
Creating channels
2020-06-13 21:06:05,056 (conf-file-poller-0) [INFO - org.apache.flume.channel.DefaultChannelFactory.create(DefaultChannelFactory.java:42)]
Creating instance of channel c1 type memory
2020-06-13 21:06:05,061 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.loadChannels(AbstractConfigurationProvider.java:201)]
Created channel c1
2020-06-13 21:06:05,061 (conf-file-poller-0) [INFO -
org.apache.flume.source.DefaultSourceFactory.create(DefaultSourceFactory.java:41)]
Creating instance of source r1, type avro
2020-06-13 21:06:05,081 (conf-file-poller-0) [INFO -
org.apache.flume.sink.DefaultSinkFactory.create(DefaultSinkFactory.java:42)]
Creating instance of sink: k1, type: logger
2020-06-13 21:06:05,084 (conf-file-poller-0) [INFO -
org.apache.flume.node.AbstractConfigurationProvider.getConfiguration(AbstractConfigurationProvider.java:116)]
Channel c1 connected to [r1, k1]
2020-06-13 21:06:05,093 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:137)]
Starting new configuration:{ sourceRunners:{r1=EventDrivenSourceRunner: {
source:Avro source r1: { bindAddress: hadoop207.cevent.com, port: 4141 } }}
sinkRunners:{k1=SinkRunner: {
policy:org.apache.flume.sink.DefaultSinkProcessor@74e2ad16 counterGroup:{
name:null counters:{} } }} channels:{c1=org.apache.flume.channel.MemoryChannel{name:
c1}} }
2020-06-13 21:06:05,105 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:144)]
Starting Channel c1
2020-06-13 21:06:05,193 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: CHANNEL, name: c1: Successfully registered
new MBean.
2020-06-13 21:06:05,194 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: CHANNEL, name: c1 started
2020-06-13 21:06:05,198 (conf-file-poller-0) [INFO - org.apache.flume.node.Application.startAllComponents(Application.java:171)]
Starting Sink k1
2020-06-13 21:06:05,199 (conf-file-poller-0) [INFO -
org.apache.flume.node.Application.startAllComponents(Application.java:182)]
Starting Source r1
2020-06-13 21:06:05,200 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:234)] Starting Avro
source r1: { bindAddress: hadoop207.cevent.com, port: 4141 }...
2020-06-13 21:06:05,620 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)]
Monitored counter group for type: SOURCE, name: r1: Successfully registered
new MBean.
2020-06-13 21:06:05,620 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)]
Component type: SOURCE, name: r1 started
2020-06-13 21:06:05,621 (lifecycleSupervisor-1-0) [INFO -
org.apache.flume.source.AvroSource.start(AvroSource.java:259)] Avro source r1
started.
2020-06-13 21:07:51,595 (New I/O server boss #9) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x55e560f7, /192.168.1.207:43222 => /192.168.1.207:4141] OPEN
2020-06-13 21:07:51,596 (New I/O worker #1) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x55e560f7, /192.168.1.207:43222 => /192.168.1.207:4141] BOUND:
/192.168.1.207:4141
2020-06-13 21:07:51,596 (New I/O worker #1) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x55e560f7, /192.168.1.207:43222 => /192.168.1.207:4141] CONNECTED:
/192.168.1.207:43222
2020-06-13 21:08:53,449 (New I/O server boss #9) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x088d73ec, /192.168.1.207:43223 => /192.168.1.207:4141] OPEN
2020-06-13 21:08:53,450 (New I/O worker #2) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x088d73ec, /192.168.1.207:43223 => /192.168.1.207:4141] BOUND:
/192.168.1.207:4141
2020-06-13 21:08:53,450 (New I/O worker #2) [INFO -
org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)]
[id: 0x088d73ec, /192.168.1.207:43223 => /192.168.1.207:4141] CONNECTED:
/192.168.1.207:43223
2020-06-13 21:08:59,416
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 74 68 69 73 20 69 73 20 68 61 64 6F 6F 70 32 30 this is
hadoop20 }
2020-06-13 21:16:01,492
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 32 32 32 32 32 22222 }
2020-06-13 21:16:39,502
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 63 65 76 65 6E 74 36 31 39 cevent619 }
2020-06-13 21:17:17,511
(SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO -
org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: {
headers:{} body: 70 61 70 61 5B 08 70 69 70 69 1B 5B 44 1B 5B 44
papa[.pipi.[D.[D }
[cevent@hadoop207 apache-flume-1.7.0]$ bin/flume-ng agent -n a2 -c conf/ -f
job/group3/flume-aggregate-2.conf
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found via
(/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
+ exec /opt/module/jdk1.7.0_79/bin/java
-Xmx20m -cp '/opt/module/apache-flume-1.7.0/conf:/opt/module/apache-flume-1.7.0/lib/*:/opt/module/hadoop-2.7.2/etc/hadoop:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/common/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/hdfs/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/yarn/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/lib/*:/opt/module/hadoop-2.7.2/share/hadoop/mapreduce/*:/opt/module/hadoop-2.7.2/contrib/capacity-scheduler/*.jar:/opt/module/hive-1.2.1/lib/*'
-Djava.library.path=:/opt/module/hadoop-2.7.2/lib/native
org.apache.flume.node.Application -n a2 -f job/group3/flume-aggregate-2.conf
SLF4J: Class path contains multiple SLF4J
bindings.
SLF4J: Found binding in
[jar:file:/opt/module/apache-flume-1.7.0/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in
[jar:file:/opt/module/hadoop-2.7.2/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See
http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
[cevent@hadoop207 apache-flume-1.7.0]$ 启动flume1
bin/flume-ng agent
-n a1 -c conf/ -f job/group3/flume-aggregate-1.conf
-Dflume.root.logger=INFO,console
Info: Sourcing environment configuration
script /opt/module/apache-flume-1.7.0/conf/flume-env.sh
Info: Including Hadoop libraries found
via (/opt/module/hadoop-2.7.2/bin/hadoop) for HDFS access
Info: Including Hive libraries found via
(/opt/module/hive-1.2.1) for Hive access
同步文件xsync
[cevent@hadoop207 module]$ xsync
apache-flume-1.7.0/
[cevent@hadoop207 hadoop-2.7.2]$ echo
cevent619 >> /opt/module/datas/group.log
[cevent@hadoop207 hadoop-2.7.2]$ nc hadoop207.cevent.com 44444
papa[^Hpipi^[[D^[[D^[[D
OK