kafka+flume+hdfs的flume配置文件

#agent命名
a1.sources = r1
a1.sinks = k1
a1.channels = c1


#sources配置
a1.sources.r1.type = org.apache.flume.source.kafka.KafkaSource
a1.sources.r1.zookeeperConnect = hadoop1:2181,hadoop2:2181,hadoop3:2181
a1.sources.r1.topic = fenji-data
a1.sources.r1.groupId = flume4097
a1.sources.r1.channels = mem_channel
a1.sources.r1.batchSize = 10000


#channels配置
a1.channels.c1.type = memory
a1.channels.c1.capacity = 20000
a1.channels.c1.transactionCapacity = 20000
a1.channels.c1.byteCapacityBufferPercentage = 40
a1.channels.c1.byteCapacity = 8000000000

#sinks配置
#hdfs
a1.sinks.k1.type = hdfs
a1.sinks.k1.hdfs.path = hdfs://172.21.16.28:8020/flume/fenji-data/%Y%m%d
a1.sinks.k1.hdfs.filePrefix=%H
a1.sinks.k1.hdfs.fileSuffix=.log
a1.sinks.k1.hdfs.rollInterval = 3600
a1.sinks.k1.hdfs.rollSize = 0
a1.sinks.k1.hdfs.rollCount = 0
a1.sinks.k1.hdfs.batchSize = 5000
a1.sinks.k1.hdfs.useLocalTimeStamp = true
a1.sinks.k1.writeFormat = Text
a1.sinks.k1.hdfs.fileType = DataStream
a1.sinks.k1.hdfs.inUsePrefix = .
a1.sinks.k1.threadsPoolSize = 300

#进行绑定
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1
~                                         

 

你可能感兴趣的:(kafka,Hadoop,flume)