flume-扇入


给另外两台机器发送flume文件夹

[root@hadoop01 src]# scp -r flume/ root@hadoop02:/usr/local/src
[root@hadoop01 src]# scp -r flume/ root@hadoop03:/usr/local/src

hadoop01


a1.sources  =  r1
a1.sinks  =  k1
a1.channels  =  c1

a1.sources.r1.type  = http
a1.sources.r1.bind = 0.0.0.0
a1.sources.r1.port = 22222 

a1.sinks.k1.type  =  avro
a1.sinks.hostname = hadoop03
a1.sinks.k1.port = 22222

a1.channels.c1.type  =  memory
a1.channels.c1.capacity  =  1000
a1.channels.c1.transactionCapacity  =  100

a1.sources.r1.channels  =  c1
a1.sinks.k1.channel  =  c1

hadoop02


a1.sources  =  r1
a1.sinks  =  k1
a1.channels  =  c1

a1.sources.r1.type  =  http
a1.sources.r1.bind = 0.0.0.0
a1.sources.r1.port = 22222 

a1.sinks.k1.type  =  avro
a1.sinks.hostname = hadoop03
a1.sinks.k1.port = 22222

a1.channels.c1.type  =  memory
a1.channels.c1.capacity  =  1000
a1.channels.c1.transactionCapacity  =  100

a1.sources.r1.channels  =  c1
a1.sinks.k1.channel  =  c1

hadoop03

a1.sources  =  r1
a1.sinks  =  k1
a1.channels  =  c1

a1.sources.r1.type  =  avro
a1.sources.r1.bind = 0.0.0.0
a1.sources.r1.port = 22222 

a1.sinks.k1.type  =  logger

a1.channels.c1.type  =  memory
a1.channels.c1.capacity  =  1000
a1.channels.c1.transactionCapacity  =  100

a1.sources.r1.channels  =  c1
a1.sinks.k1.channel  =  c1

你可能感兴趣的:(flume-扇入)