flume1.5

写入hdfs

a1.channels.c1.type = memory

# Define an Avro source called r1 on a1 and tell it
# to bind to 0.0.0.0:41414. Connect it to channel c1.
a1.sources.r1.channels = c1
a1.sources.r1.type = avro
a1.sources.r1.bind = 0.0.0.0
a1.sources.r1.port = 41414
a1.sinks.k1.type = hdfs
a1.sinks.k1.channel = c1
a1.sinks.k1.hdfs.path = hdfs://192.168.0.200:9000/user/hadoop/input/web
a1.sinks.k1.hdfs.filePrefix = access-%Y-%m-%d
a1.sinks.k1.hdfs.fileSuffix = .log
a1.sinks.k1.hdfs.useLocalTimeStamp = true
a1.sinks.k1.hdfs.fileType = DataStream
a1.sinks.k1.hdfs.writeFormat = Text
a1.sinks.k1.hdfs.rollSize = 0
a1.sinks.k1.hdfs.rollInterval= 0
a1.sinks.k1.hdfs.rollCount = 0
#超时之后不活跃的文件关
a1.sinks.k1.hdfs.idleTimeout = 0

#
# Finally, now that we‘ve defined all of our components, tell
# a1 which ones we want to activate.
a1.channels = c1
a1.sources = r1
a1.sinks = k1

 

 bin/flume-ng  agent -c conf/ -f conf/server.conf -n a1 -Dflume.root.logger=DEBUG,console

 

郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。