Flume Agent之一个source两个channel两个sink配置

一个source 两个channel 两个sink,每个sink 输出内容一致

flume_kafka_hdfs.sources = r1
flume_kafka_hdfs.channels = c1 c2
flume_kafka_hdfs.sinks = k1 k2

#配置sources,监控一个目录
flume_kafka_hdfs.sources.r1.type = spooldir
flume_kafka_hdfs.sources.r1.spoolDir = /home/sivan/flume/spooldir
flume_kafka_hdfs.sources.r1.selector.type=replicating
flume_kafka_hdfs.sources.r1.channels=c1 c2

#配置channel1
flume_kafka_hdfs.channels.c1.type = memory
flume_kafka_hdfs.channels.c1.capacity = 10000

#配置channel2
flume_kafka_hdfs.channels.c2.type = memory
flume_kafka_hdfs.channels.c2.capacity = 10000

#flume_kafka_hdfs.channels.c2.type = file
#flume_kafka_hdfs.channels.c2.checkpointDir = /home/sivan/flume/checkpoint
#flume_kafka_hdfs.channels.c2.dataDirs = /home/sivan/flume/data

#配置sink1,数据从channel1获取,传入kafka
flume_kafka_hdfs.sinks.k1.channel=c1
flume_kafka_hdfs.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
flume_kafka_hdfs.sinks.k1.kafka.bootstrap.servers = sivan:9092
flume_kafka_hdfs.sinks.k1.kafka.topic = test
flume_kafka_hdfs.sinks.k1.kafka.batchSize = 20
flume_kafka_hdfs.sinks.k1.kafka.producer.requiredAcks = 1

#配置sink2,数据从channel2获取,存入HDFS集群
flume_kafka_hdfs.sinks.k2.channel=c2
flume_kafka_hdfs.sinks.k2.type = hdfs
flume_kafka_hdfs.sinks.k2.hdfs.path = hdfs://172.19.225.156:9000/user/sivan/test
flume_kafka_hdfs.sinks.k2.hdfs.fileType = DataStream
flume_kafka_hdfs.sinks.k2.hdfs.writeFormat=TEXT
flume_kafka_hdfs.sinks.k2.hdfs.filePrefix = flumeHdfs
flume_kafka_hdfs.sinks.k2.hdfs.batchSize = 1000
flume_kafka_hdfs.sinks.k2.hdfs.rollSize = 10240
flume_kafka_hdfs.sinks.k2.hdfs.rollCount = 0
flume_kafka_hdfs.sinks.k2.hdfs.rollInterval = 1
flume_kafka_hdfs.sinks.k2.hdfs.useLocalTimeStamp = true

猜你喜欢

转载自blog.csdn.net/ENUEI/article/details/85318309