flume配置获取信息传输到kafka集群conf目录下新建配置文件[root@localhost flume]# vim conf/file-monitor.conf# 声明agenta1.sources = r1a1.sinks = k1a1.channels = c1# 定义数据源a1.sources.r1.type = execa1.sources.r1.command = tail -F /data/xx.loga1.sources.r1.channels = c1# filter过滤器a1.sources.r1.interceptors=i1a1.sources.r1.interceptors.i1.type=regex_filter#a1.sources.r1.interceptors.i1.regex=(Parsing events)(.*)(END)a1.sources.r1.interceptors.i1.regex=(aaaa)(.*)    #只有匹配到才传输到channels#定义event暂存位置,可以使内存,磁盘,数据库等a1.channels.c1.type = filea1.channels.c1.checkpointDir = /data/flume/chka1.channels.c1.dataDirs = /data/flume/data# 定义数据流向kafka#a1.sinks.k1.type = loggera1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSinka1.sinks.k1.brokerList = 192.168.41.47:9092,192.168.41.127:9092,192.168.41.86:9092a1.sinks.k1.topic = mytopic#a1.sinks.k1.requiredAcks = 1#a1.sinks.k1.batchSize = 20a1.sinks.k1.serializer.class=kafka.serializer.StringEncodera1.sinks.k1.channel = c1启动[root@localhost flume]# nohup bin/flume-ng agent -n a1 -c conf/ -f conf/file-monitor.conf  -Dflume.root.logger=INFO,console > nohup.out 2>&1 &