a1.sources=r1
a1.channels=c1
a1.sinks=k1
a1.sources.r1.type=spooldir
a1.sources.r1.spoolDir=/home/training/Desktop/20141017.log
a1.sources.r1.interceptors=i1
a1.sources.r1.interceptors.i1.type=org.apache.flume.sink.solr.morphline.MorphlineInterceptor$Builder
a1.sources.r1.interceptors.i1.morphlineFile=/home/training/Desktop/fmword2
a1.sources.r1.interceptors.i1.morphlineId=morphline1
a1.channels.c1.type=org.apache.flume.channel.kafka.KafkaChannel
a1.channels.c1.capacity=10000
a1.channels.c1.transactionCapacity=1000
a1.channels.c1.brokerList=localhost:9092
a1.channels.c1.zookeeperConnect=localhost:2181
a1.channels.c1.topic=zhangsy
a1.channels.c1.kafka.consumer.group.id=flume-consumer
a1.channels.c1.parseAsFlumeEumeEvent=true
a1.sinks.k1.type=hdfs
a1.sinks.k1.hdfs.path=/KH/word/RAW
a1.sinks.k1.hdfs.fileType=DataStream
a1.sinks.k1.hdfs.fileSuffix=.avro
#a1.sinks.k1.serializer=org.apache.flume.sink.hdfs.AvroEventSerializer$Builder
a1.sinks.k1.serializer=org.apache.flume.sink.hdfs.AvroEventSerializer$Builder
a1.sinks.k1.serializer.compressionCodec=snappy
a1.sources.r1.channels=c1
a1.sinks.k1.channel=c1
{
id : morphline1
importCommands : ["org.kitesdk.**"]
commands : [
{
readLine {
charset : UTF-8
}
}
{
setValues{
flume.avro.schema.url : "file:/home/training/Desktop/av"
}
}
{
toAvro{
schemaFile : /home/training/Desktop/av
}
}
{
writeAvroToByteArray { format : containerlessBinary}
}
]
}
{
"type" : "record",
"name" : "log",
"fields" : [
{"name":"message","type":"string"}
]
}
|