我在haadoop/etc/hadoop中配置log4J,但是只是生成了文件,并没有写入数据
- public static final Log myLOG = LogFactory.getLog("zzc");
-
- public static void main(String[] args) throws Exception {
-
- String[] fils = {"/input/a.txt","/log_out"};
- ToolRunner.run(new Configuration(), new WordCount(), fils);
-
- }
-
- @Override
- public int run(String[] args) throws Exception {
-
- Configuration conf = new Configuration();
-
- Job job = Job.getInstance(conf, "xz");
-
- job.setJarByClass(WordCount.class);
-
- job.setMapperClass(Map.class);
- job.setReducerClass(Reduce.class);
-
- job.setOutputKeyClass(Text.class);
- job.setOutputValueClass(LongWritable.class);
-
- FileInputFormat.setInputPaths(job, new Path(args[0]));
- FileOutputFormat.setOutputPath(job, new Path(args[1]));
-
-
-
- return job.waitForCompletion(true) ? 0 : 1;
- }
-
-
- public static class Map extends Mapper<Object, Text, Text, LongWritable>{
-
- @Override
- protected void map(Object key, Text value,Context context)
- throws IOException, InterruptedException {
-
- myLOG.debug("Map...");
-
- String line = value.toString();
-
- String[] values = line.split(",");
-
- for(String str : values){
- context.write(new Text(str), new LongWritable(1));
- }
-
- }
-
- }
-
- public static class Reduce extends Reducer<Text, LongWritable, Text, LongWritable>{
-
- @Override
- protected void reduce(Text key, Iterable<LongWritable> values,Context context)
- throws IOException, InterruptedException {
-
- myLOG.debug("Reduce...");
-
- long sum = 0;
-
- for(LongWritable val : values){
- sum += val.get();
- }
-
- context.write(key, new LongWritable(sum));
- }
-
-
-
- }
复制代码
|