各位好,我在eclipse中执行如下程序的时候报错。看起来是没有获取到文件输入相关的参数。麻烦大家帮忙看看是什么问题,谢谢!
看起来是这样没有起到作用
FileInputFormat.addInputPath(job, new Path("/user/hadoop/tmp/hello.txt"));// 文件输入
下面两句的输出分别如下:
System.out.println(FileInputFormat.getInputPaths(job));
System.out.println(FileOutputFormat.getOutputPath(job));
[Lorg.apache.hadoop.fs.Path;@5594a1b5
/user/hadoop/tmp
[mw_shl_code=java,true]package remote.file.manage;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import hadoop.*;
public class WordCount {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.set("fs.default.name", "hdfs://10.16.59.185:9000");
Job job = new Job(conf, "word count");
job.setJarByClass(WordCount.class);
job.setMapperClass(WordMapper.class);
job.setCombinerClass(WordReducer.class);
job.setReducerClass(WordReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path("/user/hadoop/tmp/hello.txt"));// 文件输入
FileOutputFormat.setOutputPath(job, new Path("/user/hadoop/tmp/"));// 文件输出
System.out.println(FileInputFormat.getInputPaths(job));
System.out.println(FileOutputFormat.getOutputPath(job));
System.exit(job.waitForCompletion(true) ? 0 : 1);// 等待完成退出
}
}
[/mw_shl_code]
16/05/18 14:58:24 INFO jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
[Lorg.apache.hadoop.fs.Path;@5594a1b5
/user/hadoop/tmp
16/05/18 14:50:47 WARN mapred.JobClient: Use GenericOptionsParser for parsing the arguments. Applications should implement Tool for the same.
Exception in thread "main" org.apache.hadoop.security.AccessControlException: org.apache.hadoop.security.AccessControlException: Permission denied: user=corp\huangdezhi, access=WRITE, inode="":hadoop:supergroup:rwxr-xr-x
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown Source)
at java.lang.reflect.Constructor.newInstance(Unknown Source)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:96)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:58)
at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:910)
at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:262)
at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:1115)
at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:259)
at org.apache.hadoop.mapred.JobClient.configureCommandLineOptions(JobClient.java:573)
at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:761)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:432)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:447)
at remote.file.manage.WordCount.main(WordCount.java:31)
|
|