[hadoop@namenode SeqFileText]$ java SeqMain
17/01/09 16:06:29 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
java.io.FileNotFoundException: /samba/anonymous/hadoop-test/SeqFileText/.user.sequence.crc (权限不够)
at java.io.FileOutputStream.open0(Native Method)
at java.io.FileOutputStream.open(FileOutputStream.java:270)
at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:211)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:207)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStream(RawLocalFileSystem.java:276)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:270)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:257)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:889)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:390)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:449)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:428)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:908)
at org.apache.hadoop.io.SequenceFile$Writer.<init>(SequenceFile.java:1072)
at org.apache.hadoop.io.SequenceFile$RecordCompressWriter.<init>(SequenceFile.java:1372)
at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:273)
at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:295)
at SeqMain.main(SeqMain.java:38)
下面是代码
import java.io.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.lang.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
public class SeqMain{
public static void main(String[] args){
TextWritable udata;
int data_length;
byte byte_buff[] = new byte[2000];
Configuration conf = new Configuration();
conf.set("mapreduce.jobtracker.address", "local");
conf.set("fs.defaultFS", "file:///");
try{
File readfile = new File("./url");
DataInputStream in = new DataInputStream(new FileInputStream(readfile));
FileSystem fs=null;
SequenceFile.Writer writer=null;
LongWritable idKey=new LongWritable(0);
String writefile = "./user.sequence";
Path path=new Path(writefile);
fs = FileSystem.get(conf);
writer = SequenceFile.createWriter(fs, conf, path, LongWritable.class, TextWritable.class);
long pos = 0;
int i = 0;
long end = readfile.length();
while(pos < end - 1)
{
data_length = in.readInt();
in.read(byte_buff, 0, data_length-4);
pos += data_length;
udata = new TextWritable(data_length, byte_buff);
i++;
idKey.set(i);
writer.append(idKey, udata);
}
writer.close();
} catch(IOException e){
e.printStackTrace();
}
}
}
import java.io.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.lang.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
public class TextWritable implements Writable{
byte[] data_buff;
int length;
public TextWritable(int total_leng, byte[] buff){
super();
this.data_buff = new byte[total_leng-4];
length = total_leng-4;
System.arraycopy(buff, 0, this.data_buff, 0, total_leng-4);
}
public void write(DataOutput out) throws IOException {
out.write(this.data_buff, 0, this.length);
}
public void readFields(DataInput in) throws IOException {
}
}
|