读写sequenceFile

public class WriteReadSequenceFileTest {

/**
* @param args
*/
public static void main(String[] args) {
Configuration conf=new Configuration();
conf.set("fs.default.name", "hdfs://172.168.8.54:9000");
conf.set("hadoop.job.ugi", "hadoop");

try {
FileSystem fs=FileSystem.get(conf);

//写文件
//===========================================================
Text key=new Text();
Text value=new Text();
//有三种类型压缩
//CompressionType.NONE 不压缩
//CompressionType.RECORD 默认 只压缩value
//CompressionType.BLOCK 全部压缩

// conf.set("io.seqfile.compression.type", "BLOCK");

Path path=new Path("/tmp/2/sequenceFile01.dat");
if(fs.exists(path)){
fs.delete(path, true);
}

SequenceFile.Writer writer=SequenceFile.createWriter(fs, conf, new Path("/tmp/2/sequenceFile01.dat"), Text.class, Text.class);
for(int i=0;i<100;i++){
key.set("key"+i);
value.set("value"+i);
writer.append(key, value);
}
writer.close();

FileStatus[] files=fs.listStatus(new Path("/tmp/2/"));
for(FileStatus file:files){
System.out.println(file.getPath().toUri().getPath());
}

//读文件
//============================================================
SequenceFile.Reader reader=new SequenceFile.Reader(fs,new Path("/tmp/2/sequenceFile01.dat"),conf);
while(reader.next(key, value)){
System.out.println(key+":"+value);
}
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}

}

你可能感兴趣的:(sequenceFile)