HDFS的Java API接口

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.io.IOUtils;

public class HdfsApiTest {
	public static void main(String[] args) throws IOException {
		//获得连接对象
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		
		//Delete file or directory 删除文件或者目录
		//fs.delete(new Path(args[0]), true);  如果删除是目录,无论目录是否为空,都会删除
		//fs.deleteOnExit(args[0])  如果删除目录,目录不为空,删除失败
		
		//Copy the local file to hdfs 上传本地文件到HDFS
		//fs.copyFromLocalFile(new Path(args[0]), new Path(args[1]));
		
		//Download the HDFS file loally  从HDFS下载文件到本地
		//fs.copyToLocalFile(new Path(args[0]), new Path(args[1]));
		
		//Determing if the path exists  
		/*if(fs.exists(new Path(args[0]))){  判断路径是否存在
			System.out.println("Path exists");
		}
		if(fs.isFile(new Path(args[0]))){	判断是否是文件
			System.out.println("File");
		}
		if(fs.isDirectory(new Path(args[0]))){ 判断是否是目录
			System.out.println("Directory");
		}*/
		
		//File input/output  对HDFS上的文件读写操作
		/*FSDataInputStream in = fs.open(new Path(args[0])); 
		FSDataOutputStream out = fs.append(new Path(args[1])); 
		byte[] buff = new byte[128];
		int length = 0;
		while((length=in.read(buff)) != -1){
			out.write(buff,0,length);
		}
		out.close();
		in.close();*/
		
		//Get the files and directories under the directory  获得目录下的所有文件和目录(一般使用递归显示)
		/*FileStatus[] status;
		status = fs.listStatus(new Path(args[0]));
		for(FileStatus statu:status){
			System.out.println(statu.getOwner()+"<->"+statu.getLen()+"<->"+statu.getPath());
			System.out.println(statu.getBlockSize()+"<->"+statu.getPermission());
			System.out.println(statu.isFile()+"<->"+statu.isDirectory());
			System.out.println("------------------------------");
		}*/
		fs.close();
	}
}

你可能感兴趣的:(Hadoop)