HDFS上传下载小例子

普通版

package club.drguo.hadoop.hdfs;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;

public class HdfsClient {
	private FileSystem fs = null;

	@Before
	public void getFS() throws IOException {
		// 获得配置对象
		Configuration conf = new Configuration();
		// 指定文件系统是HDFS
		conf.set("fs.default.name", "hdfs://localhost:9000");
		// 设置副本数
		conf.set("dfs.replication", "3");
		// 获得HDFS的实例
		fs = FileSystem.get(conf);
	}

	// 上传
	@Test
	public void upload() throws IOException {
		// 封装数据源
		FileInputStream fileInputStream = new FileInputStream("/home/guo/test.py");
		// 封装目的地
		Path destFile = new Path("hdfs://localhost:9000/up.py");
		FSDataOutputStream dataOutputStream = fs.create(destFile);
		// 读取并写出数据
		IOUtils.copy(fileInputStream, dataOutputStream);
	}

	// 下载
	@Test
	public void download() throws IllegalArgumentException, IOException {
		// 封装数据源
		FSDataInputStream dataInputStream = fs.open(new Path("hdfs://localhost:9000/up.py"));
		// 封装目的地
		FileOutputStream fileOutputStream = new FileOutputStream("/home/guo/down.py");
		// 读取并写出数据
		IOUtils.copy(dataInputStream, fileOutputStream);
	}

}


正常版

package club.drguo.hadoop.hdfs;

import java.io.FileNotFoundException;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Before;
import org.junit.Test;

public class HdfsClientNormal {
	private FileSystem fs = null;

	@Before
	public void getFS() throws IOException {
		// 获得配置对象
		Configuration conf = new Configuration();
		// 指定文件系统是HDFS
		conf.set("fs.default.name", "hdfs://localhost:9000");
		// 设置副本数
		conf.set("dfs.replication", "3");
		// 获得HDFS的实例
		fs = FileSystem.get(conf);
	}

	// 上传
	@Test
	public void upload() throws IOException {
		fs.copyFromLocalFile(new Path("/home/guo/test.py"), new Path("/up.py"));
	}
	//下载
	@Test
	public void download() throws IllegalArgumentException, IOException{
		fs.copyToLocalFile(new Path("/up.py"), new Path("/home/guo/down.java"));
	}
	//创建文件夹
	@Test
	public void mkDir() throws IllegalArgumentException, IOException{
		fs.mkdirs(new Path("/test"));
	}
	//删除
	@Test
	public void removeFile() throws IllegalArgumentException, IOException{
		fs.delete(new Path("/test"), true);
	}
	//修改文件名
	@Test
	public void rename() throws IllegalArgumentException, IOException{
		fs.rename(new Path("/Up.py"), new Path("/up.java"));
	}
	//查询
	@Test
	public void queryList() throws FileNotFoundException, IllegalArgumentException, IOException{
		//获取根目录下所有文件名称
		RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);//第二个参数,是否迭代
		while(listFiles.hasNext()){
			LocatedFileStatus fileStatus = listFiles.next();
			System.out.println(fileStatus.getPath().getName());
		}
		System.out.println("--------------------------------------------------------------");
		//获取根目录下文件夹及文件名称
		FileStatus[] listStatus = fs.listStatus(new Path("/"));
		for(FileStatus fileStatus : listStatus){
			System.out.println(fileStatus.getPath().getName()+"----"+(fileStatus.isDirectory()?"文件夹":"文件"));
		}
	}
}


你可能感兴趣的:(java,hadoop,hdfs)