Hadoop _HDFS_JavaAPI进行读写创建

Hadoop _HDFS_JavaAPI进行读写创建_第1张图片

package com.henu.first;

import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.nfs.nfs3.request.MKDIR3Request;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

public class TestFirst {
	//提供对配置参数的访问
	Configuration conf = null;
	//一个相当通用的文件系统的抽象基类。
	FileSystem fs = null;
	
	@Before
	public void init() throws IOException{
		conf = new Configuration();
		//返回配置的文件系统实现。
		fs = FileSystem.get(conf);
	}

	@Test
	public void mkdir() throws IOException{
		//命名文件系统中的文件或目录。
		Path path = new Path("/mytemp");
		if (fs.exists(path)) {
			//删除目录
			fs.delete(path,true);
		}
		//创建目录
		fs.mkdirs(path);
	}
	
	@Test
	public void uplocad() throws IOException{
		//文件的上传路径
		Path path = new Path("/mytemp/henu.txt");
		FSDataOutputStream create = fs.create(path);
		
		//拿到磁盘文件
		InputStream iStream = new BufferedInputStream(
				new FileInputStream("D:/test.txt"));
		//使用工具
		IOUtils.copyBytes(iStream, create, conf, true);
		iStream.close();
		
	}
	
	@Test
	public void readFile() throws IOException{
		
		Path path = new Path("/user/root/test.txt");
		FileStatus file = fs.getFileStatus(path);
		
		BlockLocation[] blks = fs.getFileBlockLocations(file, 0, file.getLen());
		
		for (BlockLocation blockLocation : blks) {
			System.out.println(blockLocation);
		}
		/*输出结果:文件大小和节点信息
		 * 	0,1048576,henu3,henu2,henu4
			1048576,640319,henu3,henu2,henu4
		 * */
		
		//读取文件
		FSDataInputStream open = fs.open(path);
		
		open.seek(1048576);
		System.out.println((char)open.readByte());
		System.out.println((char)open.readByte());
		System.out.println((char)open.readByte());
		System.out.println((char)open.readByte());
		System.out.println((char)open.readByte());
		
	}
	
	@After
	public void destory() throws IOException{
		if (fs!=null) {
			fs.close();
		}
	}
	
}

 

你可能感兴趣的:(#,bigdata_hadoop)