大数据部分配置完之后写的第一个代码

package com.sxt.hadoop.hdfs;


import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;






public class TestHDFS {


Configuration conf;
FileSystem fs;
@Before //建立连接
public void conn() throws IOException{
conf=new Configuration(true);  //Apache的conf 默认为true,为false的话,就会去根目录下去创建
fs = FileSystem.get(conf); //父类应用,指向子类实现  
//以上是客户端内容
}

@After
public void close() throws IOException{
fs.close();
}

@Test  //测试对象
public void mkdir() throws IOException{
Path dirs=new Path("/data");
if(fs.exists(dirs)){
fs.delete(dirs, true);
}
fs.mkdirs(dirs); 
}

@Test
public void upload() throws IOException{
Path outFile=new Path("/data/sxt.txt");
FSDataOutputStream out=fs.create(outFile);
InputStream input=new BufferedInputStream(new FileInputStream(new File("d:\\nginx")));
//InputStream input 
//=new BufferedInputStream(new FileInputStream(new File("c:\\nginx")));
IOUtils.copyBytes(input, out, conf, true);
}

@Test
public void blks() throws IOException{
Path f=new Path("/user/root/test.txt");
FileStatus file=fs.getFileStatus(f);
BlockLocation[] blks=fs.getFileBlockLocations(file, 0, file.getLen());

for (BlockLocation b : blks){
System.out.println(b);
} 

FSDataInputStream in=fs.open(f);
System.out.println((char)in.read());
System.out.println((char)in.read());
System.out.println((char)in.read());
System.out.println((char)in.read());
in.seek(1048576);
System.out.println((char)in.read());
System.out.println((char)in.read());
System.out.println((char)in.read()); 
}
}

大数据部分配置完之后写的第一个代码_第1张图片

你可能感兴趣的:(大数据部分配置完之后写的第一个代码)