junit
junit
4.12
test
org.apache.hadoop
hadoop-client
2.7.2
org.apache.hadoop
hadoop-common
2.7.2
org.apache.hadoop
hadoop-hdfs
2.7.2
log4j.properties
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
hdfs-site.xml:优先级别:在linux里面设置的相当于默认值 < 配置文件 < 代码
dfs.replication
1
代码
package com.zyd;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
/**
* Unit test for simple App.
*/
public class AppTest
{
@Test
/**
* HDFS获取文件系统
*/
public void initHDFS() throws Exception{
// 1 创建配置信息对象
Configuration configuration = new Configuration();
// 2 获取文件系统
FileSystem fs = FileSystem.get(configuration);
// 3 打印文件系统
System.out.println(fs.toString());
}
org.apache.hadoop.fs.LocalFileSystem@1677d1
@Test
/**
* HDFS文件上传(测试参数优先级)
*/
public void testCopyFromLocalFIle() throws URISyntaxException, IOException, InterruptedException {
//1 获取文件系统
Configuration configuration = new Configuration();
//configuration.set("dfs.replication","2");
FileSystem fs = FileSystem.get(new URI("hdfs://testnote01:9000"),configuration,"root");
//2 上传文件
fs.copyFromLocalFile(new Path("D:/LICENSE.txt"),new Path("/idea.txt"));
//3 关闭资源
fs.close();
System.out.println("over");
}
/**
* HDFS文件的下载
*/
@Test
public void testCopyToLocalFile() throws URISyntaxException, IOException, InterruptedException {
// 1 获取文件系统
Configuration configuration = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://testnote01:9000"),configuration,"root");
// 2 执行下载操作
//boolean delSrc 指是否将原文件删除
//Path src 指要下载的文件路径
//Path dst 指将文件下载到的路径
//boolean useRawLocalFileSystem 是否开启文件校验
fs.copyToLocalFile(false,new Path("/hello.txt"),new Path("e:/hadoop.txt"),true);
//3 关闭资源
fs.close();
System.out.println("over");
}
@Test
/**
* HDFS文件目录的创建
*/
public void testMkdir() throws URISyntaxException, IOException, InterruptedException {
//1.获取文件系统
Configuration configuration = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://testnote01:9000"),configuration,"root");
//2.创建目录
fs.mkdirs(new Path("/0906/zhangsan"));
//3.关闭资源
fs.close();
}
/**
* HDFS文件的删除
* @throws IOException
*/
@Test
public void testDelete() throws IOException, URISyntaxException, InterruptedException {
//1.获取文件系统
Configuration configuration = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://testnote01:9000"),configuration,"root");
//2.执行删除
fs.delete(new Path("/ok.txt"),true);
//3.关闭资源
fs.close();
}
/**
* HDFS 文件名的修改
* @throws URISyntaxException
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testRename() throws URISyntaxException, IOException, InterruptedException {
//1 获取文件系统
Configuration configuration = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://testnote01:9000"),configuration,"root");
//2 修改文件名称
fs.rename(new Path("/hello11.txt"),new Path("/Im.txt"));
//3. 关闭资源
fs.close();
}
@Test
public void testListFiles() throws URISyntaxException, IOException, InterruptedException {
//1 获取文件系统
Configuration configuration = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://testnote01:9000"),configuration,"root");
//2 获取文件详情
/**
* 思考:为什么返回迭代器不是list之类的容器
* 集合占内存多 一次全部拿取
* 迭代器一次拿一个
*/
RemoteIterator listFiles = fs.listFiles(new Path("/"),true);
while (listFiles.hasNext()){
LocatedFileStatus status = listFiles.next();
//输出详情
//文件名称
System.out.println("文件名:"+status.getPath().getName());
//长度
System.out.println("文件大小"+status.getLen());
//获取组
System.out.println("所属组:"+status.getGroup());
//权限
System.out.println("权限:"+status.getPermission());
//获取存储的块信息
BlockLocation[] blockLocations = status.getBlockLocations();
for (BlockLocation blockLocation:blockLocations){
//获取块的存储的主机节点
String[] hosts = blockLocation.getHosts();
for (String host:hosts){
System.out.println("获取块的存储的主机节点:"+host);
}
}
System.out.println("-------------------------");
}
fs.close();
}
打印结果:
文件名:ok.txt
文件大小3778
所属组:supergroup
权限:rw-r--r--
获取块的存储的主机节点:testnote01
获取块的存储的主机节点:testnote02
获取块的存储的主机节点:testnote03
-------------------------
文件名:a.txt
文件大小6
所属组:supergroup
权限:rw-rw-rw-
获取块的存储的主机节点:testnote03
获取块的存储的主机节点:testnote02
-------------------------
/**
* HDFS文件和文件夹的判断
*/
@Test
public void testListStatus() throws URISyntaxException, IOException, InterruptedException {
//1 获取文件配置信息
Configuration configuration = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://testnote01:9000"),configuration,"root");
//2 判断是文件还是文件夹
FileStatus[] listStatus = fs.listStatus(new Path("/"));
for (FileStatus fileStatus:listStatus){
//如果是文件
if (fileStatus.isFile()){
System.out.println("文件:"+fileStatus.getPath().getName());
}else {
System.out.println("文件夹:"+fileStatus.getPath().getName());
}
}
//关闭资源
fs.close();
}
}