Configuration conf = new Configuration();
// 获取配置文件对象
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000"), conf, user);
// 获取文件系统对象
public void listfiles(String where) throws IOException, InterruptedException {
Configuration conf = new Configuration();
// 获取配置文件对象
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000"), conf, user);
// 获取文件系统对象
try {
RemoteIterator<LocatedFileStatus> iter = fsSource.listFiles(new Path(where.toString()), true);
//这里的第二个参数true表示递归遍历,false反之
while (iter.hasNext()){
LocatedFileStatus file = iter.next();
String Path_file = file.getPath().toString();
// 获取文件目录
System.out.println(user + "$:" + Path_file.substring(21));
//System.out.println(file.getPath().getName());
// 只获取文件名
}
} catch (IOException e) {
e.printStackTrace();
}
}
这里注意到RemoteIterator
中true表示递归查看文件。
为true时这里我的运行结果:
为false时:什么都没有遍历到
/*
*
* 创建目录
*
*/
public void mkdir(String name) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
fsSource.mkdirs(new Path(name));
} catch (IOException e) {
// 创建目录
e.printStackTrace();
}
}
/*
*
* 删除文件
*/
public void delete(String name, boolean recursive) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
fsSource.delete(new Path(name), recursive);
} catch (IOException e) {
e.printStackTrace();
}
}
/*
*
* 下载文件
*/
public void download(String src, String dst) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
// 下载
fsSource.copyToLocalFile(new Path(src), new Path(dst));
} catch (IOException e) {
e.printStackTrace();
}
}
/*
*
* 上传文件
*/
public void upload(String src, String dst) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
// 上传
fsSource.copyFromLocalFile(new Path(src), new Path(dst));
} catch (IOException e) {
e.printStackTrace();
}
}
/*
*
* 重命名
*/
public void rename(String source, String dst) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
fsSource.rename(new Path(source), new Path(dst));
} catch (IOException e) {
e.printStackTrace();
}
}
注意开头package要改成你自己的包名
package oprate_hadoop;
import java.io.IOException;
import java.net.URI;
import java.util.Scanner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
//import java.util.concurrent.TransferQueue;
//import javax.naming.NamingException;
//import org.apache.hadoop.fs.FileStatus;
//import org.apache.commons.collections.bag.SynchronizedSortedBag;
public class Oprate_hadoop {
/*
* 列出指定目录下的所有文件
*
* */
String user = "hadoop";
//声明操作hadoop的用户,我这里就是当前登录linux的用户
//这个user会在这行代码中被使用 FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000"), conf, user);
public void listfiles(String where) throws IOException, InterruptedException {
Configuration conf = new Configuration();
// 获取配置文件对象
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000"), conf, user);
// 获取文件系统对象
try {
RemoteIterator<LocatedFileStatus> iter = fsSource.listFiles(new Path(where.toString()), true);
while (iter.hasNext()){
LocatedFileStatus file = iter.next();
String Path_file = file.getPath().toString();
// 获取文件目录
System.out.println(user + "$:" + Path_file.substring(21));
//System.out.println(file.getPath().getName());
// 只获取文件名
}
} catch (IOException e) {
e.printStackTrace();
}
}
/*
*
* 创建目录
*
* */
public void mkdir(String name) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
fsSource.mkdirs(new Path(name));
} catch (IOException e) {
// 创建目录
e.printStackTrace();
}
}
/**
* 删除文件
*/
public void delete(String name, boolean recursive) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
fsSource.delete(new Path(name), recursive);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 下载文件
*/
public void download(String src, String dst) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
// 下载
fsSource.copyToLocalFile(new Path(src), new Path(dst));
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 上传文件
*/
public void upload(String src, String dst) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
// 上传
fsSource.copyFromLocalFile(new Path(src), new Path(dst));
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 重命名
*/
public void rename(String source, String dst) throws IOException, InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://localhost:9000");
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fsSource = FileSystem.get(URI.create("hdfs://localhost:9000/"), conf, user);
try {
fsSource.rename(new Path(source), new Path(dst));
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws IOException, InterruptedException {
Oprate_hadoop hadoop = new Oprate_hadoop();
while(true) {
System.out.println("==============================================");
System.out.println("\n " +
"||\t\t1.查看文件\t\t|| \n " +
"||\t\t2.创建目录\t\t|| \n " +
"||\t\t3.删除文件\t\t|| \n " +
"||\t\t4.下载文件\t\t|| \n " +
"||\t\t5.上传文件\t\t|| \n " +
"||\t\t6.重命名文件\t\t|| \n");
System.out.println("==============================================");
Scanner input = new Scanner(System.in);
int number = input.nextInt();
if (number == 1) {
hadoop.listfiles("/");
}
else if (number == 2) {
System.out.println("请输入目录地址");
System.out.print("hdfs$:");
Scanner name = new Scanner(System.in);
String file = name.nextLine();
hadoop.mkdir(file);
}
else if(number == 3) {
System.out.println("请输入要删除的文件名称");
Scanner name = new Scanner(System.in);
String file = name.nextLine();
hadoop.delete(file, true);
}
else if(number == 4) {
System.out.println("请输入要下载的文件目录");
Scanner name = new Scanner(System.in);
String file = name.nextLine();
System.out.println("请输入要存放在本地的目录");
Scanner name2 = new Scanner(System.in);
String dst = name2.nextLine();
hadoop.download(file, dst);
}
else if(number == 5) {
System.out.println("请输入上传文件的位置");
Scanner name = new Scanner(System.in);
String file = name.nextLine();
System.out.println("请输入上传的目标位置");
Scanner name2 = new Scanner(System.in);
String dst = name2.nextLine();
hadoop.upload(file, dst);
}
else if(number == 6) {
System.out.println("请输入需要改名的文件");
Scanner name = new Scanner(System.in);
String file = name.nextLine();
System.out.println("请输入新名字");
Scanner name2 = new Scanner(System.in);
String dst = name2.nextLine();
hadoop.rename(file, dst);
}
}
}
}