1.Hadoop连接HDFS进行API操作
2.hadoop集群已启动完全分布式
3.有手就行
1.创建文件:
package test1_javaApi;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* 创建文件
*
*
* **/
public class demo1 {
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
//初始化配置
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), conf, "root"); //url写完整,root是用户
String filepath = "/test1/demo6"; //定义文件路径
String text="sdawfaw"; //定义文本内容
FSDataOutputStream fd = fs.create(new Path(text)); //调用create方法
System.out.println(filepath);
fs.close();
fd.close();
}
}
2.创建文件夹
package test1_javaApi;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* 创建文件夹
* @author ASUS
* **/
public class demo2 {
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), conf, "root");
String dirpath = "/test1/demo2";
boolean b = fs.mkdirs(new Path(dirpath));
System.out.println(String.format(dirpath, b));
fs.close();
}
}
3.文件更名
package test1_javaApi;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class demo5 {
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), conf, "root");
String filepath = "/test1/demo2/a.txt"; //改名前文件
String newName = "/test1/demo2/b.txt"; //改名后文件
boolean res = fs.rename(new Path(filepath), new Path(newName));
System.out.println(String.format(filepath, newName,res));
}
}
4.查看文件是否存在
package test1_javaApi;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class demo4 {
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), conf, "root");
String dirpath = "/test1/demo2w/";
String filepath = "/test1/demo22/a.txt";
String none ="/none";
boolean dirEx = fs.exists(new Path(dirpath));
boolean fileEx = fs.exists(new Path(filepath));
boolean noEx = fs.exists(new Path(none));
System.out.println(String.format(dirpath,dirEx));
System.out.println(String.format(filepath, fileEx));
System.out.println(String.format(none, noEx));
fs.close();
}
}
5.附加文件
package test1_javaApi;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* 附加文件
* @author ASUS
*
* **/
public class demo3 {
//附加文件
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
// conf.set("dfs.client.block,write.replace-datanode-on-failure.policy", "NEVER");
// conf.set("dfs.client.block,write.replace-datanode-on-failure.enable", "true");
FileSystem fs =FileSystem.get(new URI("hdfs://master:9000"), conf, "root");
String filepath = "/test1/demo2/a.txt";//定义路径
System.out.println("旧文本:");
printFile(conf,fs,filepath); //打印文件内容
FSDataOutputStream w = fs.append(new Path(filepath)); //附加文件*
String context = "wocaonimagehanhan\n"+"\n"; //附加内容
w.write(context.getBytes());//写入内容
w.close();
System.out.println("新文本:");
printFile(conf, fs, filepath);
}
private static void printFile(Configuration conf, FileSystem fs, String filepath) throws IllegalArgumentException, IOException {
// TODO Auto-generated method stub
FSDataInputStream read = fs.open(new Path(filepath));
byte[] datas = new byte[128];
int res;
while ((res = read.read(datas))!=-1) {
System.out.write(datas,0,res);
}
//关闭资源
read.close();
}
}
6.读文件
package test1_javaApi;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* 读文件
* @author ASUS
*
* */
public class demo6 {
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), conf, "root");
String filepath = "/test1/demo2/b.txt";
FSDataInputStream reader = fs.open(new Path(filepath));
byte[] datas = new byte[128];
int res;
while ((res=reader.read(datas))!=-1) {
System.out.write(datas,0,res);
}
reader.close();
}
}
7.上传文件
package test1_javaApi;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* 上传文件
* @author ASUS
*
* */
public class demo7 {
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), conf, "root");
fs.copyFromLocalFile(new Path("G:\\douban.txt"),new Path("/douban.txt"));
System.out.println("上传成功");
fs.close();
}
}
8.判断是文件还是目录
package test1_javaApi;
/*
* 判断是文件还是目录
*
* **/
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class demo8 {
public static void main(String[] args) throws IOException, InterruptedException, URISyntaxException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://master:9000"), conf, "root");
String bashpath = "/test1/demo2/b.txt";
FileStatus[] filestatus = fs.listStatus(new Path(bashpath));//判断文件状态
for(FileStatus file:filestatus) {
if(file.isDirectory()) {
System.out.println("目录");
}else{
System.out.println("文件");
}
}
}
}
这些都是简单的操作,需要注意要导入正确的包,不然会有意想不到的bug。