Hadoop版本
Hadoop-0.19.1-core.jar
1、创建HDFS对象
Configuration config = new Configuration(); config.set("fs.default.name","hdfs://127.0.0.1:9000/"); FileSystem dfs = FileSystem.get(config);
String dirName = "TestDirectory"; Path src = new Path(dfs.getWorkingDirectory()+"/"+dirName); dfs.mkdirs(src);
String subDirName = "subDirectory"; Path src = new Path(dfs.getWorkingDirectory()+"/TestDirectory/"+ subDirName); dfs.mkdirs(src);
String dirName = "TestDirectory"; Path src = new Path(dfs.getWorkingDirectory()+"/"+dirName); Dfs.delete(src);
Path src = new Path("E://HDFS/file1.txt"); Path dst = new Path(dfs.getWorkingDirectory()+"/TestDirectory/subDirectory/"); dfs.copyFromLocalFile(src, dst);
Path src = new Path(dfs.getWorkingDirectory()+"/TestDirectory/subDirectory/file1.txt"); Path dst = new Path("E://HDFS/"); dfs.copyToLocalFile(src, dst);
Path src = new Path(dfs.getWorkingDirectory()+"/TestDirectory/subDirectory/file2.txt"); dfs.createNewFile(src);
Path src = new Path(dfs.getWorkingDirectory()+"/TestDirectory/subDirectory/file2.txt"); FileInputStream fis = new FileInputStream("E://HDFS/file1.txt"); int len = fis.available(); byte[] btr = new byte[len]; fis.read(btr); FSDataOutputStream fs = dfs.create(src); fs.write(btr); fs.close();
Path src = new Path(dfs.getWorkingDirectory()+"/TestDirectory/subDirectory/file1.txt"); FSDataInputStream fs = dfs.open(src); String str = null; while ((str = fs.readline())!= null) { System.out.println(str); }
Path src = new Path(dfs.getWorkingDirectory()+"/TestDirectory/HDFS/file1.txt"); System.out.println(dfs.exists(src));
System.out.println(dfs.getDefaultBlockSize());
System.out.println(dfs.getDefaultReplication());
Path src = new Path(dfs.getWorkingDirectory()+"/TestDirectory/subDirectory/file1.txt"); System.out.println(dfs.isDirectory(src)); System.out.println(dfs.isFile(src));