Hadoop命令学习-官网链接:
http://hadoop.apache.org/docs/r2.6.4/hadoop-project-dist/hadoop-common/FileSystemShell.html
--------------------------------------------------------------------------------------------------------------------------
一. 创建HDFS文件:
public class Test4CreateFile { /** * 创建HDFS文件: * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); byte[] buff = "Hello Hadoop HDFS".getBytes(); FileSystem fs = FileSystem.get(uri, conf); Path dfs = new Path("hdfs://192.168.226.129:9000/studyhadoop"); FSDataOutputStream outputStream = fs.create(dfs); outputStream.write(buff,0,buff.length); FileStatus files[] = fs.listStatus( dfs ); for( FileStatus file:files){ System.out.println( "file: " + file.getPath() ); } } catch (Exception e) { e.printStackTrace(); } } }
二:删除HDFS文件
public class Test5DeleteFile { /** * 删除HDFS文件: * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri, conf); Path delef = new Path("hdfs://192.168.226.129:9000/testhadoop1"); boolean isDeleted = fs.delete(delef, false); System.out.println( "isDelete: " + isDeleted ); } catch (Exception e) { e.printStackTrace(); } } }
三:创建HDFS目录
public class Test9Mkdir { /** * HDFS下 创建目录文件 * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri, conf); Path dfs = new Path("hdfs://192.168.226.129:9000/testhadoop"); boolean isMkdirs = fs.mkdirs(dfs); if( isMkdirs ){ System.out.println( " Make Dir Successful ! "); }else{ System.out.println( " Make Dir Failure ! "); } fs.close(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
四:重命名HDFS文件
public class Test2Rename { /** * 重命名HDFS文件: * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri,conf); Path oldpath = new Path("hdfs://192.168.226.129:9000/testhadoop"); Path newpath = new Path("hdfs://192.168.226.129:9000/testhadoop1"); //判断该文件是否存在 boolean isExists = fs.exists(oldpath); System.out.println( "isExists: " +isExists ); //重命名文件 fs.rename(oldpath, newpath); isExists = fs.exists(newpath); System.out.println( "newpathisExists: " +isExists ); } catch (Exception e) { e.printStackTrace(); } } }
五:上传本地文件至HDFS
public class Test3CopyFile { /** * 上传本地文件到HDFS * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri, conf); Path src = new Path("F:\\04-HadoopStudy\\mapreduce.txt"); Path dst = new Path("hdfs://192.168.226.129:9000/rootdir"); fs.copyFromLocalFile(src, dst); System.out.println("Upload " + conf.get("fs.default.name")); FileStatus files[] = fs.listStatus( dst ); for( FileStatus file:files){ System.out.println( file.getPath() ); } } catch (Exception e) { e.printStackTrace(); } } }
六. 从HDFS下载文件至本地
public class Test10CopyToFile { /** * 从HDFS下载文件至本地 * */ public static void main(String[] args) { try { Configuration conf = new Configuration(); URI uri = new URI("hdfs://192.168.226.129:9000"); FileSystem fs = FileSystem.get(uri, conf); Path src = new Path("F:\\"); Path dst = new Path("hdfs://192.168.226.129:9000/studyhadoop"); fs.copyToLocalFile(dst, src); System.out.println("DownLoad " + conf.get("fs.default.name")); FileStatus files[] = fs.listStatus( dst ); for( FileStatus file:files){ System.out.println( file.getPath() ); } } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }