HDFS编程基础

1)向HDFS中上传任意文本文件,如果指定的文件在HDFS中已经存在,则由用户来指定是追加到原有文件末尾还是覆盖原有的文件;-put覆盖
-appendToFile追加

2)从HDFS中下载指定文件,如果本地文件与要下载的文件名称相同,则自动对下载的文件重命名;

if $(hadoop fs -test -e /home/fenghao);

then $(hadoop fs -copyToLocal text.txt ./text.txt);

else $(hadoop fs -copyToLocal text.txt ./text2.txt);

fi

3)将HDFS中指定文件的内容输出到终端中;cat

4)显示HDFS中指定的文件的读写权限、大小、创建时间、路径等信息;ls

5)给定HDFS中某一个目录,递归输出该目录下的所有文件的读写权限、大小、创建时间、路径等信息;lsr

6)提供一个HDFS内的文件的路径,对该文件进行创建和删除操作。如果文件所在目录不存在,则自动创建目录;mkdir -p

if $(hadoop dfs -test -d “dirpath”);then $(hadoop dfs -touch “filepath”); else $(hadoop dfs -mkdir -p “dirpath” && hadoop dfs -touch “filepath”); fi

7)提供一个HDFS的目录的路径,对该目录进行创建和删除操作。创建目录时,如果目录文件所在目录不存在,则自动创建相应目录;删除目录时,当该目录为空时删除,当该目录不为空时不删除该目录;mkdir -p

if $(hadoop dfs -test -z “dirpath”);then $(hadoop dfs -rmdir “dirpath”); else $(); fi

8)向HDFS中指定的文件追加内容,由用户指定内容追加到原有文件的开头或结尾;appendToFile -localpath -hdfspath 结尾

1.copyToLocal
2.cat file1>>file2 3.copyFromLocal -f

9)删除HDFS中指定的文件; rm

10)在HDFS中,将文件从源路径移动到目的路径。mv

public class HDFS {
 public static FileSystem fs;
  public static void init(){
   Configuration conf = new Configuration();
   conf.set("fs.defaultFS", "hdfs://localhost:9000");
   //conf.set("dfs.replication","1");
   try {
     fs = FileSystem.get(conf);
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }
  }
  public static void uploads(String localFile,String remoteFile) throws IOException{
         //假设  /user/hadoop/input路径已存在 如没存在 在命令行输入 hadoop fs -mkdir -p /user/hadoop/input
         init();
         //String localFileName = "/home/hadoop/myfile.txt";
         //String remoteFileName = "/user/hadoop/input/myfile.txt";
         Path localFilePath = new Path(localFile);
         Path remoteFilePath = new Path(remoteFile);
         if(!fs.exists(remoteFilePath)) {   远程文件不存在
         System.out.println("remote don't have this file");
           fs.copyFromLocalFile(localFilePath, remoteFilePath);
           System.out.println("success upload");
         }else {   //远程文件已存在
           System.out.println("already exists:1 cover ;2  append ");
           int choice = 0;
           Scanner input  = new Scanner(System.in);
           choice = input.nextInt();
           switch(choice) {
           case 1:fs.copyFromLocalFile(false, true, localFilePath,remoteFilePath);
              System.out.println("successful cover");break;
           case 2:FSDataOutputStream out = fs.append(remoteFilePath);
               FileInputStream in = new FileInputStream(localFile); //字节流
             byte buff[] = new byte[1024];
             int read = -1;
             while((read =in.read(buff))>0) {  //in.read()返回读取数据的长度
              out.write(buff, 0, read);
             }
             out.close();
             in.close();
             System.out.println("successful append");break;
            default:System.out.println("??what r u doing");break;
            }
           }
           
        }
        public static void downloads(String localFile,String remoteFile) throws IllegalArgumentException, IOException{
         init();
         Path remoteFilePath = new Path(remoteFile);
         Path localFilePath=new Path(localFile);
         File fi=new File(localFile);
         if(!fi.exists()){
          System.out.println("don't exists");
          //fs.copyToLocalFile(remoteFilePath,localFilePath);
          fs.copyToLocalFile(false, remoteFilePath, localFilePath, true);
          System.out.println("success download");
         }
         else{
         System.out.println("exists change name");
          Integer num=new Integer(0);
          String tmpfile=localFile;
          while(true){
           //try to get anew name
           tmpfile=localFile+""+num;
           fi=new File(tmpfile);
           if(fi.exists()){
            num++;
            }
           else{
            fs.copyToLocalFile(new Path(remoteFile), new Path(tmpfile));
            System.out.println("download success");
            break;
           }
          }
         }
        }
        public static void HDFS2Terimal(String remoteFile) throws IOException {
          init();
          Path remotePath=new Path(remoteFile);
                FSDataInputStream inputStream = fs.open(remotePath);
                BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
                String line;
                while ((line = bufferedReader.readLine()) != null) {
                    System.out.println(line);
                    }
                }
                public static void outchown(String remoteFile) throws  IOException{
         init();
         Path remotePath=new Path(remoteFile);
         FileStatus[] fsta=fs.listStatus(remotePath);
         for(FileStatus s:fsta){
          System.out.println("file size "+s.getBlockSize());
          System.out.println("file permission "+s.getPermission());
          System.out.println("file path "+s.getPath());
          System.out.println("file time stamp "+s.getModificationTime());
         }
        }
        public static void diguioutchown(String remoteDir) throws IOException{
         //no need to use chown method
         init();
         Path remoteDirPath=new Path(remoteDir);
         RemoteIterator<LocatedFileStatus> it =fs.listFiles(remoteDirPath, true);
         while(it.hasNext()){
         FileStatus s=it.next();
          System.out.println("file size "+s.getBlockSize());
          System.out.println("file permission "+s.getPermission());
          System.out.println("file path "+s.getPath());
          System.out.println("file time stamp "+s.getModificationTime());
         }
        }
        public static void createfile(String remoteFile) throws IOException{
         init();
         int index=remoteFile.lastIndexOf("/");
         String remoteDir=remoteFile.substring(0,index);
         Path remoteDirPath=new Path(remoteDir);
         Path remotePath=new Path(remoteFile);
         if(!fs.exists(remoteDirPath)){
         System.out.println("auto mkdir");
          fs.mkdirs(remoteDirPath);
         }
         else{
          System.out.println("already have dir");
         }
         //createfile
         FSDataOutputStream outt=fs.create(remotePath);
         outt.close();
         System.out.println("success");
        }
        public static void creatdir(String remoteDir) throws IOException{
         init();
         Path remoteDirPath=new Path(remoteDir);
         if(!fs.exists(remoteDirPath)){
          System.out.println("auto mkdir dir");
         }
         else{
          FileStatus []fsta=fs.listStatus(remoteDirPath);
          if(fsta.length>0){
           //exist
           fs.delete(remoteDirPath);
           }
          else{
           System.out.println("do nothing");
          }
         }
        }
        public static void addmore(String remoteFile1,String remoteFile2) throws IOException{
         init();
         Path remotePath1=new Path(remoteFile1);
         Path remotePath2=new Path(remoteFile2);
         if(fs.exists(remotePath1)&&fs.exists(remotePath2)){
         System.out.println("input '1' for file1+file2 and '2' for file2+file1");
          Scanner sc=new Scanner(System.in);
          int choice=sc.nextInt();
          switch(choice){
          case 1:{
           //move to local first ...
           downloads("/usr/local/justtest/ooooooohhhhhhh.txt", remoteFile2);
           FileInputStream in=new FileInputStream("/usr/local/justtest/ooooooohhhhhhh.txt");
           File f=new File("/usr/local/justtest/ooooooohhhhhhh.txt");
           FSDataOutputStream out=fs.append(remotePath1);
           byte Btmp[]=new byte[1024];
           int len=0;
           while((len=in.read(Btmp))>0){
            out.write(Btmp,0,len);
            }
           out.close();
           in.close();
           f.delete();
          }
          case 2:{
           //move to local first ...
           downloads("/usr/local/justtest/ooooooohhhhhhh.txt", remoteFile1);
           FileInputStream in=new FileInputStream("/usr/local/justtest/ooooooohhhhhhh.txt");
           File f=new File("/usr/local/justtest/ooooooohhhhhhh.txt");
           FSDataOutputStream out=fs.append(remotePath2);
           byte Btmp[]=new byte[1024];
           int len=0;
           while((len=in.read(Btmp))>0){
            out.write(Btmp,0,len);
           }
           out.close();
           in.close();
           f.delete();
          }
          default:{
           System.out.println("what r u doing");
          }
          }
         }
        }
        public static void deletefile(String remoteFile) throws IOException{
         init();
         Path remotePath=new Path(remoteFile);
         if(!fs.exists(remotePath)){
          System.out.println("don't exists");
         }
         else{
          fs.delete(remotePath);
          System.out.println("success delete");
         }
        }
        public static void movefile(String srcFile,String aimFile) throws IOException{
            init();
            Path srcPath=new Path(srcFile);
            Path aimPath=new Path(aimFile);
            if(fs.exists(srcPath)){
             if(fs.rename(srcPath, srcPath)){
              System.out.println("success");
             }
             else{
             System.out.println("fail maybe a file have the same name");
             }
            }
            else{
             System.out.println("src don't exist");
            }
        }
         public static void main(String[]args){
         String localFile1="/usr/local/justtest/hello.txt";
         String localFile2="/usr/local/justtest/world.txt";
         String remoteFile1="/user/hadoop/hello.txt";
         String remoteFile2="/remojusttest/world.txt";
         String remotePath="/remojusttest";
         try{
          uploads(localFile1,remoteFile1);
          downloads(remoteFile1,localFile1);
          HDFS2Terimal(remoteFile1);
          outchown(remoteFile1);
          diguioutchown(remotePath);
          createfile(remoteFile2);
          creatdir(remotePath);
          addmore(remoteFile1,remoteFile2);
          deletefile(remoteFile1);
          movefile(remoteFile2,remoteFile1);
          }catch(Exception e){
          e.printStackTrace();
         }
        }
}

你可能感兴趣的:(HDFS编程基础)