51cto赵强HADOOP学习(二)

hdfs常用命令

51cto赵强HADOOP学习(二)_第1张图片
image.png
#cd training/data
#start-dfs.sh
#hdfs dfs -help
#hdfs dfs -help ls
#hdfs dfs -ls /
#hdfs dfs -lsr /
#hdfs dfs -du /
#hdfs dfs -dus /
#hdfs dfs -count /input
#hdfs dfs -mv /input/data.txt /output/data.txt
#hdfs dfs -lsr /
#hdfs dfs -cp /output/data.txt /input/data.txt
#hdfs dfs -lsr /
#hdfs dfs -rmr /output
#hdfs dfs -lsr /
# hdfs dfs -put apache-tomcat-7.0.65.tar.gz /input/tomcat.tar.gz
#hdfs dfs -lsr /
# hdfs dfs -get /input/tomcat.tar.gz a.tar.gz
#ll
#hdfs dfs -cat /input/data.txt
#hdfs dfs -text /input/data.txt
#hdfs dfs -mkdir /logs
#hdfs dfs -ls /
#hdfs dfs -touchz /logs/a.log
#hdfs dfs -lsr /

java编程案例

@Test
    public void testMakeDir() throws Exception {
        Configuration conf = new Configuration();
        
        //创建一个FileSystem的对象
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.56.102:9000"),conf);
        
        //创建目录
        fs.mkdirs(new Path("/demo"));
        
        
    }

设置权限

#pwd
/root/training/hadoop-2.4.1/etc/hadoop
#vi hdfs-site.xml

    dfs.permissions
    false

#start-dfs.sh
#jps
@Test
    public void testUpload() throws Exception{
        Configuration conf = new Configuration();
        
        //创建一个FileSystem的对象
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.56.102:9000"),conf);
        
        //构造一个输入流:d:\\temp\\a.avi
        InputStream in = new FileInputStream("d:\\temp\\a.avi");
        
        //构造输出流:/demo/a.avi
        OutputStream out = fs.create(new Path("/demo/a.avi"));
        
        //定义缓冲区
        byte[] buffer = new byte[1024];
        int len = 0;
        while((len=in.read(buffer))>0) {
            //将数据写到输出流 HDFS
            out.write(buffer,0,len);
        }
        out.flush();
        
        //关闭输入和输出流
        in.close();
        out.close();
    }
#hdfs dfs -lsr /
@Test
    public void testUpload() throws Exception{
        Configuration conf = new Configuration();
        
        //创建一个FileSystem的对象
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.56.102:9000"),conf);
        
        //构造一个输入流:d:\\temp\\a.avi
        InputStream in = new FileInputStream("d:\\temp\\a.avi");
        
        //构造输出流:/demo/a.avi
        OutputStream out = fs.create(new Path("/demo/b.avi"));
        
        //利用工具类简化操作
        IOUtils.copy(in, out);
        
//       //定义缓冲区
//        byte[] buffer = new byte[1024];
//        int len = 0;
//        while((len=in.read(buffer))>0) {
//          //将数据写到输出流 HDFS
//          out.write(buffer,0,len);
//        }
//        out.flush();
        
//        //关闭输入和输出流
//        in.close();
//        out.close();
    }
#hdfs dfs -lsr /demo
@Test
    public void testDownload() throws Exception{
        Configuration conf = new Configuration();
        
        //创建一个FileSystem的对象
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.56.102:9000"),conf);
        
        //构造一个输入流:HDFS
        InputStream in = fs.open(new Path("/demo/a.avi"));
        
        //构造输出流:d:\\temp\\abc.avi
        OutputStream out = new FileOutputStream("d:\\temp\\abc.avi");
        
        byte[] buffer = new byte[1024];
        int len = 0;
        while((len=in.read(buffer))>0) {
            out.write(buffer, 0, len);
        }
        
        out.flush();
        
        //关闭输入和输出流
        in.close();
        out.close();
        
    }

@Test
    public void testDownload() throws Exception{
        Configuration conf = new Configuration();
        
        //创建一个FileSystem的对象
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.56.102:9000"),conf);
        
        //构造一个输入流:HDFS
        InputStream in = fs.open(new Path("/demo/a.avi"));
        
        //构造输出流:d:\\temp\\abc.avi
        OutputStream out = new FileOutputStream("d:\\temp\\xyz.avi");
        
        //利用工具类简化操作
        IOUtils.copy(in, out);
        
 //       byte[] buffer = new byte[1024];
//        int len = 0;
//        while((len=in.read(buffer))>0) {
//          out.write(buffer, 0, len);
//        }
        
//        out.flush();
        
//        //关闭输入和输出流
 //       in.close();
 //       out.close();
        
    }

你可能感兴趣的:(51cto赵强HADOOP学习(二))