Hadoop之Java客户端操作HDFS

public class HdfsUtil {

    private FileSystem fs = null;

    @Before
    public void init() throws Exception {
        //读取classpath下的core-site.xml配置文件
        Configuration conf = new Configuration();
        //手动封装配置中需要读取的信息
        conf.set("fs.defaultFS", "hdfs://192.168.2.100:9000");
        //根据配置信息获取一个具体的客户端实例对象
        fs = FileSystem.get(new URI("hdfs://192.168.2.100:9000"), conf, "hadoop");
    }

    @Test
    //通过流上传文件
    public void uploadByStream() throws IOException {
        Path path = new Path("hdfs://192.168.2.100:9000/user/hadoop/mark.txt");
        FSDataOutputStream dataOut = fs.create(path);
        FileInputStream dataIn = new FileInputStream("c://mark.txt");
        IOUtils.copy(dataIn, dataOut);
    }

    @Test
    //通过封装好方法上传文件
    public void uploadByMethods() throws IOException {
        fs.copyFromLocalFile(new Path("c://mark.txt"), new Path("hdfs://192.168.2.100:9000/user/hadoop/mark.txt"));
    }


    @Test
    //下载文件
    public void download() throws IOException {
        fs.copyToLocalFile(new Path("hdfs://192.168.2.100:9000/user/hadoop/mark.txt"),new Path("d://JAVA"));
    }


    @Test
    //查看文件
    public void listFiles() throws IOException {
        RemoteIterator files = fs.listFiles(new Path("/"), true);
        while (files.hasNext()){
            LocatedFileStatus file = files.next();
            Path path = file.getPath();
            System.out.println(path.getName());
        }
        System.out.println("------------------");
        FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
        for (FileStatus fileStatus : fileStatuses){
            String name = fileStatus.getPath().getName();
            System.out.println(name);
        }
    }



    @Test
    //创建文件夹
    public void mkdir() throws IOException {
        fs.mkdirs(new Path("/user/mark"));
    }


    @Test
    //删除文件或文件下
    public void rm() throws IOException {
        fs.delete(new Path("/user/mark"),true);
    }

}

 

你可能感兴趣的:(Hadoop)