java API操作HDFS

pom.xml配置仓库


    
        cloudera
        https://repository.cloudera.com/artifactory/cloudera-repos/
    

pom.xml的配置依赖


      org.apache.hadoop
      hadoop-client
      2.6.0

HdfsUtil.java

package com.fengqing;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;

import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URI;

public class HdfsUtil {

    private FileSystem fileSystem;

    //构造方法
    public HdfsUtil () {
        init();
    }

    //初始化方法
    private void init () {
        try {
            Configuration configuration = new Configuration();
            //configuration.set("fs.defaultFS", "hdfs://192.168.126.130:8082");
            fileSystem = FileSystem.get(new URI("hdfs://192.168.126.130:8082"), configuration, "root");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    //在HDFS上创建目录
    public void mkdirs () throws Exception{
        fileSystem.mkdirs(new Path("/fengqing3"));
        //fileSystem.close();
    }

    //在HDFS上创建目录
    public void createFile () throws Exception{
        FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/fengqing/hello.txt"));
        fsDataOutputStream.write("hello fengqing".getBytes());
        fsDataOutputStream.flush();
        fsDataOutputStream.close();
    }

    //查看HDFS上文件的内容
    public void cat () throws Exception{
        FSDataInputStream fsDataInputStream = fileSystem.open(new Path("/fengqing/hello.txt"));
        IOUtils.copyBytes(fsDataInputStream, System.out, 1024);
    }

    //对HDFS上的文件从命名
    public void rename () throws Exception{
        Path srcPath = new Path("/fengqing/hello.txt");
        Path dstPath = new Path("/fengqing/world.txt");
        fileSystem.rename(srcPath, dstPath);
    }

    //上传本地文件到HDFS上
    public void copyFromLocalFile () throws Exception {
        Path srcPath = new Path("G:/mm.txt");
        Path dstPath = new Path("/fengqing/mm.txt");
        fileSystem.copyFromLocalFile(srcPath, dstPath);
    }

    //上传本地文件到HDFS上(带进度条)
    public void copyFromLocalFileWithProgress () throws Exception {
        InputStream inputStream = new BufferedInputStream(new FileInputStream("G/mm.txt"));
        FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/fengqing/mm.txt"),
                new Progressable() {//匿名内部类
            @Override
            public void progress() {
                System.out.print(".");
            }
        });
        IOUtils.copyBytes(inputStream, fsDataOutputStream, 1024);
    }

    //从HDFS上下载文件
    public void copyToLocalFile () throws Exception{
        Path srcPath = new Path("/fengqing/mm.txt");
        Path dstPath = new Path("G:/mm.txt");
        fileSystem.copyToLocalFile(false, srcPath, dstPath, true);
    }

    //列出指定目录下的文件及文件夹
    public void ls () throws Exception{
        FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/"));
        for (FileStatus fileStatus : fileStatuses) {
            String type = fileStatus.isDirectory() ? "文件夹" : "文件";
            FsPermission permission = fileStatus.getPermission();
            String owner = fileStatus.getOwner();
            Path path = fileStatus.getPath();
            long len = fileStatus.getLen();
            System.out.println("type:" + type);
            System.out.println("permission:" + permission);
            System.out.println("owner:" + owner);
            System.out.println("path:" + path);
            System.out.println("len:" + len);
        }
    }

    //删除HDFS上的文件或目录
    public void delete () throws Exception {
        fileSystem.delete(new Path("/fengqing"), true);//true表示递归删除
    }
}

 

你可能感兴趣的:(hadoop)