HDFS Java API

pom.xml

      
        org.apache.hadoop
            hadoop-common
            3.1.0
        
        
            org.apache.hadoop
            hadoop-hdfs
            3.1.0
        
        
            org.apache.hadoop
            hadoop-client
            3.1.0
        

HdfsOperate.java

package com.shaoyan.hdfs;

import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;

import java.io.IOException;

public class HdfsOperate {

    private FileSystem fs;

    public HdfsOperate(FileSystem fs){
        this.fs = fs;
    }

    public void closeFS() throws IOException{
        fs.close();
    }

    // 创建目录
    public void makeDir(String dirName) throws IOException {
        Path path = new Path(dirName);
        FsPermission fsPermission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.READ_EXECUTE);
        fs.mkdirs(path, fsPermission);
    }

    //删除目录
    public void delDir(String dirName) throws IOException {
        Path path = new Path(dirName);
        fs.delete(path, true);//true is to delete recursively
    }

    //写文件
    public void writeFile(String fileName, String content) throws IOException {
        Path path = new Path(fileName);
        FSDataOutputStream out = fs.create(path);
        out.writeUTF(content);
    }

    //读文件
    public void readFile(String fileName) throws IOException {
        Path path = new Path(fileName);

        if(fs.exists(path)){
            FSDataInputStream is = fs.open(path);
            FileStatus status = fs.getFileStatus(path);
            byte[] buffer = new byte[Integer.parseInt(String.valueOf(status.getLen()))];
            is.readFully(0, buffer);
            is.close();
            System.out.println(buffer.toString());
        }

    }

    //上传文件
    public void uploadFile(String fileName, String targetDir) throws IOException {
        Path src = new Path(fileName);
        Path dst = new Path(targetDir);
        fs.copyFromLocalFile(src, dst);
    }

    //删除文件
    public void delFile(String fileName) throws IOException {
        Path path = new Path(fileName);
        fs.delete(path, true);
    }

    //查询目录下的文件列表
    public void listAllFiles(String dirName) throws IOException {
        Path path = new Path(dirName);
        getFile(path,fs);
    }

    public void getFile(Path path,FileSystem fs) throws IOException {

        FileStatus[] fileStatus = fs.listStatus(path);
        for(int i=0;i

HdfsTest.java

package com.shaoyan.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HdfsTest {

    public static void main(String[] args) throws URISyntaxException, IOException {
        Configuration configuration = new Configuration();
        configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");//这个需要加上,否则会报错
        URI uri = new URI("hdfs://hadoop-master:9000");
        FileSystem fs = FileSystem.get(uri, configuration);
        HdfsOperate hdfsOperate = new HdfsOperate(fs);

        //start test...
        hdfsOperate.makeDir("/lucy/java_test1");

        hdfsOperate.writeFile("/lucy/java_test1/hello.txt", "hello, hdfs...");

        hdfsOperate.readFile("/lucy/java_test1/hello.txt");

        hdfsOperate.uploadFile("/home/alice/alice.txt", "/lucy/java_test1");

        hdfsOperate.listAllFiles("/lucy");

        hdfsOperate.closeFS();





    }
}

你可能感兴趣的:(HDFS Java API)