JAVA API 读取hdfs系统文件

package com.company;

/**
 * @Author zhaoxin
 * @Email [email protected]
 * @Description //TODO 注意权限问题
 * @Date 2018/10/11
 **/

import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;

/**
 * 完成hdfs 操作
 */
public class MyHadoop {
    public static void main(String args[]) {
    }

    @Test
    public void readFile() throws Exception {
        //url流处理器工程
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
        URL url = new URL("hdfs://192.168.136.128:9000/input/data.txt");
        URLConnection connection = url.openConnection();
        InputStream inputStream = connection.getInputStream();
//         返回这个输入流中可以被读的剩下的bytes字节的估计值
        byte[] b = new byte[inputStream.available()];
//        byte [] b=new byte[1024];
        while (inputStream.read(b) != -1)
            inputStream.read(b);
        inputStream.close();
        String string = new String(b);
        System.out.println(string);
    }

    /***
     * 通过Hadoop的api访问文件
     */
    @Test
    public void readFileByAPI() throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.default.name", "hdfs://192.168.136.128:9000/");
        FileSystem fileSystem = FileSystem.get(conf);
        Path path = new Path("/input/data.txt");

        FSDataInputStream fsDataInputStream = fileSystem.open(path);
        byte[] bytes = new byte[1024];
        int len = -1;
        ByteArrayOutputStream stream = new ByteArrayOutputStream();

        while ((len = fsDataInputStream.read(bytes)) != -1) {
            stream.write(bytes, 0, len);
        }
        fsDataInputStream.close();
        stream.close();
        System.out.println(new String(stream.toByteArray()));

    }

    /***
     * 通过Hadoop的api访问文件
     */
    @Test
    public void readFileByAPI2() throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.default.name", "hdfs://192.168.136.128:9000/");
        FileSystem fileSystem = FileSystem.get(conf);
        Path path = new Path("/input/data.txt");
        FSDataInputStream fsDataInputStream = fileSystem.open(path);
        ByteArrayOutputStream stream = new ByteArrayOutputStream();
        IOUtils.copyBytes(fsDataInputStream, stream, 1024);
        System.out.println(new String(stream.toByteArray()));

    }

    /***
     * 创建目录
     */
    @Test
    public void mkdir() throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.default.name", "hdfs://192.168.136.128:9000/");
        FileSystem fileSystem = FileSystem.get(conf);
        fileSystem.mkdirs(new Path("/input/input2/"));
    }

    /***
     * putFile
     */
    @Test
    public void put() throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.default.name", "hdfs://192.168.136.128:9000/");
        FileSystem fileSystem = FileSystem.get(conf);
        FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/input/input2/a.txt"));
        fsDataOutputStream.write("helloworld".getBytes());
        fsDataOutputStream.close();
    }

    /***
     * 删除目录
     */
    @Test
    public void del() throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.default.name", "hdfs://192.168.136.128:9000/");
        FileSystem fileSystem = FileSystem.get(conf);
        fileSystem.delete(new Path("/input/input2/"), true);
    }

    /***
     * 递归目录
     */
    @Test
    public void 递归() throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.default.name", "hdfs://192.168.136.128:9000/");
        FileSystem fileSystem = FileSystem.get(conf);
//        fileSystem.delete(new Path("/input/input2/"),true );
//       FileStatus [] fileStatuses=fileSystem.listStatus(new Path("/input"));

        RemoteIterator iterator = fileSystem.listFiles(new Path("/input/"), true);
        while (iterator.hasNext()) {
            LocatedFileStatus status = iterator.next();
            Path filePath = status.getPath();
            String fileName = filePath.getName();
            System.out.println(fileName);
        }
        System.out.println("---------------------------------");

        // listStatus 可以列出文件和文件夹的信息,但是不提供自带的递归遍历
        FileStatus[] listStatus = fileSystem.listStatus(new Path("/input"));
        recuersionFile(listStatus);
    }
    public void recuersionFile(FileStatus[] listStatus)throws Exception{
        if (listStatus != null) {
            for (FileStatus status : listStatus) {

                if (status.isDirectory()) {
                    Configuration conf = new Configuration();
                    conf.set("fs.default.name", "hdfs://192.168.136.128:9000/");
                    FileSystem fileSystem = FileSystem.get(conf);
                    FileStatus[] listStatus2 = fileSystem.listStatus(new Path(status.getPath().toString()));
                    recuersionFile(listStatus2);
                } else {
                    String name = status.getPath().getName();
                    System.out.println(status.getPath());
                    System.out.println("fileName: " + name);
                }
            }
        }
    }
}

 

你可能感兴趣的:(Hadoop)