大数据之:HDFS_API

import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.BasicConfigurator;
import org.junit.Before;
import org.junit.Test;

public class hdfs_api {
    public static void main(String[] args) throws Exception {
        BasicConfigurator.configure();

        // 上传一个文件
        Configuration conf = new Configuration();
        conf.set("dfs.replication", "1");
        conf.set("dfs.blocksize", "64m");
        
        FileSystem fs = FileSystem.get(new URI("hdfs://10.112.1.71:9000/"), conf, "root");

        fs.copyFromLocalFile(new Path("D:/Tmp/test/1.txt"), new Path("/"));
        fs.close();
    }
    
    FileSystem fs = null;
    @Before   // 单元测试执行前的代码
    public void init() throws Exception {
        Configuration conf = new Configuration();
        conf.set("dfs.replication", "1");
        conf.set("dfs.blocksize", "64m");
        fs = FileSystem.get(new URI("hdfs://10.112.1.71:9000/"), conf, "root");
    }
    
    // 在HDFS中新建文件夹
    @Test
    public void testMkdir() throws Exception {
        fs.mkdirs(new Path("/home/xie"));
        fs.close();
    }

    // 从HDFS下载一个文件
    @Test     // 一个单元测试
    public void testGet() throws Exception{
        fs.copyToLocalFile(new Path("/1.txt"), new Path("d:/Tmp/down.txt"));
        fs.close();
    }
    // 从HDFS一个文件
    @Test
    public void testdelete() throws Exception{
        boolean del = fs.delete(new Path("/1.txt"),true);
        fs.close();
    }

    // 从HDFS读取文件
    @Test
    public void cat()throws Exception{
        FSDataInputStream cat = fs.open(new Path("/1.txt"));
        IOUtils.copyBytes(cat,System.out,1024);
        cat.close();
    }

}

你可能感兴趣的:(大数据之:HDFS_API)