HDFS Single Node Java API

HDFS Single Node Java API Java API

pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0modelVersion>

    <groupId>com.lihaozegroupId>
    <artifactId>hadoopartifactId>
    <version>1.0.0version>
    <packaging>jarpackaging>

    <name>hadoopname>
    <url>http://maven.apache.orgurl>

    <properties>
        <jdk.version>1.8jdk.version>
        <maven.compiler.source>1.8maven.compiler.source>
        <maven.compiler.target>1.8maven.compiler.target>
        <project.build.sourceEncoding>UTF-8project.build.sourceEncoding>
        <project.reporting.outputEncoding>UTF-8project.reporting.outputEncoding>
        <maven.test.failure.ignore>truemaven.test.failure.ignore>
        <maven.test.skip>truemaven.test.skip>
    properties>
    <dependencies>
        
        <dependency>
            <groupId>org.junit.jupitergroupId>
            <artifactId>junit-jupiter-apiartifactId>
            <version>5.10.0version>
            <scope>testscope>
        dependency>
        
        <dependency>
            <groupId>org.junit.jupitergroupId>
            <artifactId>junit-jupiter-engineartifactId>
            <version>5.10.0version>
            <scope>testscope>
        dependency>
        <dependency>
            <groupId>org.projectlombokgroupId>
            <artifactId>lombokartifactId>
            <version>1.18.28version>
        dependency>
        <dependency>
            <groupId>org.apache.logging.log4jgroupId>
            <artifactId>log4j-slf4j-implartifactId>
            <version>2.20.0version>
        dependency>
        <dependency>
            <groupId>org.apache.hadoopgroupId>
            <artifactId>hadoop-clientartifactId>
            <version>3.3.5version>
        dependency>
        <dependency>
            <groupId>com.google.guavagroupId>
            <artifactId>guavaartifactId>
            <version>31.1-jreversion>
        dependency>
        
        <dependency>
            <groupId>org.apache.commonsgroupId>
            <artifactId>commons-pool2artifactId>
            <version>2.11.1version>
        dependency>
        <dependency>
            <groupId>com.janeluogroupId>
            <artifactId>ikanalyzerartifactId>
            <version>2012_u6version>
        dependency>
        <dependency>
            <groupId>com.github.binarywanggroupId>
            <artifactId>java-testdata-generatorartifactId>
            <version>1.1.2version>
        dependency>

    dependencies>
    <build>
        <finalName>${project.artifactId}finalName>
        
        <plugins>
            <plugin>
                <groupId>org.apache.maven.pluginsgroupId>
                <artifactId>maven-compiler-pluginartifactId>
                <version>3.11.0version>
                <configuration>
                    
                    <encoding>UTF-8encoding>
                    
                    <source>${jdk.version}source>
                    <target>${jdk.version}target>
                configuration>
            plugin>
            <plugin>
                <groupId>org.apache.maven.pluginsgroupId>
                <artifactId>maven-clean-pluginartifactId>
                <version>3.2.0version>
            plugin>
            <plugin>
                <groupId>org.apache.maven.pluginsgroupId>
                <artifactId>maven-resources-pluginartifactId>
                <version>3.3.1version>
            plugin>
            <plugin>
                <groupId>org.apache.maven.pluginsgroupId>
                <artifactId>maven-war-pluginartifactId>
                <version>3.3.2version>
            plugin>
            
            
            <plugin>
                <groupId>org.apache.maven.pluginsgroupId>
                <artifactId>maven-surefire-pluginartifactId>
                <version>2.22.2version>
                <configuration>
                    <skip>trueskip>
                configuration>
            plugin>
        plugins>
    build>
project>

HDFS Single Node Java API Java API

package com.lihaozhe.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;

import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HdfsTest {

    @Test
    public void test01() {
        URI uri = null;
        try {
            // HDFS 地址
            uri = new URI("hdfs://hadoop:9000/");
            // 配置项
            Configuration conf = new Configuration();
            // 通过这种方式设置java客户端身份
            String user = "lhz";
            // 获取 HDFS 连接
            FileSystem fs = FileSystem.get(uri, conf, user);
            // 验证 HDFS 文件系统根下是否存在 lihaozhe 目录
            String result = fs.exists(new Path("/lihaozhe")) ? "目录存在" : "目录不存在";
            // 输出验证结果
            System.out.println(result);
            // 关闭 HDFS 连接
            fs.close();
        } catch (URISyntaxException | IOException | InterruptedException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void test02() {
        URI uri = null;
        try {
            // HDFS 地址
            uri = new URI("hdfs://hadoop:9000/");
            // 配置项
            Configuration conf = new Configuration();
            // 通过这种方式设置java客户端身份
            String user = "lhz";
            // 获取 HDFS 连接
            FileSystem fs = FileSystem.get(uri, conf, user);
            // 获取 HDFS 文件系统根下所有文件
            FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
            // 输出验证结果
            for (FileStatus fileStatus : fileStatuses) {
                System.out.println(fileStatus.getPath());
            }
            // 关闭 HDFS 连接
            fs.close();
        } catch (URISyntaxException | IOException | InterruptedException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void test03() {
        URI uri = null;
        try {
            // HDFS 地址
            uri = new URI("hdfs://hadoop:9000/");
            // 配置项
            Configuration conf = new Configuration();
            // 通过这种方式设置java客户端身份
            String user = "lhz";
            // 获取 HDFS 连接
            FileSystem fs = FileSystem.get(uri, conf, user);
            // 设置欲被创建的目录名
            Path path = new Path("/xiaoshuo");
            // 创建目录
            if (!fs.exists(path)) {
                System.out.println(fs.mkdirs(path) ? "创建成功" : "创建失败");
            } else {
                System.out.println(path.getName() + "已经存在无需重复创建");
            }
            // 关闭 HDFS 连接
            fs.close();
        } catch (URISyntaxException | IOException | InterruptedException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void test04() {
        URI uri = null;
        try {
            // HDFS 地址
            uri = new URI("hdfs://hadoop:9000/");
            // 配置项
            Configuration conf = new Configuration();
            // 通过这种方式设置java客户端身份
            String user = "lhz";
            // 获取 HDFS 连接
            FileSystem fs = FileSystem.get(uri, conf, user);
            // 设置欲被创建的目录名
            Path path = new Path("/xiaoshuo");
            // 创建目录
            if (fs.exists(path)) {
                System.out.println(fs.rename(path, new Path("/小说")) ? "修改成功" : "修改失败");
            } else {
                System.out.println(path.getName() + "不存在");
            }
            // 关闭 HDFS 连接
            fs.close();
        } catch (URISyntaxException | IOException | InterruptedException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void test05() {
        URI uri = null;
        try {
            // HDFS 地址
            uri = new URI("hdfs://hadoop:9000/");
            // 配置项
            Configuration conf = new Configuration();
            // 通过这种方式设置java客户端身份
            String user = "lhz";
            // 获取 HDFS 连接
            FileSystem fs = FileSystem.get(uri, conf, user);
            // 设置欲被创建的目录名
            Path path = new Path("/小说");
            // 创建目录
            if (fs.exists(path)) {
                System.out.println(fs.delete(path, true) ? "删除成功" : "删除失败");
            } else {
                System.out.println(path.getName() + "不存在");
            }
            // 关闭 HDFS 连接
            fs.close();
        } catch (URISyntaxException | IOException | InterruptedException e) {
            throw new RuntimeException(e);
        }
    }

    @Test
    public void test06() {
        URI uri = null;
        try {
            // HDFS 地址
            uri = new URI("hdfs://hadoop:9000/");
            // 配置项
            Configuration conf = new Configuration();
            // 通过这种方式设置java客户端身份
            String user = "lhz";
            // 获取 HDFS 连接
            FileSystem fs = FileSystem.get(uri, conf, user);
            // 本地文件系统 欲被上传的文件
            Path src = new Path("三国演义.txt");
            // HDFS 存储路径
            Path dst = new Path("/小说/三国演义.txt");
            fs.copyFromLocalFile(src, dst);
            System.out.println(fs.exists(dst) ? "上传成功" : "上传失败");
            // 关闭 HDFS 连接
            fs.close();
        } catch (URISyntaxException | IOException | InterruptedException e) {
            throw new RuntimeException(e);
        }
    }


    @Test
    public void test07() {
        URI uri = null;
        try {
            // HDFS 地址
            uri = new URI("hdfs://hadoop:9000/");
            // 配置项
            Configuration conf = new Configuration();
            // 通过这种方式设置java客户端身份
            String user = "lhz";
            // 获取 HDFS 连接
            FileSystem fs = FileSystem.get(uri, conf, user);
            // HDFS 文件系统路径
            Path src = new Path("/小说/三国演义.txt");
            // 本地文件系统 从 HDFS 文件系统下载到本地文件系统的路径
            Path dst = new Path("三国.txt");
            fs.copyToLocalFile(src, dst);
            File file = new File(dst.getName());
            System.out.println(file.exists() ? "下载成功" : "下载失败");
            // 关闭 HDFS 连接
            fs.close();
        } catch (URISyntaxException | IOException | InterruptedException e) {
            throw new RuntimeException(e);
        }
    }
}

你可能感兴趣的:(数据分析,大数据,Hadoop,大数据,java,数据分析)