Java API 向Hive插入数据 查询Hive数据

目录

    • 一、pom文件
    • 二、测试类
    • 三、注意点

一、pom文件

最少依赖如下:

        <!--hadoop-->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.7.3</version>
        </dependency>
        <!--hive-->
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>1.2.1</version>
        </dependency>

二、测试类

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;

/**
 * @author Java和算法学习
 */
public class HiveTest {

    public static void main(String[] args) {
        List<List<String>> argList = new ArrayList<>();
        List<String> arg = new ArrayList<>();
        arg.add("101");
        arg.add("qwqwq");
        argList.add(arg);
        arg = new ArrayList<>();
        arg.add("102");
        arg.add("weqwe");
        argList.add(arg);
        String dst = "/apps/hive/warehouse/pokes";
        createFile(dst, argList);
        loadData2Hive(dst);

        select();
    }

    public static void select() {
        Connection connection = getConnection();
        String sql = "SELECT * FROM pokes";
        try {
            PreparedStatement preparedStatement = connection.prepareStatement(sql);
            ResultSet rs = preparedStatement.executeQuery();
            while (rs.next()) {
                System.out.println(rs.getInt(1) + "\t" + rs.getString(2));
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }

    /**
     * 将数据上传到hdfs中,用于load到hive表中,设置分隔符是","
     */
    public static void createFile(String dst, List<List<String>> argList) {
        try (FileSystem fileSystem = FileSystem.get(new URI("hdfs://192.168.x.xx:8020"), new Configuration(), "hdfs");
             FSDataOutputStream outputStream = fileSystem.create(new Path(dst))) {
            StringBuilder sb = new StringBuilder();
            for (List<String> arg : argList) {
                for (String value : arg) {
                    // 分隔符要和建表时指定的分隔符相同,否则数据能插入,但是后续查询的结果为空
                    sb.append(value).append(",");
                }                
                sb.deleteCharAt(sb.length() - 1);
                sb.append("\n");
            }            
            sb.deleteCharAt(sb.length() - 1);
            byte[] contents = sb.toString().getBytes();
            outputStream.write(contents);
            System.out.println("文件创建成功!");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 将HDFS文件load到hive表中
     */
    public static void loadData2Hive(String dst) {
        String sql = " load data inpath '" + dst + "' into table pokes";
        try (Connection connection = getConnection(); PreparedStatement pstmt = connection.prepareStatement(sql)) {
            pstmt.execute();
            System.out.println("loadData到Hive表成功!");
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }

    public static Connection getConnection() {
        String driverName = "org.apache.hive.jdbc.HiveDriver";
        String connectionUrl = "jdbc:hive2://192.168.x.xx:10000/default";
        // 查询的时候不用用户名和密码也可以
        // 插入数据时必须要
        String username = "hdfs";
        String password = "hdfs";
        try {
            Class.forName(driverName);
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
            System.exit(1);
        }
        Connection connection = null;
        try {
            connection = DriverManager.getConnection(connectionUrl, username, password);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return connection;
    }

}

三、注意点

1、本地代码不需要core-site.xml,hdfs-site.xml,hive-site.xml等配置文件

2、获取数据库连接时

查询时不给用户名和密码能够查询到数据
插入数据时必须给用户名和密码

否则会出现以下异常

FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask
Loading data to table default.pokes
Failed with exception org.apache.hadoop.security.AccessControlException: Permission denied. user=anonymous is not the owner of inode=pokes
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkOwner(FSPermissionChecker.java:250)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:227)
	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:190)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1955)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1939)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkOwner(FSDirectory.java:1908)
	at org.apache.hadoop.hdfs.server.namenode.FSDirAttrOp.setPermission(FSDirAttrOp.java:63)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.setPermission(FSNamesystem.java:1824)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.setPermission(NameNodeRpcServer.java:821)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.setPermission(ClientNamenodeProtocolServerSideTranslatorPB.java:464)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345)

3、将数据上传到hdfs中时,设置的分隔符必须和建表时设置的分割符相同,不然查询到的数据是null

建表时设置分隔符的命令:
CREATE TABLE pokes (foo INT, bar STRING) row format delimited fields terminated by ‘,’;

你可能感兴趣的:(大数据,hive,java,jdbc)