Java代码访问hive数仓

1、 增加hdfs的配置,在core-site.xml文件中添加如下配置


  hadoop.proxyuser.hadoop.hosts
  *


 hadoop.proxyuser.hadoop.groups
 *

其中配置中的第二个hadoop是登陆主机的用户名
2、 启动hive的服务,包括启动元数据服务和hiveserver2服务器,执行如下两条命令
hive --service metastore &
hive --service hiveserver2 &
3、 java代码

package com.hiveconn;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;

public class HiveConnect {
    private static String driverName = "org.apache.hive.jdbc.HiveDriver";
 //   private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
    private static String url = "jdbc:hive2://192.168.19.10:10000/";
    private static String user = "hadoop";
    private static String password = "hive123456";

    private static Connection conn = null;
    private static Statement stmt = null;
    private static ResultSet rs = null;

    // 加载驱动、创建连接
    public void init() throws Exception {
//        Class.forName(driverName);
        try {
            Class.forName(driverName);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            System.exit(1);
        }

        System.out.println(driverName);
        try {
            conn = DriverManager.getConnection(url,user,password);
        }catch (Exception e){
            System.out.println(e);
        }

        stmt = conn.createStatement();
    }

    //创建数据库
    public void createDatabase() throws Exception {
        String sql = "create database hive_jdbc_test";
        System.out.println("Running: " + sql);
        stmt.execute(sql);
    }

    // 查询所有数据库
    public void showDatabases() throws Exception {
        String sql = "show databases";
        System.out.println("Running: " + sql);
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getString(1));
        }
    }
    // 统计查询(会运行mapreduce作业)
    public void countData() throws Exception {
        String sql = "select count(1) from hivestu.stua";
        System.out.println("Running: " + sql);
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getInt(1) );
        }
    }

    // 释放资源
    public void destory() throws Exception {
        if ( rs != null) {
            rs.close();
        }
        if (stmt != null) {
            stmt.close();
        }
        if (conn != null) {
            conn.close();
        }
    }
}

4、 遇到的问题

报错:java.sql.SQLException: Could not establish connection to jdbc:hive2://192.168.59.132:10001/: Required field 'serverProtocolVersion' is unset! Struct:TOpenSessionResp(status:TStatus(statusCode:ERROR_STATUS, infoMessages:[*org.apache.hive.service.cli.HiveSQLException:Failed to open new session: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): User: hadoop is not allowed to impersonate hadoop:14:13, org.apache.hive.service.cli.session.SessionManager:createSession:SessionManager.java:419, org.apache.hive.service.cli.session.SessionManager:openSession:SessionManager.java:362, org.apache.hive.service.cli.CLIService:openSessionWithImpersonation:CLIService.java:193, org.apache.hive.service.cli.thrift.ThriftCLIService:getSessionHandle:ThriftCLIService.java:440, org.apache.hive.service.cli.thrift.ThriftCLIService:OpenSession:ThriftCLIService.java:322, org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession:getResult:TCLIService.java:1377, org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession:getResult:TCLIService.java:1362, org.apache.thrift.ProcessFunction:process:ProcessFunction.java:39, org.apache.thrift.TBaseProcessor:process:TBaseProcessor.java:39, org.apache.hive.service.auth.TSetIpAddressProcessor:process:TSetIpAddressProcessor.java:56, org.apache.thrift.server.TThreadPoolServer$WorkerProcess:run:TThreadPoolServer.java:286, java.util.concurrent.ThreadPoolExecutor:runWorker:ThreadPoolExecutor.java:1149, java.util.concurrent.ThreadPoolExecutor$Worker:run:ThreadPoolExecutor.java:624, java.lang.Thread:run:Thread.java:748, *java.lang.RuntimeException:java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): User: hadoop is not allowed to impersonate hadoop:22:8, org.apache.hive.service.cli.session.HiveSessionProxy:invoke:HiveSessionProxy.java:89, org.apache.hive.service.cli.session.HiveSessionProxy:access$000:HiveSessionProxy.java:36, org.apache.hive.service.cli.session.HiveSessionProxy$1:run:HiveSessionProxy.java:63, java.security.AccessController:doPrivileged:AccessController.java:-2, javax.security.auth.Subject:doAs:Subject.java:422, org.apache.hadoop.security.UserGroupInformation:doAs:UserGroupInformation.java:1762, org.apache.hive.service.cli.session.HiveSessionProxy:invoke:HiveSessionProxy.java:59, com.sun.proxy.$Proxy37:open::-1, org.apache.hive.service.cli.session.SessionManager:createSession:SessionManager.java:410, *java.lang.RuntimeException:org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): User: hadoop is not allowed to impersonate hadoop:29:7, org.apache.hadoop.hive.ql.session.SessionState:start:SessionState.java:610, org.apache.hadoop.hive.ql.session.SessionState:start:SessionState.java:548, org.apache.hive.service.cli.session.HiveSessionImpl:open:HiveSessionImpl.java:164, sun.reflect.NativeMethodAccessorImpl:invoke0:NativeMethodAccessorImpl.java:-2, sun.reflect.NativeMethodAccessorImpl:invoke:NativeMethodAccessorImpl.java:62, sun.reflect.DelegatingMethodAccessorImpl:invoke:DelegatingMethodAccessorImpl.java:43, java.lang.reflect.Method:invoke:Method.java:498, org.apache.hive.service.cli.session.HiveSessionProxy:invoke:HiveSessionProxy.java:78, *org.apache.hadoop.ipc.RemoteException:User: hadoop is not allowed to impersonate hadoop:49:20, org.apache.hadoop.ipc.Client:call:Client.java:1476, org.apache.hadoop.ipc.Client:call:Client.java:1413, org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker:invoke:ProtobufRpcEngine.java:229, com.sun.proxy.$Proxy30:getFileInfo::-1, org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB:getFileInfo:ClientNamenodeProtocolTranslatorPB.java:776, sun.reflect.NativeMethodAccessorImpl:invoke0:NativeMethodAccessorImpl.java:-2, sun.reflect.NativeMethodAccessorImpl:invoke:NativeMethodAccessorImpl.java:62, sun.reflect.DelegatingMethodAccessorImpl:invoke:DelegatingMethodAccessorImpl.java:43, java.lang.reflect.Method:invoke:Method.java:498, org.apache.hadoop.io.retry.RetryInvocationHandler:invokeMethod:RetryInvocationHandler.java:191, org.apache.hadoop.io.retry.RetryInvocationHandler:invoke:RetryInvocationHandler.java:102, com.sun.proxy.$Proxy31:getFileInfo::-1, org.apache.hadoop.hdfs.DFSClient:getFileInfo:DFSClient.java:2117, org.apache.hadoop.hdfs.DistributedFileSystem$22:doCall:DistributedFileSystem.java:1305, org.apache.hadoop.hdfs.DistributedFileSystem$22:doCall:DistributedFileSystem.java:1301, org.apache.hadoop.fs.FileSystemLinkResolver:resolve:FileSystemLinkResolver.java:81, org.apache.hadoop.hdfs.DistributedFileSystem:getFileStatus:DistributedFileSystem.java:1301, org.apache.hadoop.fs.FileSystem:exists:FileSystem.java:1425, org.apache.hadoop.hive.ql.session.SessionState:createRootHDFSDir:SessionState.java:708, org.apache.hadoop.hive.ql.session.SessionState:createSessionDirs:SessionState.java:654, org.apache.hadoop.hive.ql.session.SessionState:start:SessionState.java:586], errorCode:0, errorMessage:Failed to open new session: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): User: hadoop is not allowed to impersonate hadoop), serverProtocolVersion:null)

解决:添加core-site.xml中的配置就可

你可能感兴趣的:(hive,java,hive)