hive jdbc使用

在hive学习中,通过java调用hive的jdbc驱动来查询数据。
声明:当前基于hive1.2.1版本进行的学习和测试。

首先环境中有部署hive,并且存在。

Java工程需要配置以及代码编写:
1、需要用到的依赖jar包:用的maven项目
         
          
           < dependency >
             < groupId > org.apache.hive groupId >
             < artifactId > hive-jdbc artifactId >
             < version > ${hive.version} version >
          dependency >
          
          < dependency >
             < groupId > org.apache.hadoop groupId >
             < artifactId > hadoop-common artifactId >
              < version > ${hadoop.version} version >
          dependency >
         

2、集群环境启动hive的server,在hive1.2.1版本以后需要使用  hiveserver2来启动,具体可以参考官方文档,并且可以查看${HIVE_HOME}/bin目录下是有hiverserver2存在的。
     可以通过命令启动:
     hive --service hiveserver2 --hiveconf hive.server2.thrift.port=11111   设置端口号,默认端口号为10000
     也可以通过hiveserver2 直接启动,这时端口号为默认10000

3、java代码的编写
     首先编写一个service类,以连接hive的jdbc(其实是一个jdbcUtil)
     
package hive;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.log4j.Logger;

public class HiveService {
        static Logger logger = Logger.getLogger(HiveService. class );
        //hive的jdbc驱动类
        public static String dirverName = "org.apache.hive.jdbc.HiveDriver"
        //连接hive的URL hive1.2.1版本需要的是jdbc:hive2,而不是 jdbc:hive 
        public static String url = "jdbc:hive2://192.168.10.100:11111/default" ;
        //登录linux的用户名  一般会给权限大一点的用户,否则无法进行事务形操作
        public static String user = "hive" ;
        //登录linux的密码
        public static String pass = "hive" ;
        /**
        * 创建连接
        * @return
        * @throws SQLException
        */
        public static Connection getConn(){
              Connection conn = null ;
               try {
                     Class.forName( dirverName );
                      conn = DriverManager.getConnection( url , user , pass );
              } catch (ClassNotFoundException e ) {
                      // TODO Auto-generated catch block
                      e .printStackTrace();
              } catch (SQLException e ) {
                      // TODO Auto-generated catch block
                      e .printStackTrace();
              }
               return conn ;
       }
       
        /**
        * 创建命令
        * @param conn
        * @return
        * @throws SQLException
        */
        public static Statement getStmt(Connection conn ) throws SQLException{
               logger .debug( conn );
               if ( conn == null ){
                      logger .debug( "this conn is null" );
              }
               return conn .createStatement();
       }
       
        /**
        * 关闭连接
        * @param conn
        */
        public static void closeConn(Connection conn ){
               try {
                      conn .close();
              } catch (SQLException e ) {
                      // TODO Auto-generated catch block
                      e .printStackTrace();
              }
       }
       
        /**
        * 关闭命令
        * @param stmt
        */
        public static void closeStmt(Statement stmt ){
               try {
                      stmt .close();
              } catch (SQLException e ) {
                      // TODO Auto-generated catch block
                      e .printStackTrace();
              }
       }
}

   再编写一个测试类:
  
package hive;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import org.apache.log4j.Logger;
public class HiveTestOne {
       
        static Logger logger = Logger.getLogger(HiveTestOne. class );
       
        public static void main(String[] args ){
              
              Connection conn = HiveService.getConn();
              Statement stmt = null ;
               try {
                      stmt = HiveService.getStmt( conn );
              } catch (SQLException e ) {
                      logger .debug( "1" );
              }
              
              String sql = "select * from test" ;
              
              ResultSet res = null ;
               try {
                      res = stmt .executeQuery( sql );
                     
                     ResultSetMetaData meta = res .getMetaData();
                     
                      for ( int i = 1; i <= meta .getColumnCount(); i ++){
                           System. out .print( meta .getColumnName( i ) + "    " );
                     }
                     System. out .println();
                      while ( res .next()){
                           System. out .print( res .getString(1) + "    " );
                           System. out .print( res .getString(2) + "    " );
                           System. out .println();
                     }
              } catch (SQLException e ) {
                      e .printStackTrace();
              }
              
               try {
                      stmt .execute( "insert into test1(id, name) values(222,'yang')" );//需要拥有hdfs文件读写权限的用户才可以进行此操作
                      logger .debug( "create is susscess" );
                     
              } catch (SQLException e ) {
                      // TODO Auto-generated catch block
                      e .printStackTrace();
              }
              HiveService.closeStmt( stmt );
              HiveService.closeConn( conn );
       }
}


遇到的问题:
1、 Java.lang.ClassNotFoundException: org.apache.Hadoop.hive.jdbc.HiveDriver
     突然发现多写了一个Hadoop,这个路径写错了,Hadoop是多余的,应该是 org.apache.hive.jdbc.HiveDriver

2、 java.sql.SQLException: No suitable driver found for jdbc:hive://192.168.10.100:11111/default
     这是由于我使用的是hive1.2.1版本,到此版本后URL地址应该写成 jdbc:hive2://192.168.10.100:11111/default

3、 java.sql.SQLException: Could not open client transport with JDBC Uri: jdbc:hive2://192.168.10.100:11111/default: java.NET.ConnectException: Connection refused
      这是由于远程hive集群没有启动hiveserver2导致的。

4、出现异常 java.sql.SQLException : Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
   到hive后台查看日志
   Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Permission denied: user=hive, access=EXECUTE, inode="/tmp":root:supergroup:drwxrwx---
     权限问题,所以切换一下用户试一下,在我的代码
//hive的jdbc驱动类
public static String dirverName = "org.apache.hive.jdbc.HiveDriver"
//连接hive的URL hive1.2.1版本需要的是jdbc:hive2,而不是 jdbc:hive 
public static String url = "jdbc:hive2://192.168.10.100:10000/default" ;
//登录linux的用户名  一般会给权限大一点的用户,否则无法进行事务形操作
public static String user = "hive" ;
//登录linux的密码
public static String pass = "hive" ;

     我使用的是hive用户登录,在我的集群中hdfs的文件其他用户没有写的权限,只有读的权限,所以我将
          //登录linux的用户名  一般会给权限大一点的用户,否则无法进行事务形操作
public static String user = "root" ;
//登录linux的密码
public static String pass = "123456" ;

      切换为我的管理员root用户,这样就可以执行create和insert语句了。


     























你可能感兴趣的:(hive)