hive Java API

Java连接hive进行操作的方式有多种,接触到了两种:

首先,hive要起动远程服务接口,命令:

hive --service hiveserver -p 50000 &

 

1. 通过jdbc驱动连接hive

当然还有其他的连接方式,比如ODBC等,这种方式很常用。

不稳定,经常会被大数据量冲挂,不建议使用。

package cn.ac.iscas.hiveclient;



import java.io.IOException;

import java.io.InputStream;

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.ResultSet;

import java.sql.SQLException;

import java.sql.Statement;

import java.util.Properties;



public class HiveJDBCClient {



    private static String driverName;

    private static String url;

    private static String user;

    private static String password;

    

    private Connection conn = null;

    private Statement stmt = null;

    

    static {    

        Properties prop = new Properties();

        InputStream in = HiveJDBCClient.class.getResourceAsStream("hiveCli.properties");

        try{

            prop.load(in);

            driverName = prop.getProperty("driverName");

            url = prop.getProperty("url");

            user = prop.getProperty("user");

            password = prop.getProperty("password");

        }catch (IOException e){

            e.printStackTrace();

        }

    }

    

    public boolean execute(String sql){

        boolean rs = false;

        try {

            conn = getConn();

            stmt = conn.createStatement();

            rs = stmt.execute(sql);

        } catch (ClassNotFoundException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        } catch (SQLException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        }finally{

            try{

                if( null != conn){

                    conn.close();

                    conn = null;

                }

                if( null != stmt){

                    stmt.close();

                    stmt = null;

                }

            }catch (SQLException e){

                e.printStackTrace();

            }

        }

        return rs;

    }

    

    public ResultSet executeQuery(String sql){

        ResultSet rs = null;

        try {

            conn = getConn();

            stmt = conn.createStatement();

            rs = stmt.executeQuery(sql);

        } catch (ClassNotFoundException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        } catch (SQLException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        }finally{

            try{

                if( null != conn){

                    conn.close();

                    conn = null;

                }

                if( null != stmt){

                    stmt.close();

                    stmt = null;

                }

            }catch (SQLException e){

                e.printStackTrace();

            }

        }

        return rs;

    }

    

    private static Connection getConn() throws ClassNotFoundException,

    SQLException{

        Class.forName(driverName);

        Connection conn = DriverManager.getConnection(url,user,password);

        return conn;

    }

    

    public static void main(String[] args){

        HiveJDBCClient hc = new HiveJDBCClient();

        ResultSet rs = hc.executeQuery("desc carsrecord");

        try {

            while(rs.next()){

                System.out.println(rs.getString(1));

            }

        } catch (SQLException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        }

    }

}

2. 通过hive thrift连接

package cn.ac.iscas.hiveclient;

import java.io.IOException;

import java.io.InputStream;

import java.util.ArrayList;

import java.util.List;

import java.util.Properties;



import org.apache.hadoop.hive.service.HiveClient;

import org.apache.hadoop.hive.service.HiveServerException;

import org.apache.thrift.TException;

import org.apache.thrift.protocol.TBinaryProtocol;

import org.apache.thrift.protocol.TProtocol;

import org.apache.thrift.transport.TSocket;

import org.apache.thrift.transport.TTransport;

import org.apache.thrift.transport.TTransportException;



public class HiveThreatClient {

    static TTransport transport;

    static TProtocol protocol;

    static HiveClient client ;

    static String ip;

    static int port;

    

    static {    

        Properties prop = new Properties();

        InputStream in = HiveJDBCClient.class.getResourceAsStream("hiveCli.properties");

        try{

            prop.load(in);

            ip = prop.getProperty("ip");

            port = Integer.valueOf(prop.getProperty("port"));

        }catch (IOException e){

            e.printStackTrace();

        }

    }

    

    public static List<String> execute(String query) throws HiveServerException, 

                    TException,TTransportException{

        List<String> result = new ArrayList<String>();

        transport = new TSocket(ip,port);

        protocol = new TBinaryProtocol(transport);

        client = new HiveClient(protocol);

        transport.open();

        client.send_execute(query);

        client.recv_execute();

        //client.send_commit_txn(rqst);

        //client.execute(query);

        List<String> list = client.fetchN(10);

        while(null!=list && list.size()>0){

            for(String r :list){

                System.out.println(r);

                result.add(r);

            }

            list = client.fetchN(10);

        }

        client.shutdown();

        transport.close();

        return result;

    }

        

    public static void main(String[] args){

        try {

            //HiveThreatClient.execute("desc carsrecord");

            //HiveThreatClient.execute("select distinct addressname from carsrecord  where collectiontime='2015-02-02'");

            //load data inpath '/2015/02/2015-02-01.dat' overwrite into table carsrecord partition(collectiontime='2015-02-01')

            for(int i = 10; i < 29; i++){

                String day = i > 9 ? ""+i:"0"+i;

                String stat = "load data inpath '/2015/02/2015-02-"+day+".dat' overwrite into table carsrecord partition(collectiontime='2015-02-"+day+"')";

                System.out.println(stat);

                HiveThreatClient.execute(stat);

            }

            //HiveThreatClient.execute("select * from carsrecord where collectiondate>='2014-01-01' and collectiondate<'2014-01-03'");

        } catch (HiveServerException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        } catch (TTransportException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        } catch (TException e) {

            // TODO Auto-generated catch block

            e.printStackTrace();

        }

    }

}

需要引入的jar包:

  •     <classpathentry kind="lib" path="hive-service-0.13.1.jar"/>
        <classpathentry kind="lib" path="hive-exec-0.13.1.jar"/>
        <classpathentry kind="lib" path="slf4j-api-1.6.6.jar"/>
        <classpathentry kind="lib" path="hive-metastore-0.13.1.jar"/>
        <classpathentry kind="lib" path="libfb303-0.9.0.jar"/>

你可能感兴趣的:(java)