Java通过HiveServer2访问Hive服务示例

      Hive具有一个可选的组件叫做HiveServer或者HiveThrift,其允许通过指定端口访问Hive。Thrift是一个高效的RPC服务框架,可以通过编程的方式远程访问Hive。

      hive对外提供thrift和jdbc两种接口方式。其中jdbc是基于thrift接口封装,jdbc支持每个方法都是同步操作,但是hiveserver2 thrift接口支持异步操作,jdbc同步只是因为在现实上加了同步锁。

hiveserver1和hiveserver2的主要区别如下:

version jdbc driver class jdbc connection url 是否支持跨语言 是否支持并发写 是否支持表锁 默认thrift端口

hiveserver1

org.apache.hive.jdbc.HiveDriver jdbc:hive://: 10000
hiveserver2 org.apache.hadoop.hive.jdbc.HiveDriver jdbc:hive2://: 是(基于zk的表锁) 10000

下面就简单的实现一个Java通过HiveServer2访问Hive数据的示例:

pom.xml添加依赖:


    com.alibaba
    fastjson
    1.2.41


     org.apache.hive
     hive-jdbc
     1.2.1
 
 
     org.apache.hadoop
     hadoop-client
     2.7.2
 

定义接口,HiveDao.java

import com.alibaba.fastjson.JSONObject;

public interface HiveDao {
    /**
     * 一般查询
     *
     * @param connection
     * @param sql
     * @return
     */
    JSONObject querySql(HadoopConnection connection, String sql);

}

HadoopConnection.java

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;

public class HadoopConnection {
    private int id;
    private String url;
    private long keepTime;
    /**
     * 连接状态 0:空闲;1:使用
     */
    private int status;
    private String username;
    private String pawssord;
    private Connection connection;

    public int getId() {
        return id;
    }

    public void setId(int id) {
        this.id = id;
    }

    public String getUrl() {
        return url;
    }

    public void setUrl(String url) {
        this.url = url;
    }

    public long getKeepTime() {
        return keepTime;
    }

    public void setKeepTime(long keepTime) {
        this.keepTime = keepTime;
    }

    public int getStatus() {
        return status;
    }

    public void setStatus(int status) {
        this.status = status;
    }

    public String getUsername() {
        return username;
    }

    public void setUsername(String username) {
        this.username = username;
    }

    public String getPawssord() {
        return pawssord;
    }

    public void setPawssord(String pawssord) {
        this.pawssord = pawssord;
    }

    public Connection getConnection() {
        return connection;
    }

    public void setConnection(Connection connection) {
        this.connection = connection;
    }

    public HadoopConnection(int id, String url, int status, String user, String pwd) throws SQLException {
        super();
        this.id = id;
        this.url = url;
        this.status = status;
        this.username = user;
        this.pawssord = pwd;
        this.connection = DriverManager.getConnection(url, username, pawssord);

    }
    @Override
    public String toString() {
        return "HadoopConnection [id=" + id + ", status=" + status + "]";
    }
}

HiveDaoImpl.Java:

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;

import org.springframework.stereotype.Component;

import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.List;


@Component
public class HiveDaoImpl implements HiveDao {

    public static  String HIVE_URL = "jdbc:hive2://ip:port/database?mapreduce.job.queuename=xxx;mapred.input.dir.recursive=true;hive.mapred.supports.subdirectories=true;hive.support.quoted.identifiers=none;mapreduce.job.reduces=1";

    private static  String DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver";

    public static  String USERNAME = "usename";

    public static  String PWD = "password";

    private static List connections = Lists.newArrayList();

    /**
     * 初始化连接
     */
    public static void  init(Integer numpool) {
        try {
            Class.forName(DRIVER_NAME);//获得字符串参数中指定的类并初始化该类,Class.forName常见的用法就是在加载数据库驱动的时候。
            createConnectionPool(numpool, HIVE_URL, USERNAME, PWD);
        } catch (Exception e) {
            System.out.println("hadoop connection pool 初始化失败 : " + e);
        }
    }

    /**
     * 无返回值sql
     * @param sql
     * @throws SQLException
     */
    public  void excuteSql(HadoopConnection connection, String sql) throws SQLException {
        if (connection == null || sql == null) {
            return;
        }
        connection.getConnection().createStatement().execute(sql);
        resetConnection(connection);
    }

    /**
     * 一般查询
     * @param connection
     * @param sql
     * @return
     */
    public  JSONObject querySql(HadoopConnection connection, String sql) {
        JSONObject result = new JSONObject();
        if (connection == null || sql == null) {
            return result;
        }
        result.put("request", sql);
        try {
            //执行sql语句并获取结果
            ResultSet resultSet = connection.getConnection().createStatement().executeQuery(sql);
            //获取返回值的元数据信息
            ResultSetMetaData metaData = resultSet.getMetaData();
            //获取列数
            int columnCount = metaData.getColumnCount();
            JSONArray heads = new JSONArray();
            JSONArray types = new JSONArray();
            for (int index = 1; index <= columnCount; index++) {
                heads.add(metaData.getColumnName(index));
                types.add(metaData.getColumnTypeName(index));
            }

            JSONArray rowDatas = new JSONArray();
            while (resultSet.next()) {
                JSONArray rowData = new JSONArray();
                for (int index = 1; index <= columnCount; index++) {
                    rowData.add(resultSet.getString(index));
                }
                rowDatas.add(rowData);
            }
            result.put("heads", heads);
            result.put("types", types);
            result.put("result", rowDatas);
        } catch (Exception e) {
            result.put("error", e);
        }
        resetConnection(connection);
        return result;
    }

    /**
     * 保持连接存活
     */
    public static  void keepAliveCollection() {
        HadoopConnection keepTemp = null;
        try {
            for (HadoopConnection connection : connections) {
                keepTemp = connection;
                if (connection.getStatus() == 0) {
                    if (connection.getConnection().isClosed()) {
                        resetConnection(connection);
                    }
                    long nowTime = System.currentTimeMillis();
                    if (nowTime - connection.getKeepTime() > 10000) {
                        connection.getConnection().createStatement().execute("select 0");
                        connection.setKeepTime(nowTime);
                    }
                }
            }
        } catch (Exception e) {
            resetConnection(keepTemp);
        }
    }

    /**
     * 获取可用连接
     * @return
     */
    public static synchronized  HadoopConnection getConnection() {
        HadoopConnection connection = null;
        for (HadoopConnection hadoop : connections) {
            if (hadoop.getStatus() == 0) {
                hadoop.setStatus(1);
                connection = hadoop;
                break;
            }
        }
        return connection;
    }

    public static synchronized  void closeConnections() {
        if (getConnectionSize() == 0) {
            return;
        }
        try {
            for (HadoopConnection hadoop : connections) {
                if (hadoop.getConnection() != null) {
                    hadoop.getConnection().close();
                }
            }
        } catch (Exception e) {
            System.out.println("close hadoop connection fail, msg: " + e.getMessage());
        } finally {
            connections.clear();
        }
    }

    public static synchronized  void resetConnection(HadoopConnection connection) {
        try {
            connection.getConnection().clearWarnings();
            if (connection.getConnection().isClosed()) {
                System.out.println(connection + " , is reseted!");
                connection.setConnection(DriverManager.getConnection(connection.getUrl()));
            }
            connection.setStatus(0);
        } catch (Exception e) {
            System.out.println("reset connection exception : " + e);
        }
    }

    public static synchronized void createConnectionPool(int num, String url, String user, String pwd) throws SQLException {
        if (!connections.isEmpty()) {
            return;
        }
        for (int i = 0; i < num; i++) {
            connections.add(new HadoopConnection(i, url, 0, user, pwd));
            System.out.println("hadoop connection pool success : url->["+url+"] user->["+user+"]" );
        }
    }

    public static int getConnectionSize() {
        return connections.size();
    }

}

测试:

BaseTest.java:

import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;

@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"classpath:testContext.xml"})
public class BaseTest {
}

HiveTest.java:

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.didichuxing.risk.hive.HadoopConnection;
import com.didichuxing.risk.hive.HiveDao;
import com.didichuxing.risk.hive.HiveDaoImpl;
import org.junit.Test;

import javax.annotation.Resource;

public class HiveTest extends BaseTest{

    @Resource
    private HiveDao hiveDao;

    @Test
    public void testHiveClient() {
        try {
            HiveDaoImpl.init(6);
            HadoopConnection connection = HiveDaoImpl.getConnection();
            //取hive库中数据
            String sql = "select * from database.table";
            if (connection != null) {
                JSONObject jsonObject = hiveDao.querySql(connection, sql);
                if (null == jsonObject) {
                    System.out.println("json object is null.");
                }

                JSONArray rows = (JSONArray) jsonObject.get("result");
                JSONArray heads = (JSONArray) jsonObject.get("heads");
                JSONArray types = (JSONArray) jsonObject.get("types");
                JSONArray row = null, head = null, type = null;

                //打印字段名
                for (int i = 0; i < heads.size(); i++) {
                    System.out.println(heads.get(i));
                }

                //打印字段类型
                for (int i = 0; i < types.size(); i++) {
                    System.out.println(types.get(i));
                }


                if (rows == null || rows.size() == 0) {
                    System.out.println("row is null.");
                    return ;
                }

                for (int i = 0; i < rows.size(); i++) {
                    row = (JSONArray) rows.get(i);
                    System.out.println(row);
                }
            } else {
                System.out.println("get hive data : " + "connection is null");
            }
        } catch (Exception e) {
            System.out.println("get hive data exception : " + e.getMessage());
        } finally {
            HiveDaoImpl.closeConnections();
        }

    }
}

以上。

Author:忆之独秀

Email:[email protected]

注明出处:https://blog.csdn.net/lavorange/article/details/80412288

你可能感兴趣的:(HPC)