java操作hive

my.properties配置文件

# hive
hive_driverClassName=org.apache.hive.jdbc.HiveDriver
hive_url=jdbc:hive2://192.168.10.154:10000
hive_user=root
hive_password=123456
# hive中数据库名称
hive_jichuku=jichuku

配置文件读取工具类:

package utils;

import java.io.FileInputStream;
import java.util.Properties;

public class ProperUtil {
    private static Properties properties = new Properties();

    static {
        String path = Thread.currentThread().getContextClassLoader().getResource("my.properties").getPath();
        try {
            properties.load(new FileInputStream(path));
        } catch (Exception e) {}
    }

    public static String getProperty(String key) {
        return properties.getProperty(key);
    }

    public static void setProperty(String key, String value) {
        properties.setProperty(key, value);
    }
}

hive操作工具类及测试:

package utils;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class HiveUtil {

    public static void main(String args[]) {
        String sql = "select * from back_xf_user where del_flg=?";
        List> list = HiveUtil.hiveSelect(sql, "1");
        System.out.println(list);
    }

    public static List> hiveSelect(String sql, String... params){
        List> list = new ArrayList<>();
        try {
            Class.forName(ProperUtil.getProperty("hive_driverClassName")).newInstance();

            Connection conn = DriverManager.getConnection(ProperUtil.getProperty("hive_url") + "/" + ProperUtil.getProperty("hive_jichuku"),
                    ProperUtil.getProperty("hive_user"), ProperUtil.getProperty("hive_password"));
            java.sql.PreparedStatement pstsm = conn.prepareStatement(sql);
            int pindex = 1;
            if (params != null) for (String p: params) {
                pstsm.setString(pindex++, p);
            }

            ResultSet resultSet = pstsm.executeQuery();
            ResultSetMetaData metaData = resultSet.getMetaData();
            int columnCount = metaData.getColumnCount();
            while(resultSet.next()){
                Map map = new HashMap<>();
                for (int i=1; i<= columnCount; i++) {
                    String field = metaData.getColumnName(i);
                    map.put(field, resultSet.getString(field));
                }
                list.add(map);
            }
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println(e);
        }
        return list;
    }

}

pom.xml文件相关依赖,请注意自己环境对应的版本:

    
        2.1.0
        2.7.6
    
       
         
            org.apache.hive
            hive-jdbc
            ${hive.version}
        

        
            org.apache.hadoop
            hadoop-common
            ${hadoop.version}
        
    

 

你可能感兴趣的:(java,大数据)