1. hadoop的配置文件core-site.xml中
fs.defaultFS
hdfs://hadoop1:9000
hadoop.tmp.dir
/usr/local/softinstall/hadoop/workspace/hdfs/temp
io.file.buffer.size
4096
hadoop.proxyuser.root.hosts
*
hadoop.proxyuser.root.groups
*
不然会有报错情况
Error: Failed to open new session: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException
具体细节参考:http://blog.csdn.net/swing2008/article/details/53230145
2. 启动hadoop集群,启动hive的服务端 :
hive --service hiveserver2 >/dev/null &
如果不启动hive服务端,则报错解决链接。
3.maven项目中添加依赖
org.apache.hive
hive-jdbc
2.1.0
org.apache.hive
hive-metastore
2.1.0
org.apache.hive
hive-service
2.1.0
package com.eastcom.first.spark.data.hive;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HiveJdbcCli {
// TODO Auto-generated method stub
private final static Logger logger = LoggerFactory.getLogger(HiveJdbcCli.class);
// 网上写 org.apache.hadoop.hive.jdbc.HiveDriver ,新版本不能这样写
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
// 这里是hive2,网上其他人都写hive,在高版本中会报错
private static String url = "jdbc:hive2://192.168.31.130:10000/default";
private static String user = "root";
private static String password = "root";
private static String sql = "";
private static ResultSet res;
private static Connection conn;
private static Statement stmt;
public static void main(String[] args) throws ClassNotFoundException {
try {
init();
// 第一步:存在就先删除
// String tableName = dropTable(stmt);
String tableName = "testhive.tbdate";
// 第二步:不存在就创建
// createTable(stmt, tableName);
// 第三步:查看创建的表
// showTables(stmt, tableName);
// 执行describe table操作
// describeTables(stmt, tableName);
// 执行load data into table操作
// loadData(stmt, tableName);
// 执行 select * query 操作
selectData(stmt, tableName);
// 执行 regular hive query 统计操作
countData(stmt, tableName);
destroy();
} catch (SQLException e) {
e.printStackTrace();
logger.error("Connection error!", e);
System.exit(1);
}
}
private static void countData(Statement stmt, String tableName) throws SQLException {
sql = "select count(1) from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“regular hive query”运行结果:");
while (res.next()) {
System.out.println("count ------>" + res.getString(1));
}
}
private static void selectData(Statement stmt, String tableName) throws SQLException {
sql = "select * from " + tableName + " limit 10 ";
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 select * query 运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2) + res.getString(3) + "\t" + res.getString(4));
}
}
private static void loadData(Statement stmt, String tableName) throws SQLException {
// 目录 ,我的是hive安装的机子的虚拟机的home目录下
String filepath = "user.txt";
sql = "load data local inpath '" + filepath + "' into table " + tableName;
System.out.println("Running:" + sql);
stmt.execute(sql);
}
private static void describeTables(Statement stmt, String tableName) throws SQLException {
sql = "describe " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 describe table 运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
}
private static void showTables(Statement stmt, String tableName) throws SQLException {
sql = "show tables '" + tableName + "'";
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 show tables 运行结果:");
if (res.next()) {
System.out.println(res.getString(1));
}
}
private static void createTable(Statement stmt, String tableName) throws SQLException {
sql = "create table " + tableName + " (key int, value string) row format delimited fields terminated by '\t'";
stmt.execute(sql);
}
private static String dropTable(Statement stmt) throws SQLException {
// 创建的表名
String tableName = "testHive";
sql = "drop table " + tableName;
stmt.execute(sql);
return tableName;
}
private static Connection getConn() throws ClassNotFoundException, SQLException {
Class.forName(driverName);
Connection conn = DriverManager.getConnection(url, user, password);
return conn;
}
private static void init() throws ClassNotFoundException, SQLException {
conn = getConn();
stmt = conn.createStatement();
}
private static void destroy() throws ClassNotFoundException, SQLException {
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
}
}
SLF4J: Found binding in [jar:file:/C:/Users/Administrator/.m2/repository/org/apache/activemq/activemq-all/5.15.0/activemq-all-5.15.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/C:/Users/Administrator/.m2/repository/org/slf4j/slf4j-log4j12/1.7.21/slf4j-log4j12-1.7.21.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/C:/Users/Administrator/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.4.1/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
INFO | Supplied authorities: 192.168.31.130:10000
INFO | Resolved authority: 192.168.31.130:10000
Running:select * from testhive.tbdate limit 10
执行 select * query 运行结果:
2003-1-1 2003012003 1
2003-1-2 2003012003 1
2003-1-3 2003012003 1
2003-1-4 2003012003 1
2003-1-5 2003012003 1
2003-1-6 2003012003 1
2003-1-7 2003012003 1
2003-1-8 2003012003 1
2003-1-9 2003012003 1
2003-1-10 2003012003 1
Running:select count(1) from testhive.tbdate
执行“regular hive query”运行结果:
count ------>4383
http://www.cnblogs.com/netbloomy/p/6688670.html
http://blog.csdn.net/tanggao1314/article/details/51510844
http://blog.csdn.net/fz1989/article/details/51489498