jdbc连接Hive

jdbc连接Hive

1.使用sqoop将stu表导入到hive中

​ 数据库表位于hadoop102上的test数据库

bin/sqoop import \
--connect jdbc:mysql://hadoop102:3306/test \
--username root \
--password 000000 \
--table stu \
--num-mappers 1 \
--hive-import \
--fields-terminated-by "\t" \
--hive-overwrite \
--hive-table stu_hive

可以在hive目录下敲 bin/hive 进入到hive客户端

//查看所有的表

show tables;

2.使用jdbc连接并访问hive

1.在hadoop102上面验证jdbc是否能连接hive

可以先敲命令,查看hive服务是否启动,10000是hive的端口号

sudo netstat -anp|grep 10000

如果里面有进程,说明hive服务正在运行,否则需要启动hive服务

[xikuang@hadoop102 hive]$ bin/hive --service hiveserver2   

启动 beeline 客户端(需要多等待一会)

[xikuang@hadoop102 hive]$ bin/beeline -u jdbc:hive2://hadoop102:10000 -n xikuang

如果连接成功,说明jdbc是可以连接hive数据库的;

2.在maven web project中添加hive-jdbc依赖


    
      org.apache.hive
      hive-jdbc
      2.3.0
    
    
    
      junit
      junit
      4.9
      test
    

3.在src/test/java目录下单元测试相应的api

TestHiveJDBC.java

@Before表示执行junit之前的操作

@After表示执行Junit之后的操作

@Test 表示正在执行的单元测试

import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.sql.*;

/**
 * jdbc操作hive
 */
public class TestHiveJDBC {
    
    private static String driverName="org.apache.hive.jdbc.HiveDriver";
    private static String url="jdbc:hive2://hadoop102:10000";
    private static String user="xikuang";
    private static String password="000000";


    private static Connection conn=null;
    private static Statement stmt=null;
    private static ResultSet rs=null;

    //加载驱动,创建连接
    @Before
    public void init() throws ClassNotFoundException, SQLException {

        Class.forName(driverName);
        conn= DriverManager.getConnection(url,user,password);
        stmt=conn.createStatement();
        System.out.println(conn);
    }
    //创建数据库
    @Test
    public void createDataBase() throws SQLException {
        String sql="create database hive_jdbc_test";
        System.out.println("running:"+sql);
        stmt.execute(sql);
    }

    //查看所有数据库
    @Test
    public void showDataBases() throws SQLException {
        String sql="show databases";
        System.out.println("running: "+sql);
        rs=stmt.executeQuery(sql);
        while(rs.next()){
            String name = rs.getString(1);
            System.out.println(name);
        }
    }
    //删除数据库
    @Test
    public void dropDatabase() throws SQLException {
        stmt.execute("drop database if exists hive_jdbc_test CASCADE ");
    }
    //查询所有表
    @Test
    public void showTables() throws SQLException {
        rs=stmt.executeQuery("show tables");
        while (rs.next()){
            String tableName = rs.getString(1);
            System.out.println(tableName);
        }
    }
    //查找 表中数据
    @Test
    public void queryAll() throws SQLException {
        rs=stmt.executeQuery("select * from stu_hive");
        while (rs.next()){
            int id = rs.getInt(1);
            String name = rs.getString(2);
            String sex = rs.getString(3);
            System.out.println(id+" "+name+" "+sex);
        }
    }



    //统计查询
    @Test
    public void getCount() throws SQLException {
        rs=stmt.executeQuery("select count(1) from staff");
        while(rs.next()){
            int count = rs.getInt(1);
            System.out.println("count: "+count);

        }
    }
    @Test
    public void createExTab() throws SQLException {

        stmt.execute("create external table if not exists student_ext ( " +
                "name string , " +
                "age int , " +
                "agent string  ," +
                "adress struct) " +
                "row format delimited " +
                "fields terminated by ',' " +
                "collection items terminated by ':'" +
                "lines terminated by '\n' " +
                "stored as textfile " +
                "location '/testData/hive/student1' ");
    }


    @After
    public void close() throws SQLException {
        if(rs!=null){
            rs.close();
        }
        if(stmt!=null){
            stmt.close();
        }
        if(conn!=null){
            conn.close();
        }
    }
}

你可能感兴趣的:(大数据,hive)