https://dev.mysql.com/downloads/connector/j/
例子:下载mysql-connector-java-5.1.41.tar
[root@localhost Software]# tar xzfmysql-connector-java-5.1.41.tar.gz
[root@localhost Software]# cd mysql-connector-java-5.1.41/
[[email protected]]# ll
[root@localhost Software]# cpmysql-connector-java-5.1.41/mysql-connector-java-5.1.41-bin.jar/usr/hive/lib/mysql-connector-java-5.1.41-bin.jar
[root@localhost hive]# mysql -u root -ppassword
mysql> create database hive_db;
hive.metastore.warehouse.dir
/usr/hive/warehouse
location of default database for the warehouse
hive.metastore.local
true
Use false if a production metastore server is used
hive.exec.scratchdir
/tmp/hive
HDFS root scratch dir for Hive jobs which gets created with write all (733) permission. For each connecting user, an HDFS scratch dir: ${hive.exec.scratchdir}/ is created, with ${hive.scratch.dir.permission}.
javax.jdo.option.ConnectionURL
jdbc:mysql://localhost:3306/hive_db?createDatabaseIfNoExist=true
Roy
JDBC connect string for a JDBC metastore.
To use SSL to encrypt/authenticate the connection, provide database-specific SSL flag in the connection URL.
For example, jdbc:postgresql://myhost/db?ssl=true for postgres database.
javax.jdo.option.ConnectionDriverName
com.mysql.jdbc.Driver
User-Defined(Roy) Driver class name for a JDBC metastore
javax.jdo.option.ConnectionUserName
root
User-defined(Roy)Username to use against metastore database
javax.jdo.option.ConnectionPassword
password
User-defined(Roy)password to use against metastore database
[root@localhost hive]# schematool -dbType mysql -initSchema
--显示成功
schemaTool completed
[root@localhost hive]# hive --servicemetastore &
--屏幕提示信息不显示时,按ctrl+c退出
[root@localhost hive]# jps
--显示进程信息多了(RunJar)
51280 Jps
5985 SecondaryNameNode
6226 ResourceManager
45766 DataNode
5753 NameNode
51194 RunJar
6348 NodeManager
5.3、有需要时,可启动hive 远程服务 (端口号10000)
[root@localhost hive]# hive --servicehiveserver &
6.1、准备导入文本文件/root/桌面/Test/wc-in/a.txt
格式:
1,h
2,i
3,v
4,e
root@localhost hadoop]# hive
hive> create table a(id int,name string)
> row format delimited fields terminated by ',';
--显示信息
OK
Time taken: 0.288 seconds
hive> load data local inpath '/root/桌面/Test/wc-in/a.txt' into table a;
--显示信息
Loading data to table default.a
OK
Time taken: 0.763 seconds
hive> select * from a;
OK
1 h
2 i
3 v
4 e
Time taken: 0.309 seconds, Fetched: 4row(s)
hive> dfs -ls /usr/hive/warehouse/a;
--显示信息
Found 1 items
-rw-r--r-- 1 root supergroup 16 2017-03-08 17:46/usr/hive/warehouse/a/a.txt
hive> dfs -cat /usr/hive/warehouse/a/*;
1,h
2,i
3,v
4,e
[root@localhost conf]# mysql -u root -ppassword
mysql> use hive_db;
mysql> select TBL_ID, CREATE_TIME,DB_ID, OWNER, TBL_NAME,TBL_TYPE from TBLS;
--显示信息
+--------+-------------+-------+-------+----------+---------------+
| TBL_ID | CREATE_TIME | DB_ID | OWNER |TBL_NAME | TBL_TYPE |
+--------+-------------+-------+-------+----------+---------------+
| 37 | 1488966386 | 1 | root | a | MANAGED_TABLE |
+--------+-------------+-------+-------+----------+---------------+
1 row in set (0.03 sec)
[root@localhost hadoop]# hdfs dfs -ls/usr/hive/warehouse/a
--
显示信息
Found 1 items
-rw-r--r-- 1 root supergroup 162017-03-08 17:46 /usr/hive/warehouse/a/a.txt
[root@localhost hadoop]# hdfs dfs -cat /usr/hive/warehouse/a/*
1,h
2,i
3,v
4,e
常见问题处理:
1、启动hive时报错
[root@localhost hive]# hive
--显示报错信息
Caused by:org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.SafeModeException):Cannot create directory /tmp/hive/root/24f1d91f-f32b-47e1-824d-ba26b02bd13e.Name node is in safe mode.
原因:hadoop为安全模式
--解决方法:
关闭安全模式
[root@localhost hadoop]# hadoop dfsadmin-safemode leave
--显示信息
DEPRECATED: Use of this script to executehdfs command is deprecated.
Instead use the hdfs command for it.
Safe mode is OFF
2、在导入数据时出错信息
hive> load data local inpath '/root/桌面/Test/wc-in/a.txt' into table a;
--显示报错信息
FAILED: Execution Error, return code 1 fromorg.apache.hadoop.hive.ql.exec.MoveTask.org.apache.hadoop.ipc.RemoteException(java.io.IOException): File/usr/hive/warehouse/a/a_copy_2.txt could only be replicated to 0 nodes insteadof minReplication (=1). There are 0datanode(s) running and no node(s) are excluded in this operation.
原因:hadoop没有启动datanote
解决方法:
[root@localhost hive]# start-dfs.sh
[root@localhost hive]# jps
--显示信息
51152 Jps
5985 SecondaryNameNode
6226 ResourceManager
45766 DataNode
5753 NameNode
6348 NodeManager
应网友要求测个例子:
--调用HiveServer2客户端和beeline命令用法
--启用服务,信息不动时Ctrl+C退出
[root@localhost bin]# hiveserver2
[root@localhost bin]# beeline
which: no hbase in (/usr/lib64/qt-3.3/bin:/root/perl5/bin:/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin:/usr/hadoop/bin:/usr/hadoop/bin:/usr/hadoop/sbin:/usr/hive/bin:/usr/java/jdk1.8.0_111/bin:/root/bin:/usr/hadoop/bin:/usr/hadoop/sbin:/usr/hive/bin:/usr/java/jdk1.8.0_111/bin)
Beeline version 2.1.1 by Apache Hive
beeline>
连接和登陆账号密码输入:
Connecting to jdbc:mysql://localhost:3306/hive_db
Enter username for jdbc:mysql://localhost:3306/hive_db: root
Enter password for jdbc:mysql://localhost:3306/hive_db: ********
--测试创建表:
0: jdbc:mysql://localhost:3306/hive_db> create table Test_beeline(id int);
显示信息:
No rows affected (0.044 seconds)
--查看创建表
0: jdbc:mysql://localhost:3306/hive_db> show tables;