Hive只在一个节点上安装即可
1.上传tar包
2.解压
tar -zxvf hive-0.9.0.tar.gz -C /cloud/
3.配置mysql metastore(切换到root用户)
配置HIVE_HOME环境变量
查询以前安装的mysql相关包
rpm -qa | grep mysql
暴力删除这个包
rpm -e mysql-libs-5.1.66-2.el6_3.i686 --nodeps
mysql安装
修改mysql的密码
/usr/bin/mysql_secure_installation
(注意:删除匿名用户,允许用户远程连接)
登陆mysql
mysql -u root -p
4.配置hive
新增hive-site.xml
添加如下内容:
5.安装hive和mysq完成后,将mysql的连接jar包拷贝到$HIVE_HOME/lib目录下
如果出现没有权限的问题,在mysql授权(在安装mysql的机器上执行)
./bin/mysql -h127.0.0.1 -uroot -proot
#(执行下面的语句 *.*:所有库下的所有表 %:任何IP地址或主机都可以连接)
grant all privileges on *.* to root@'%' identified by 'root';
flush privileges;
6.建表(默认是内部表)
create table trade_detail(id bigint, account string, income double, expenses double, time string) row format delimited fields terminated by '\t';
建分区表
create table td_part(id bigint, account string, income double, expenses double, time string) partitioned by (logdate string) row format delimited fields terminated by '\t';
建外部表
create external table td_ext(id bigint, account string, income double, expenses double, time string) row format delimited fields terminated by '\t' location '/td_ext';
7.创建分区表
普通表和分区表区别:有大量数据增加的需要建分区表
create table book (id bigint, name string) partitioned by (pubdate string) row format delimited fields terminated by '\t';
分区表加载数据
load data local inpath './book.txt' overwrite into table book partition (pubdate='2010-08-22');
load data local inpath '/root/data.am' into table beauty partition (nation="USA");
select nation, avg(size) from beauties group by nation order by avg(size);
启动hive ./bin/hive
常用命令
show tables;
drop table table_name;
select * from table_name;
load data local inpath '/home/hadoop/hive-0.12.0/test2.txt' overwrite into table t_order;导入本地数据
load data inpath '/home/hadoop/hive-0.12.0/test2.txt' overwrite into table t_order;导入hdfs数据
1.创建hive数据库
create table t_order(id int,name string, rongling string,price double)
row format delimited
fields terminated by '\t';
2.创建测试数据
0000101 iphone6plus 64G 6888
0000102 xiaomi 64G 2388
0000103 iphone5s 64G 6888
0000104 mi3 64G 2388
0000105 huawei 64G 6888
注意数据间不是空格是tab
3.导入本地数据
load data local inpath '/home/hadoop/hive-0.12.0/test2.txt' overwrite into table t_order;
4.
select * from t_order;
select count(*) from t_order;