hadoop hive 手记

hadoop hive  手记

 
1,创建表及分区,针对于squid_log 
create table squid_tmp(visittime string, clientip string, visitdom string, visiturl string, visitstat string) partitioned by (pt_ip string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
 
create table squid_tmpp(visittime string, clientip string, visitdom string, visiturl string, visitstat string) partitioned by (pt_ip string, pt_dt int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
 
create table squid_log (visittime string, clientip string, visitdom string, visiturl string, visitstat string) partitioned by (pt_ip string, pt_dt int) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
 
2, 导入文本到hive 中
load data local inpath '/data1/squid_log/117.34.80.249/tmp.log' overwrite into table squid.squid_tmp partition(pt_ip = '117.34.80.249')
 
3, 从 squid_tmp 导入到squid_log中
insert into table squid_log partition(pt_ip, pt_dt) select visittime, clientip, visitdom, visiturl, visitstat, substr(visittime,1,8) as pt_dt, pt_ip from squid_tmp;
 
4, hive 删除分区
alter table squid_log drop partition(pt_ip = '117.34.80.249', pt_dt=20130224);
 
5, hive 查看分区信息
show partitions squid_log;
 
 

本文出自 “欢迎评论,欢迎点赞” 博客,谢绝转载!

你可能感兴趣的:(hadoop,hive)