原生类型
原生类型包括TINYINT,SMALLINT,INT,BIGINT,BOOLEAN,FLOAT,DOUBLE,STRING,BINARY (Hive 0.8.0以上才可用),TIMESTAMP (Hive 0.8.0以上才可用),这些数据加载很容易,只要设置好列分隔符,按照列分隔符输出到文件就可以了。
假设有这么一张用户登陆表
CREATE TABLE login ( uid BIGINT, ip STRING ) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;
这表示登陆表ip字段和uid字段以分隔符','隔开。
输出hive表对应的数据
# printf "%s,%s\n" 3105007001 192.168.1.1 >> login.txt # printf "%s,%s\n" 3105007002 192.168.1.2 >> login.txt
login.txt的内容:
# cat login.txt 3105007001,192.168.1.1 3105007002,192.168.1.2
加载数据到hive表
LOAD DATA LOCAL INPATH '/home/hadoop/login.txt' OVERWRITE INTO TABLE login PARTITION (dt='20130101');
查看数据
select uid,ip from login where dt='20130101'; 3105007001 192.168.1.1 3105007002 192.168.1.2
假设登陆表是
CREATE TABLE login_array ( ip STRING, uid array<BIGINT> ) PARTITIONED BY (dt STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' STORED AS TEXTFILE;
这表示登陆表每个ip有多个用户登陆,ip和uid字段之间使用','隔开,而uid数组之间的元素以'|'隔开。
输出hive表对应的数据
# printf "%s,%s|%s|%s\n" 192.168.1.1 3105007010 3105007011 3105007012 >> login_array.txt
# printf "%s,%s|%s|%s\n" 192.168.1.2 3105007020 3105007021 3105007022 >> login_array.txt
login_array.txt的内容:
cat login_array.txt 192.168.1.1,3105007010|3105007011|3105007012 192.168.1.2,3105007020|3105007021|3105007022
加载数据到hive表
LOAD DATA LOCAL INPATH '/home/hadoop/login_array.txt' OVERWRITE INTO TABLE login_array PARTITION (dt='20130101');
查看数据
select ip,uid from login_array where dt='20130101'; 192.168.1.1 [3105007010,3105007011,3105007012] 192.168.1.2 [3105007020,3105007021,3105007022]
使用数组
select ip,uid[0] from login_array where dt='20130101'; --使用下标访问数组 select ip,size(uid) from login_array where dt='20130101'; #查看数组长度 select ip from login_array where dt='20130101' where array_contains(uid,'3105007011');#数组查找
更多操作参考 https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF#LanguageManualUDF-CollectionFunctions
假设登陆表是
CREATE TABLE login_map ( ip STRING, uid STRING, gameinfo map<string,bigint> ) PARTITIONED BY (dt STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY ':' STORED AS TEXTFILE;
这表示登陆表每个用户都会有游戏信息,而用户的游戏信息有多个,key是游戏名,value是游戏的积分。map中的key和value以'':"分隔,map的元素以'|'分隔。
输出hive表对应的数据
# printf "%s,%s,%s:%s|%s:%s|%s:%s\n" 192.168.1.1 3105007010 wow 10 cf 1 qqgame 2 >> login_map.txt # printf "%s,%s,%s:%s|%s:%s|%s:%s\n" 192.168.1.2 3105007012 wow 20 cf 21 qqgame 22 >> login_map.txt
login_map.txt的内容:
# cat login_map.txt 192.168.1.1,3105007010,wow:10|cf:1|qqgame:2 192.168.1.2,3105007012,wow:20|cf:21|qqgame:22
加载数据到hive表
LOAD DATA LOCAL INPATH '/home/hadoop/login_map.txt' OVERWRITE INTO TABLE login_map PARTITION (dt='20130101');
查看数据
select ip,uid,gameinfo from login_map where dt='20130101'; 192.168.1.1 3105007010 {"wow":10,"cf":1,"qqgame":2} 192.168.1.2 3105007012 {"wow":20,"cf":21,"qqgame":22}
使用map
select ip,uid,gameinfo['wow'] from login_map where dt='20130101'; --使用下标访问map select ip,uid,size(gameinfo) from login_map where dt='20130101'; #查看map长度 select ip,uid from login_map where dt='20130101' where array_contains(map_keys(gameinfo),'wow');#查看map的key,找出有玩wow游戏的记录
更多操作参考 https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF#LanguageManualUDF-CollectionFunctions
假设登陆表是
CREATE TABLE login_struct ( ip STRING, user struct<uid:bigint,name:string> ) PARTITIONED BY (dt STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY ':' STORED AS TEXTFILE;
user是一个struct,分别包含用户uid和用户名。
输出hive表对应的数据
printf "%s,%s|%s|\n" 192.168.1.1 3105007010 blue >> login_struct.txt printf "%s,%s|%s|\n" 192.168.1.2 3105007012 ggjucheng >> login_struct.txt
login_struct.txt的内容:
# cat login_struct.txt 192.168.1.1,3105007010,wow:10|cf:1|qqgame:2 192.168.1.2,3105007012,wow:20|cf:21|qqgame:22
加载数据到hive表
LOAD DATA LOCAL INPATH '/home/hadoop/login_struct.txt' OVERWRITE INTO TABLE login_struct PARTITION (dt='20130101');
查看数据
select ip,user from login_struct where dt='20130101'; 192.168.1.1 {"uid":3105007010,"name":"blue"} 192.168.1.2 {"uid":3105007012,"name":"ggjucheng"}
使用struct
select ip,user.uid,user.name from login_map where dt='20130101';
用的比较少,暂时不讲
之前讲的array,map,struct这几种复合类型,里面的元素都是原生类型,如果元素是复合类型,那该怎么加载数据呢。
假设登陆表是
CREATE TABLE login_game_complex ( ip STRING, uid STRING, gameinfo map<bigint,struct<name:string,score:bigint,level:string>> ) PARTITIONED BY (dt STRING) ROW FORMAT DELIMITED STORED AS TEXTFILE;
这表示登陆表每个用户都会有游戏信息,而用户的游戏信息有多个,key是游戏id,value是一个struct,包含游戏的名字,积分,等级。
这种复杂类型的入库格式很麻烦,而且复合嵌套层次很多时,要生成的正确的格式也比较复杂,很容易出错。这里稍微提下,在嵌套层次多的情况下,分隔符会会随着复合类型嵌套层次的递增,分隔符默认会以\0,\1,\2....变化。
这里不介绍从shell下生成文件load data入库,感兴趣的同学,可以看看hive的源代码的org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe的serialize方法。
这里介绍使用另一种数据操作方式:insert,先把一个简单的表的数据,加载load到hive,再使用insert插入数据到一个嵌套复杂类型的表。
创建简单的表
CREATE TABLE login_game_simple ( ip STRING, uid STRING, gameid bigint, gamename string, gamescore bigint, gamelevel string ) PARTITIONED BY (dt STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE;
生成login_game_simple.txt的内容:
192.168.1.0,3105007010,1,wow,100,v1 192.168.1.0,3105007010,2,cf,100,v2 192.168.1.0,3105007010,3,qqgame,100,v3 192.168.1.2,3105007011,1,wow,101,v1 192.168.1.2,3105007011,3,qqgame,101,v3 192.168.1.2,3105007012,1,wow,102,v1 192.168.1.2,3105007012,2,cf,102,v2 192.168.1.2,3105007012,3,qqgame,102,v3
load data到hive后,再生成复杂的gameinfo map结构,插入到表login_game_complex
INSERT OVERWRITE TABLE login_game_complex PARTITION (dt='20130101') select ip,uid,map(gameid, named_struct('name',gamename,'score',gamescore,'level',gamelevel) ) FROM login_game_simple where dt='20130101' ;
查询数据
select ip,uid,gameinfo from login_game_complex where dt='20130101'; 192.168.1.0 3105007010 {1:{"name":"wow","score":100,"level":"v1"}} 192.168.1.0 3105007010 {2:{"name":"cf","score":100,"level":"v2"}} 192.168.1.0 3105007010 {3:{"name":"qqgame","score":100,"level":"v3"}} 192.168.1.2 3105007011 {1:{"name":"wow","score":101,"level":"v1"}} 192.168.1.2 3105007011 {3:{"name":"qqgame","score":101,"level":"v3"}} 192.168.1.2 3105007012 {1:{"name":"wow","score":102,"level":"v1"}} 192.168.1.2 3105007012 {2:{"name":"cf","score":102,"level":"v2"}} 192.168.1.2 3105007012 {3:{"name":"qqgame","score":102,"level":"v3"}}
这里只是演示了嵌套复杂类型的入库方式,所以这里只是例子。真正要完美入库,还是需要写一个自定义函数,根据ip和uid做group by,然后把gameinfo合并起来。hive没有这样的自定义函数,篇幅着想,不引进复杂的自定义函数编写。
转载 http://www.cnblogs.com/ggjucheng/archive/2013/01/31/2868941.html