保存结果到本地
方法一:通过重定向方式,将查询结果写到指定的文件中
hive -e "SELECT id,concat_ws(',',collect_set(concat(k1,'-',k2,'-',k3))) from test.users GROUP BY id;" > users.data
查看
[root@node00 ~]# cat users.data
1 a1-a2-a3,a11-a22-a33,a111-a222-a333
2 b1-b2-b3,b11-b22-b33
方法二:使用INSERT OVERWRITE LOCAL DIRECTORY
保存结果到本地
hive -e "
INSERT OVERWRITE LOCAL DIRECTORY '/home/charles/users.data'
ROW format delimited fields terminated BY '\t'
SELECT id,concat_ws(',',collect_set(concat(k1,'-',k2,'-',k3))) from test.users GROUP BY id;
"
查看
[root@node00 charles]# ll -a users.data
total 16
-rw-r--r-- 1 root root 61 May 9 17:24 000000_0
-rw-r--r-- 1 root root 12 May 9 17:24 .000000_0.crc
[root@node00 charles]# cat users.data/000000_0
1 a1-a2-a3,a11-a22-a33,a111-a222-a333
2 b1-b2-b3,b11-b22-b33
方法三:使用Hadoop
命令保存结果到本地
[root@cdh01 ~]# hdfs dfs -get /user/hive/warehouse/test /root/test;
查看
[root@cdh01 ~]# cat /root/test/data.txt
{"one":true,"three":["red","yellow","orange"],"two":19.5,"four":"poop"}
{"one":false,"three":["red","yellow","black"],"two":129.5,"four":"stars"}
{"one":false,"three":["pink","gold"],"two":222.56,"four":"fiat"}
方法四:利用 Sqoop 导出 Hive 表
保存结果到HDFS中
方法一:使用INSERT OVERWRITE DIRECTORY
命令保存结果到HDFS
hive -e "
INSERT OVERWRITE DIRECTORY '/users.data'
ROW format delimited fields terminated BY '\t'
SELECT id,concat_ws(',',collect_set(concat(k1,'-',k2,'-',k3))) from test.users GROUP BY id;
"
查看
[root@node00 ~]# hdfs dfs -ls /users.data
Found 1 items
-rwxrwxrwx 3 root supergroup 61 2019-05-09 17:28 /users.data/000000_0
[root@node00 ~]# hdfs dfs -cat /users.data/000000_0
1 a1-a2-a3,a11-a22-a33,a111-a222-a333
2 b1-b2-b3,b11-b22-b33
方法二:使用export
命令保存结果到HDFS
hive> export table default.test to '/root/test/';
Copying data from file:/tmp/hdfs/849dfd20-a5df-4994-a5fa-2aa44c28f5dc/hive_2019-07-19_23-13-00_527_6322373220241253473-1/-local-10000/_metadata
Copying file: file:/tmp/hdfs/849dfd20-a5df-4994-a5fa-2aa44c28f5dc/hive_2019-07-19_23-13-00_527_6322373220241253473-1/-local-10000/_metadata
Copying data from hdfs://cdh01.cnyimi.cn:8020/user/hive/warehouse/test
Copying file: hdfs://cdh01.cnyimi.cn:8020/user/hive/warehouse/test/data.txt
OK
查看
[root@cdh01 ~]# hdfs dfs -cat /root/test/_metadata
{"version":"0.1","table":"{\"1\":{\"str\":\"test\"},\"2\":{\"str\":\"default\"},\"3\":{\"str\":\"root\"},\"4\":{\"i32\":1561560428},\"5\":{\"i32\":0},\"6\":{\"i32\":0},\"7\":{\"rec\":{\"1\":{\"lst\":[\"rec\",4,{\"1\":{\"str\":\"one\"},\"2\":{\"str\":\"boolean\"}},{\"1\":{\"str\":\"three\"},\"2\":{\"str\":\"array\"}},{\"1\":{\"str\":\"two\"},\"2\":{\"str\":\"double\"}},{\"1\":{\"str\":\"four\"},\"2\":{\"str\":\"string\"}}]},\"2\":{\"str\":\"hdfs://cdh01.cnyimi.cn:8020/user/hive/warehouse/test\"},\"3\":{\"str\":\"org.apache.hadoop.mapred.TextInputFormat\"},\"4\":{\"str\":\"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat\"},\"5\":{\"tf\":0},\"6\":{\"i32\":-1},\"7\":{\"rec\":{\"2\":{\"str\":\"org.openx.data.jsonserde.JsonSerDe\"},\"3\":{\"map\":[\"str\",\"str\",1,{\"serialization.format\":\"1\"}]}}},\"8\":{\"lst\":[\"str\",0]},\"9\":{\"lst\":[\"rec\",0]},\"10\":{\"map\":[\"str\",\"str\",0,{}]},\"11\":{\"rec\":{\"1\":{\"lst\":[\"str\",0]},\"2\":{\"lst\":[\"lst\",0]},\"3\":{\"map\":[\"lst\",\"str\",0,{}]}}},\"12\":{\"tf\":0}}},\"8\":{\"lst\":[\"rec\",0]},\"9\":{\"map\":[\"str\",\"str\",6,{\"totalSize\":\"211\",\"numRows\":\"0\",\"rawDataSize\":\"0\",\"numFiles\":\"1\",\"transient_lastDdlTime\":\"1561560623\",\"numFilesErasureCoded\":\"0\"}]},\"12\":{\"str\":\"MANAGED_TABLE\"},\"15\":{\"i32\":1}}" ,"partitions":[]}
[root@cdh01 ~]# hdfs dfs -cat /root/test/data/data.txt
{"one":true,"three":["red","yellow","orange"],"two":19.5,"four":"poop"}
{"one":false,"three":["red","yellow","black"],"two":129.5,"four":"stars"}
{"one":false,"three":["pink","gold"],"two":222.56,"four":"fiat"}