使用sqoop1将hive导入mysql

#!/bin/sh
#数据连接 
srcConnect="connect jdbc:mysql://10.2.1.1:3306/test"
#临时表存放的目录
tempTabPath=/user/test

sql="select  NVL(rowkey,'') as rowkey, 
             NVL(projid,'') as projid,
NVL(devid,'')  as devid,
NVL(barcode,'') as barcode,
NVL(devaddr,'') as devaddr,
NVL(runmode_mb,'') as runmode_mb,
    NVL(starttime,TIMESTAMP('1971-01-01 00:30:00')) as starttime,
    NVL(endttime,TIMESTAMP('1971-01-01 00:30:00')) as endttime,
    NVL(receivetime,TIMESTAMP('1971-01-01 00:30:00')) as receivetime
from test"


echo "++++++++++++++++++++开始导入数据:++++++++++++++++++++++++++++++++"


#将hive表的数据,导入到hdfs上  
hive -e "
        use default;

insert overwrite  directory  '${tempTabPath}' row format delimited fields terminated by '\t' ${sql};
       "
#利用sqoop,将hive得数据导入到mysql集群   
  sqoop export \
  --${srcConnect} \
  --username root \
  --password 1234qwer \
  --table speed_test \
  --export-dir ${tempTabPath} \
  --input-fields-terminated-by '\t' \
       -- m 5
echo "++++++++++++++++++++结束导入数据:++++++++++++++++++++++++++++++++"  
  
     hadoop fs -rm -r ${tempTabPath} 

 

执行脚本前,在mysql创建一个跟hive表结构相同的表,否则,导入数据报错。

 

你可能感兴趣的:(大数据)