将Hive的表数据导入到postgresql的表
数据量大概8千多万
Hive表采用orc格式,snappy压缩
{
"job": {
"setting": {
"speed": {
"byte":1048576,
"channel":1
},
},
"content": [
{
"reader": {
"name": "hdfsreader",
"parameter": {
"path": "/user/hive/warehouse/sjjsq_db.db/dwa_yczb_fp_graph_month/part-004*",
"defaultFS": "hdfs://192.168.201.83:8020",
"column" : [
{
"index": 0,
"type": "string"
},
{
"index": 1,
"type": "string"
},
{
"index": 2,
"type": "string"
},
{
"index": 3,
"type": "string"
},
{
"index": 4,
"type": "string"
},
{
"index": 5,
"type": "string"
},
{
"index": 6,
"type": "string"
},
{
"index": 7,
"type": "double"
},
{
"index": 8,
"type": "double"
},
{
"index": 9,
"type": "double"
},
{
"index": 10,
"type": "string"
},
{
"index": 11,
"type": "string"
},
{
"index": 12,
"type": "string"
},
{
"index": 13,
"type": "string"
},
{
"index": 14,
"type": "string"
},
{
"index": 15,
"type": "string"
},
{
"index": 16,
"type": "date"
},
{
"index": 17,
"type": "date"
},{
"index": 18,
"type":"string"
}
],
"fileType":"orc",
"sliceRecordCount": 1000000
}
},
"writer": {
"name": "postgresqlwriter",
"parameter": {
"print": true,
"encoding": "GBK",
"username": "postgres",
"password": "password",
"column": [
"id","xfmc","xfsbh","xfmcbzh","gfmc","gfsbh","gfmcbzh","yjje","yjse","yjcs","kpnf","fplx","fpztdm","sbytdm","kjlxdm","ydfpbz","created_ts","updated_ts","kpny"
],
"connection": [
{
"jdbcUrl": "jdbc:postgresql://192.168.210.16:5432/postgres",
"table": ["yczb_fp_graph_month"]
}
]
}
}
}
]
}
}