datax hdfswriter文档
https://github.com/alibaba/DataX/blob/master/hdfswriter/doc/hdfswriter.md
需要注意的是,hdfswriter写入时的字段分隔符,需要用户保证与创建的Hive表的字段分隔符一致,否则无法在Hive表中查到数据。
另外,虽然hive3支持decimal格式,但是datax还不支持。。。因此datax作业中应该用string代替decimal。
建表语句
create external table ods.studentpay_chain
(id string COMMENT 'id',
name string COMMENT '名称',
age int COMMENT '年龄',
money decimal(28,10) COMMENT '金额',
updateTime timestamp COMMENT '更新时间',
startDate timestamp COMMENT '生效日期',
endDate timestamp COMMENT '失效日期')
COMMENT '学生缴费表'
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LINES TERMINATED BY '\n'
STORED AS ORC
LOCATION '/user/hive/warehouse/ods.db/ods.studentpay_chain';
datax作业配置
{
"job": {
"content": [
{
"reader": {
"name": "mysqlreader",
"parameter": {
"column": ["*"],
"connection": [
{
"jdbcUrl": ["jdbc:mysql://xxx:3306/xxx"],
"table": ["$table"]
}
],
"password": "xxx",
"username": "xxx",
"where": "updateTime > '$from' and updateTime < '$to'"
}
},
"writer": {
"name": "hdfswriter",
"parameter": {
"column": [
{"name":"id","type":"string"},
{"name":"name","type":"string"},
{"name":"age","type":"int"},
{"name":"money","type":"string"},
{"name":"updateTime","type":"timestamp"},
{"name":"startTime","type":"timestamp"},
{"name":"endTime","type":"timestamp"}
],
"compress": "SNAPPY",
"defaultFS": "hdfs://xxx:9000",
"fieldDelimiter": "\t",
"fileName": "$table",
"fileType": "orc",
"path": "/user/hive/warehouse/ods.db/ods.studentpay_chain",
"writeMode": "append"
}
}
}
],
"setting": {
"speed": {
"channel": "2"
}
}
}
}