datax实战-mysql同步数据到hive

{
    "job": {
        "setting": {
            "speed": {
                 "channel": 1
            },
            "errorLimit": {
                "record": 0,
                "percentage": 0.02
            }
        },
        "content": [
            {
                "reader": {
                    "name": "mysqlreader",
                    "parameter": {
                        "username": "*****",
                        "password": "*****",
                        "column": [
                            "name",
                            "sku"
                        ],
                        "splitPk": "id",
                        "connection": [
                            {
                                "table": [
                                    "table_test"
                                ],
                                "jdbcUrl": [
                                "jdbc:mysql://*****:3306/databasesName?useUnicode=true&characterEncoding=UTF-8",
                                ]
                            }
                        ]
                    }
                },

                "writer": {
                    "name": "hdfswriter",
                    "parameter": {
                        "defaultFS": "hdfs://*****/",
                        "fileType": "text",
                        "path": "/usr/hive/warehouse/tmp.db/*****/",
                        "fileName": "test",
                        "hadoopConfig":{
                                                         "dfs.nameservices": "HDFS*****",
                                                         "dfs.ha.namenodes.HDFS*****": "nn1,nn2",
                                                         "dfs.namenode.rpc-address.HDFS*****.nn1": "*****:4007",
                                                         "dfs.namenode.rpc-address.HDFS*****.nn2": "*****:4007",
                                                         "dfs.client.failover.proxy.provider.HDFS*****": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
                                                        } ,
                        "column": [
                            {
                                "name": "name",
                                "type": "STRING"
                            },
                            {
                                "name": "sku",
                                "type": "STRING"
                            }
                        ],
                        "writeMode": "append",
                        "fieldDelimiter": "\t",
                        "compress":"gzip"
                    }
                }


            }
        ]
    }
}

 

你可能感兴趣的:(datax实战)