HDFS下载文件报错!

public static void main(String[] args) throws Exception
    {
        System.setProperty("javax.xml.parsers.DocumentBuilderFactory",
            "com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl");
        // 初始化HDFS文件系统;
        Configuration cfg = new Configuration();
        cfg.set("hadoop.job.ugi", "hadoop,supergroup"); // "hadoop,supergroup"
        cfg.set("fs.default.name", "hdfs://master:9000"); // "hdfs://master:9000"
        cfg.set("mapred.job.tracker", "hdfs://master:9001"); // "hdfs://master:9001"
        cfg.set("dfs.http.address", "master:50070"); // "master:50070"
        
        FileSystem fs = FileSystem.get(cfg);
        String localPath = "d:\\temp";
        String hdfsPath = "/tmp/query_ret/7/attempt_201007151545_0024_r_000000_0";
        fs.copyToLocalFile(new Path(hdfsPath), new Path(localPath));
        System.out.print(123);
    }

 

 

Running: INSERT OVERWRITE DIRECTORY '/tmp/query_ret/4' select userid from chatagret where  1 = 1  order by userid
[2010-07-15 20:36:43,578][ERROR][ULThread_0][ULTJob.java86]
java.io.IOException: Cannot run program "chmod": CreateProcess error=2, ?????????
at java.lang.ProcessBuilder.start(ProcessBuilder.java:459)
at org.apache.hadoop.util.Shell.runCommand(Shell.java:149)
at org.apache.hadoop.util.Shell.run(Shell.java:134)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:286)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:354)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:337)
at org.apache.hadoop.fs.RawLocalFileSystem.execCommand(RawLocalFileSystem.java:481)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:473)
at org.apache.hadoop.fs.FilterFileSystem.setPermission(FilterFileSystem.java:280)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:372)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:479)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:460)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:367)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:208)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:199)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:142)
at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:1211)
at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:1192)
at com.huawei.wad.ups.platform.extinterface.entity.task.ULTJobService.generateFileHadoop(ULTJobService.java:707)
at com.huawei.wad.ups.platform.extinterface.entity.task.ULTJobService.jobProcessing(ULTJobService.java:165)
at com.huawei.wad.ups.platform.extinterface.entity.task.ULTJob.execute(ULTJob.java:79)
at com.huawei.wad.ups.platform.service.ultaskmgt.ULTaskHandler.run(ULTaskHandler.java:93)
at java.lang.Thread.run(Thread.java:619)
Caused by: java.io.IOException: CreateProcess error=2, ?????????
at java.lang.ProcessImpl.create(Native Method)
at java.lang.ProcessImpl.(ProcessImpl.java:81)
at java.lang.ProcessImpl.start(ProcessImpl.java:30)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:452)
... 22 more

 

原因还没找到,谢谢!

你可能感兴趣的:(java,apache,eclipse,linux,hadoop)