Exceptions in HDFS

Log them here for later analysis.

 

 

2009-08-26 01:17:37,798 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: Exception in receiveBlock for block blk_5223350282761282817_281131 java.nio.channels.ClosedByInterruptException
2009-08-26 01:17:37,799 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: writeBlock blk_5223350282761282817_281131 received exception java.io.IOException: Interrupted receiveBlock
2009-08-26 01:17:37,799 INFO org.apache.hadoop.hdfs.server.datanode.DataNode: PacketResponder blk_5223350282761282817_281131 1 Exception java.net.SocketException: Socket closed
        at java.net.SocketInputStream.socketRead0(Native Method)
        at java.net.SocketInputStream.read(SocketInputStream.java:129)
        at java.io.DataInputStream.readFully(DataInputStream.java:178)
        at java.io.DataInputStream.readLong(DataInputStream.java:399)
        at org.apache.hadoop.hdfs.server.datanode.BlockReceiver$PacketResponder.run(BlockReceiver.java:853)
        at java.lang.Thread.run(Thread.java:619)

 

2009-08-26 01:17:37,827 ERROR org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(10.0.0.9:50010, storageID=DS-951226019-10.0.0.9-50010-1251209172987, infoPort=50075, ipcPort=50020):DataXceiver
java.io.IOException: Interrupted receiveBlock
        at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.receiveBlock(BlockReceiver.java:569)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:357)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:103)
        at java.lang.Thread.run(Thread.java:619)

 

2009-08-26 01:17:37,840 INFO org.apache.hadoop.ipc.Server: IPC Server handler 2 on 50020, call updateBlock(blk_5223350282761282817_281131, blk_5223350282761282817_281136, false) from 10.0.0.16:54613: error: java.io.IOException: Block blk_5223350282761282817_281136 length is 1105408 does not match block file length 1560576
java.io.IOException: Block blk_5223350282761282817_281136 length is 1105408 does not match block file length 1560576
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.validateBlockMetadata(FSDataset.java:1259)
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.tryUpdateBlock(FSDataset.java:898)
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.updateBlock(FSDataset.java:810)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.updateBlock(DataNode.java:1384)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:481)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:894)

2009-08-26 01:10:48,314 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(10.0.0.8:50010, storageID=DS-1793106907-10.0.0.8-50010-1251209173521, infoPort=50075, ipcPort=50020):Failed to transfer blk_-3457816871186697703_281034 to 10.0.0.16:50010 got java.net.SocketException: Connection reset
        at java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:96)
        at java.net.SocketOutputStream.write(SocketOutputStream.java:136)
        at java.io.BufferedOutputStream.write(BufferedOutputStream.java:105)
        at java.io.DataOutputStream.write(DataOutputStream.java:90)
        at org.apache.hadoop.hdfs.server.datanode.BlockSender.sendChunks(BlockSender.java:336)
        at org.apache.hadoop.hdfs.server.datanode.BlockSender.sendBlock(BlockSender.java:421)
        at org.apache.hadoop.hdfs.server.datanode.DataNode$DataTransfer.run(DataNode.java:1111)
        at java.lang.Thread.run(Thread.java:619)

 

2009-08-26 00:41:04,430 ERROR org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(10.0.0.14:50010, storageID=DS-1239116510-10.0.0.14-50010-1251209186514, infoPort=50075, ipcPort=50020):DataXceiver
org.apache.hadoop.hdfs.server.datanode.BlockAlreadyExistsException: Block blk_-8656937491228549459_162680 is valid, and cannot be written to.
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.writeToBlock(FSDataset.java:975)
        at org.apache.hadoop.hdfs.server.datanode.BlockReceiver.<init>(BlockReceiver.java:97)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiver.writeBlock(DataXceiver.java:259)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:103)
        at java.lang.Thread.run(Thread.java:619)

2009-08-26 00:55:10,250 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: Error processing datanode Command
java.io.IOException: Error in deleting blocks.
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.invalidate(FSDataset.java:1353)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.processCommand(DataNode.java:849)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.processCommand(DataNode.java:811)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.offerService(DataNode.java:691)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.run(DataNode.java:1144)
        at java.lang.Thread.run(Thread.java:619)

 

 

2009-08-26 01:27:44,783 WARN org.apache.hadoop.hdfs.server.datanode.DataNode: DatanodeRegistration(10.0.0.14:50010, storageID=DS-1239116510-10.0.0.14-50010-1251209186514, infoPort=50075, ipcPort=50020):Got exception while serving blk_-2856096768554983549_281092 to /10.0.0.15:
java.io.IOException: Block blk_-2856096768554983549_281092 is not valid.
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.getBlockFile(FSDataset.java:726)
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.getLength(FSDataset.java:714)
        at org.apache.hadoop.hdfs.server.datanode.BlockSender.<init>(BlockSender.java:100)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiver.readBlock(DataXceiver.java:172)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiver.run(DataXceiver.java:95)
        at java.lang.Thread.run(Thread.java:619)

 

2009-08-26 01:50:50,095 WARN org.apache.hadoop.util.Shell: Could not get disk usage information
org.apache.hadoop.util.Shell$ExitCodeException: du: cannot access `/mnt/DP_disk4/tao/hadoop-tao/dfs/data/current/subdir61/blk_1441044640010723064_32156.meta': No such file or directory
du: cannot access `/mnt/DP_disk4/tao/hadoop-tao/dfs/data/current/subdir61/blk_1441044640010723064': No such file or directory

        at org.apache.hadoop.util.Shell.runCommand(Shell.java:195)
        at org.apache.hadoop.util.Shell.run(Shell.java:134)
        at org.apache.hadoop.fs.DU.access$200(DU.java:29)
        at org.apache.hadoop.fs.DU$DURefreshThread.run(DU.java:84)
        at java.lang.Thread.run(Thread.java:619)

 

 

761282817_281136, datanode=10.0.0.9:50010)
org.apache.hadoop.ipc.RemoteException: java.io.IOException: Block blk_5223350282761282817_281136 length is 1105408 does not match block file length 1560576
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.validateBlockMetadata(FSDataset.java:1259)
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.tryUpdateBlock(FSDataset.java:898)
        at org.apache.hadoop.hdfs.server.datanode.FSDataset.updateBlock(FSDataset.java:810)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.updateBlock(DataNode.java:1384)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:481)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:894)

        at org.apache.hadoop.ipc.Client.call(Client.java:697)
        at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:216)
        at $Proxy5.updateBlock(Unknown Source)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.syncBlock(DataNode.java:1513)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.recoverBlock(DataNode.java:1482)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.recoverBlock(DataNode.java:1548)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:481)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:894)

 

你可能感兴趣的:(hadoop,hdfs)