Hadoop CDH4 HA Exception

环境:

写入数据时,activenode被kill掉

分析:

与Active连接断开,Active没有返回Response,此异常,需要捕获并处理,可以添加休眠,以便Standby切换成Active

日志:

2012-08-0210:50:28,961WARNipc.Client(Client.java:run(787))-UnexpectederrorreadingresponsesonconnectionThread[IPCClient(591210723)connectiontobigdata-4/172.16.206.209:9000frompeter,5,main]

java.lang.NullPointerException

atorg.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:852)

atorg.apache.hadoop.ipc.Client$Connection.run(Client.java:781)

Exceptioninthread"main"java.io.IOException:Failedonlocalexception:java.io.IOException:Errorreadingresponses;HostDetails:localhostis:"master/192.168.1.9";destinationhostis:"bigdata-4":9000;

2012-08-0210:50:28,965WARNretry.RetryInvocationHandler(RetryInvocationHandler.java:invoke(95))-Exceptionwhileinvokingclassorg.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create.Notretryingbecausetheinvokedmethodisnotidempotent,andunabletodeterminewhetheritwasinvoked

java.io.IOException:Failedonlocalexception:java.io.IOException:Errorreadingresponses;HostDetails:localhostis:"master/192.168.1.9";destinationhostis:"bigdata-4":9000;

atorg.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:765)

atorg.apache.hadoop.ipc.Client.call(Client.java:1165)

atorg.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)

at$Proxy9.create(UnknownSource)

atorg.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:187)

atsun.reflect.GeneratedMethodAccessor2.invoke(UnknownSource)

atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)

atjava.lang.reflect.Method.invoke(Method.java:597)

atorg.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)

atorg.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)

at$Proxy10.create(UnknownSource)

atorg.apache.hadoop.hdfs.DFSOutputStream.<init>(DFSOutputStream.java:1250)

atorg.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1269)

atorg.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1063)

atorg.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1021)

atorg.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:232)

atorg.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:75)

atorg.apache.hadoop.fs.FileSystem.create(FileSystem.java:806)

atorg.apache.hadoop.fs.FileSystem.create(FileSystem.java:787)

atorg.apache.hadoop.fs.FileSystem.create(FileSystem.java:686)

atorg.apache.hadoop.fs.FileSystem.create(FileSystem.java:675)

atcom.tcloud.hadoop2.hdfs.FS.create(FS.java:40)

atcom.tcloud.hadoop2.hdfs.FS.run(FS.java:53)

atorg.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)

atorg.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84)

atcom.tcloud.hadoop2.hdfs.FS.main(FS.java:58)

Causedby:java.io.IOException:Errorreadingresponses

atorg.apache.hadoop.ipc.Client$Connection.run(Client.java:788)

Causedby:java.lang.NullPointerException

atorg.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:852)

atorg.apache.hadoop.ipc.Client$Connection.run(Client.java:781)

atorg.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:765)

atorg.apache.hadoop.ipc.Client.call(Client.java:1165)

atorg.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)

at$Proxy9.create(UnknownSource)

atorg.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:187)

atsun.reflect.GeneratedMethodAccessor2.invoke(UnknownSource)

atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)

atjava.lang.reflect.Method.invoke(Method.java:597)

atorg.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)

atorg.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)

at$Proxy10.create(UnknownSource)

atorg.apache.hadoop.hdfs.DFSOutputStream.<init>(DFSOutputStream.java:1250)

atorg.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1269)

atorg.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1063)

atorg.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1021)

atorg.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:232)

atorg.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:75)

atorg.apache.hadoop.fs.FileSystem.create(FileSystem.java:806)

atorg.apache.hadoop.fs.FileSystem.create(FileSystem.java:787)

atorg.apache.hadoop.fs.FileSystem.create(FileSystem.java:686)

atorg.apache.hadoop.fs.FileSystem.create(FileSystem.java:675)

atcom.tcloud.hadoop2.hdfs.FS.create(FS.java:40)

atcom.tcloud.hadoop2.hdfs.FS.run(FS.java:53)

atorg.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)

atorg.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84)

atcom.tcloud.hadoop2.hdfs.FS.main(FS.java:58)

Causedby:java.io.IOException:Errorreadingresponses

atorg.apache.hadoop.ipc.Client$Connection.run(Client.java:788)

Causedby:java.lang.NullPointerException

atorg.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:852)

atorg.apache.hadoop.ipc.Client$Connection.run(Client.java:781)

相关推荐