com.google.protobuf.InvalidProtocolBufferException: Message missing required fields: callId, status

            造成以下的错误,是因为使用的hadoop 的java api的版本与集群上的版本不一致造成的,hadoop使用ProtocolBuffer对消息进行序列化,由于版本不同使用的ProtocolBuffer也是不同的,所以在反序列化的时候出现了错误。
1620784 [DefaultQuartzScheduler_Worker-0] INFO org.apache.hadoop.yarn.service.AbstractService - Service:org.apache.hadoop.yarn.client.YarnClientImpl is started.
java.lang.reflect.UndeclaredThrowableException
	at org.apache.hadoop.yarn.exceptions.impl.pb.YarnRemoteExceptionPBImpl.unwrapAndThrowException(YarnRemoteExceptionPBImpl.java:135)
	at org.apache.hadoop.yarn.api.impl.pb.client.ClientRMProtocolPBClientImpl.getClusterMetrics(ClientRMProtocolPBClientImpl.java:141)
	at org.apache.hadoop.yarn.client.YarnClientImpl.getYarnClusterMetrics(YarnClientImpl.java:163)
	at com.bigdata.bi.unicorn.core.common.yarn.GetYarnResourceTools.getAllResource(GetYarnResourceTools.java:43)
	at com.bigdata.bi.unicorn.core.common.yarn.GetYarnResourceTools.getResourceUsedRatio(GetYarnResourceTools.java:57)
	at com.bigdata.bi.unicorn.core.schedule.executor.WorkFlowExector.hasResourceByRatio(WorkFlowExector.java:80)
	at com.bigdata.bi.unicorn.core.schedule.executor.WorkFlowExector.hasResource(WorkFlowExector.java:64)
	at com.bigdata.bi.unicorn.core.schedule.executor.WorkFlowExector.executeWorkFlow(WorkFlowExector.java:40)
	at com.bigdata.bi.unicorn.core.quartz.schedule.WorkFlowScheduleJob.execute(WorkFlowScheduleJob.java:39)
	at org.quartz.core.JobRunShell.run(JobRunShell.java:203)
	at org.quartz.simpl.SimpleThreadPool$WorkerThread.run(SimpleThreadPool.java:520)
Caused by: com.google.protobuf.ServiceException: java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Message missing required fields: callId, status; Host Details : local host is: "yangy-PC/10.0.4.190"; destination host is: "node01":8032; 
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:212)
	at com.sun.proxy.$Proxy18.getClusterMetrics(Unknown Source)
	at org.apache.hadoop.yarn.api.impl.pb.client.ClientRMProtocolPBClientImpl.getClusterMetrics(ClientRMProtocolPBClientImpl.java:138)
	... 9 more
Caused by: java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Message missing required fields: callId, status; Host Details : local host is: "yangy-PC/10.0.4.190"; destination host is: "node01":8032; 
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:760)
	at org.apache.hadoop.ipc.Client.call(Client.java:1229)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:202)
	... 11 more
Caused by: com.google.protobuf.InvalidProtocolBufferException: Message missing required fields: callId, status
	at com.google.protobuf.UninitializedMessageException.asInvalidProtocolBufferException(UninitializedMessageException.java:81)
	at org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto$Builder.buildParsed(RpcPayloadHeaderProtos.java:1094)
	at org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto$Builder.access$1300(RpcPayloadHeaderProtos.java:1028)
	at org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto.parseDelimitedFrom(RpcPayloadHeaderProtos.java:986)
	at org.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:938)
	at org.apache.hadoop.ipc.Client$Connection.run(Client.java:836)
1671698 [DefaultQuartzScheduler_Worker-0] ERROR com.bigdata.bi.unicorn.core.schedule.executor.WorkFlowExector - -------cluster is error , can't get resource info ---
java.lang.reflect.UndeclaredThrowableException
	at org.apache.hadoop.yarn.exceptions.impl.pb.YarnRemoteExceptionPBImpl.unwrapAndThrowException(YarnRemoteExceptionPBImpl.java:135)
	at org.apache.hadoop.yarn.api.impl.pb.client.ClientRMProtocolPBClientImpl.getClusterMetrics(ClientRMProtocolPBClientImpl.java:141)
	at org.apache.hadoop.yarn.client.YarnClientImpl.getYarnClusterMetrics(YarnClientImpl.java:163)
	at com.bigdata.bi.unicorn.core.common.yarn.GetYarnResourceTools.getAllResource(GetYarnResourceTools.java:43)
	at com.bigdata.bi.unicorn.core.common.yarn.GetYarnResourceTools.getResourceUsedRatio(GetYarnResourceTools.java:57)
	at com.bigdata.bi.unicorn.core.schedule.executor.WorkFlowExector.hasResourceByRatio(WorkFlowExector.java:80)
	at com.bigdata.bi.unicorn.core.schedule.executor.WorkFlowExector.hasResource(WorkFlowExector.java:64)
	at com.bigdata.bi.unicorn.core.schedule.executor.WorkFlowExector.executeWorkFlow(WorkFlowExector.java:40)
	at com.bigdata.bi.unicorn.core.quartz.schedule.WorkFlowScheduleJob.execute(WorkFlowScheduleJob.java:39)
	at org.quartz.core.JobRunShell.run(JobRunShell.java:203)
	at org.quartz.simpl.SimpleThreadPool$WorkerThread.run(SimpleThreadPool.java:520)
Caused by: com.google.protobuf.ServiceException: java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Message missing required fields: callId, status; Host Details : local host is: "yangy-PC/10.0.4.190"; destination host is: "node01":8032; 
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:212)
	at com.sun.proxy.$Proxy18.getClusterMetrics(Unknown Source)
	at org.apache.hadoop.yarn.api.impl.pb.client.ClientRMProtocolPBClientImpl.getClusterMetrics(ClientRMProtocolPBClientImpl.java:138)
	... 9 more
Caused by: java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Message missing required fields: callId, status; Host Details : local host is: "yangy-PC/10.0.4.190"; destination host is: "node01":8032; 
	at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:760)
	at org.apache.hadoop.ipc.Client.call(Client.java:1229)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:202)
	... 11 more
Caused by: com.google.protobuf.InvalidProtocolBufferException: Message missing required fields: callId, status
	at com.google.protobuf.UninitializedMessageException.asInvalidProtocolBufferException(UninitializedMessageException.java:81)
	at org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto$Builder.buildParsed(RpcPayloadHeaderProtos.java:1094)
	at org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto$Builder.access$1300(RpcPayloadHeaderProtos.java:1028)
	at org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto.parseDelimitedFrom(RpcPayloadHeaderProtos.java:986)
	at org.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:938)
	at org.apache.hadoop.ipc.Client$Connection.run(Client.java:836)

你可能感兴趣的:(hadoop,java编程)