报错:io.netty.buffer.PooledByteBufAllocator.metric()Lio/netty/buffer/PooledByteBufAllocatorMetric

运行异常:

Exception in thread “main” java.lang.NoSuchMethodError: io.netty.buffer.PooledByteBufAllocator.metric()Lio/netty/buffer/PooledByteBufAllocatorMetric;
at org.apache.spark.network.util.NettyMemoryMetrics.registerMetrics(NettyMemoryMetrics.java:80)
at org.apache.spark.network.util.NettyMemoryMetrics.(NettyMemoryMetrics.java:76)
at org.apache.spark.network.client.TransportClientFactory.(TransportClientFactory.java:109)
at org.apache.spark.network.TransportContext.createClientFactory(TransportContext.java:99)
at org.apache.spark.rpc.netty.NettyRpcEnv.(NettyRpcEnv.scala:71)
at org.apache.spark.rpc.netty.NettyRpcEnvFactory.create(NettyRpcEnv.scala:461)
at org.apache.spark.rpc.RpcEnv . c r e a t e ( R p c E n v . s c a l a : 57 ) a t o r g . a p a c h e . s p a r k . S p a r k E n v .create(RpcEnv.scala:57) at org.apache.spark.SparkEnv .create(RpcEnv.scala:57)atorg.apache.spark.SparkEnv.create(SparkEnv.scala:249)
at org.apache.spark.SparkEnv . c r e a t e D r i v e r E n v ( S p a r k E n v . s c a l a : 175 ) a t o r g . a p a c h e . s p a r k . S p a r k C o n t e x t . c r e a t e S p a r k E n v ( S p a r k C o n t e x t . s c a l a : 256 ) a t o r g . a p a c h e . s p a r k . S p a r k C o n t e x t . < i n i t > ( S p a r k C o n t e x t . s c a l a : 423 ) a t c o m . c y p . b u s i n e s s . T o p N 10 .createDriverEnv(SparkEnv.scala:175) at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:256) at org.apache.spark.SparkContext.<init>(SparkContext.scala:423) at com.cyp.business.TopN10 .createDriverEnv(SparkEnv.scala:175)atorg.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:256)atorg.apache.spark.SparkContext.<init>(SparkContext.scala:423)atcom.cyp.business.TopN10.delayedEndpoint c o m com comcyp b u s i n e s s business businessTopN10 1 ( T o p N 10. s c a l a : 10 ) a t c o m . c y p . b u s i n e s s . T o p N 10 1(TopN10.scala:10) at com.cyp.business.TopN10 1(TopN10.scala:10)atcom.cyp.business.TopN10delayedInit b o d y . a p p l y ( T o p N 10. s c a l a : 6 ) a t s c a l a . F u n c t i o n 0 body.apply(TopN10.scala:6) at scala.Function0 body.apply(TopN10.scala:6)atscala.Function0class.apply m c V mcV mcVsp(Function0.scala:40)
at scala.runtime.AbstractFunction0.apply m c V mcV mcVsp(AbstractFunction0.scala:12)
at scala.App
anonfun$main 1. a p p l y ( A p p . s c a l a : 76 ) a t s c a l a . A p p a n o n f u n 1.apply(App.scala:76)atscala.App anonfun 1.apply(App.scala:76)atscala.Appanonfunmain 1. a p p l y ( A p p . s c a l a : 76 ) a t s c a l a . A p p a n o n f u n 1.apply(App.scala:76)atscala.App anonfun 1.apply(App.scala:76)atscala.Appanonfunmain 1. a p p l y ( A p p . s c a l a : 76 ) a t s c a l a . c o l l e c t i o n . i m m u t a b l e . L i s t . f o r e a c h ( L i s t . s c a l a : 383 ) a t s c a l a . c o l l e c t i o n . g e n e r i c . T r a v e r s a b l e F o r w a r d e r 1.apply(App.scala:76) at scala.collection.immutable.List.foreach(List.scala:383) at scala.collection.generic.TraversableForwarder 1.apply(App.scala:76)atscala.collection.immutable.List.foreach(List.scala:383)atscala.collection.generic.TraversableForwarderclass.foreach(TraversableForwarder.scala:35)
at scala.App c l a s s . m a i n ( A p p . s c a l a : 76 ) a t c o m . c y p . b u s i n e s s . T o p N 10 class.main(App.scala:76) at com.cyp.business.TopN10 class.main(App.scala:76)atcom.cyp.business.TopN10.main(TopN10.scala:6)
at com.cyp.business.TopN10.main(TopN10.scala)

环境: spark2.3.0 hbase:1.2.0

Spark2.3.0升级Netty to 4.1.17

[SPARK-19810] Remove support for Scala 2.10
[SPARK-22324] Upgrade Arrow to 0.8.0 and Netty to 4.1.17
Spark2.3.0依赖的netty-all-4.1.17.Final.jar 与 hbase1.2.0依赖的netty-all-4.0.23.Final.jar 冲突

解决:

pom.xml统一netty-all 版本

io.netty
netty-all
4.1.17.Final

你可能感兴趣的:(Spark,Hbase)