java.lang.NoSuchMethodError: org.apache.hadoop.mapred.TaskID.

java.lang.NoSuchMethodError: org.apache.hadoop.mapred.TaskID.(Lorg/apache/hadoop/mapreduce/JobID;Lorg/apache/hadoop/mapreduce/TaskType;I)V
at org.apache.spark.rdd.HadoopRDD . a d d L o c a l C o n f i g u r a t i o n ( H a d o o p R D D . s c a l a : 384 ) a t o r g . a p a c h e . s p a r k . r d d . H a d o o p R D D .addLocalConfiguration(HadoopRDD.scala:384) at org.apache.spark.rdd.HadoopRDD .addLocalConfiguration(HadoopRDD.scala:384)atorg.apache.spark.rdd.HadoopRDD$anon 1. < i n i t > ( H a d o o p R D D . s c a l a : 246 ) a t o r g . a p a c h e . s p a r k . r d d . H a d o o p R D D . c o m p u t e ( H a d o o p R D D . s c a l a : 211 ) a t o r g . a p a c h e . s p a r k . r d d . H a d o o p R D D . c o m p u t e ( H a d o o p R D D . s c a l a : 102 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . c o m p u t e O r R e a d C h e c k p o i n t ( R D D . s c a l a : 323 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . i t e r a t o r ( R D D . s c a l a : 287 ) a t o r g . a p a c h e . s p a r k . r d d . M a p P a r t i t i o n s R D D . c o m p u t e ( M a p P a r t i t i o n s R D D . s c a l a : 38 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . c o m p u t e O r R e a d C h e c k p o i n t ( R D D . s c a l a : 323 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . i t e r a t o r ( R D D . s c a l a : 287 ) a t o r g . a p a c h e . s p a r k . r d d . M a p P a r t i t i o n s R D D . c o m p u t e ( M a p P a r t i t i o n s R D D . s c a l a : 38 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . c o m p u t e O r R e a d C h e c k p o i n t ( R D D . s c a l a : 323 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 1.<init>(HadoopRDD.scala:246) at org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:211) at org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:102) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) at org.apache.spark.rdd.RDD 1.<init>(HadoopRDD.scala:246)atorg.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:211)atorg.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:102)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:287)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:287)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD$anonfun 8. a p p l y ( R D D . s c a l a : 336 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 8.apply(RDD.scala:336) at org.apache.spark.rdd.RDD 8.apply(RDD.scala:336)atorg.apache.spark.rdd.RDD$anonfun 8. a p p l y ( R D D . s c a l a : 334 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r 8.apply(RDD.scala:334) at org.apache.spark.storage.BlockManager 8.apply(RDD.scala:334)atorg.apache.spark.storage.BlockManager a n o n f u n anonfun anonfundoPutIterator 1. a p p l y ( B l o c k M a n a g e r . s c a l a : 957 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r 1.apply(BlockManager.scala:957) at org.apache.spark.storage.BlockManager 1.apply(BlockManager.scala:957)atorg.apache.spark.storage.BlockManager a n o n f u n anonfun anonfundoPutIterator 1. a p p l y ( B l o c k M a n a g e r . s c a l a : 948 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r . d o P u t ( B l o c k M a n a g e r . s c a l a : 888 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r . d o P u t I t e r a t o r ( B l o c k M a n a g e r . s c a l a : 948 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r . g e t O r E l s e U p d a t e ( B l o c k M a n a g e r . s c a l a : 694 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . g e t O r C o m p u t e ( R D D . s c a l a : 334 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . i t e r a t o r ( R D D . s c a l a : 285 ) a t o r g . a p a c h e . s p a r k . r d d . M a p P a r t i t i o n s R D D . c o m p u t e ( M a p P a r t i t i o n s R D D . s c a l a : 38 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . c o m p u t e O r R e a d C h e c k p o i n t ( R D D . s c a l a : 323 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 1.apply(BlockManager.scala:948) at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888) at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948) at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694) at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334) at org.apache.spark.rdd.RDD.iterator(RDD.scala:285) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) at org.apache.spark.rdd.RDD 1.apply(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888)atorg.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694)atorg.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:285)atorg.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)atorg.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)atorg.apache.spark.rdd.RDD$anonfun 8. a p p l y ( R D D . s c a l a : 336 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 8.apply(RDD.scala:336) at org.apache.spark.rdd.RDD 8.apply(RDD.scala:336)atorg.apache.spark.rdd.RDD$anonfun 8. a p p l y ( R D D . s c a l a : 334 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r 8.apply(RDD.scala:334) at org.apache.spark.storage.BlockManager 8.apply(RDD.scala:334)atorg.apache.spark.storage.BlockManager a n o n f u n anonfun anonfundoPutIterator 1. a p p l y ( B l o c k M a n a g e r . s c a l a : 957 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r 1.apply(BlockManager.scala:957) at org.apache.spark.storage.BlockManager 1.apply(BlockManager.scala:957)atorg.apache.spark.storage.BlockManager a n o n f u n anonfun anonfundoPutIterator 1. a p p l y ( B l o c k M a n a g e r . s c a l a : 948 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r . d o P u t ( B l o c k M a n a g e r . s c a l a : 888 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r . d o P u t I t e r a t o r ( B l o c k M a n a g e r . s c a l a : 948 ) a t o r g . a p a c h e . s p a r k . s t o r a g e . B l o c k M a n a g e r . g e t O r E l s e U p d a t e ( B l o c k M a n a g e r . s c a l a : 694 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . g e t O r C o m p u t e ( R D D . s c a l a : 334 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . i t e r a t o r ( R D D . s c a l a : 285 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . R e s u l t T a s k . r u n T a s k ( R e s u l t T a s k . s c a l a : 87 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . T a s k . r u n ( T a s k . s c a l a : 99 ) a t o r g . a p a c h e . s p a r k . e x e c u t o r . E x e c u t o r 1.apply(BlockManager.scala:948) at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888) at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948) at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694) at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334) at org.apache.spark.rdd.RDD.iterator(RDD.scala:285) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:99) at org.apache.spark.executor.Executor 1.apply(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.doPut(BlockManager.scala:888)atorg.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:948)atorg.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:694)atorg.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334)atorg.apache.spark.rdd.RDD.iterator(RDD.scala:285)atorg.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)atorg.apache.spark.scheduler.Task.run(Task.scala:99)atorg.apache.spark.executor.ExecutorTaskRunner.run(Executor.scala:282)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor W o r k e r . r u n ( T h r e a d P o o l E x e c u t o r . j a v a : 624 ) a t j a v a . l a n g . T h r e a d . r u n ( T h r e a d . j a v a : 748 ) 19 / 01 / 2410 : 09 : 24 W A R N s c h e d u l e r . T a s k S e t M a n a g e r : L o s t t a s k 0.0 i n s t a g e 0.0 ( T I D 0 , l o c a l h o s t , e x e c u t o r d r i v e r ) : j a v a . l a n g . N o S u c h M e t h o d E r r o r : o r g . a p a c h e . h a d o o p . m a p r e d . T a s k I D . < i n i t > ( L o r g / a p a c h e / h a d o o p / m a p r e d u c e / J o b I D ; L o r g / a p a c h e / h a d o o p / m a p r e d u c e / T a s k T y p e ; I ) V a t o r g . a p a c h e . s p a r k . r d d . H a d o o p R D D Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 19/01/24 10:09:24 WARN scheduler.TaskSetManager: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): java.lang.NoSuchMethodError: org.apache.hadoop.mapred.TaskID.<init>(Lorg/apache/hadoop/mapreduce/JobID;Lorg/apache/hadoop/mapreduce/TaskType;I)V at org.apache.spark.rdd.HadoopRDD Worker.run(ThreadPoolExecutor.java:624)atjava.lang.Thread.run(Thread.java:748)19/01/2410:09:24WARNscheduler.TaskSetManager:Losttask0.0instage0.0(TID0,localhost,executordriver):java.lang.NoSuchMethodError:org.apache.hadoop.mapred.TaskID.<init>(Lorg/apache/hadoop/mapreduce/JobID;Lorg/apache/hadoop/mapreduce/TaskType;I)Vatorg.apache.spark.rdd.HadoopRDD.addLocalConfiguration(HadoopRDD.scala:384)
at org.apache.spark.rdd.HadoopRDDKaTeX parse error: Can't use function '$' in math mode at position 5: anon$̲1.(Hadoop…anonfun$8.apply(RDD.scala:336)
问题解决:
Hadoop的jar包冲突

你可能感兴趣的:(bugbug)