更多代码请见:https://github.com/xubo245/SparkLearning
报错代码:
hadoop@Master:~/cloud/test/sh_spark_xubo/SparkPi$ scalac org/apache/spark/examples/SparkPi.scala
error: scala.tools.nsc.MissingRequirementError: object scala.reflect.Manifest not found.
at scala.tools.nsc.symtab.Definitions$definitions$.getModuleOrClass(Definitions.scala:655)
at scala.tools.nsc.symtab.Definitions$definitions$.getModule(Definitions.scala:605)
at scala.tools.nsc.symtab.Definitions$definitions$.FullManifestModule(Definitions.scala:347)
at scala.tools.nsc.typechecker.Implicits$ImplicitSearch.findSingletonManifest$1(Implicits.scala:1071)
at scala.tools.nsc.typechecker.Implicits$ImplicitSearch.mot$1(Implicits.scala:1091)
at scala.tools.nsc.typechecker.Implicits$ImplicitSearch.manifestOfType(Implicits.scala:1137)
at scala.tools.nsc.typechecker.Implicits$ImplicitSearch.implicitManifestOrOfExpectedType(Implicits.scala:1147)
at scala.tools.nsc.typechecker.Implicits$ImplicitSearch.bestImplicit(Implicits.scala:1181)
at scala.tools.nsc.typechecker.Implicits$class.inferImplicit(Implicits.scala:67)
at scala.tools.nsc.Global$analyzer$.inferImplicit(Global.scala:351)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$applyImplicitArgs$1.apply(Typers.scala:121)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$applyImplicitArgs$1.apply(Typers.scala:116)
at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:59)
at scala.collection.immutable.List.foreach(List.scala:76)
at scala.tools.nsc.typechecker.Typers$Typer.applyImplicitArgs(Typers.scala:116)
at scala.tools.nsc.typechecker.Typers$Typer.adapt(Typers.scala:761)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4282)
at scala.tools.nsc.typechecker.Typers$Typer.typedQualifier(Typers.scala:4352)
at scala.tools.nsc.typechecker.Typers$Typer.typedQualifier(Typers.scala:4358)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:4146)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$typedApply$1$1.apply(Typers.scala:3355)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$typedApply$1$1.apply(Typers.scala:3355)
at scala.tools.nsc.typechecker.Typers$Typer.silent(Typers.scala:624)
at scala.tools.nsc.typechecker.Typers$Typer.typedApply$1(Typers.scala:3355)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:4106)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer.typedQualifier(Typers.scala:4352)
at scala.tools.nsc.typechecker.Typers$Typer.typedQualifier(Typers.scala:4358)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:4146)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$typedApply$1$1.apply(Typers.scala:3355)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$typedApply$1$1.apply(Typers.scala:3355)
at scala.tools.nsc.typechecker.Typers$Typer.silent(Typers.scala:624)
at scala.tools.nsc.typechecker.Typers$Typer.typedApply$1(Typers.scala:3355)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:4106)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4346)
at scala.tools.nsc.typechecker.Typers$Typer.computeType(Typers.scala:4423)
at scala.tools.nsc.typechecker.Namers$Namer.typeSig(Namers.scala:1207)
at scala.tools.nsc.typechecker.Namers$Namer$$anonfun$typeCompleter$1.apply(Namers.scala:551)
at scala.tools.nsc.typechecker.Namers$Namer$$anonfun$typeCompleter$1.apply(Namers.scala:549)
at scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1385)
at scala.tools.nsc.typechecker.Namers$$anon$1.complete(Namers.scala:1381)
at scala.tools.nsc.symtab.Symbols$Symbol.info(Symbols.scala:730)
at scala.tools.nsc.symtab.Symbols$Symbol.initialize(Symbols.scala:842)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:3899)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer.typedStat$1(Typers.scala:2100)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$24.apply(Typers.scala:2184)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$24.apply(Typers.scala:2184)
at scala.collection.immutable.List.loop$1(List.scala:148)
at scala.collection.immutable.List.mapConserve(List.scala:164)
at scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2184)
at scala.tools.nsc.typechecker.Typers$Typer.typedBlock(Typers.scala:1919)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:3953)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer.transformedOrTyped(Typers.scala:4430)
at scala.tools.nsc.typechecker.Typers$Typer.typedDefDef(Typers.scala:1760)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:3921)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer.typedStat$1(Typers.scala:2100)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$24.apply(Typers.scala:2184)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$24.apply(Typers.scala:2184)
at scala.collection.immutable.List.loop$1(List.scala:148)
at scala.collection.immutable.List.mapConserve(List.scala:164)
at scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2184)
at scala.tools.nsc.typechecker.Typers$Typer.typedTemplate(Typers.scala:1512)
at scala.tools.nsc.typechecker.Typers$Typer.typedModuleDef(Typers.scala:1320)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:3915)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer.typedStat$1(Typers.scala:2100)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$24.apply(Typers.scala:2184)
at scala.tools.nsc.typechecker.Typers$Typer$$anonfun$24.apply(Typers.scala:2184)
at scala.collection.immutable.List.loop$1(List.scala:148)
at scala.collection.immutable.List.mapConserve(List.scala:164)
at scala.tools.nsc.typechecker.Typers$Typer.typedStats(Typers.scala:2184)
at scala.tools.nsc.typechecker.Typers$Typer.typed1(Typers.scala:3908)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4273)
at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:4333)
at scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.apply(Analyzer.scala:94)
at scala.tools.nsc.Global$GlobalPhase.applyPhase(Global.scala:329)
at scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:86)
at scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3$$anonfun$run$1.apply(Analyzer.scala:86)
at scala.collection.Iterator$class.foreach(Iterator.scala:772)
at scala.collection.mutable.ListBuffer$$anon$1.foreach(ListBuffer.scala:318)
at scala.tools.nsc.typechecker.Analyzer$typerFactory$$anon$3.run(Analyzer.scala:86)
at scala.tools.nsc.Global$Run.compileSources(Global.scala:953)
at scala.tools.nsc.Global$Run.compile(Global.scala:1041)
at scala.tools.nsc.Main$.process(Main.scala:106)
at scala.tools.nsc.Main$.main(Main.scala:123)
at scala.tools.nsc.Main.main(Main.scala)
error: fatal error: object scala.reflect.Manifest not found.
export CLASSPATH=$CLASSPATH:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$HIVE_HOME/lib:$CLOUD_PREFIX/mahout/lib:$SPARK_HOME/assembly/target/scala-2.10:$REDIS_JEDIS_CLASS:$JAVA_HOME/jre/lib/ext:$SPARK_HOME/assembly/target/scala-2.10/spark-assembly-1.5.2-hadoop2.6.0.jar
位置:/home/hadoop/cloud/test/sh_spark_xubo/SparkPi/test
解决办法:
1.将ubuntu14.04源里的scala2.9.3软件remove,然后再在官网下载scala2.10.5,有很多教材,可以自己搜下
2.修改hadoop@Master:~/cloud/test/sh_spark_xubo/SparkPi/test$ vi SparkPi.scala
// scalastyle:off println
//package org.apache.spark.examples
import scala.math.random
import org.apache.spark._
/** Computes an approximation to pi */
object SparkPi {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Spark Pi")
val spark = new SparkContext(conf)
val slices = if (args.length > 0) args(0).toInt else 2
val n = math.min(100000L * slices, Int.MaxValue).toInt // avoid overflow
val count = spark.parallelize(1 until n, slices).map { i =>
val x = random * 2 - 1
val y = random * 2 - 1
if (x*x + y*y < 1) 1 else 0
}.reduce(_ + _)
println("Pi is roughly " + 4.0 * count / n)
spark.stop()
}
}
// scalastyle:on println
scalac SparkPi.scala
hadoop@Master:~/cloud/test/sh_spark_xubo/SparkPi/test$ ll
total 44
drwxrwxr-x 3 hadoop hadoop 4096 1月 27 22:53 ./
drwxrwxr-x 4 hadoop hadoop 4096 1月 27 22:41 ../
drwxrwxr-x 3 hadoop hadoop 4096 1月 27 22:41 org/
-rw-rw-r-- 1 hadoop hadoop 1194 1月 27 22:48 SparkPi$$anonfun$1.class
-rw-rw-r-- 1 hadoop hadoop 1050 1月 27 22:48 SparkPi$$anonfun$2.class
-rw-rw-r-- 1 hadoop hadoop 654 1月 27 22:48 SparkPi.class
-rw-rw-r-- 1 hadoop hadoop 2430 1月 27 22:48 SparkPi$.class
-rw-rw-r-- 1 hadoop hadoop 4924 1月 27 22:49 SparkPi.jar
-rw-r--r-- 1 hadoop hadoop 1509 1月 27 22:48 SparkPi.scala
-rwxrwxr-x 1 hadoop hadoop 181 1月 27 22:48 submitJob.sh*
hadoop@Master:~/cloud/test/sh_spark_xubo/SparkPi/test$ jar -cvf SparkPi.jar SparkPi*
added manifest
adding: SparkPi$$anonfun$1.class(in = 1194) (out= 723)(deflated 39%)
adding: SparkPi$$anonfun$2.class(in = 1050) (out= 601)(deflated 42%)
adding: SparkPi.class(in = 654) (out= 515)(deflated 21%)
adding: SparkPi$.class(in = 2430) (out= 1284)(deflated 47%)
adding: SparkPi.scala(in = 1509) (out= 824)(deflated 45%)
运行结果:
hadoop@Master:~/cloud/test/sh_spark_xubo/SparkPi/test$ ./submitJob.sh
Pi is roughly 3.1374