执行spark-submit脚本报错

java.lang.IllegalArgumentException: System memory 101384192 must be at least 4.718592E8. Please use a larger heap size.
	at org.apache.spark.memory.UnifiedMemoryManager$.getMaxMemory(UnifiedMemoryManager.scala:193)
	at org.apache.spark.memory.UnifiedMemoryManager$.apply(UnifiedMemoryManager.scala:175)
	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:354)
	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193)
	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:288)
	at org.apache.spark.SparkContext.(SparkContext.scala:457)
	at org.apache.spark.api.java.JavaSparkContext.(JavaSparkContext.scala:59)
	at cn.spark.study.core.HDFSFile.main(HDFSFile.java:15)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

解决办法:

在java工程中添加.set("spark.testing.memory","2147480000");

	SparkConf conf = new SparkConf()
		.setAppName("HDFSFile")
		.set("spark.testing.memory","2147480000");

你可能感兴趣的:(spark)