cmd中输入spark-shell
Microsoft Windows [Version 10.0.17134.885]
© 2018 Microsoft Corporation. All rights reserved.
C:\WINDOWS\system32>spark-shell
Using Spark’s default log4j profile: org/apache/spark/log4j-defaults.properties
Setting default log level to “WARN”.
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
19/08/01 17:03:01 ERROR SparkContext: Error initializing SparkContext.
org.apache.spark.SparkException: Invalid Spark URL: spark://[email protected]:52455
at org.apache.spark.rpc.RpcEndpointAddress . a p p l y ( R p c E n d p o i n t A d d r e s s . s c a l a : 66 ) a t o r g . a p a c h e . s p a r k . r p c . n e t t y . N e t t y R p c E n v . a s y n c S e t u p E n d p o i n t R e f B y U R I ( N e t t y R p c E n v . s c a l a : 134 ) a t o r g . a p a c h e . s p a r k . r p c . R p c E n v . s e t u p E n d p o i n t R e f B y U R I ( R p c E n v . s c a l a : 101 ) a t o r g . a p a c h e . s p a r k . r p c . R p c E n v . s e t u p E n d p o i n t R e f ( R p c E n v . s c a l a : 109 ) a t o r g . a p a c h e . s p a r k . u t i l . R p c U t i l s .apply(RpcEndpointAddress.scala:66) at org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:134) at org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101) at org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109) at org.apache.spark.util.RpcUtils .apply(RpcEndpointAddress.scala:66)atorg.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:134)atorg.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101)atorg.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109)atorg.apache.spark.util.RpcUtils.makeDriverRef(RpcUtils.scala:32)
at org.apache.spark.executor.Executor.(Executor.scala:184)
at org.apache.spark.scheduler.local.LocalEndpoint.(LocalSchedulerBackend.scala:59)
at org.apache.spark.scheduler.local.LocalSchedulerBackend.start(LocalSchedulerBackend.scala:127)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:183)
at org.apache.spark.SparkContext.(SparkContext.scala:501)
at org.apache.spark.SparkContext . g e t O r C r e a t e ( S p a r k C o n t e x t . s c a l a : 2520 ) a t o r g . a p a c h e . s p a r k . s q l . S p a r k S e s s i o n .getOrCreate(SparkContext.scala:2520) at org.apache.spark.sql.SparkSession .getOrCreate(SparkContext.scala:2520)atorg.apache.spark.sql.SparkSessionBuilderKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲7.apply(SparkSe…anonfun 7. a p p l y ( S p a r k S e s s i o n . s c a l a : 926 ) a t s c a l a . O p t i o n . g e t O r E l s e ( O p t i o n . s c a l a : 121 ) a t o r g . a p a c h e . s p a r k . s q l . S p a r k S e s s i o n 7.apply(SparkSession.scala:926) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession 7.apply(SparkSession.scala:926)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.sql.SparkSessionBuilder.getOrCreate(SparkSession.scala:926)
at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
at l i n e 3. line3. line3.read i w iw iwiw.(:15)
at l i n e 3. line3. line3.readKaTeX parse error: Can't use function '$' in math mode at position 36: …43) at $̲line3.$read.
at org.apache.spark.repl.Main . m a i n ( M a i n . s c a l a : 58 ) a t o r g . a p a c h e . s p a r k . r e p l . M a i n . m a i n ( M a i n . s c a l a ) a t s u n . r e f l e c t . N a t i v e M e t h o d A c c e s s o r I m p l . i n v o k e 0 ( N a t i v e M e t h o d ) a t s u n . r e f l e c t . N a t i v e M e t h o d A c c e s s o r I m p l . i n v o k e ( N a t i v e M e t h o d A c c e s s o r I m p l . j a v a : 62 ) a t s u n . r e f l e c t . D e l e g a t i n g M e t h o d A c c e s s o r I m p l . i n v o k e ( D e l e g a t i n g M e t h o d A c c e s s o r I m p l . j a v a : 43 ) a t j a v a . l a n g . r e f l e c t . M e t h o d . i n v o k e ( M e t h o d . j a v a : 498 ) a t o r g . a p a c h e . s p a r k . d e p l o y . J a v a M a i n A p p l i c a t i o n . s t a r t ( S p a r k A p p l i c a t i o n . s c a l a : 52 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . o r g .main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org .main(Main.scala:58)atorg.apache.spark.repl.Main.main(Main.scala)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit.orgapache s p a r k spark sparkdeploy S p a r k S u b m i t SparkSubmit SparkSubmit$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain 1 ( S p a r k S u b m i t . s c a l a : 167 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . s u b m i t ( S p a r k S u b m i t . s c a l a : 195 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . d o S u b m i t ( S p a r k S u b m i t . s c a l a : 86 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t 1(SparkSubmit.scala:167) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at org.apache.spark.deploy.SparkSubmit 1(SparkSubmit.scala:167)atorg.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)atorg.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)atorg.apache.spark.deploy.SparkSubmit$anon 2. d o S u b m i t ( S p a r k S u b m i t . s c a l a : 924 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t 2.doSubmit(SparkSubmit.scala:924) at org.apache.spark.deploy.SparkSubmit 2.doSubmit(SparkSubmit.scala:924)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
19/08/01 17:03:01 ERROR Utils: Uncaught exception in thread main
java.lang.NullPointerException
at org.apache.spark.scheduler.local.LocalSchedulerBackend.org a p a c h e apache apachespark s c h e d u l e r scheduler schedulerlocal L o c a l S c h e d u l e r B a c k e n d LocalSchedulerBackend LocalSchedulerBackend s t o p ( L o c a l S c h e d u l e r B a c k e n d . s c a l a : 162 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . l o c a l . L o c a l S c h e d u l e r B a c k e n d . s t o p ( L o c a l S c h e d u l e r B a c k e n d . s c a l a : 138 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . T a s k S c h e d u l e r I m p l . s t o p ( T a s k S c h e d u l e r I m p l . s c a l a : 653 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . D A G S c h e d u l e r . s t o p ( D A G S c h e d u l e r . s c a l a : 2042 ) a t o r g . a p a c h e . s p a r k . S p a r k C o n t e x t stop(LocalSchedulerBackend.scala:162) at org.apache.spark.scheduler.local.LocalSchedulerBackend.stop(LocalSchedulerBackend.scala:138) at org.apache.spark.scheduler.TaskSchedulerImpl.stop(TaskSchedulerImpl.scala:653) at org.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:2042) at org.apache.spark.SparkContext stop(LocalSchedulerBackend.scala:162)atorg.apache.spark.scheduler.local.LocalSchedulerBackend.stop(LocalSchedulerBackend.scala:138)atorg.apache.spark.scheduler.TaskSchedulerImpl.stop(TaskSchedulerImpl.scala:653)atorg.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:2042)atorg.apache.spark.SparkContext a n o n f u n anonfun anonfunstop 6. a p p l y 6.apply 6.applymcV s p ( S p a r k C o n t e x t . s c a l a : 1949 ) a t o r g . a p a c h e . s p a r k . u t i l . U t i l s sp(SparkContext.scala:1949) at org.apache.spark.util.Utils sp(SparkContext.scala:1949)atorg.apache.spark.util.Utils.tryLogNonFatalError(Utils.scala:1340)
at org.apache.spark.SparkContext.stop(SparkContext.scala:1948)
at org.apache.spark.SparkContext.(SparkContext.scala:585)
at org.apache.spark.SparkContext . g e t O r C r e a t e ( S p a r k C o n t e x t . s c a l a : 2520 ) a t o r g . a p a c h e . s p a r k . s q l . S p a r k S e s s i o n .getOrCreate(SparkContext.scala:2520) at org.apache.spark.sql.SparkSession .getOrCreate(SparkContext.scala:2520)atorg.apache.spark.sql.SparkSessionBuilderKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲7.apply(SparkSe…anonfun 7. a p p l y ( S p a r k S e s s i o n . s c a l a : 926 ) a t s c a l a . O p t i o n . g e t O r E l s e ( O p t i o n . s c a l a : 121 ) a t o r g . a p a c h e . s p a r k . s q l . S p a r k S e s s i o n 7.apply(SparkSession.scala:926) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession 7.apply(SparkSession.scala:926)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.sql.SparkSessionBuilder.getOrCreate(SparkSession.scala:926)
at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
at l i n e 3. line3. line3.read i w iw iwiw.(:15)
at l i n e 3. line3. line3.readKaTeX parse error: Can't use function '$' in math mode at position 36: …43) at $̲line3.$read.
at org.apache.spark.repl.Main . m a i n ( M a i n . s c a l a : 58 ) a t o r g . a p a c h e . s p a r k . r e p l . M a i n . m a i n ( M a i n . s c a l a ) a t s u n . r e f l e c t . N a t i v e M e t h o d A c c e s s o r I m p l . i n v o k e 0 ( N a t i v e M e t h o d ) a t s u n . r e f l e c t . N a t i v e M e t h o d A c c e s s o r I m p l . i n v o k e ( N a t i v e M e t h o d A c c e s s o r I m p l . j a v a : 62 ) a t s u n . r e f l e c t . D e l e g a t i n g M e t h o d A c c e s s o r I m p l . i n v o k e ( D e l e g a t i n g M e t h o d A c c e s s o r I m p l . j a v a : 43 ) a t j a v a . l a n g . r e f l e c t . M e t h o d . i n v o k e ( M e t h o d . j a v a : 498 ) a t o r g . a p a c h e . s p a r k . d e p l o y . J a v a M a i n A p p l i c a t i o n . s t a r t ( S p a r k A p p l i c a t i o n . s c a l a : 52 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . o r g .main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org .main(Main.scala:58)atorg.apache.spark.repl.Main.main(Main.scala)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit.orgapache s p a r k spark sparkdeploy S p a r k S u b m i t SparkSubmit SparkSubmit$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain 1 ( S p a r k S u b m i t . s c a l a : 167 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . s u b m i t ( S p a r k S u b m i t . s c a l a : 195 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . d o S u b m i t ( S p a r k S u b m i t . s c a l a : 86 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t 1(SparkSubmit.scala:167) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at org.apache.spark.deploy.SparkSubmit 1(SparkSubmit.scala:167)atorg.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)atorg.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)atorg.apache.spark.deploy.SparkSubmit$anon 2. d o S u b m i t ( S p a r k S u b m i t . s c a l a : 924 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t 2.doSubmit(SparkSubmit.scala:924) at org.apache.spark.deploy.SparkSubmit 2.doSubmit(SparkSubmit.scala:924)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
19/08/01 17:03:01 WARN MetricsSystem: Stopping a MetricsSystem that is not running
19/08/01 17:03:01 ERROR Main: Failed to initialize Spark session.
org.apache.spark.SparkException: Invalid Spark URL: spark://[email protected]:52455
at org.apache.spark.rpc.RpcEndpointAddress . a p p l y ( R p c E n d p o i n t A d d r e s s . s c a l a : 66 ) a t o r g . a p a c h e . s p a r k . r p c . n e t t y . N e t t y R p c E n v . a s y n c S e t u p E n d p o i n t R e f B y U R I ( N e t t y R p c E n v . s c a l a : 134 ) a t o r g . a p a c h e . s p a r k . r p c . R p c E n v . s e t u p E n d p o i n t R e f B y U R I ( R p c E n v . s c a l a : 101 ) a t o r g . a p a c h e . s p a r k . r p c . R p c E n v . s e t u p E n d p o i n t R e f ( R p c E n v . s c a l a : 109 ) a t o r g . a p a c h e . s p a r k . u t i l . R p c U t i l s .apply(RpcEndpointAddress.scala:66) at org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:134) at org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101) at org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109) at org.apache.spark.util.RpcUtils .apply(RpcEndpointAddress.scala:66)atorg.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:134)atorg.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101)atorg.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109)atorg.apache.spark.util.RpcUtils.makeDriverRef(RpcUtils.scala:32)
at org.apache.spark.executor.Executor.(Executor.scala:184)
at org.apache.spark.scheduler.local.LocalEndpoint.(LocalSchedulerBackend.scala:59)
at org.apache.spark.scheduler.local.LocalSchedulerBackend.start(LocalSchedulerBackend.scala:127)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:183)
at org.apache.spark.SparkContext.(SparkContext.scala:501)
at org.apache.spark.SparkContext . g e t O r C r e a t e ( S p a r k C o n t e x t . s c a l a : 2520 ) a t o r g . a p a c h e . s p a r k . s q l . S p a r k S e s s i o n .getOrCreate(SparkContext.scala:2520) at org.apache.spark.sql.SparkSession .getOrCreate(SparkContext.scala:2520)atorg.apache.spark.sql.SparkSessionBuilderKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲7.apply(SparkSe…anonfun 7. a p p l y ( S p a r k S e s s i o n . s c a l a : 926 ) a t s c a l a . O p t i o n . g e t O r E l s e ( O p t i o n . s c a l a : 121 ) a t o r g . a p a c h e . s p a r k . s q l . S p a r k S e s s i o n 7.apply(SparkSession.scala:926) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession 7.apply(SparkSession.scala:926)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.sql.SparkSessionBuilder.getOrCreate(SparkSession.scala:926)
at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
at l i n e 3. line3. line3.read i w iw iwiw.(:15)
at l i n e 3. line3. line3.readKaTeX parse error: Can't use function '$' in math mode at position 36: …43) at $̲line3.$read.
at org.apache.spark.repl.Main . m a i n ( M a i n . s c a l a : 58 ) a t o r g . a p a c h e . s p a r k . r e p l . M a i n . m a i n ( M a i n . s c a l a ) a t s u n . r e f l e c t . N a t i v e M e t h o d A c c e s s o r I m p l . i n v o k e 0 ( N a t i v e M e t h o d ) a t s u n . r e f l e c t . N a t i v e M e t h o d A c c e s s o r I m p l . i n v o k e ( N a t i v e M e t h o d A c c e s s o r I m p l . j a v a : 62 ) a t s u n . r e f l e c t . D e l e g a t i n g M e t h o d A c c e s s o r I m p l . i n v o k e ( D e l e g a t i n g M e t h o d A c c e s s o r I m p l . j a v a : 43 ) a t j a v a . l a n g . r e f l e c t . M e t h o d . i n v o k e ( M e t h o d . j a v a : 498 ) a t o r g . a p a c h e . s p a r k . d e p l o y . J a v a M a i n A p p l i c a t i o n . s t a r t ( S p a r k A p p l i c a t i o n . s c a l a : 52 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . o r g .main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org .main(Main.scala:58)atorg.apache.spark.repl.Main.main(Main.scala)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit.orgapache s p a r k spark sparkdeploy S p a r k S u b m i t SparkSubmit SparkSubmit$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain 1 ( S p a r k S u b m i t . s c a l a : 167 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . s u b m i t ( S p a r k S u b m i t . s c a l a : 195 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t . d o S u b m i t ( S p a r k S u b m i t . s c a l a : 86 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t 1(SparkSubmit.scala:167) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at org.apache.spark.deploy.SparkSubmit 1(SparkSubmit.scala:167)atorg.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)atorg.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)atorg.apache.spark.deploy.SparkSubmit$anon 2. d o S u b m i t ( S p a r k S u b m i t . s c a l a : 924 ) a t o r g . a p a c h e . s p a r k . d e p l o y . S p a r k S u b m i t 2.doSubmit(SparkSubmit.scala:924) at org.apache.spark.deploy.SparkSubmit 2.doSubmit(SparkSubmit.scala:924)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)