-
-
Save anonymous/ebb6c9d71865c9c8e125aadbbdd6a5bc to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
root@sdb-docker01:~# spark-shell | |
Traceback (most recent call last): | |
File "/usr/bin/hdp-select", line 374, in <module> | |
listPackages(getPackages("all")) | |
File "/usr/bin/hdp-select", line 217, in listPackages | |
os.path.basename(os.path.dirname(os.readlink(linkname)))) | |
OSError: [Errno 22] Invalid argument: '/usr/hdp/current/falcon-client' | |
ls: cannot access /usr/hdp//hadoop/lib: No such file or directory | |
hdp.version is not set while running Spark under HDP, please set through HDP_VERSION in spark-env.sh or add a java-opts file in conf with -Dhdp.version=xxx | |
SLF4J: Class path contains multiple SLF4J bindings. | |
SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] | |
SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly.jar!/org/slf4j/impl/StaticLoggerBinder.class] | |
SLF4J: Found binding in [jar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-examples-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/org/slf4j/impl/StaticLoggerBinder.class] | |
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. | |
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] | |
log4j:ERROR Could not instantiate class [com.microsoft.log4jappender.EtwAppender]. | |
java.lang.ClassNotFoundException: com.microsoft.log4jappender.EtwAppender | |
at java.net.URLClassLoader$1.run(URLClassLoader.java:366) | |
at java.net.URLClassLoader$1.run(URLClassLoader.java:355) | |
at java.security.AccessController.doPrivileged(Native Method) | |
at java.net.URLClassLoader.findClass(URLClassLoader.java:354) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:425) | |
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:358) | |
at java.lang.Class.forName0(Native Method) | |
at java.lang.Class.forName(Class.java:195) | |
at org.apache.log4j.helpers.Loader.loadClass(Loader.java:198) | |
at org.apache.log4j.helpers.OptionConverter.instantiateByClassName(OptionConverter.java:327) | |
at org.apache.log4j.helpers.OptionConverter.instantiateByKey(OptionConverter.java:124) | |
at org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:785) | |
at org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:768) | |
at org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:648) | |
at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:514) | |
at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:580) | |
at org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:526) | |
at org.apache.log4j.LogManager.<clinit>(LogManager.java:127) | |
at org.slf4j.impl.Log4jLoggerFactory.getLogger(Log4jLoggerFactory.java:64) | |
at org.slf4j.LoggerFactory.getLogger(LoggerFactory.java:285) | |
at org.apache.commons.logging.impl.SLF4JLogFactory.getInstance(SLF4JLogFactory.java:155) | |
at org.apache.commons.logging.impl.SLF4JLogFactory.getInstance(SLF4JLogFactory.java:132) | |
at org.apache.commons.logging.LogFactory.getLog(LogFactory.java:275) | |
at org.apache.hadoop.util.ShutdownHookManager.<clinit>(ShutdownHookManager.java:44) | |
at java.lang.Class.forName0(Native Method) | |
at java.lang.Class.forName(Class.java:278) | |
at org.apache.spark.util.Utils$.classForName(Utils.scala:174) | |
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$4.apply(ShutdownHookManager.scala:220) | |
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$4.apply(ShutdownHookManager.scala:220) | |
at scala.util.Try$.apply(Try.scala:161) | |
at org.apache.spark.util.SparkShutdownHookManager.install(ShutdownHookManager.scala:220) | |
at org.apache.spark.util.ShutdownHookManager$.shutdownHooks$lzycompute(ShutdownHookManager.scala:50) | |
at org.apache.spark.util.ShutdownHookManager$.shutdownHooks(ShutdownHookManager.scala:48) | |
at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:191) | |
at org.apache.spark.util.ShutdownHookManager$.<init>(ShutdownHookManager.scala:58) | |
at org.apache.spark.util.ShutdownHookManager$.<clinit>(ShutdownHookManager.scala) | |
at org.apache.spark.util.Utils$.createTempDir(Utils.scala:239) | |
at org.apache.spark.repl.SparkIMain.outputDir$lzycompute(SparkIMain.scala:102) | |
at org.apache.spark.repl.SparkIMain.outputDir(SparkIMain.scala:99) | |
at org.apache.spark.repl.SparkIMain.<init>(SparkIMain.scala:115) | |
at org.apache.spark.repl.SparkILoop$SparkILoopInterpreter.<init>(SparkILoop.scala:187) | |
at org.apache.spark.repl.SparkILoop.createInterpreter(SparkILoop.scala:217) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:949) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) | |
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) | |
at org.apache.spark.repl.Main$.main(Main.scala:31) | |
at org.apache.spark.repl.Main.main(Main.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) | |
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) | |
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
log4j:ERROR Could not instantiate appender named "ETW". | |
log4j:ERROR Could not instantiate class [com.microsoft.log4jappender.AnonymizeLogAppender]. | |
java.lang.ClassNotFoundException: com.microsoft.log4jappender.AnonymizeLogAppender | |
at java.net.URLClassLoader$1.run(URLClassLoader.java:366) | |
at java.net.URLClassLoader$1.run(URLClassLoader.java:355) | |
at java.security.AccessController.doPrivileged(Native Method) | |
at java.net.URLClassLoader.findClass(URLClassLoader.java:354) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:425) | |
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:358) | |
at java.lang.Class.forName0(Native Method) | |
at java.lang.Class.forName(Class.java:195) | |
at org.apache.log4j.helpers.Loader.loadClass(Loader.java:198) | |
at org.apache.log4j.helpers.OptionConverter.instantiateByClassName(OptionConverter.java:327) | |
at org.apache.log4j.helpers.OptionConverter.instantiateByKey(OptionConverter.java:124) | |
at org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:785) | |
at org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:768) | |
at org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:648) | |
at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:514) | |
at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:580) | |
at org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:526) | |
at org.apache.log4j.LogManager.<clinit>(LogManager.java:127) | |
at org.slf4j.impl.Log4jLoggerFactory.getLogger(Log4jLoggerFactory.java:64) | |
at org.slf4j.LoggerFactory.getLogger(LoggerFactory.java:285) | |
at org.apache.commons.logging.impl.SLF4JLogFactory.getInstance(SLF4JLogFactory.java:155) | |
at org.apache.commons.logging.impl.SLF4JLogFactory.getInstance(SLF4JLogFactory.java:132) | |
at org.apache.commons.logging.LogFactory.getLog(LogFactory.java:275) | |
at org.apache.hadoop.util.ShutdownHookManager.<clinit>(ShutdownHookManager.java:44) | |
at java.lang.Class.forName0(Native Method) | |
at java.lang.Class.forName(Class.java:278) | |
at org.apache.spark.util.Utils$.classForName(Utils.scala:174) | |
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$4.apply(ShutdownHookManager.scala:220) | |
at org.apache.spark.util.SparkShutdownHookManager$$anonfun$4.apply(ShutdownHookManager.scala:220) | |
at scala.util.Try$.apply(Try.scala:161) | |
at org.apache.spark.util.SparkShutdownHookManager.install(ShutdownHookManager.scala:220) | |
at org.apache.spark.util.ShutdownHookManager$.shutdownHooks$lzycompute(ShutdownHookManager.scala:50) | |
at org.apache.spark.util.ShutdownHookManager$.shutdownHooks(ShutdownHookManager.scala:48) | |
at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:191) | |
at org.apache.spark.util.ShutdownHookManager$.<init>(ShutdownHookManager.scala:58) | |
at org.apache.spark.util.ShutdownHookManager$.<clinit>(ShutdownHookManager.scala) | |
at org.apache.spark.util.Utils$.createTempDir(Utils.scala:239) | |
at org.apache.spark.repl.SparkIMain.outputDir$lzycompute(SparkIMain.scala:102) | |
at org.apache.spark.repl.SparkIMain.outputDir(SparkIMain.scala:99) | |
at org.apache.spark.repl.SparkIMain.<init>(SparkIMain.scala:115) | |
at org.apache.spark.repl.SparkILoop$SparkILoopInterpreter.<init>(SparkILoop.scala:187) | |
at org.apache.spark.repl.SparkILoop.createInterpreter(SparkILoop.scala:217) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:949) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) | |
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) | |
at org.apache.spark.repl.Main$.main(Main.scala:31) | |
at org.apache.spark.repl.Main.main(Main.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) | |
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) | |
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
log4j:ERROR Could not instantiate appender named "Anonymizer". | |
16/07/07 20:19:21 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
16/07/07 20:19:22 INFO SecurityManager: Changing view acls to: root | |
16/07/07 20:19:22 INFO SecurityManager: Changing modify acls to: root | |
16/07/07 20:19:22 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); users with modify permissions: Set(root) | |
16/07/07 20:19:23 INFO HttpServer: Starting HTTP Server | |
16/07/07 20:19:23 INFO Server: jetty-8.y.z-SNAPSHOT | |
16/07/07 20:19:23 INFO AbstractConnector: Started SocketConnector@0.0.0.0:39539 | |
16/07/07 20:19:23 INFO Utils: Successfully started service 'HTTP class server' on port 39539. | |
Welcome to | |
____ __ | |
/ __/__ ___ _____/ /__ | |
_\ \/ _ \/ _ `/ __/ '_/ | |
/___/ .__/\_,_/_/ /_/\_\ version 1.6.1 | |
/_/ | |
Using Scala version 2.10.5 (OpenJDK 64-Bit Server VM, Java 1.7.0_101) | |
Type in expressions to have them evaluated. | |
Type :help for more information. | |
16/07/07 20:19:34 INFO SparkContext: Running Spark version 1.6.1 | |
16/07/07 20:19:34 INFO SecurityManager: Changing view acls to: root | |
16/07/07 20:19:34 INFO SecurityManager: Changing modify acls to: root | |
16/07/07 20:19:34 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); users with modify permissions: Set(root) | |
16/07/07 20:19:34 INFO Utils: Successfully started service 'sparkDriver' on port 50340. | |
16/07/07 20:19:35 INFO Slf4jLogger: Slf4jLogger started | |
16/07/07 20:19:35 INFO Remoting: Starting remoting | |
16/07/07 20:19:36 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriverActorSystem@10.8.17.9:33041] | |
16/07/07 20:19:36 INFO Utils: Successfully started service 'sparkDriverActorSystem' on port 33041. | |
16/07/07 20:19:36 INFO SparkEnv: Registering MapOutputTracker | |
16/07/07 20:19:36 INFO SparkEnv: Registering BlockManagerMaster | |
16/07/07 20:19:36 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-d65f9892-5008-461e-9e21-04349082ff33 | |
16/07/07 20:19:36 INFO MemoryStore: MemoryStore started with capacity 511.5 MB | |
16/07/07 20:19:36 INFO SparkEnv: Registering OutputCommitCoordinator | |
16/07/07 20:19:37 INFO Server: jetty-8.y.z-SNAPSHOT | |
16/07/07 20:19:37 INFO AbstractConnector: Started SelectChannelConnector@0.0.0.0:4040 | |
16/07/07 20:19:37 INFO Utils: Successfully started service 'SparkUI' on port 4040. | |
16/07/07 20:19:37 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://10.8.17.9:4040 | |
spark.yarn.driver.memoryOverhead is set but does not apply in client mode. | |
16/07/07 20:19:38 INFO TimelineClientImpl: Timeline service address: http://hn0-haspar.pbed5jwkixfebdxr1by2u30lzf.cx.internal.cloudapp.net:8188/ws/v1/timeline/ | |
16/07/07 20:19:39 INFO AbstractService: Service org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl failed in state STARTED; cause: java.io.IOException: No FileSystem for scheme: wasb | |
java.io.IOException: No FileSystem for scheme: wasb | |
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2644) | |
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2651) | |
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92) | |
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687) | |
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:170) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:355) | |
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) | |
at org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.serviceStart(TimelineClientImpl.java:378) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193) | |
at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceStart(YarnClientImpl.java:194) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193) | |
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:127) | |
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56) | |
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:530) | |
at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017) | |
at $line3.$read$$iwC$$iwC.<init>(<console>:15) | |
at $line3.$read$$iwC.<init>(<console>:24) | |
at $line3.$read.<init>(<console>:26) | |
at $line3.$read$.<init>(<console>:30) | |
at $line3.$read$.<clinit>(<console>) | |
at $line3.$eval$.<init>(<console>:7) | |
at $line3.$eval$.<clinit>(<console>) | |
at $line3.$eval.$print(<console>) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) | |
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346) | |
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) | |
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) | |
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) | |
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324) | |
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974) | |
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159) | |
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108) | |
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) | |
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) | |
at org.apache.spark.repl.Main$.main(Main.scala:31) | |
at org.apache.spark.repl.Main.main(Main.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) | |
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) | |
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
16/07/07 20:19:39 INFO AbstractService: Service org.apache.hadoop.yarn.client.api.impl.YarnClientImpl failed in state STARTED; cause: org.apache.hadoop.service.ServiceStateException: java.io.IOException: No FileSystem for scheme: wasb | |
org.apache.hadoop.service.ServiceStateException: java.io.IOException: No FileSystem for scheme: wasb | |
at org.apache.hadoop.service.ServiceStateException.convert(ServiceStateException.java:59) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:204) | |
at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceStart(YarnClientImpl.java:194) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193) | |
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:127) | |
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56) | |
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:530) | |
at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017) | |
at $line3.$read$$iwC$$iwC.<init>(<console>:15) | |
at $line3.$read$$iwC.<init>(<console>:24) | |
at $line3.$read.<init>(<console>:26) | |
at $line3.$read$.<init>(<console>:30) | |
at $line3.$read$.<clinit>(<console>) | |
at $line3.$eval$.<init>(<console>:7) | |
at $line3.$eval$.<clinit>(<console>) | |
at $line3.$eval.$print(<console>) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) | |
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346) | |
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) | |
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) | |
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) | |
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324) | |
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974) | |
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159) | |
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108) | |
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) | |
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) | |
at org.apache.spark.repl.Main$.main(Main.scala:31) | |
at org.apache.spark.repl.Main.main(Main.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) | |
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) | |
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
Caused by: java.io.IOException: No FileSystem for scheme: wasb | |
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2644) | |
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2651) | |
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92) | |
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687) | |
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:170) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:355) | |
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) | |
at org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.serviceStart(TimelineClientImpl.java:378) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193) | |
... 54 more | |
16/07/07 20:19:39 ERROR SparkContext: Error initializing SparkContext. | |
org.apache.hadoop.service.ServiceStateException: java.io.IOException: No FileSystem for scheme: wasb | |
at org.apache.hadoop.service.ServiceStateException.convert(ServiceStateException.java:59) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:204) | |
at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceStart(YarnClientImpl.java:194) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193) | |
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:127) | |
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56) | |
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:530) | |
at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017) | |
at $line3.$read$$iwC$$iwC.<init>(<console>:15) | |
at $line3.$read$$iwC.<init>(<console>:24) | |
at $line3.$read.<init>(<console>:26) | |
at $line3.$read$.<init>(<console>:30) | |
at $line3.$read$.<clinit>(<console>) | |
at $line3.$eval$.<init>(<console>:7) | |
at $line3.$eval$.<clinit>(<console>) | |
at $line3.$eval.$print(<console>) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) | |
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346) | |
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) | |
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) | |
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) | |
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324) | |
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974) | |
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159) | |
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108) | |
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) | |
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) | |
at org.apache.spark.repl.Main$.main(Main.scala:31) | |
at org.apache.spark.repl.Main.main(Main.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) | |
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) | |
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
Caused by: java.io.IOException: No FileSystem for scheme: wasb | |
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2644) | |
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2651) | |
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92) | |
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687) | |
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:170) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:355) | |
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) | |
at org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.serviceStart(TimelineClientImpl.java:378) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193) | |
... 54 more | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null} | |
16/07/07 20:19:39 INFO ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null} | |
16/07/07 20:19:39 INFO SparkUI: Stopped Spark web UI at http://10.8.17.9:4040 | |
16/07/07 20:19:39 WARN YarnSchedulerBackend$YarnSchedulerEndpoint: Attempted to request executors before the AM has registered! | |
16/07/07 20:19:39 INFO YarnClientSchedulerBackend: Stopped | |
16/07/07 20:19:39 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! | |
16/07/07 20:19:39 INFO MemoryStore: MemoryStore cleared | |
16/07/07 20:19:39 INFO BlockManager: BlockManager stopped | |
16/07/07 20:19:39 INFO BlockManagerMaster: BlockManagerMaster stopped | |
16/07/07 20:19:39 WARN MetricsSystem: Stopping a MetricsSystem that is not running | |
16/07/07 20:19:39 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! | |
16/07/07 20:19:39 INFO SparkContext: Successfully stopped SparkContext | |
16/07/07 20:19:39 INFO RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon. | |
16/07/07 20:19:39 INFO RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports. | |
16/07/07 20:19:39 INFO RemoteActorRefProvider$RemotingTerminator: Remoting shut down. | |
org.apache.hadoop.service.ServiceStateException: java.io.IOException: No FileSystem for scheme: wasb | |
at org.apache.hadoop.service.ServiceStateException.convert(ServiceStateException.java:59) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:204) | |
at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceStart(YarnClientImpl.java:194) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193) | |
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:127) | |
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56) | |
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:530) | |
at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017) | |
at $iwC$$iwC.<init>(<console>:15) | |
at $iwC.<init>(<console>:24) | |
at <init>(<console>:26) | |
at .<init>(<console>:30) | |
at .<clinit>(<console>) | |
at .<init>(<console>:7) | |
at .<clinit>(<console>) | |
at $print(<console>) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) | |
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346) | |
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) | |
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) | |
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) | |
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324) | |
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974) | |
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159) | |
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108) | |
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) | |
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) | |
at org.apache.spark.repl.Main$.main(Main.scala:31) | |
at org.apache.spark.repl.Main.main(Main.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) | |
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) | |
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
Caused by: java.io.IOException: No FileSystem for scheme: wasb | |
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2644) | |
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2651) | |
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92) | |
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687) | |
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:170) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:355) | |
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) | |
at org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.serviceStart(TimelineClientImpl.java:378) | |
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193) | |
... 54 more | |
java.lang.NullPointerException | |
at org.apache.spark.sql.SQLContext$.createListenerAndUI(SQLContext.scala:1367) | |
at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101) | |
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) | |
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) | |
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) | |
at java.lang.reflect.Constructor.newInstance(Constructor.java:526) | |
at org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028) | |
at $iwC$$iwC.<init>(<console>:15) | |
at $iwC.<init>(<console>:24) | |
at <init>(<console>:26) | |
at .<init>(<console>:30) | |
at .<clinit>(<console>) | |
at .<init>(<console>:7) | |
at .<clinit>(<console>) | |
at $print(<console>) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) | |
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346) | |
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) | |
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) | |
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) | |
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) | |
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132) | |
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324) | |
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124) | |
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974) | |
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159) | |
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108) | |
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) | |
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) | |
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) | |
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) | |
at org.apache.spark.repl.Main$.main(Main.scala:31) | |
at org.apache.spark.repl.Main.main(Main.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) | |
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) | |
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
<console>:16: error: not found: value sqlContext | |
import sqlContext.implicits._ | |
^ | |
<console>:16: error: not found: value sqlContext | |
import sqlContext.sql | |
^ | |
scala> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment