Skip to content

Instantly share code, notes, and snippets.

@HyukjinKwon
Last active September 5, 2016 13:52
Show Gist options
  • Save HyukjinKwon/f3f9a36dde88028ca09fd417b6ce5c68 to your computer and use it in GitHub Desktop.
Save HyukjinKwon/f3f9a36dde88028ca09fd417b6ce5c68 to your computer and use it in GitHub Desktop.
[00:42:08] Failed -------------------------------------------------------------------------
[00:42:08] 1. Error: spark.glm save/load (@test_mllib.R#168) ------------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoo
[00:42:08] pFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.
[00:42:08] channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:168
[00:42:08] 2: callJStatic("org.apache.spark.ml.r.RWrappers", "load", path)
[00:42:08]
[00:42:08] 3: invokeJava(isStatic = TRUE, className, metho
[00:42:08] dName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 2. Error: glm save/load (@test_mllib.R#298) ------------------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handl
[00:42:08] eMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChann
[00:42:08] elHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:298
[00:42:08] 2: callJStatic("org.a
[00:42:08] pache.spark.ml.r.RWrappers", "load", path)
[00:42:08] 3: invok
[00:42:08] eJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 3. Error: spark.kmeans (@test_mllib.R#346) -------------------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkCo
[00:42:08] ntext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.a
[00:42:08] pache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandler
[00:42:08] Context.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/projects/spark/R/lib/SparkR/tests/testthat/
[00:42:08] test_mllib.R:346
[00:42:08] 2: callJStatic("org.apache.spark.
[00:42:08] ml.r.RWrappers", "load", path)
[00:42:08] 3: invokeJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 4. Error: spark.mlp (@test_mllib.R#377) ----------------------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.s
[00:42:08] cala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Me
[00:42:08] thod.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at
[00:42:08] io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/pro
[00:42:08] jects/spark/R/lib/SparkR/tests/testthat/test_mllib.
[00:42:08] R:377
[00:42:08] 2: callJStatic("org.apache.spark.ml.r.RWrappers", "load", path)
[00:42:08] 3: invokeJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 5. Error: spark.naiveBayes (@test_mllib.R#445) ---------------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOpera
[00:42:08] tionScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessor
[00:42:08] Impl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelR
[00:42:08] ead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.jav
[00:42:08] a:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/projects/spark/
[00:42:08] R/lib/SparkR/tests/testthat/test_mllib.R:445
[00:42:08] 2: callJStatic("org.apache.spark.ml.r.RWrappers", "load", path)
[00:42:08] 3: invokeJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 6. Error: spark.survreg (@test_mllib.R#502) ------------------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:1
[00:42:08] 51)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAcces
[00:42:08] sorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.hand
[00:42:08] ler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137
[00:42:08] )
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: r
[00:42:08] ead.ml(modelPath) at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:502
[00:42:08] 2: callJStatic("org.apache.spark.ml.r.RWrappers", "load", path)
[00:42:08] 3: invokeJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 7. Error: spark.isotonicRegression (@test_mllib.R#547) -------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationSco
[00:42:08] pe$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandl
[00:42:08] erContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDeco
[00:42:08] rator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java
[00:42:08] .lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:547
[00:42:08] 2: callJStatic("org.apache.spark.ml.r.RWrappers", "load", path)
[00:42:08] 3: invokeJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 8. Error: spark.gaussianMixture (@test_mllib.R#609) ----------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.i
[00:42:08] nvoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerCont
[00:42:08] ext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.
[00:42:08] DefaultThreadFactory$DefaultRunnableDecorator.run(D
[00:42:08] efaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:609
[00:42:08] 2: callJStatic("org.apache.spark.ml.r.RWrappers", "load", path)
[00:42:08] 3: invokeJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 9. Error: spark.lda with libsvm (@test_mllib.R#642) ----------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoo
[00:42:08] pFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] a
[00:42:08] t sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.
[00:42:08] netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.j
[00:42:08] ava:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThrea
[00:42:08] dFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:642
[00:42:08] 2: callJStatic("org.apache.spark.ml.r.RWrappers", "load", path)
[00:42:08] 3: invokeJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] 10. Error: spark.lda with text input (@test_mllib.R#655) -----------------------
[00:42:08] org.apache.spark.sql.AnalysisException: Path does not exist: file:/C:/projects/spark/R/lib/SparkR/tests/testthat/data/mllib/sample_lda_data.txt;
[00:42:08]
[00:42:08] at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$12.apply(DataSource.scala:376)
[00:42:08]
[00:42:08] at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$12.apply(DataSource.scala:365)
[00:42:08]
[00:42:08] at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
[00:42:08]
[00:42:08] at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
[00:42:08]
[00:42:08] at scala.collection.immutable.List.foreach(List.scala:
[00:42:08] 381)
[00:42:08]
[00:42:08] at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
[00:42:08]
[00:42:08] at scala.collection.immutable.List.flatMap(List.scala:344)
[00:42:08]
[00:42:08] at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:365)
[00:42:08]
[00:42:08] at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:149)
[00:42:08]
[00:42:08] at org.apache.spark.sql.DataFrameReader.text(DataFrameReader.scala:500)
[00:42:08]
[00:42:08] at org.apache.spark.sql.DataFrameReader.text(DataFrameReader.scala:500)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultCh
[00:42:08] annelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.text("data/mllib/sample_lda_data.txt") at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:655
[00:42:08] 2: dispatchFunc("read.text(path)", x, ...)
[00:42:08] 3: f(x, ...)
[00:42:08] 4: callJMethod(read, "text", paths)
[00:42:08] 5: invokeJava(isStatic = FALSE, objId$id, methodName, ...)
[00:42:08] 6: stop(readString(conn))
[00:42:08]
[00:42:08] 11. Error: spark.po
[00:42:08] sterior and spark.perplexity (@test_mllib.R#691) ---
[00:42:08] ---------
[00:42:08] org.apache.spark.sql.AnalysisException: Path does not exist: file:/C:/projects/spark/R/lib/SparkR/tests/testthat/data/mllib/sample_lda_data.txt;
[00:42:08]
[00:42:08] at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$12.apply(DataSource.scala:376)
[00:42:08]
[00:42:08] at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$12.apply(DataSource.scala:365)
[00:42:08]
[00:42:08] at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
[00:42:08]
[00:42:08] at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
[00:42:08]
[00:42:08] at scala.collection.immutable.List.foreach(List.scala:381)
[00:42:08]
[00:42:08] at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
[00:42:08]
[00:42:08] at scala.collection.immutable.List.flatMap(List.scala:344)
[00:42:08]
[00:42:08] at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:365)
[00:42:08]
[00:42:08] at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:149)
[00:42:08]
[00:42:08] at org.apache.spark.sql.DataFrameReader.text(DataFrameReader.scala:500)
[00:42:08]
[00:42:08] at org.apache.spark.sql.DataFra
[00:42:08] meReader.text(DataFrameReader.scala:500)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.
[00:42:08] netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(Ni
[00:42:08] oEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.text("data/mllib/sample_lda_data.txt") at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:691
[00:42:08] 2: dispatchFunc("read.text(path)", x, ...)
[00:42:08] 3: f(x, ...)
[00:42:08] 4: callJMethod(read, "text", paths)
[00:42:08] 5: invokeJava(isStatic = FALSE, objId$id, methodName, ...)
[00:42:08] 6: stop(readString(conn))
[00:42:08]
[00:42:08] 12. Error: spark.als (@test_mllib.R#724) ---------------------------------------
[00:42:08] java.io.IOException: No FileSystem for scheme: C
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2421)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
[00:42:08]
[00:42:08] at org.apac
[00:42:08] he.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2
[00:42:08] 449)
[00:42:08]
[00:42:08] at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:995)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:990)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:806)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
[00:42:08]
[00:42:08] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.withScope(SparkContext.scala:686)
[00:42:08]
[00:42:08] at org.apache.spark.SparkContext.textFile(SparkContext
[00:42:08] .scala:804)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers$.load(RWrappers.scala:36)
[00:42:08]
[00:42:08] at org.apache.spark.ml.r.RWrappers.load(RWrappers.scala)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[00:42:08]
[00:42:08] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
[00:42:08]
[00:42:08] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[00:42:08]
[00:42:08] at java.lang.reflect.Method.invoke(Method.java:498)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86)
[00:42:08]
[00:42:08] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38)
[00:42:08]
[00:42:08] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.nett
[00:42:08] y.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
[00:42:08]
[00:42:08] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
[00:42:08]
[00:42:08] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
[00:42:08]
[00:42:08] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEven
[00:42:08] tLoop.processSelectedKeys(NioEventLoop.java:382)
[00:42:08]
[00:42:08] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
[00:42:08]
[00:42:08] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
[00:42:08]
[00:42:08] at java.lang.Thread.run(Thread.java:745)
[00:42:08]
[00:42:08] 1: read.ml(modelPath) at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:724
[00:42:08] 2: callJStatic("org.apache.spark.ml.r.RWrappers", "load", path)
[00:42:08] 3: invokeJava(isStatic = TRUE, className, methodName, ...)
[00:42:08] 4: stop(readString(conn))
[00:42:08]
[00:42:08] DONE ===========================================================================
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment