Created
September 5, 2016 13:43
-
-
Save HyukjinKwon/0c42b2c208e06c59525d91087252d9b0 to your computer and use it in GitHub Desktop.
[SPARK][R] test output https://ci.appveyor.com/project/HyukjinKwon/spark/build/69-SPARK-17339-after
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[00:41:11] Failed ------------------------------------------------------------------------- | |
[00:41:11] 1. Error: spark.lda with text input (@test_mllib.R#655) ------------------------ | |
[00:41:11] org.apache.spark.sql.AnalysisException: Path does not exist: file:/C:/projects/spark/R/lib/SparkR/tests/testthat/data/mllib/sample_lda_data.txt; | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$12.apply(DataSource.scala:376) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$12.apply(DataSource.scala:365) | |
[00:41:11] | |
[00:41:11] at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241) | |
[00:41:11] | |
[00:41:11] at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241) | |
[00:41:11] | |
[00:41:11] at scala.collection.immutable.List.foreach(List.scala:381) | |
[00:41:11] | |
[00:41:11] at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241) | |
[00:41:11] | |
[00:41:11] at scala.collection.immutable.List.flatMap(List.scala:344) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:365) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:149) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.DataFrameRe | |
[00:41:11] ader.text(DataFrameReader.scala:500) | |
[00:41:11] | |
[00:41:11] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
[00:41:11] | |
[00:41:11] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
[00:41:11] | |
[00:41:11] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
[00:41:11] | |
[00:41:11] at java.lang.reflect.Method.invoke(Method.java:498) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:86) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294) | |
[00:41:11] | |
[00:41:11] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) | |
[00:41:11] | |
[00:41:11] at io.nett | |
[00:41:11] y.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294) | |
[00:41:11] | |
[00:41:11] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.NioEventLoop.run(NioEve | |
[00:41:11] ntLoop.java:354) | |
[00:41:11] | |
[00:41:11] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111) | |
[00:41:11] | |
[00:41:11] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137) | |
[00:41:11] | |
[00:41:11] at java.lang.Thread.run(Thread.java:745) | |
[00:41:11] | |
[00:41:11] 1: read.text("data/mllib/sample_lda_data.txt") at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:655 | |
[00:41:11] 2: dispatchFunc("read.text(path)", x, ...) | |
[00:41:11] 3: f(x, ...) | |
[00:41:11] 4: callJMethod(read, "text", paths) | |
[00:41:11] 5: invokeJava(isStatic = FALSE, objId$id, methodName, ...) | |
[00:41:11] 6: stop(readString(conn)) | |
[00:41:11] | |
[00:41:11] 2. Error: spark.posterior and spark.perplexity (@test_mllib.R#691) ------------- | |
[00:41:11] org.apache.spark.sql.AnalysisException: Path does not exist: file:/C:/projects/spark/R/lib/SparkR/tests/testthat/data/mllib/sample_lda_data.txt; | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$12.apply(DataSource.scala:376) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.execution.datasources.DataSource$$anonfun$12.apply(DataSource.scala:365) | |
[00:41:11] | |
[00:41:11] at scala.collection.Tr | |
[00:41:11] aversableLike$$anonfun$flatMap$1.apply(Traversab | |
[00:41:11] leLike.scala:241) | |
[00:41:11] | |
[00:41:11] at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241) | |
[00:41:11] | |
[00:41:11] at scala.collection.immutable.List.foreach(List.scala:381) | |
[00:41:11] | |
[00:41:11] at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241) | |
[00:41:11] | |
[00:41:11] at scala.collection.immutable.List.flatMap(List.scala:344) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:365) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:149) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.sql.DataFrameReader.text(DataFrameReader.scala:500) | |
[00:41:11] | |
[00:41:11] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
[00:41:11] | |
[00:41:11] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
[00:41:11] | |
[00:41:11] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
[00:41:11] | |
[00:41:11] at java.lang.reflect.Method.invoke(Method.java:498) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:141) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.sca | |
[00:41:11] la:86) | |
[00:41:11] | |
[00:41:11] at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:38) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294) | |
[00:41:11] | |
[00:41:11] at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294) | |
[00:41:11] | |
[00:41:11] at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandl | |
[00:41:11] erContext.java:294) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382) | |
[00:41:11] | |
[00:41:11] at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354) | |
[00:41:11] | |
[00:41:11] at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111) | |
[00:41:11] | |
[00:41:11] at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137) | |
[00:41:11] | |
[00:41:11] at java.lang.Thread.run(Thread.java:745) | |
[00:41:11] | |
[00:41:11] 1: read.text("data/mllib/sample_lda_data.txt") at C:/projects/spark/R/lib/SparkR/tests/testthat/test_mllib.R:691 | |
[00:41:11] 2: dispatchFunc("read.text(path)", x, ...) | |
[00:41:11] 3: f(x, ...) | |
[00:41:11] 4: callJMethod(read, "text", paths) | |
[00:41:11] 5: invokeJava(isS | |
[00:41:11] tatic = FALSE, objId$id, methodName, ...) | |
[00:41:11] 6: stop(readString(conn)) | |
[00:41:11] | |
[00:41:11] DONE ==== | |
[00:41:11] ======================================================================= |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment