Created
August 15, 2021 16:19
-
-
Save yuwtennis/7b0c1dc0dcf98297af1e3179852ca693 to your computer and use it in GitHub Desktop.
Spark Worker error
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Spark Executor Command: "/usr/lib/jvm/java-11-openjdk/bin/java" "-cp" "/opt/spark3/conf/:/opt/spark3/jars/*" "-Xmx1024M" "-Dspark.driver.port=35601" "org.apache.spark.executor.CoarseGrainedExecutorBackend" "--driver-url" "spark://CoarseGrainedScheduler@192.168.11.2:35601" "--executor-id" "0" "--hostname" "192.168.11.2" "--cores" "1" "--app-id" "app-20210816011025-0000" "--worker-url" "spark://Worker@192.168.11.2:39677" | |
======================================== | |
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties | |
21/08/16 01:10:26 INFO CoarseGrainedExecutorBackend: Started daemon with process name: 134769@laptop-archlinux | |
21/08/16 01:10:26 INFO SignalUtils: Registering signal handler for TERM | |
21/08/16 01:10:26 INFO SignalUtils: Registering signal handler for HUP | |
21/08/16 01:10:26 INFO SignalUtils: Registering signal handler for INT | |
21/08/16 01:10:26 WARN Utils: Your hostname, laptop-archlinux resolves to a loopback address: 127.0.0.1; using 192.168.11.2 instead (on interface enp0s20f0u1) | |
21/08/16 01:10:26 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address | |
WARNING: An illegal reflective access operation has occurred | |
WARNING: Illegal reflective access by org.apache.spark.unsafe.Platform (file:/opt/spark3/jars/spark-unsafe_2.12-3.1.2.jar) to constructor java.nio.DirectByteBuffer(long,int) | |
WARNING: Please consider reporting this to the maintainers of org.apache.spark.unsafe.Platform | |
WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations | |
WARNING: All illegal access operations will be denied in a future release | |
21/08/16 01:10:26 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
21/08/16 01:10:26 INFO SecurityManager: Changing view acls to: ywatanabe,root | |
21/08/16 01:10:26 INFO SecurityManager: Changing modify acls to: ywatanabe,root | |
21/08/16 01:10:26 INFO SecurityManager: Changing view acls groups to: | |
21/08/16 01:10:26 INFO SecurityManager: Changing modify acls groups to: | |
21/08/16 01:10:26 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(ywatanabe, root); groups with view permissions: Set(); users with modify permissions: Set(ywatanabe, root); groups with modify permissions: Set() | |
21/08/16 01:10:26 INFO TransportClientFactory: Successfully created connection to /192.168.11.2:35601 after 55 ms (0 ms spent in bootstraps) | |
21/08/16 01:10:26 WARN TransportChannelHandler: Exception in connection from /192.168.11.2:35601 | |
java.io.InvalidClassException: scala.collection.mutable.WrappedArray$ofRef; local class incompatible: stream classdesc serialVersionUID = 3456489343829468865, local class serialVersionUID = 1028182004549731694 | |
at java.base/java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:689) | |
at java.base/java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:2012) | |
at java.base/java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1862) | |
at java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2169) | |
at java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1679) | |
at java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2464) | |
at java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2358) | |
at java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2196) | |
at java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1679) | |
at java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:493) | |
at java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:451) | |
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76) | |
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:109) | |
at org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$deserialize$2(NettyRpcEnv.scala:299) | |
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) | |
at org.apache.spark.rpc.netty.NettyRpcEnv.deserialize(NettyRpcEnv.scala:352) | |
at org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$deserialize$1(NettyRpcEnv.scala:298) | |
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) | |
at org.apache.spark.rpc.netty.NettyRpcEnv.deserialize(NettyRpcEnv.scala:298) | |
at org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$7(NettyRpcEnv.scala:246) | |
at org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$7$adapted(NettyRpcEnv.scala:246) | |
at org.apache.spark.rpc.netty.RpcOutboxMessage.onSuccess(Outbox.scala:90) | |
at org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:195) | |
at org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:142) | |
at org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:53) | |
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) | |
at org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) | |
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) | |
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:714) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:650) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:576) | |
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493) | |
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989) | |
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) | |
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) | |
at java.base/java.lang.Thread.run(Thread.java:829) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
I get the same failure.
scala/bug#5046 seems to be related, but I don't know how Spark triggers it.