Created
May 20, 2019 23:20
-
-
Save rtyler/5cb697ceb4b82956b270fe341b9f4e2f to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
19/05/20 16:16:11 ERROR Executor: Exception in task 0.0 in stage 0.0 (TID 0) | |
java.lang.ClassCastException: cannot assign instance of scala.collection.immutable.List$SerializationProxy to field org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$dependencies_ of type scala.collection.Seq in instance of org.apache.spark.rdd.MapPartitionsRDD | |
at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2287) | |
at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1417) | |
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2293) | |
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2211) | |
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2069) | |
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1573) | |
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2287) | |
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2211) | |
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2069) | |
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1573) | |
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:431) | |
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75) | |
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:88) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
19/05/20 16:16:11 WARN TaskSetManager: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): java.lang.ClassCastException: cannot assign instance of scala.collection.immutable.List$SerializationProxy to field org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$depe | |
ndencies_ of type scala.collection.Seq in instance of org.apache.spark.rdd.MapPartitionsRDD | |
at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2287) | |
at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1417) | |
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2293) | |
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2211) | |
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2069) | |
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1573) | |
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2287) | |
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2211) | |
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2069) | |
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1573) | |
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:431) | |
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75) | |
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:88) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment