Last active
February 14, 2017 09:46
-
-
Save gcpagano/e95a351dc75c4a6ecd2a50fdd7665e90 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
- create Dataset from GenericRecord *** FAILED *** | |
[info] org.apache.spark.SparkException: Task not serializable | |
[info] at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:298) | |
[info] at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:288) | |
[info] at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:108) | |
[info] at org.apache.spark.SparkContext.clean(SparkContext.scala:2037) | |
[info] at org.apache.spark.rdd.RDD$$anonfun$map$1.apply(RDD.scala:366) | |
[info] at org.apache.spark.rdd.RDD$$anonfun$map$1.apply(RDD.scala:365) | |
[info] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
[info] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
[info] at org.apache.spark.rdd.RDD.withScope(RDD.scala:358) | |
[info] at org.apache.spark.rdd.RDD.map(RDD.scala:365) | |
[info] at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:439) | |
[info] at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395) | |
[info] at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163) | |
[info] at com.databricks.spark.avro.AvroSuite$$anonfun$37.apply$mcV$sp(AvroSuite.scala:1086) | |
[info] at com.databricks.spark.avro.AvroSuite$$anonfun$37.apply(AvroSuite.scala:1063) | |
[info] at com.databricks.spark.avro.AvroSuite$$anonfun$37.apply(AvroSuite.scala:1063) | |
[info] at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) | |
[info] at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
[info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
[info] at org.scalatest.Transformer.apply(Transformer.scala:22) | |
[info] at org.scalatest.Transformer.apply(Transformer.scala:20) | |
[info] at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) | |
[info] at org.scalatest.Suite$class.withFixture(Suite.scala:1122) | |
[info] at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) | |
[info] at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) | |
[info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) | |
[info] at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) | |
[info] at org.scalatest.FunSuite.runTest(FunSuite.scala:1555) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) | |
[info] at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) | |
[info] at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) | |
[info] at scala.collection.immutable.List.foreach(List.scala:381) | |
[info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) | |
[info] at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) | |
[info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) | |
[info] at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) | |
[info] at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) | |
[info] at org.scalatest.Suite$class.run(Suite.scala:1424) | |
[info] at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) | |
[info] at org.scalatest.SuperEngine.runImpl(Engine.scala:545) | |
[info] at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) | |
[info] at com.databricks.spark.avro.AvroSuite.org$scalatest$BeforeAndAfterAll$$super$run(AvroSuite.scala:53) | |
[info] at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) | |
[info] at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) | |
[info] at com.databricks.spark.avro.AvroSuite.run(AvroSuite.scala:53) | |
[info] at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:357) | |
[info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:502) | |
[info] at sbt.TestRunner.runTest$1(TestFramework.scala:76) | |
[info] at sbt.TestRunner.run(TestFramework.scala:85) | |
[info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202) | |
[info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202) | |
[info] at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185) | |
[info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202) | |
[info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202) | |
[info] at sbt.TestFunction.apply(TestFramework.scala:207) | |
[info] at sbt.Tests$$anonfun$9.apply(Tests.scala:216) | |
[info] at sbt.Tests$$anonfun$9.apply(Tests.scala:216) | |
[info] at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44) | |
[info] at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44) | |
[info] at sbt.std.Transform$$anon$4.work(System.scala:63) | |
[info] at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228) | |
[info] at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228) | |
[info] at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17) | |
[info] at sbt.Execute.work(Execute.scala:237) | |
[info] at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228) | |
[info] at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228) | |
[info] at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159) | |
[info] at sbt.CompletionService$$anon$2.call(CompletionService.scala:28) | |
[info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
[info] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) | |
[info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
[info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
[info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
[info] at java.lang.Thread.run(Thread.java:745) | |
[info] Cause: java.io.NotSerializableException: org.apache.avro.Schema$RecordSchema | |
[info] Serialization stack: | |
[info] - object not serializable (class: org.apache.avro.Schema$RecordSchema, value: {"type":"record","name":"GenericRecordTest","namespace":"com.databricks.spark.avro","fields":[{"name":"field1","type":"string"}]}) | |
[info] - field (class: org.apache.spark.sql.catalyst.expressions.Literal, name: value, type: class java.lang.Object) | |
[info] - object (class org.apache.spark.sql.catalyst.expressions.Literal, {"type":"record","name":"GenericRecordTest","namespace":"com.databricks.spark.avro","fields":[{"name":"field1","type":"string"}]}) | |
[info] - writeObject data (class: scala.collection.immutable.List$SerializationProxy) | |
[info] - object (class scala.collection.immutable.List$SerializationProxy, scala.collection.immutable.List$SerializationProxy@28d01f4e) | |
[info] - writeReplace data (class: scala.collection.immutable.List$SerializationProxy) | |
[info] - object (class scala.collection.immutable.$colon$colon, List({"type":"record","name":"GenericRecordTest","namespace":"com.databricks.spark.avro","fields":[{"name":"field1","type":"string"}]})) | |
[info] - field (class: org.apache.spark.sql.catalyst.expressions.objects.NewInstance, name: arguments, type: interface scala.collection.Seq) | |
[info] - object (class org.apache.spark.sql.catalyst.expressions.objects.NewInstance, newInstance(class org.apache.avro.generic.GenericData$Record)) | |
[info] - field (class: com.databricks.spark.avro.AvroTypeInference$InitializeAvroObject, name: objectInstance, type: class org.apache.spark.sql.catalyst.expressions.Expression) | |
[info] - object (class com.databricks.spark.avro.AvroTypeInference$InitializeAvroObject, initializeavroobject(newInstance(class org.apache.avro.generic.GenericData$Record), (0,'field1.toString))) | |
[info] - field (class: org.apache.spark.sql.catalyst.encoders.ExpressionEncoder, name: deserializer, type: class org.apache.spark.sql.catalyst.expressions.Expression) | |
[info] - object (class org.apache.spark.sql.catalyst.encoders.ExpressionEncoder, class[field1[0]: string]) | |
[info] - field (class: org.apache.spark.sql.SparkSession$$anonfun$4, name: enc$2, type: class org.apache.spark.sql.catalyst.encoders.ExpressionEncoder) | |
[info] - object (class org.apache.spark.sql.SparkSession$$anonfun$4, <function1>) | |
[info] at org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:40) | |
[info] at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:46) | |
[info] at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:100) | |
[info] at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:295) | |
[info] at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:288) | |
[info] at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:108) | |
[info] at org.apache.spark.SparkContext.clean(SparkContext.scala:2037) | |
[info] at org.apache.spark.rdd.RDD$$anonfun$map$1.apply(RDD.scala:366) | |
[info] at org.apache.spark.rdd.RDD$$anonfun$map$1.apply(RDD.scala:365) | |
[info] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
[info] at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
[info] at org.apache.spark.rdd.RDD.withScope(RDD.scala:358) | |
[info] at org.apache.spark.rdd.RDD.map(RDD.scala:365) | |
[info] at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:439) | |
[info] at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395) | |
[info] at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163) | |
[info] at com.databricks.spark.avro.AvroSuite$$anonfun$37.apply$mcV$sp(AvroSuite.scala:1086) | |
[info] at com.databricks.spark.avro.AvroSuite$$anonfun$37.apply(AvroSuite.scala:1063) | |
[info] at com.databricks.spark.avro.AvroSuite$$anonfun$37.apply(AvroSuite.scala:1063) | |
[info] at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) | |
[info] at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) | |
[info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) | |
[info] at org.scalatest.Transformer.apply(Transformer.scala:22) | |
[info] at org.scalatest.Transformer.apply(Transformer.scala:20) | |
[info] at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) | |
[info] at org.scalatest.Suite$class.withFixture(Suite.scala:1122) | |
[info] at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) | |
[info] at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) | |
[info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) | |
[info] at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) | |
[info] at org.scalatest.FunSuite.runTest(FunSuite.scala:1555) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) | |
[info] at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) | |
[info] at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) | |
[info] at scala.collection.immutable.List.foreach(List.scala:381) | |
[info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) | |
[info] at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) | |
[info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) | |
[info] at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) | |
[info] at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) | |
[info] at org.scalatest.Suite$class.run(Suite.scala:1424) | |
[info] at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) | |
[info] at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) | |
[info] at org.scalatest.SuperEngine.runImpl(Engine.scala:545) | |
[info] at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) | |
[info] at com.databricks.spark.avro.AvroSuite.org$scalatest$BeforeAndAfterAll$$super$run(AvroSuite.scala:53) | |
[info] at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) | |
[info] at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) | |
[info] at com.databricks.spark.avro.AvroSuite.run(AvroSuite.scala:53) | |
[info] at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:357) | |
[info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:502) | |
[info] at sbt.TestRunner.runTest$1(TestFramework.scala:76) | |
[info] at sbt.TestRunner.run(TestFramework.scala:85) | |
[info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202) | |
[info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202) | |
[info] at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185) | |
[info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202) | |
[info] at sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202) | |
[info] at sbt.TestFunction.apply(TestFramework.scala:207) | |
[info] at sbt.Tests$$anonfun$9.apply(Tests.scala:216) | |
[info] at sbt.Tests$$anonfun$9.apply(Tests.scala:216) | |
[info] at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44) | |
[info] at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44) | |
[info] at sbt.std.Transform$$anon$4.work(System.scala:63) | |
[info] at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228) | |
[info] at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228) | |
[info] at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17) | |
[info] at sbt.Execute.work(Execute.scala:237) | |
[info] at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228) | |
[info] at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228) | |
[info] at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159) | |
[info] at sbt.CompletionService$$anon$2.call(CompletionService.scala:28) | |
[info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
[info] at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) | |
[info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) | |
[info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
[info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
[info] at java.lang.Thread.run(Thread.java:745) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment