Skip to content

Instantly share code, notes, and snippets.

@Jolanrensen
Created June 4, 2024 11:53
Show Gist options
  • Save Jolanrensen/7ebcdbd0dc8daf252aa5e14e12d29409 to your computer and use it in GitHub Desktop.
Save Jolanrensen/7ebcdbd0dc8daf252aa5e14e12d29409 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"metadata": {
"collapsed": true,
"ExecuteTime": {
"end_time": "2024-06-04T11:45:04.012333Z",
"start_time": "2024-06-04T11:44:59.890518Z"
}
},
"source": [
"val scalaVersion = \"2.13.14\"\n",
"val scalaCompatVersion = \"2.13\"\n",
"val sparkVersion = \"3.5.1\"\n",
"val kotlinVersion = \"1.9.23\"\n",
"\n",
"USE {\n",
" dependencies(\n",
" \"org.apache.spark:spark-repl_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion\",\n",
" \"org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion\",\n",
" \"org.apache.spark:spark-sql_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-yarn_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-streaming_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-mllib_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-sql_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-graphx_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-launcher_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-catalyst_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-streaming_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.apache.spark:spark-core_$scalaCompatVersion:$sparkVersion\",\n",
" \"org.scala-lang:scala-library:$scalaVersion\",\n",
" \"org.scala-lang.modules:scala-xml_$scalaCompatVersion:2.0.1\",\n",
" \"org.scala-lang:scala-reflect:$scalaVersion\",\n",
" \"org.scala-lang:scala-compiler:$scalaVersion\",\n",
" \"commons-io:commons-io:2.11.0\",\n",
" )\n",
"}"
],
"outputs": [],
"execution_count": 3
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2024-06-04T11:46:46.543165Z",
"start_time": "2024-06-04T11:46:42.840276Z"
}
},
"cell_type": "code",
"source": [
"import org.apache.spark.sql.SparkSession\n",
"\n",
"val spark = SparkSession.builder()\n",
" .appName(\"Notebook\")\n",
" .master(\"local[*]\")\n",
" .getOrCreate()"
],
"outputs": [],
"execution_count": 4
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2024-06-04T11:50:53.955231Z",
"start_time": "2024-06-04T11:50:53.767268Z"
}
},
"cell_type": "code",
"source": [
"import scala.Product\n",
"import java.io.Serializable\n",
"\n",
"enum class EyeColor {\n",
" BLUE, BROWN, GREEN\n",
"}\n",
"\n",
"enum class Gender {\n",
" MALE, FEMALE, OTHER\n",
"}\n",
"\n",
"open class Person @JvmOverloads constructor(\n",
" var eyeColor: EyeColor = EyeColor.BLUE,\n",
" var name: String = \"\",\n",
" var gender: Gender = Gender.OTHER,\n",
" var length: Double = 0.0,\n",
" var age: Int = 0,\n",
"): Serializable"
],
"outputs": [],
"execution_count": 6
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2024-06-04T11:52:09.949012Z",
"start_time": "2024-06-04T11:52:03.228868Z"
}
},
"cell_type": "code",
"source": [
"import org.apache.spark.sql.Encoders\n",
"\n",
"val ds = spark.createDataset(\n",
" listOf(\n",
" Person(\n",
" eyeColor = EyeColor.BLUE,\n",
" name = \"Alice\",\n",
" gender = Gender.FEMALE,\n",
" length = 1.70,\n",
" age = 25,\n",
" ),\n",
" Person(\n",
" eyeColor = EyeColor.BLUE,\n",
" name = \"Bob\",\n",
" gender = Gender.MALE,\n",
" length = 1.67,\n",
" age = 25,\n",
" ),\n",
" Person(\n",
" eyeColor = EyeColor.BROWN,\n",
" name = \"Charlie\",\n",
" gender = Gender.OTHER,\n",
" length = 1.80,\n",
" age = 17,\n",
" ),\n",
" ),\n",
" Encoders.bean(Person::class.java)\n",
")\n",
"ds.show()"
],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"+---+--------+------+------+-------+\n",
"|age|eyeColor|gender|length| name|\n",
"+---+--------+------+------+-------+\n",
"| 25| BLUE|FEMALE| 1.7| Alice|\n",
"| 25| BLUE| MALE| 1.67| Bob|\n",
"| 17| BROWN| OTHER| 1.8|Charlie|\n",
"+---+--------+------+------+-------+\n",
"\n"
]
}
],
"execution_count": 7
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2024-06-04T11:52:34.782763Z",
"start_time": "2024-06-04T11:52:33.132106Z"
}
},
"cell_type": "code",
"source": [
"ds.filter { \n",
" it.name == \"Alice\"\n",
"}.show()"
],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0) (192.168.1.114 executor driver): java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD\n",
"\tat java.base/java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2076)\n",
"\tat java.base/java.io.ObjectStreamClass$FieldReflector.checkObjectFieldValueTypes(ObjectStreamClass.java:2039)\n",
"\tat java.base/java.io.ObjectStreamClass.checkObjFieldValueTypes(ObjectStreamClass.java:1293)\n",
"\tat java.base/java.io.ObjectInputStream.defaultCheckFieldValues(ObjectInputStream.java:2512)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2419)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:489)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:447)\n",
"\tat scala.collection.generic.DefaultSerializationProxy.readObject(DefaultSerializationProxy.scala:58)\n",
"\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n",
"\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n",
"\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n",
"\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\n",
"\tat java.base/java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1046)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2357)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:489)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:447)\n",
"\tat org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:87)\n",
"\tat org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:129)\n",
"\tat org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:86)\n",
"\tat org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:166)\n",
"\tat org.apache.spark.scheduler.Task.run(Task.scala:141)\n",
"\tat org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)\n",
"\tat org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)\n",
"\tat org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)\n",
"\tat org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)\n",
"\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)\n",
"\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n",
"\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n",
"\tat java.base/java.lang.Thread.run(Thread.java:829)\n",
"\n",
"Driver stacktrace:\n",
"org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0) (192.168.1.114 executor driver): java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD\n",
"\tat java.base/java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2076)\n",
"\tat java.base/java.io.ObjectStreamClass$FieldReflector.checkObjectFieldValueTypes(ObjectStreamClass.java:2039)\n",
"\tat java.base/java.io.ObjectStreamClass.checkObjFieldValueTypes(ObjectStreamClass.java:1293)\n",
"\tat java.base/java.io.ObjectInputStream.defaultCheckFieldValues(ObjectInputStream.java:2512)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2419)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:489)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:447)\n",
"\tat scala.collection.generic.DefaultSerializationProxy.readObject(DefaultSerializationProxy.scala:58)\n",
"\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n",
"\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n",
"\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n",
"\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\n",
"\tat java.base/java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1046)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2357)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:489)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:447)\n",
"\tat org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:87)\n",
"\tat org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:129)\n",
"\tat org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:86)\n",
"\tat org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:166)\n",
"\tat org.apache.spark.scheduler.Task.run(Task.scala:141)\n",
"\tat org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)\n",
"\tat org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)\n",
"\tat org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)\n",
"\tat org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)\n",
"\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)\n",
"\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n",
"\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n",
"\tat java.base/java.lang.Thread.run(Thread.java:829)\n",
"\n",
"Driver stacktrace:\n",
"\tat org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2856)\n",
"\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2792)\n",
"\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2791)\n",
"\tat scala.collection.immutable.List.foreach(List.scala:334)\n",
"\tat org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2791)\n",
"\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1247)\n",
"\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1247)\n",
"\tat scala.Option.foreach(Option.scala:437)\n",
"\tat org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1247)\n",
"\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:3060)\n",
"\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2994)\n",
"\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2983)\n",
"\tat org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)\n",
"\tat org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:989)\n",
"\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2398)\n",
"\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2419)\n",
"\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2438)\n",
"\tat org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:530)\n",
"\tat org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:483)\n",
"\tat org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:61)\n",
"\tat org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:4332)\n",
"\tat org.apache.spark.sql.Dataset.$anonfun$head$1(Dataset.scala:3314)\n",
"\tat org.apache.spark.sql.Dataset.$anonfun$withAction$2(Dataset.scala:4322)\n",
"\tat org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:546)\n",
"\tat org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:4320)\n",
"\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:125)\n",
"\tat org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)\n",
"\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:108)\n",
"\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)\n",
"\tat org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)\n",
"\tat org.apache.spark.sql.Dataset.withAction(Dataset.scala:4320)\n",
"\tat org.apache.spark.sql.Dataset.head(Dataset.scala:3314)\n",
"\tat org.apache.spark.sql.Dataset.take(Dataset.scala:3537)\n",
"\tat org.apache.spark.sql.Dataset.getRows(Dataset.scala:280)\n",
"\tat org.apache.spark.sql.Dataset.showString(Dataset.scala:315)\n",
"\tat org.apache.spark.sql.Dataset.show(Dataset.scala:838)\n",
"\tat org.apache.spark.sql.Dataset.show(Dataset.scala:797)\n",
"\tat org.apache.spark.sql.Dataset.show(Dataset.scala:806)\n",
"\tat Line_10_jupyter.<init>(Line_10.jupyter.kts:3)\n",
"\tat java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)\n",
"\tat java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)\n",
"\tat java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)\n",
"\tat java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)\n",
"\tat kotlin.script.experimental.jvm.BasicJvmScriptEvaluator.evalWithConfigAndOtherScriptsResults(BasicJvmScriptEvaluator.kt:122)\n",
"\tat kotlin.script.experimental.jvm.BasicJvmScriptEvaluator.invoke$suspendImpl(BasicJvmScriptEvaluator.kt:48)\n",
"\tat kotlin.script.experimental.jvm.BasicJvmScriptEvaluator.invoke(BasicJvmScriptEvaluator.kt)\n",
"\tat kotlin.script.experimental.jvm.BasicJvmReplEvaluator.eval(BasicJvmReplEvaluator.kt:49)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.InternalEvaluatorImpl$eval$resultWithDiagnostics$1.invokeSuspend(InternalEvaluatorImpl.kt:127)\n",
"\tat kotlin.coroutines.jvm.internal.BaseContinuationImpl.resumeWith(ContinuationImpl.kt:33)\n",
"\tat kotlinx.coroutines.DispatchedTask.run(DispatchedTask.kt:104)\n",
"\tat kotlinx.coroutines.EventLoopImplBase.processNextEvent(EventLoop.common.kt:277)\n",
"\tat kotlinx.coroutines.BlockingCoroutine.joinBlocking(Builders.kt:95)\n",
"\tat kotlinx.coroutines.BuildersKt__BuildersKt.runBlocking(Builders.kt:69)\n",
"\tat kotlinx.coroutines.BuildersKt.runBlocking(Unknown Source)\n",
"\tat kotlinx.coroutines.BuildersKt__BuildersKt.runBlocking$default(Builders.kt:48)\n",
"\tat kotlinx.coroutines.BuildersKt.runBlocking$default(Unknown Source)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.InternalEvaluatorImpl.eval(InternalEvaluatorImpl.kt:127)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.CellExecutorImpl$execute$1$result$1.invoke(CellExecutorImpl.kt:79)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.CellExecutorImpl$execute$1$result$1.invoke(CellExecutorImpl.kt:77)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.ReplForJupyterImpl.withHost(ReplForJupyterImpl.kt:758)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.CellExecutorImpl.execute(CellExecutorImpl.kt:77)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.execution.CellExecutor$DefaultImpls.execute$default(CellExecutor.kt:12)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.ReplForJupyterImpl.evaluateUserCode(ReplForJupyterImpl.kt:581)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.ReplForJupyterImpl.access$evaluateUserCode(ReplForJupyterImpl.kt:136)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.ReplForJupyterImpl$evalEx$1.invoke(ReplForJupyterImpl.kt:439)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.ReplForJupyterImpl$evalEx$1.invoke(ReplForJupyterImpl.kt:436)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.ReplForJupyterImpl.withEvalContext(ReplForJupyterImpl.kt:417)\n",
"\tat org.jetbrains.kotlinx.jupyter.repl.impl.ReplForJupyterImpl.evalEx(ReplForJupyterImpl.kt:436)\n",
"\tat org.jetbrains.kotlinx.jupyter.messaging.IdeCompatibleMessageRequestProcessor$processExecuteRequest$1$response$1$1.invoke(IdeCompatibleMessageRequestProcessor.kt:140)\n",
"\tat org.jetbrains.kotlinx.jupyter.messaging.IdeCompatibleMessageRequestProcessor$processExecuteRequest$1$response$1$1.invoke(IdeCompatibleMessageRequestProcessor.kt:139)\n",
"\tat org.jetbrains.kotlinx.jupyter.execution.JupyterExecutorImpl$Task.execute(JupyterExecutorImpl.kt:42)\n",
"\tat org.jetbrains.kotlinx.jupyter.execution.JupyterExecutorImpl$executorThread$1.invoke(JupyterExecutorImpl.kt:82)\n",
"\tat org.jetbrains.kotlinx.jupyter.execution.JupyterExecutorImpl$executorThread$1.invoke(JupyterExecutorImpl.kt:80)\n",
"\tat kotlin.concurrent.ThreadsKt$thread$thread$1.run(Thread.kt:30)\n",
"Caused by: java.lang.ClassCastException: cannot assign instance of java.lang.invoke.SerializedLambda to field org.apache.spark.rdd.MapPartitionsRDD.f of type scala.Function3 in instance of org.apache.spark.rdd.MapPartitionsRDD\n",
"\tat java.base/java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2076)\n",
"\tat java.base/java.io.ObjectStreamClass$FieldReflector.checkObjectFieldValueTypes(ObjectStreamClass.java:2039)\n",
"\tat java.base/java.io.ObjectStreamClass.checkObjFieldValueTypes(ObjectStreamClass.java:1293)\n",
"\tat java.base/java.io.ObjectInputStream.defaultCheckFieldValues(ObjectInputStream.java:2512)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2419)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:489)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:447)\n",
"\tat scala.collection.generic.DefaultSerializationProxy.readObject(DefaultSerializationProxy.scala:58)\n",
"\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n",
"\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n",
"\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n",
"\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\n",
"\tat java.base/java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1046)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2357)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2496)\n",
"\tat java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2390)\n",
"\tat java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2228)\n",
"\tat java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1687)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:489)\n",
"\tat java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:447)\n",
"\tat org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:87)\n",
"\tat org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:129)\n",
"\tat org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:86)\n",
"\tat org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:166)\n",
"\tat org.apache.spark.scheduler.Task.run(Task.scala:141)\n",
"\tat org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620)\n",
"\tat org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)\n",
"\tat org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)\n",
"\tat org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:94)\n",
"\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623)\n",
"\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n",
"\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n",
"\tat java.base/java.lang.Thread.run(Thread.java:829)\n"
]
}
],
"execution_count": 8
},
{
"metadata": {},
"cell_type": "code",
"outputs": [],
"execution_count": null,
"source": ""
}
],
"metadata": {
"kernelspec": {
"display_name": "Kotlin",
"language": "kotlin",
"name": "kotlin"
},
"language_info": {
"name": "kotlin",
"version": "1.8.20",
"mimetype": "text/x-kotlin",
"file_extension": ".kt",
"pygments_lexer": "kotlin",
"codemirror_mode": "text/x-kotlin",
"nbconvert_exporter": ""
},
"ktnbPluginMetadata": {
"projectLibraries": []
}
},
"nbformat": 4,
"nbformat_minor": 0
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment