Created
June 4, 2018 13:00
-
-
Save Mahesha999/c284458d51141884fbff6f9415570d11 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
18/06/04 17:47:48 ERROR Executor: Exception in task 0.0 in stage 4.0 (TID 4) | |
java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: java.lang.Long is not a valid external type for schema of string | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) AS m.attr1#0 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 0 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) AS m.attr2#1 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 1 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) AS m.attr3#2 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 2 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) AS m.attr4#3 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 3 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) AS m.attr5#4 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 4 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) AS m.attr6#5 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 5 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) AS m.attr7#6 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 6 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) AS m.attr8#7L | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 7 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) AS id#8 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 8 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) AS m.attr9#9 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 9 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) AS m.attr10#10 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 10 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) AS m.attr11#11 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 11 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:279) | |
at org.apache.spark.sql.SparkSession$$anonfun$5.apply(SparkSession.scala:537) | |
at org.apache.spark.sql.SparkSession$$anonfun$5.apply(SparkSession.scala:537) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source) | |
at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) | |
at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370) | |
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369) | |
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:147) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47) | |
at org.apache.spark.scheduler.Task.run(Task.scala:85) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: java.lang.RuntimeException: java.lang.Long is not a valid external type for schema of string | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:276) | |
... 16 more | |
18/06/04 17:47:48 ERROR TaskSetManager: Task 0 in stage 4.0 failed 1 times; aborting job | |
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 4.0 failed 1 times, most recent failure: Lost task 0.0 in stage 4.0 (TID 4, localhost): java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: java.lang.Long is not a valid external type for schema of string | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) AS m.attr1#0 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 0 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) AS m.attr2#1 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 1 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) AS m.attr3#2 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 2 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) AS m.attr4#3 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 3 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) AS m.attr5#4 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 4 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) AS m.attr6#5 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 5 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) AS m.attr7#6 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 6 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) AS m.attr8#7L | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 7 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) AS id#8 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 8 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) AS m.attr9#9 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 9 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) AS m.attr10#10 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 10 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) AS m.attr11#11 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 11 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:279) | |
at org.apache.spark.sql.SparkSession$$anonfun$5.apply(SparkSession.scala:537) | |
at org.apache.spark.sql.SparkSession$$anonfun$5.apply(SparkSession.scala:537) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source) | |
at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) | |
at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370) | |
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369) | |
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:147) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47) | |
at org.apache.spark.scheduler.Task.run(Task.scala:85) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: java.lang.RuntimeException: java.lang.Long is not a valid external type for schema of string | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:276) | |
... 16 more | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1450) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1438) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1437) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1437) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:811) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1659) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1618) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1607) | |
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:632) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1871) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1884) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1897) | |
at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:347) | |
at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:39) | |
at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2183) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57) | |
at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2532) | |
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2182) | |
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2189) | |
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1925) | |
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1924) | |
at org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2562) | |
at org.apache.spark.sql.Dataset.head(Dataset.scala:1924) | |
at org.apache.spark.sql.Dataset.take(Dataset.scala:2139) | |
at org.apache.spark.sql.Dataset.showString(Dataset.scala:239) | |
at org.apache.spark.sql.Dataset.show(Dataset.scala:526) | |
at org.apache.spark.sql.Dataset.show(Dataset.scala:506) | |
at Temp.main(Temp.java:128) | |
Caused by: java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: java.lang.Long is not a valid external type for schema of string | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) AS m.attr1#0 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 0 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1), NullType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 0, m.attr1) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) AS m.attr2#1 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 1 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 1, m.attr2) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) AS m.attr3#2 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 2 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 2, m.attr3) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) AS m.attr4#3 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 3 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 3, m.attr4) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) AS m.attr5#4 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 4 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 4, m.attr5) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) AS m.attr6#5 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 5 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 5, m.attr6) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) AS m.attr7#6 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 6 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 6, m.attr7) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) AS m.attr8#7L | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 7 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8), LongType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 7, m.attr8) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) AS id#8 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 8 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 8, id) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) AS m.attr9#9 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 9 | |
:- null | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9), NullType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 9, m.attr9) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) AS m.attr10#10 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 10 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 10, m.attr10) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) AS m.attr11#11 | |
+- if (assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt) null else staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) | |
:- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object).isNullAt | |
: :- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
: : +- input[0, org.apache.spark.sql.Row, true] | |
: +- 11 | |
:- null | |
+- staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType), true) | |
+- validateexternaltype(getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11), StringType) | |
+- getexternalrowfield(assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object), 11, m.attr11) | |
+- assertnotnull(input[0, org.apache.spark.sql.Row, true], top level row object) | |
+- input[0, org.apache.spark.sql.Row, true] | |
at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:279) | |
at org.apache.spark.sql.SparkSession$$anonfun$5.apply(SparkSession.scala:537) | |
at org.apache.spark.sql.SparkSession$$anonfun$5.apply(SparkSession.scala:537) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source) | |
at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) | |
at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370) | |
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:369) | |
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:147) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47) | |
at org.apache.spark.scheduler.Task.run(Task.scala:85) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: java.lang.RuntimeException: java.lang.Long is not a valid external type for schema of string | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:276) | |
... 16 more |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment