Skip to content

Instantly share code, notes, and snippets.

@buryat
Created May 27, 2016 17:20
Show Gist options
  • Save buryat/064548e0d1ea6ad99fca3882f0ef90cb to your computer and use it in GitHub Desktop.
Save buryat/064548e0d1ea6ad99fca3882f0ef90cb to your computer and use it in GitHub Desktop.
16/05/27 16:38:51 INFO GenerateOrdering: Code generated in 67.482205 ms
16/05/27 16:38:51 INFO GenerateUnsafeProjection: Code generated in 13.754114 ms
16/05/27 16:39:05 INFO UnsafeExternalSorter: Thread 96 spilling sort data of 2.1 GB to disk (0 time so far)
16/05/27 16:39:05 INFO UnsafeExternalSorter: Thread 110 spilling sort data of 2.1 GB to disk (0 time so far)
16/05/27 16:39:05 INFO UnsafeExternalSorter: Thread 95 spilling sort data of 2.1 GB to disk (0 time so far)
16/05/27 16:39:05 INFO UnsafeExternalSorter: Thread 93 spilling sort data of 2.1 GB to disk (0 time so far)
16/05/27 16:39:05 INFO UnsafeExternalSorter: Thread 92 spilling sort data of 2.1 GB to disk (0 time so far)
16/05/27 16:39:06 INFO UnsafeExternalSorter: Thread 90 spilling sort data of 2.1 GB to disk (0 time so far)
16/05/27 16:39:06 INFO UnsafeExternalSorter: Thread 94 spilling sort data of 2.1 GB to disk (0 time so far)
16/05/27 16:39:06 INFO UnsafeExternalSorter: Thread 91 spilling sort data of 2.1 GB to disk (0 time so far)
16/05/27 16:39:15 ERROR Executor: Managed memory leak detected; size = 53538066 bytes, TID = 454
16/05/27 16:39:15 ERROR Executor: Exception in task 28.0 in stage 2.0 (TID 454)
java.lang.OutOfMemoryError: Java heap space
at org.apache.spark.util.collection.unsafe.sort.UnsafeSortDataFormat.allocate(UnsafeSortDataFormat.java:86)
at org.apache.spark.util.collection.unsafe.sort.UnsafeSortDataFormat.allocate(UnsafeSortDataFormat.java:32)
at org.apache.spark.util.collection.TimSort$SortState.ensureCapacity(TimSort.java:951)
at org.apache.spark.util.collection.TimSort$SortState.mergeLo(TimSort.java:699)
at org.apache.spark.util.collection.TimSort$SortState.mergeAt(TimSort.java:525)
at org.apache.spark.util.collection.TimSort$SortState.mergeCollapse(TimSort.java:453)
at org.apache.spark.util.collection.TimSort$SortState.access$200(TimSort.java:325)
at org.apache.spark.util.collection.TimSort.sort(TimSort.java:153)
at org.apache.spark.util.collection.Sorter.sort(Sorter.scala:37)
at org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.getSortedIterator(UnsafeInMemorySorter.java:228)
at org.apache.spark.util.collection.unsafe.sort.UnsafeExternalSorter.spill(UnsafeExternalSorter.java:186)
at org.apache.spark.memory.TaskMemoryManager.acquireExecutionMemory(TaskMemoryManager.java:175)
at org.apache.spark.memory.TaskMemoryManager.allocatePage(TaskMemoryManager.java:249)
at org.apache.spark.memory.MemoryConsumer.allocateArray(MemoryConsumer.java:83)
at org.apache.spark.util.collection.unsafe.sort.UnsafeExternalSorter.growPointerArrayIfNecessary(UnsafeExternalSorter.java:295)
at org.apache.spark.util.collection.unsafe.sort.UnsafeExternalSorter.insertRecord(UnsafeExternalSorter.java:330)
at org.apache.spark.sql.execution.UnsafeExternalRowSorter.insertRow(UnsafeExternalRowSorter.java:91)
at org.apache.spark.sql.execution.UnsafeExternalRowSorter.sort(UnsafeExternalRowSorter.java:168)
at org.apache.spark.sql.execution.Sort$$anonfun$1.apply(Sort.scala:90)
at org.apache.spark.sql.execution.Sort$$anonfun$1.apply(Sort.scala:64)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$21.apply(RDD.scala:728)
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$21.apply(RDD.scala:728)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:270)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:270)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:270)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment