Skip to content

Instantly share code, notes, and snippets.

@crockpotveggies
Created May 9, 2016 20:19
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save crockpotveggies/639783dd023ed1c83ddf5191d1761a64 to your computer and use it in GitHub Desktop.
Save crockpotveggies/639783dd023ed1c83ddf5191d1761a64 to your computer and use it in GitHub Desktop.
ERROR [2016-05-09 19:53:01,015] org.apache.spark.util.SparkUncaughtExceptionHandler: [Container in shutdown] Uncaught exception in thread Thread[Executor task launch worker-0,5,main]
! java.lang.OutOfMemoryError: Cannot allocate 5335642403 + 364953600 bytes (> Pointer.maxBytes)
! at org.bytedeco.javacpp.Pointer.deallocator(Pointer.java:443) ~[javacpp-1.2-SNAPSHOT.jar:1.2-SNAPSHOT]
! at org.bytedeco.javacpp.Pointer.init(Pointer.java:118) ~[javacpp-1.2-SNAPSHOT.jar:1.2-SNAPSHOT]
! at org.bytedeco.javacpp.FloatPointer.allocateArray(Native Method) ~[javacpp-1.2-SNAPSHOT.jar:1.2-SNAPSHOT]
! at org.bytedeco.javacpp.FloatPointer.<init>(FloatPointer.java:68) ~[javacpp-1.2-SNAPSHOT.jar:1.2-SNAPSHOT]
! at org.nd4j.linalg.api.buffer.BaseDataBuffer.<init>(BaseDataBuffer.java:563) ~[nd4j-buffer-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.api.buffer.FloatBuffer.<init>(FloatBuffer.java:40) ~[nd4j-buffer-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.api.buffer.factory.DefaultDataBufferFactory.createFloat(DefaultDataBufferFactory.java:227) ~[nd4j-buffer-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.factory.Nd4j.createBuffer(Nd4j.java:1159) ~[nd4j-api-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.api.ndarray.BaseNDArray.<init>(BaseNDArray.java:225) ~[nd4j-api-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.cpu.nativecpu.NDArray.<init>(NDArray.java:107) ~[nd4j-native-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.cpu.nativecpu.CpuNDArrayFactory.create(CpuNDArrayFactory.java:239) ~[nd4j-native-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4028) ~[nd4j-api-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.api.shape.Shape.toOffsetZeroCopyHelper(Shape.java:147) ~[nd4j-api-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.api.shape.Shape.toOffsetZeroCopy(Shape.java:103) ~[nd4j-api-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.api.ndarray.BaseNDArray.dup(BaseNDArray.java:1420) ~[nd4j-api-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.nd4j.linalg.api.ndarray.BaseNDArray.mul(BaseNDArray.java:3060) ~[nd4j-api-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.nn.layers.normalization.LocalResponseNormalization.activate(LocalResponseNormalization.java:181) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.nn.layers.BaseLayer.activate(BaseLayer.java:358) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.nn.multilayer.MultiLayerNetwork.activationFromPrevLayer(MultiLayerNetwork.java:521) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.nn.multilayer.MultiLayerNetwork.feedForwardToLayer(MultiLayerNetwork.java:644) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.nn.multilayer.MultiLayerNetwork.feedForward(MultiLayerNetwork.java:598) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.nn.multilayer.MultiLayerNetwork.computeGradientAndScore(MultiLayerNetwork.java:1688) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.optimize.solvers.BaseOptimizer.gradientAndScore(BaseOptimizer.java:152) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.optimize.solvers.StochasticGradientDescent.optimize(StochasticGradientDescent.java:56) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.optimize.Solver.optimize(Solver.java:51) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.nn.multilayer.MultiLayerNetwork.fit(MultiLayerNetwork.java:1365) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.nn.multilayer.MultiLayerNetwork.fit(MultiLayerNetwork.java:1402) ~[deeplearning4j-core-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.spark.impl.multilayer.IterativeReduceFlatMap.call(IterativeReduceFlatMap.java:102) ~[dl4j-spark-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.deeplearning4j.spark.impl.multilayer.IterativeReduceFlatMap.call(IterativeReduceFlatMap.java:49) ~[dl4j-spark-0.4-rc3.9-SNAPSHOT.jar:na]
! at org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$5$1.apply(JavaRDDLike.scala:170) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$5$1.apply(JavaRDDLike.scala:170) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$20.apply(RDD.scala:710) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$20.apply(RDD.scala:710) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:69) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.rdd.RDD.iterator(RDD.scala:268) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:306) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.rdd.RDD.iterator(RDD.scala:270) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.scheduler.Task.run(Task.scala:89) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214) ~[spark-core_2.10-1.6.1.jar:1.6.1]
! at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_65]
! at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_65]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment