Created
February 26, 2015 01:36
-
-
Save d-v-b/6161500842d6698a1fc5 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Done saving /tier2/ahrens/davis/data/spark/20150217/7dpf_gfap_gc6f_misha_omr_photo_20150217_202934/mat/mxProj_z_2-9 | |
15/02/25 20:20:50 ERROR TaskSchedulerImpl: Lost executor 0 on h05u09.int.janelia.org: remote Akka client disassociated | |
15/02/25 20:20:50 ERROR SparkDeploySchedulerBackend: Asked to remove non-existent executor 0 | |
15/02/25 20:20:50 ERROR SparkDeploySchedulerBackend: Asked to remove non-existent executor 0 | |
15/02/25 20:21:36 ERROR TaskSchedulerImpl: Lost executor 1 on h07u27.int.janelia.org: remote Akka client disassociated | |
15/02/25 20:21:36 ERROR SparkDeploySchedulerBackend: Asked to remove non-existent executor 1 | |
15/02/25 20:21:36 ERROR SparkDeploySchedulerBackend: Asked to remove non-existent executor 1 | |
15/02/25 20:34:20 ERROR TaskSetManager: Task 10634 in stage 56.0 failed 4 times; aborting job | |
--------------------------------------------------------------------------- | |
Py4JJavaError Traceback (most recent call last) | |
<ipython-input-16-92c10c225bc2> in <module>() | |
1 for z in zR: | |
2 mxProj = serDat.filterOnKeys(lambda k: k[2] in z) | |
----> 3 mxDims = mxProj.dims | |
4 fName = 'z_' + str(mxDims.min[2]) + '-' + str(mxDims.max[2]) | |
5 | |
/scratch/spark/tmp/spark-3e6a4353-7104-43f5-b1af-fb0e6c2f46f5/spark-b8c1f14f-cfa4-4b99-b24b-20303843871a/thunder_python-0.5.0_dev-py2.7.egg/thunder/rdds/series.pyc in dims(self) | |
82 from thunder.rdds.keys import Dimensions | |
83 if self._dims is None: | |
---> 84 entry = self.populateParamsFromFirstRecord()[0] | |
85 n = size(entry) | |
86 d = self.rdd.keys().mapPartitions(lambda i: [Dimensions(i, n)]).reduce(lambda x, y: x.mergeDims(y)) | |
/scratch/spark/tmp/spark-3e6a4353-7104-43f5-b1af-fb0e6c2f46f5/spark-b8c1f14f-cfa4-4b99-b24b-20303843871a/thunder_python-0.5.0_dev-py2.7.egg/thunder/rdds/series.pyc in populateParamsFromFirstRecord(self) | |
100 Returns the result of calling self.rdd.first(). | |
101 """ | |
--> 102 record = super(Series, self).populateParamsFromFirstRecord() | |
103 if self._index is None: | |
104 val = record[1] | |
/scratch/spark/tmp/spark-3e6a4353-7104-43f5-b1af-fb0e6c2f46f5/spark-b8c1f14f-cfa4-4b99-b24b-20303843871a/thunder_python-0.5.0_dev-py2.7.egg/thunder/rdds/data.pyc in populateParamsFromFirstRecord(self) | |
76 from numpy import asarray | |
77 | |
---> 78 record = self.rdd.first() | |
79 self._dtype = str(asarray(record[1]).dtype) | |
80 return record | |
/usr/local/spark-current/python/pyspark/rdd.py in first(self) | |
1137 ValueError: RDD is empty | |
1138 """ | |
-> 1139 rs = self.take(1) | |
1140 if rs: | |
1141 return rs[0] | |
/usr/local/spark-current/python/pyspark/rdd.py in take(self, num) | |
1119 | |
1120 p = range(partsScanned, min(partsScanned + numPartsToTry, totalParts)) | |
-> 1121 res = self.context.runJob(self, takeUpToNumLeft, p, True) | |
1122 | |
1123 items += res | |
/usr/local/spark-current/python/pyspark/context.py in runJob(self, rdd, partitionFunc, partitions, allowLocal) | |
825 # SparkContext#runJob. | |
826 mappedRDD = rdd.mapPartitions(partitionFunc) | |
--> 827 it = self._jvm.PythonRDD.runJob(self._jsc.sc(), mappedRDD._jrdd, javaPartitions, allowLocal) | |
828 return list(mappedRDD._collect_iterator_through_file(it)) | |
829 | |
/usr/local/spark-current/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py in __call__(self, *args) | |
536 answer = self.gateway_client.send_command(command) | |
537 return_value = get_return_value(answer, self.gateway_client, | |
--> 538 self.target_id, self.name) | |
539 | |
540 for temp_arg in temp_args: | |
/usr/local/spark-current/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name) | |
298 raise Py4JJavaError( | |
299 'An error occurred while calling {0}{1}{2}.\n'. | |
--> 300 format(target_id, '.', name), value) | |
301 else: | |
302 raise Py4JError( | |
Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.runJob. | |
: org.apache.spark.SparkException: Job aborted due to stage failure: Task 10634 in stage 56.0 failed 4 times, most recent failure: Lost task 10634.3 in stage 56.0 (TID 194817, h04u27.int.janelia.org): java.lang.IllegalStateException: Shutdown in progress | |
at java.lang.ApplicationShutdownHooks.add(ApplicationShutdownHooks.java:66) | |
at java.lang.Runtime.addShutdownHook(Runtime.java:211) | |
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:1414) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:254) | |
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:187) | |
at org.apache.spark.deploy.SparkHadoopUtil.getFileSystemThreadStatistics(SparkHadoopUtil.scala:176) | |
at org.apache.spark.deploy.SparkHadoopUtil.getFSBytesReadOnThreadCallback(SparkHadoopUtil.scala:139) | |
at org.apache.spark.rdd.NewHadoopRDD$$anon$1.<init>(NewHadoopRDD.scala:116) | |
at org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:107) | |
at org.apache.spark.rdd.NewHadoopRDD.compute(NewHadoopRDD.scala:69) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:280) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:247) | |
at org.apache.spark.rdd.MappedRDD.compute(MappedRDD.scala:31) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:280) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:247) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:280) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:247) | |
at org.apache.spark.api.python.PythonRDD$WriterThread$$anonfun$run$1.apply$mcV$sp(PythonRDD.scala:242) | |
at org.apache.spark.api.python.PythonRDD$WriterThread$$anonfun$run$1.apply(PythonRDD.scala:204) | |
at org.apache.spark.api.python.PythonRDD$WriterThread$$anonfun$run$1.apply(PythonRDD.scala:204) | |
at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1550) | |
at org.apache.spark.api.python.PythonRDD$WriterThread.run(PythonRDD.scala:203) | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1214) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1203) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1202) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1202) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:696) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:696) | |
at scala.Option.foreach(Option.scala:236) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:696) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1420) | |
at akka.actor.Actor$class.aroundReceive(Actor.scala:465) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessActor.aroundReceive(DAGScheduler.scala:1375) | |
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:516) | |
at akka.actor.ActorCell.invoke(ActorCell.scala:487) | |
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:238) | |
at akka.dispatch.Mailbox.run(Mailbox.scala:220) | |
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393) | |
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) | |
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) | |
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) | |
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment