Created
July 2, 2021 13:12
-
-
Save olavurmortensen/96295137fb8e0efd3cf1cf831cafe52c to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--------------------------------------------------------------------------- | |
FatalError Traceback (most recent call last) | |
<ipython-input-80-accb05dc05de> in <module> | |
----> 1 hl.len(mt.AD).summarize() | |
/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/expr/expressions/base_expression.py in summarize(self, handler) | |
1102 if handler is None: | |
1103 handler = hl.utils.default_handler() | |
-> 1104 handler(self._summarize(name=prefix)) | |
1105 | |
1106 def _selector_and_agg_method(self): | |
/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/expr/expressions/base_expression.py in _summarize(self, agg_res, name, header, top) | |
1075 raise ValueError("Cannot summarize a scalar expression") | |
1076 if agg_res is None: | |
-> 1077 count, agg_res = self._aggregation_method()(hl.tuple((hl.agg.count(), self._all_summary_aggs()))) | |
1078 summary_header = f'{count} records.' | |
1079 sum_fields, nested = self._summary_fields(agg_res, top) | |
<decorator-gen-1216> in aggregate_entries(self, expr, _localize) | |
/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs) | |
612 def wrapper(__original_func, *args, **kwargs): | |
613 args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method) | |
--> 614 return __original_func(*args_, **kwargs_) | |
615 | |
616 return wrapper | |
/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/matrixtable.py in aggregate_entries(self, expr, _localize) | |
2079 agg_ir = ir.MatrixAggregate(base._mir, expr._ir) | |
2080 if _localize: | |
-> 2081 return Env.backend().execute(agg_ir) | |
2082 else: | |
2083 return construct_expr(ir.LiftMeOut(agg_ir), expr.dtype) | |
/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed) | |
96 raise HailUserError(message_and_trace) from None | |
97 | |
---> 98 raise e | |
/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed) | |
72 # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1)) | |
73 try: | |
---> 74 result = json.loads(self._jhc.backend().executeJSON(jir)) | |
75 value = ir.typ._from_json(result['value']) | |
76 timings = result['timings'] | |
/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/py4j/java_gateway.py in __call__(self, *args) | |
1255 answer = self.gateway_client.send_command(command) | |
1256 return_value = get_return_value( | |
-> 1257 answer, self.gateway_client, self.target_id, self.name) | |
1258 | |
1259 for temp_arg in temp_args: | |
/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/backend/py4j_backend.py in deco(*args, **kwargs) | |
30 raise FatalError('%s\n\nJava stack trace:\n%s\n' | |
31 'Hail version: %s\n' | |
---> 32 'Error summary: %s' % (deepest, full, hail.__version__, deepest), error_id) from None | |
33 except pyspark.sql.utils.CapturedException as e: | |
34 raise FatalError('%s\n\nJava stack trace:\n%s\n' | |
FatalError: HailException: array index out of bounds: index=1, length=1 | |
---------- | |
Python traceback: | |
File "<ipython-input-75-49c8349cc875>", line 1, in <module> | |
mt = hl.split_multi_hts(mt) | |
File "<decorator-gen-1708>", line 2, in split_multi_hts | |
File "/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/methods/statgen.py", line 1827, in split_multi_hts | |
[hl.sum(split.AD) - split.AD[split.a_index], split.AD[split.a_index]]) | |
Java stack trace: | |
org.apache.spark.SparkException: Job aborted due to stage failure: Task 7 in stage 23.0 failed 1 times, most recent failure: Lost task 7.0 in stage 23.0 (TID 722, localhost, executor driver): is.hail.utils.HailException: array index out of bounds: index=1, length=1 | |
---------- | |
Python traceback: | |
File "<ipython-input-75-49c8349cc875>", line 1, in <module> | |
mt = hl.split_multi_hts(mt) | |
File "<decorator-gen-1708>", line 2, in split_multi_hts | |
File "/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/methods/statgen.py", line 1827, in split_multi_hts | |
[hl.sum(split.AD) - split.AD[split.a_index], split.AD[split.a_index]]) | |
at __C5100CompiledWithAggs.applyregion17_58(Emit.scala:765) | |
at __C5100CompiledWithAggs.apply(Emit.scala) | |
at is.hail.expr.ir.Interpret$$anonfun$is$hail$expr$ir$Interpret$$itF$1$1.apply$mcVJ$sp(Interpret.scala:921) | |
at is.hail.expr.ir.Interpret$$anonfun$is$hail$expr$ir$Interpret$$itF$1$1.apply(Interpret.scala:920) | |
at is.hail.expr.ir.Interpret$$anonfun$is$hail$expr$ir$Interpret$$itF$1$1.apply(Interpret.scala:920) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at is.hail.expr.ir.Interpret$.is$hail$expr$ir$Interpret$$itF$1(Interpret.scala:920) | |
at is.hail.expr.ir.Interpret$$anonfun$29.apply(Interpret.scala:939) | |
at is.hail.expr.ir.Interpret$$anonfun$29.apply(Interpret.scala:939) | |
at is.hail.rvd.RVD$$anonfun$20.apply(RVD.scala:691) | |
at is.hail.rvd.RVD$$anonfun$20.apply(RVD.scala:691) | |
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18.apply(ContextRDD.scala:248) | |
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18.apply(ContextRDD.scala:248) | |
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anonfun$2.apply(RichContextRDD.scala:59) | |
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anonfun$2.apply(RichContextRDD.scala:59) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:68) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) | |
at scala.collection.Iterator$$anon$12.next(Iterator.scala:445) | |
at is.hail.utils.package$.singletonElement(package.scala:644) | |
at is.hail.rvd.RVD$$anonfun$combine$1.apply(RVD.scala:724) | |
at is.hail.rvd.RVD$$anonfun$combine$1.apply(RVD.scala:724) | |
at org.apache.spark.SparkContext$$anonfun$36.apply(SparkContext.scala:2157) | |
at org.apache.spark.SparkContext$$anonfun$36.apply(SparkContext.scala:2157) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:403) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:409) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1889) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1877) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1876) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1876) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2110) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2059) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2048) | |
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) | |
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2158) | |
at is.hail.rvd.RVD.combine(RVD.scala:724) | |
at is.hail.expr.ir.Interpret$.run(Interpret.scala:938) | |
at is.hail.expr.ir.Interpret$.alreadyLowered(Interpret.scala:53) | |
at is.hail.expr.ir.InterpretNonCompilable$.interpretAndCoerce$1(InterpretNonCompilable.scala:16) | |
at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:53) | |
at is.hail.expr.ir.InterpretNonCompilable$.apply(InterpretNonCompilable.scala:58) | |
at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.transform(LoweringPass.scala:67) | |
at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15) | |
at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15) | |
at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81) | |
at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:15) | |
at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:13) | |
at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81) | |
at is.hail.expr.ir.lowering.LoweringPass$class.apply(LoweringPass.scala:13) | |
at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.apply(LoweringPass.scala:62) | |
at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:14) | |
at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:12) | |
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) | |
at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35) | |
at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:12) | |
at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28) | |
at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:354) | |
at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:338) | |
at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:335) | |
at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:25) | |
at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:23) | |
at is.hail.utils.package$.using(package.scala:618) | |
at is.hail.annotations.Region$.scoped(Region.scala:18) | |
at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:23) | |
at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:247) | |
at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:335) | |
at is.hail.backend.spark.SparkBackend$$anonfun$7.apply(SparkBackend.scala:379) | |
at is.hail.backend.spark.SparkBackend$$anonfun$7.apply(SparkBackend.scala:377) | |
at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) | |
at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:377) | |
at sun.reflect.GeneratedMethodAccessor182.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) | |
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) | |
at py4j.Gateway.invoke(Gateway.java:282) | |
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) | |
at py4j.commands.CallCommand.execute(CallCommand.java:79) | |
at py4j.GatewayConnection.run(GatewayConnection.java:238) | |
at java.lang.Thread.run(Thread.java:748) | |
is.hail.utils.HailException: array index out of bounds: index=1, length=1 | |
---------- | |
Python traceback: | |
File "<ipython-input-75-49c8349cc875>", line 1, in <module> | |
mt = hl.split_multi_hts(mt) | |
File "<decorator-gen-1708>", line 2, in split_multi_hts | |
File "/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/methods/statgen.py", line 1827, in split_multi_hts | |
[hl.sum(split.AD) - split.AD[split.a_index], split.AD[split.a_index]]) | |
at __C5100CompiledWithAggs.applyregion17_58(Emit.scala:765) | |
at __C5100CompiledWithAggs.apply(Emit.scala) | |
at is.hail.expr.ir.Interpret$$anonfun$is$hail$expr$ir$Interpret$$itF$1$1.apply$mcVJ$sp(Interpret.scala:921) | |
at is.hail.expr.ir.Interpret$$anonfun$is$hail$expr$ir$Interpret$$itF$1$1.apply(Interpret.scala:920) | |
at is.hail.expr.ir.Interpret$$anonfun$is$hail$expr$ir$Interpret$$itF$1$1.apply(Interpret.scala:920) | |
at scala.collection.Iterator$class.foreach(Iterator.scala:891) | |
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) | |
at is.hail.expr.ir.Interpret$.is$hail$expr$ir$Interpret$$itF$1(Interpret.scala:920) | |
at is.hail.expr.ir.Interpret$$anonfun$29.apply(Interpret.scala:939) | |
at is.hail.expr.ir.Interpret$$anonfun$29.apply(Interpret.scala:939) | |
at is.hail.rvd.RVD$$anonfun$20.apply(RVD.scala:691) | |
at is.hail.rvd.RVD$$anonfun$20.apply(RVD.scala:691) | |
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18.apply(ContextRDD.scala:248) | |
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18.apply(ContextRDD.scala:248) | |
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anonfun$2.apply(RichContextRDD.scala:59) | |
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anonfun$2.apply(RichContextRDD.scala:59) | |
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) | |
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:68) | |
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) | |
at scala.collection.Iterator$$anon$12.next(Iterator.scala:445) | |
at is.hail.utils.package$.singletonElement(package.scala:644) | |
at is.hail.rvd.RVD$$anonfun$combine$1.apply(RVD.scala:724) | |
at is.hail.rvd.RVD$$anonfun$combine$1.apply(RVD.scala:724) | |
at org.apache.spark.SparkContext$$anonfun$36.apply(SparkContext.scala:2157) | |
at org.apache.spark.SparkContext$$anonfun$36.apply(SparkContext.scala:2157) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:403) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:409) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Hail version: 0.2.61-3c86d3ba497a | |
Error summary: HailException: array index out of bounds: index=1, length=1 | |
---------- | |
Python traceback: | |
File "<ipython-input-75-49c8349cc875>", line 1, in <module> | |
mt = hl.split_multi_hts(mt) | |
File "<decorator-gen-1708>", line 2, in split_multi_hts | |
File "/opt/conda/envs/fargen-1-exome/lib/python3.7/site-packages/hail/methods/statgen.py", line 1827, in split_multi_hts | |
[hl.sum(split.AD) - split.AD[split.a_index], split.AD[split.a_index]]) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment