FatalError Traceback (most recent call last)
/usr/local/lib/python3.7/dist-packages/IPython/core/formatters.py in call(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
→ 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
/usr/local/lib/python3.7/dist-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object
393 and callable(cls.dict.get(‘repr’)):
→ 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
/usr/local/lib/python3.7/dist-packages/IPython/lib/pretty.py in repr_pprint(obj, p, cycle)
698 “”“A pprint that just redirects to the normal repr function.”“”
699 # Find newlines and replace them with p.break()
→ 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
/usr/local/lib/python3.7/dist-packages/hail/table.py in repr(self)
1295
1296 def repr(self):
→ 1297 return self.str()
1298
1299 def data(self):
/usr/local/lib/python3.7/dist-packages/hail/table.py in str(self)
1292
1293 def str(self):
→ 1294 return self._ascii_str()
1295
1296 def repr(self):
/usr/local/lib/python3.7/dist-packages/hail/table.py in _ascii_str(self)
1318 return s
1319
→ 1320 rows, has_more, dtype = self.data()
1321 fields = list(dtype)
1322 trunc_fields = [trunc(f) for f in fields]
/usr/local/lib/python3.7/dist-packages/hail/table.py in data(self)
1302 row_dtype = t.row.dtype
1303 t = t.select(**{k: hl._showstr(v) for (k, v) in t.row.items()})
→ 1304 rows, has_more = t._take_n(self.n)
1305 self._data = (rows, has_more, row_dtype)
1306 return self._data
/usr/local/lib/python3.7/dist-packages/hail/table.py in _take_n(self, n)
1449 has_more = False
1450 else:
→ 1451 rows = self.take(n + 1)
1452 has_more = len(rows) > n
1453 rows = rows[:n]
in take(self, n, _localize)
/usr/local/lib/python3.7/dist-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
612 def wrapper(original_func, *args, **kwargs):
613 args, kwargs = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
→ 614 return original_func(*args, **kwargs)
615
616 return wrapper
/usr/local/lib/python3.7/dist-packages/hail/table.py in take(self, n, _localize)
2119 “”"
2120
→ 2121 return self.head(n).collect(_localize)
2122
2123 @typecheck_method(n=int)
in collect(self, _localize)
/usr/local/lib/python3.7/dist-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
612 def wrapper(original_func, *args, **kwargs):
613 args, kwargs = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
→ 614 return original_func(*args, **kwargs)
615
616 return wrapper
/usr/local/lib/python3.7/dist-packages/hail/table.py in collect(self, _localize)
1918 e = construct_expr(rows_ir, hl.tarray(t.row.dtype))
1919 if _localize:
→ 1920 return Env.backend().execute(e._ir)
1921 else:
1922 return e
/usr/local/lib/python3.7/dist-packages/hail/backend/py4j_backend.py in execute(self, ir, timed)
96 raise HailUserError(message_and_trace) from None
97
—> 98 raise e
/usr/local/lib/python3.7/dist-packages/hail/backend/py4j_backend.py in execute(self, ir, timed)
72 # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1))
73 try:
—> 74 result = json.loads(self._jhc.backend().executeJSON(jir))
75 value = ir.typ._from_json(result[‘value’])
76 timings = result[‘timings’]
/usr/local/lib/python3.7/dist-packages/py4j/java_gateway.py in call(self, *args)
1255 answer = self.gateway_client.send_command(command)
1256 return_value = get_return_value(
→ 1257 answer, self.gateway_client, self.target_id, self.name)
1258
1259 for temp_arg in temp_args:
/usr/local/lib/python3.7/dist-packages/hail/backend/py4j_backend.py in deco(*args, **kwargs)
30 raise FatalError(‘%s\n\nJava stack trace:\n%s\n’
31 ‘Hail version: %s\n’
—> 32 ‘Error summary: %s’ % (deepest, full, hail.version, deepest), error_id) from None
33 except pyspark.sql.utils.CapturedException as e:
34 raise FatalError(‘%s\n\nJava stack trace:\n%s\n’
FatalError: SparkException: Job aborted due to stage failure: Task 488 in stage 5.0 failed 4 times, most recent failure: Lost task 488.3 in stage 5.0 (TID 3904, saturn-521657d0-b18f-4cba-8b1b-badecf79009d-sw-w09z.c.biodata-catalyst.internal, executor 58): ExecutorLostFailure (executor 58 exited caused by one of the running tasks) Reason: Slave lost
Driver stacktrace:
Java stack trace:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 488 in stage 5.0 failed 4 times, most recent failure: Lost task 488.3 in stage 5.0 (TID 3904, saturn-521657d0-b18f-4cba-8b1b-badecf79009d-sw-w09z.c.biodata-catalyst.internal, executor 58): ExecutorLostFailure (executor 58 exited caused by one of the running tasks) Reason: Slave lost
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1926)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1914)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1913)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1913)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:948)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:948)
at scala.Option.foreach(Option.scala:257)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:948)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2147)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2096)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2085)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:759)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2082)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2114)
at is.hail.sparkextras.ContextRDD.crunJobWithIndex(ContextRDD.scala:228)
at is.hail.rvd.RVD$.getKeyInfo(RVD.scala:1218)
at is.hail.rvd.RVD$.makeCoercer(RVD.scala:1293)
at is.hail.expr.ir.GenericTableValue.getRVDCoercer(GenericTableValue.scala:162)
at is.hail.expr.ir.GenericTableValue.toTableValue(GenericTableValue.scala:188)
at is.hail.io.vcf.MatrixVCFReader.apply(LoadVCF.scala:1773)
at is.hail.expr.ir.TableRead.execute(TableIR.scala:1100)
at is.hail.expr.ir.TableOrderBy.execute(TableIR.scala:2629)
at is.hail.expr.ir.TableSubset$class.execute(TableIR.scala:1324)
at is.hail.expr.ir.TableHead.execute(TableIR.scala:1332)
at is.hail.expr.ir.TableMapRows.execute(TableIR.scala:1845)
at is.hail.expr.ir.Interpret$.run(Interpret.scala:819)
at is.hail.expr.ir.Interpret$.alreadyLowered(Interpret.scala:53)
at is.hail.expr.ir.InterpretNonCompilable$.interpretAndCoerce$1(InterpretNonCompilable.scala:16)
at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:53)
at is.hail.expr.ir.InterpretNonCompilable$$anonfun$1.apply(InterpretNonCompilable.scala:25)
at is.hail.expr.ir.InterpretNonCompilable$$anonfun$1.apply(InterpretNonCompilable.scala:25)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at is.hail.expr.ir.InterpretNonCompilable$.rewriteChildren$1(InterpretNonCompilable.scala:25)
at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:54)
at is.hail.expr.ir.InterpretNonCompilable$.apply(InterpretNonCompilable.scala:58)
at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.transform(LoweringPass.scala:67)
at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15)
at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15)
at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81)
at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:15)
at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:13)
at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81)
at is.hail.expr.ir.lowering.LoweringPass$class.apply(LoweringPass.scala:13)
at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.apply(LoweringPass.scala:62)
at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:14)
at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:12)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35)
at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:12)
at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28)
at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:354)
at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:338)
at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:335)
at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:25)
at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:23)
at is.hail.utils.package$.using(package.scala:618)
at is.hail.annotations.Region$.scoped(Region.scala:18)
at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:23)
at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:247)
at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:335)
at is.hail.backend.spark.SparkBackend$$anonfun$7.apply(SparkBackend.scala:379)
at is.hail.backend.spark.SparkBackend$$anonfun$7.apply(SparkBackend.scala:377)
at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52)
at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:377)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:238)
at java.lang.Thread.run(Thread.java:748)
Hail version: 0.2.61-3c86d3ba497a
Error summary: SparkException: Job aborted due to stage failure: Task 488 in stage 5.0 failed 4 times, most recent failure: Lost task 488.3 in stage 5.0 (TID 3904, saturn-521657d0-b18f-4cba-8b1b-badecf79009d-sw-w09z.c.biodata-catalyst.internal, executor 58): ExecutorLostFailure (executor 58 exited caused by one of the running tasks) Reason: Slave lost
Driver stacktrace:
2021-01-28 18:33:37 Hail: INFO: Coerced sorted dataset
2021-01-28 18:33:37 Hail: INFO: Coerced dataset with out-of-order partitions.