in king(call_expr, block_size) /opt/conda/miniconda3/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs) 575 def wrapper(__original_func, *args, **kwargs): 576 args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method) --> 577 return __original_func(*args_, **kwargs_) 578 579 return wrapper /opt/conda/miniconda3/lib/python3.6/site-packages/hail/methods/relatedness/king.py in king(call_expr, block_size) 232 is_defined: hl.float(hl.is_defined(call_expr)) 233 }) --> 234 ref = hl.linalg.BlockMatrix.from_entry_expr(mt[is_hom_ref], block_size=block_size) 235 het = hl.linalg.BlockMatrix.from_entry_expr(mt[is_het], block_size=block_size) 236 var = hl.linalg.BlockMatrix.from_entry_expr(mt[is_hom_var], block_size=block_size) in from_entry_expr(cls, entry_expr, mean_impute, center, normalize, axis, block_size) /opt/conda/miniconda3/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs) 575 def wrapper(__original_func, *args, **kwargs): 576 args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method) --> 577 return __original_func(*args_, **kwargs_) 578 579 return wrapper /opt/conda/miniconda3/lib/python3.6/site-packages/hail/linalg/blockmatrix.py in from_entry_expr(cls, entry_expr, mean_impute, center, normalize, axis, block_size) 414 path = new_temp_file() 415 cls.write_from_entry_expr(entry_expr, path, overwrite=False, mean_impute=mean_impute, --> 416 center=center, normalize=normalize, axis=axis, block_size=block_size) 417 return cls.read(path) 418 in write_from_entry_expr(entry_expr, path, overwrite, mean_impute, center, normalize, axis, block_size) /opt/conda/miniconda3/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs) 575 def wrapper(__original_func, *args, **kwargs): 576 args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method) --> 577 return __original_func(*args_, **kwargs_) 578 579 return wrapper /opt/conda/miniconda3/lib/python3.6/site-packages/hail/linalg/blockmatrix.py in write_from_entry_expr(entry_expr, path, overwrite, mean_impute, center, normalize, axis, block_size) 719 if entry_expr in mt._fields_inverse: 720 field = mt._fields_inverse[entry_expr] --> 721 mt.select_entries(field)._write_block_matrix(path, overwrite, field, block_size) 722 else: 723 field = Env.get_uid() /opt/conda/miniconda3/lib/python3.6/site-packages/hail/matrixtable.py in _write_block_matrix(self, path, overwrite, entry_field, block_size) 4119 'overwrite': overwrite, 4120 'entryField': entry_field, -> 4121 'blockSize': block_size})) 4122 4123 def _calculate_new_partitions(self, n_partitions): /opt/conda/miniconda3/lib/python3.6/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed) 96 raise HailUserError(message_and_trace) from None 97 ---> 98 raise e /opt/conda/miniconda3/lib/python3.6/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed) 72 # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1)) 73 try: ---> 74 result = json.loads(self._jhc.backend().executeJSON(jir)) 75 value = ir.typ._from_json(result['value']) 76 timings = result['timings'] /usr/lib/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in __call__(self, *args) 1255 answer = self.gateway_client.send_command(command) 1256 return_value = get_return_value( -> 1257 answer, self.gateway_client, self.target_id, self.name) 1258 1259 for temp_arg in temp_args: /opt/conda/miniconda3/lib/python3.6/site-packages/hail/backend/py4j_backend.py in deco(*args, **kwargs) 30 raise FatalError('%s\n\nJava stack trace:\n%s\n' 31 'Hail version: %s\n' ---> 32 'Error summary: %s' % (deepest, full, hail.__version__, deepest), error_id) from None 33 except pyspark.sql.utils.CapturedException as e: 34 raise FatalError('%s\n\nJava stack trace:\n%s\n' FatalError: HailException: Cannot create BlockMatrix: filtered entry at row 8491008 and col 406 Java stack trace: org.apache.spark.SparkException: Job aborted due to stage failure: Task 2073 in stage 14.0 failed 20 times, most recent failure: Lost task 2073.19 in stage 14.0 (TID 426052, my-project.internal, executor 1570): is.hail.utils.HailException: Cannot create BlockMatrix: filtered entry at row 8491008 and col 406 at is.hail.utils.ErrorHandling$class.fatal(ErrorHandling.scala:11) at is.hail.utils.package$.fatal(package.scala:77) at is.hail.linalg.WriteBlocksRDD$$anonfun$compute$6$$anonfun$apply$27.apply(BlockMatrix.scala:2083) at is.hail.linalg.WriteBlocksRDD$$anonfun$compute$6$$anonfun$apply$27.apply(BlockMatrix.scala:2045) at is.hail.utils.package$.using(package.scala:618) at is.hail.linalg.WriteBlocksRDD$$anonfun$compute$6.apply(BlockMatrix.scala:2045) at is.hail.linalg.WriteBlocksRDD$$anonfun$compute$6.apply(BlockMatrix.scala:2044) at scala.collection.Iterator$class.foreach(Iterator.scala:891) at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at is.hail.linalg.WriteBlocksRDD.compute(BlockMatrix.scala:2044) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346) at org.apache.spark.rdd.RDD.iterator(RDD.scala:310) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at org.apache.spark.scheduler.Task.run(Task.scala:123) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Driver stacktrace: at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1892) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1880) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1879) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1879) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:927) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:927) at scala.Option.foreach(Option.scala:257) at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:927) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2113) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2062) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2051) at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:738) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2082) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2101) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2126) at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:990) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:385) at org.apache.spark.rdd.RDD.collect(RDD.scala:989) at is.hail.expr.ir.functions.MatrixWriteBlockMatrix.execute(MatrixWriteBlockMatrix.scala:47) at is.hail.expr.ir.functions.WrappedMatrixToValueFunction.execute(RelationalFunctions.scala:88) at is.hail.expr.ir.Interpret$.run(Interpret.scala:831) at is.hail.expr.ir.Interpret$.alreadyLowered(Interpret.scala:53) at is.hail.expr.ir.InterpretNonCompilable$.interpretAndCoerce$1(InterpretNonCompilable.scala:16) at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:53) at is.hail.expr.ir.InterpretNonCompilable$.apply(InterpretNonCompilable.scala:58) at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.transform(LoweringPass.scala:67) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15) at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:15) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:13) at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81) at is.hail.expr.ir.lowering.LoweringPass$class.apply(LoweringPass.scala:13) at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.apply(LoweringPass.scala:62) at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:14) at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:12) at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35) at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:12) at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28) at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:360) at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:344) at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:341) at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:25) at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:23) at is.hail.utils.package$.using(package.scala:618) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:13) at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:23) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:254) at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:341) at is.hail.backend.spark.SparkBackend$$anonfun$7.apply(SparkBackend.scala:385) at is.hail.backend.spark.SparkBackend$$anonfun$7.apply(SparkBackend.scala:383) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:383) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:238) at java.lang.Thread.run(Thread.java:748) is.hail.utils.HailException: Cannot create BlockMatrix: filtered entry at row 8491008 and col 406 at is.hail.utils.ErrorHandling$class.fatal(ErrorHandling.scala:11) at is.hail.utils.package$.fatal(package.scala:77) at is.hail.linalg.WriteBlocksRDD$$anonfun$compute$6$$anonfun$apply$27.apply(BlockMatrix.scala:2083) at is.hail.linalg.WriteBlocksRDD$$anonfun$compute$6$$anonfun$apply$27.apply(BlockMatrix.scala:2045) at is.hail.utils.package$.using(package.scala:618) at is.hail.linalg.WriteBlocksRDD$$anonfun$compute$6.apply(BlockMatrix.scala:2045) at is.hail.linalg.WriteBlocksRDD$$anonfun$compute$6.apply(BlockMatrix.scala:2044) at scala.collection.Iterator$class.foreach(Iterator.scala:891) at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at is.hail.linalg.WriteBlocksRDD.compute(BlockMatrix.scala:2044) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346) at org.apache.spark.rdd.RDD.iterator(RDD.scala:310) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at org.apache.spark.scheduler.Task.run(Task.scala:123) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Hail version: 0.2.62-84fa81b9ea3d Error summary: HailException: Cannot create BlockMatrix: filtered entry at row 8491008 and col 406