FatalError Traceback (most recent call last) in ----> 1 mtfinal4.make_table().export('./ACE2/cand_gene_cadd_riskpatients.txt') in export(self, output, types_file, header, parallel, delimiter) ~/miniconda3/envs/hail/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs) 612 def wrapper(__original_func, *args, **kwargs): 613 args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method) --> 614 return __original_func(*args_, **kwargs_) 615 616 return wrapper ~/miniconda3/envs/hail/lib/python3.6/site-packages/hail/table.py in export(self, output, types_file, header, parallel, delimiter) 1036 parallel = ir.ExportType.default(parallel) 1037 Env.backend().execute( -> 1038 ir.TableWrite(self._tir, ir.TableTextWriter(output, types_file, header, parallel, delimiter))) 1039 1040 def group_by(self, *exprs, **named_exprs) -> 'GroupedTable': ~/miniconda3/envs/hail/lib/python3.6/site-packages/hail/backend/spark_backend.py in execute(self, ir, timed) 294 jir = self._to_java_value_ir(ir) 295 # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1)) --> 296 result = json.loads(self._jhc.backend().executeJSON(jir)) 297 value = ir.typ._from_json(result['value']) 298 timings = result['timings'] ~/miniconda3/envs/hail/lib/python3.6/site-packages/py4j/java_gateway.py in __call__(self, *args) 1255 answer = self.gateway_client.send_command(command) 1256 return_value = get_return_value( -> 1257 answer, self.gateway_client, self.target_id, self.name) 1258 1259 for temp_arg in temp_args: ~/miniconda3/envs/hail/lib/python3.6/site-packages/hail/backend/spark_backend.py in deco(*args, **kwargs) 39 raise FatalError('%s\n\nJava stack trace:\n%s\n' 40 'Hail version: %s\n' ---> 41 'Error summary: %s' % (deepest, full, hail.__version__, deepest)) from None 42 except pyspark.sql.utils.CapturedException as e: 43 raise FatalError('%s\n\nJava stack trace:\n%s\n' FatalError: FileNotFoundException: /mnt/share6/FOR_Takeo/WES/all_vari_annotation_MSC.txt (Input/output error) Java stack trace: java.lang.RuntimeException: error while applying lowering 'InterpretNonCompilable' at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:26) at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:18) at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35) at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:18) at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28) at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:317) at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:304) at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:303) at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:20) at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:18) at is.hail.utils.package$.using(package.scala:601) at is.hail.annotations.Region$.scoped(Region.scala:18) at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:18) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:229) at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:303) at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:323) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:238) at java.lang.Thread.run(Thread.java:748) org.apache.spark.SparkException: Job aborted. at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:100) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1096) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopDataset(PairRDDFunctions.scala:1094) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply$mcV$sp(PairRDDFunctions.scala:1067) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:1032) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply$mcV$sp(PairRDDFunctions.scala:958) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:957) at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply$mcV$sp(RDD.scala:1499) at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1478) at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1478) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.RDD.saveAsTextFile(RDD.scala:1478) at is.hail.utils.richUtils.RichRDD$.writeTable$extension(RichRDD.scala:78) at is.hail.expr.ir.TableValue.export(TableValue.scala:98) at is.hail.expr.ir.TableTextWriter.apply(TableWriter.scala:333) at is.hail.expr.ir.Interpret$.run(Interpret.scala:726) at is.hail.expr.ir.Interpret$.alreadyLowered(Interpret.scala:53) at is.hail.expr.ir.InterpretNonCompilable$.interpretAndCoerce$1(InterpretNonCompilable.scala:16) at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:53) at is.hail.expr.ir.InterpretNonCompilable$.apply(InterpretNonCompilable.scala:58) at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.transform(LoweringPass.scala:50) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15) at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:69) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:15) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:13) at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:69) at is.hail.expr.ir.lowering.LoweringPass$class.apply(LoweringPass.scala:13) at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.apply(LoweringPass.scala:45) at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:20) at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:18) at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35) at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:18) at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28) at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:317) at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:304) at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:303) at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:20) at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:18) at is.hail.utils.package$.using(package.scala:601) at is.hail.annotations.Region$.scoped(Region.scala:18) at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:18) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:229) at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:303) at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:323) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:238) at java.lang.Thread.run(Thread.java:748) org.apache.spark.SparkException: Job aborted due to stage failure: Task 11491 in stage 12.0 failed 1 times, most recent failure: Lost task 11491.0 in stage 12.0 (TID 11607, localhost, executor driver): org.apache.spark.SparkException: Task failed while writing rows at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:155) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:83) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at org.apache.spark.scheduler.Task.run(Task.scala:121) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:403) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:409) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.io.FileNotFoundException: /mnt/share6/FOR_Takeo/WES/all_vari_annotation_MSC.txt (Input/output error) at java.io.FileInputStream.open0(Native Method) at java.io.FileInputStream.open(FileInputStream.java:195) at java.io.FileInputStream.(FileInputStream.java:138) at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileInputStream.(RawLocalFileSystem.java:106) at org.apache.hadoop.fs.RawLocalFileSystem.open(RawLocalFileSystem.java:202) at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSInputChecker.(ChecksumFileSystem.java:143) at org.apache.hadoop.fs.ChecksumFileSystem.open(ChecksumFileSystem.java:346) at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:769) at is.hail.io.fs.HadoopFS.openNoCompression(HadoopFS.scala:68) at is.hail.expr.ir.GenericLines$$anonfun$3$$anon$3.(GenericLines.scala:34) at is.hail.expr.ir.GenericLines$$anonfun$3.apply(GenericLines.scala:30) at is.hail.expr.ir.GenericLines$$anonfun$3.apply(GenericLines.scala:22) at is.hail.expr.ir.TextTableReader$$anonfun$25$$anonfun$apply$16.apply(TextTableReader.scala:411) at is.hail.expr.ir.TextTableReader$$anonfun$25$$anonfun$apply$16.apply(TextTableReader.scala:406) at is.hail.expr.ir.GenericTableValueRDD$$anonfun$compute$1.apply(GenericTableValue.scala:87) at is.hail.expr.ir.GenericTableValueRDD$$anonfun$compute$1.apply(GenericTableValue.scala:86) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18$$anonfun$apply$19.apply(ContextRDD.scala:248) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18$$anonfun$apply$19.apply(ContextRDD.scala:248) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$18.hasNext(Iterator.scala:762) at scala.collection.Iterator$$anon$16.hasNext(Iterator.scala:598) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:331) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:462) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$4.apply(SparkHadoopWriter.scala:128) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$4.apply(SparkHadoopWriter.scala:127) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1394) at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:139) ... 10 more Driver stacktrace: at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1889) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1877) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1876) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1876) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) at scala.Option.foreach(Option.scala:257) at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2110) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2059) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2048) at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2082) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2114) at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:78) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1096) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopDataset(PairRDDFunctions.scala:1094) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply$mcV$sp(PairRDDFunctions.scala:1067) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:1032) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply$mcV$sp(PairRDDFunctions.scala:958) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958) at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:957) at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply$mcV$sp(RDD.scala:1499) at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1478) at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1478) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.RDD.saveAsTextFile(RDD.scala:1478) at is.hail.utils.richUtils.RichRDD$.writeTable$extension(RichRDD.scala:78) at is.hail.expr.ir.TableValue.export(TableValue.scala:98) at is.hail.expr.ir.TableTextWriter.apply(TableWriter.scala:333) at is.hail.expr.ir.Interpret$.run(Interpret.scala:726) at is.hail.expr.ir.Interpret$.alreadyLowered(Interpret.scala:53) at is.hail.expr.ir.InterpretNonCompilable$.interpretAndCoerce$1(InterpretNonCompilable.scala:16) at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:53) at is.hail.expr.ir.InterpretNonCompilable$.apply(InterpretNonCompilable.scala:58) at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.transform(LoweringPass.scala:50) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15) at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:69) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:15) at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:13) at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:69) at is.hail.expr.ir.lowering.LoweringPass$class.apply(LoweringPass.scala:13) at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.apply(LoweringPass.scala:45) at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:20) at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:18) at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35) at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:18) at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28) at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:317) at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:304) at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:303) at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:20) at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:18) at is.hail.utils.package$.using(package.scala:601) at is.hail.annotations.Region$.scoped(Region.scala:18) at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:18) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:229) at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:303) at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:323) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:238) at java.lang.Thread.run(Thread.java:748) org.apache.spark.SparkException: Task failed while writing rows at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:155) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:83) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at org.apache.spark.scheduler.Task.run(Task.scala:121) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:403) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:409) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) java.io.FileNotFoundException: /mnt/share6/FOR_Takeo/WES/all_vari_annotation_MSC.txt (Input/output error) at java.io.FileInputStream.open0(Native Method) at java.io.FileInputStream.open(FileInputStream.java:195) at java.io.FileInputStream.(FileInputStream.java:138) at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileInputStream.(RawLocalFileSystem.java:106) at org.apache.hadoop.fs.RawLocalFileSystem.open(RawLocalFileSystem.java:202) at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSInputChecker.(ChecksumFileSystem.java:143) at org.apache.hadoop.fs.ChecksumFileSystem.open(ChecksumFileSystem.java:346) at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:769) at is.hail.io.fs.HadoopFS.openNoCompression(HadoopFS.scala:68) at is.hail.expr.ir.GenericLines$$anonfun$3$$anon$3.(GenericLines.scala:34) at is.hail.expr.ir.GenericLines$$anonfun$3.apply(GenericLines.scala:30) at is.hail.expr.ir.GenericLines$$anonfun$3.apply(GenericLines.scala:22) at is.hail.expr.ir.TextTableReader$$anonfun$25$$anonfun$apply$16.apply(TextTableReader.scala:411) at is.hail.expr.ir.TextTableReader$$anonfun$25$$anonfun$apply$16.apply(TextTableReader.scala:406) at is.hail.expr.ir.GenericTableValueRDD$$anonfun$compute$1.apply(GenericTableValue.scala:87) at is.hail.expr.ir.GenericTableValueRDD$$anonfun$compute$1.apply(GenericTableValue.scala:86) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18$$anonfun$apply$19.apply(ContextRDD.scala:248) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18$$anonfun$apply$19.apply(ContextRDD.scala:248) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$18.hasNext(Iterator.scala:762) at scala.collection.Iterator$$anon$16.hasNext(Iterator.scala:598) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:331) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002) at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30) at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48) at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327) at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344) at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323) at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151) at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208) at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:462) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31) at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$4.apply(SparkHadoopWriter.scala:128) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$4.apply(SparkHadoopWriter.scala:127) at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1394) at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:139) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:83) at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90) at org.apache.spark.scheduler.Task.run(Task.scala:121) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:403) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:409) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Hail version: 0.2.46-6ef64c08b000 Error summary: FileNotFoundException: /mnt/share6/FOR_Takeo/WES/all_vari_annotation_MSC.txt (Input/output error)