I was not using requester pays earlier, after using requester pay flag now im using it and facing the following error; when applying vep.
FatalError Traceback (most recent call last)
/opt/conda/default/lib/python3.6/site-packages/IPython/core/formatters.py in call(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
–> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
/opt/conda/default/lib/python3.6/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object
393 and callable(cls.dict.get(‘repr’)):
–> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
/opt/conda/default/lib/python3.6/site-packages/IPython/lib/pretty.py in repr_pprint(obj, p, cycle)
682 “”“A pprint that just redirects to the normal repr function.”""
683 # Find newlines and replace them with p.break()
–> 684 output = repr(obj)
685 lines = output.splitlines()
686 with p.group():
/opt/conda/default/lib/python3.6/site-packages/hail/table.py in repr(self)
1267
1268 def repr(self):
-> 1269 return self.str()
1270
1271 def data(self):
/opt/conda/default/lib/python3.6/site-packages/hail/table.py in str(self)
1264
1265 def str(self):
-> 1266 return self._ascii_str()
1267
1268 def repr(self):
/opt/conda/default/lib/python3.6/site-packages/hail/table.py in _ascii_str(self)
1290 return s
1291
-> 1292 rows, has_more, dtype = self.data()
1293 fields = list(dtype)
1294 trunc_fields = [trunc(f) for f in fields]
/opt/conda/default/lib/python3.6/site-packages/hail/table.py in data(self)
1274 row_dtype = t.row.dtype
1275 t = t.select(**{k: hl._showstr(v) for (k, v) in t.row.items()})
-> 1276 rows, has_more = t._take_n(self.n)
1277 self._data = (rows, has_more, row_dtype)
1278 return self._data
/opt/conda/default/lib/python3.6/site-packages/hail/table.py in _take_n(self, n)
1421 has_more = False
1422 else:
-> 1423 rows = self.take(n + 1)
1424 has_more = len(rows) > n
1425 rows = rows[:n]
in take(self, n, _localize)
/opt/conda/default/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
612 def wrapper(original_func, *args, **kwargs):
613 args, kwargs = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
–> 614 return original_func(*args, **kwargs)
615
616 return wrapper
/opt/conda/default/lib/python3.6/site-packages/hail/table.py in take(self, n, _localize)
2085 “”"
2086
-> 2087 return self.head(n).collect(_localize)
2088
2089 @typecheck_method(n=int)
in collect(self, _localize)
/opt/conda/default/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
612 def wrapper(original_func, *args, **kwargs):
613 args, kwargs = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
–> 614 return original_func(*args, **kwargs)
615
616 return wrapper
/opt/conda/default/lib/python3.6/site-packages/hail/table.py in collect(self, _localize)
1884 e = construct_expr(rows_ir, hl.tarray(t.row.dtype))
1885 if _localize:
-> 1886 return Env.backend().execute(e._ir)
1887 else:
1888 return e
/opt/conda/default/lib/python3.6/site-packages/hail/backend/spark_backend.py in execute(self, ir, timed)
294 jir = self._to_java_value_ir(ir)
295 # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1))
–> 296 result = json.loads(self._jhc.backend().executeJSON(jir))
297 value = ir.typ._from_json(result[‘value’])
298 timings = result[‘timings’]
/usr/lib/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in call(self, *args)
1255 answer = self.gateway_client.send_command(command)
1256 return_value = get_return_value(
-> 1257 answer, self.gateway_client, self.target_id, self.name)
1258
1259 for temp_arg in temp_args:
/opt/conda/default/lib/python3.6/site-packages/hail/backend/spark_backend.py in deco(*args, **kwargs)
39 raise FatalError(’%s\n\nJava stack trace:\n%s\n’
40 ‘Hail version: %s\n’
—> 41 ‘Error summary: %s’ % (deepest, full, hail.version, deepest)) from None
42 except pyspark.sql.utils.CapturedException as e:
43 raise FatalError(’%s\n\nJava stack trace:\n%s\n’
FatalError: IOException: error=2, No such file or directory
Java stack trace:
java.lang.RuntimeException: error while applying lowering ‘InterpretNonCompilable’
at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:26)
at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:18)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35)
at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:18)
at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28)
at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:317)
at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:304)
at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:303)
at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:20)
at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:18)
at is.hail.utils.package$.using(package.scala:601)
at is.hail.annotations.Region$.scoped(Region.scala:18)
at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:18)
at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:229)
at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:303)
at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:323)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:238)
at java.lang.Thread.run(Thread.java:748)
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 6.0 failed 20 times, most recent failure: Lost task 0.19 in stage 6.0 (TID 63, hailpy1-sw-rvrl.us-central1-c.c.cncd-analysts.internal, executor 2): java.io.IOException: Cannot run program “/vep”: error=2, No such file or directory
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1048)
at is.hail.utils.richUtils.RichIterator$.pipe$extension(RichIterator.scala:55)
at is.hail.methods.VEP$$anonfun$7$$anonfun$apply$4.apply(VEP.scala:176)
at is.hail.methods.VEP$$anonfun$7$$anonfun$apply$4.apply(VEP.scala:172)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala:221)
at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:299)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091)
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:357)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:308)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:310)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:310)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
atis.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala:221)
at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:299)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091)
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:357)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:308)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:310)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:310)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:310)
at is.hail.sparkextras.ContextRDD.iterator(ContextRDD.scala:378)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anonfun$apply$3.apply(RepartitionedOrderedRDD2.scala:60)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anonfun$apply$3.apply(RepartitionedOrderedRDD2.scala:59)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$18.hasNext(Iterator.scala:762)
at scala.collection.Iterator$$anon$16.hasNext(Iterator.scala:598)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30)
at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:331)
at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344)
at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.utils.package$.getIteratorSizeWithMaxN(package.scala:388)
at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:355)
at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:353)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:123)
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
java.io.IOException: error=2, No such file or directory
at java.lang.UNIXProcess.forkAndExec(Native Method)
at java.lang.UNIXProcess.(UNIXProcess.java:247)
at java.lang.ProcessImpl.start(ProcessImpl.java:134)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029)
at is.hail.utils.richUtils.RichIterator$.pipe$extension(RichIterator.scala:55)
at is.hail.methods.VEP$$anonfun$7$$anonfun$apply$4.apply(VEP.scala:176)
at is.hail.methods.VEP$$anonfun$7$$anonfun$apply$4.apply(VEP.scala:172)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala:221)
at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:299)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165)
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091)
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156)
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882)
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:357)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:308)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:310)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:310)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:346)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:310)
at is.hail.sparkextras.ContextRDD.iterator(ContextRDD.scala:378)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anonfun$apply$3.apply(RepartitionedOrderedRDD2.scala:60)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anonfun$apply$3.apply(RepartitionedOrderedRDD2.scala:59)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$18.hasNext(Iterator.scala:762)
at scala.collection.Iterator$$anon$16.hasNext(Iterator.scala:598)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:188)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
at is.hail.utils.richUtils.RichIterator$$anon$5.isValid(RichIterator.scala:30)
at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:331)
at is.hail.utils.FlipbookIterator$$anon$9.(FlipbookIterator.scala:344)
at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:53)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:151)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:148)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:305)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$9$$anonfun$apply$10.apply(ContextRDD.scala:208)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:31)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.utils.package$.getIteratorSizeWithMaxN(package.scala:388)
at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:355)
at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:353)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:123)
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Hail version: 0.2.46-6ef64c08b000
Error summary: IOException: error=2, No such file or directory