Cannot Get Sequence Information

I tried to get the sequence at a particular locus from a Table. Here is the code I tried:

grch38 = hl.get_reference("GRCh38")
grch38.add_sequence('gs://hail-common/references/Homo_sapiens_assembly38.fasta.gz',
                    'gs://hail-common/references/Homo_sapiens_assembly38.fasta.fai')
hl.get_sequence(samples_maf_ht.Chromosome, samples_maf_ht.Start_Position, reference_genome="GRCh38").show()

And then I got the following error.

FatalError                                Traceback (most recent call last)
File /opt/conda/lib/python3.10/site-packages/IPython/core/formatters.py:344, in BaseFormatter.__call__(self, obj)
    342     method = get_real_method(obj, self.print_method)
    343     if method is not None:
--> 344         return method()
    345     return None
    346 else:

File /opt/conda/lib/python3.10/site-packages/hail/table.py:2169, in Table._Show._repr_html_(self)
   2168 def _repr_html_(self):
-> 2169     return self._html_str()

File /opt/conda/lib/python3.10/site-packages/hail/table.py:2260, in Table._Show._html_str(self)
   2256 import html
   2258 types = self.types
-> 2260 rows, has_more, dtype = self.data()
   2261 fields = list(dtype)
   2263 default_td_style = (
   2264     'white-space: nowrap; ' 'max-width: 500px; ' 'overflow: hidden; ' 'text-overflow: ellipsis; '
   2265 )

File /opt/conda/lib/python3.10/site-packages/hail/table.py:2164, in Table._Show.data(self)
   2162     row_dtype = t.row.dtype
   2163     t = t.select(**{k: hl._showstr(v) for (k, v) in t.row.items()})
-> 2164     rows, has_more = t._take_n(self.n)
   2165     self._data = (rows, has_more, row_dtype)
   2166 return self._data

File /opt/conda/lib/python3.10/site-packages/hail/table.py:2310, in Table._take_n(self, n)
   2308     has_more = False
   2309 else:
-> 2310     rows = self.take(n + 1)
   2311     has_more = len(rows) > n
   2312     rows = rows[:n]

File <decorator-gen-1250>:2, in take(self, n, _localize)

File /opt/conda/lib/python3.10/site-packages/hail/typecheck/check.py:585, in _make_dec.<locals>.wrapper(__original_func, *args, **kwargs)
    582 @decorator
    583 def wrapper(__original_func: Callable[..., T], *args, **kwargs) -> T:
    584     args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 585     return __original_func(*args_, **kwargs_)

File /opt/conda/lib/python3.10/site-packages/hail/table.py:3027, in Table.take(self, n, _localize)
   2993 @typecheck_method(n=int, _localize=bool)
   2994 def take(self, n, _localize=True):
   2995     """Collect the first `n` rows of the table into a local list.
   2996 
   2997     Examples
   (...)
   3024         List of row structs.
   3025     """
-> 3027     return self.head(n).collect(_localize)

File <decorator-gen-1244>:2, in collect(self, _localize, _timed)

File /opt/conda/lib/python3.10/site-packages/hail/typecheck/check.py:585, in _make_dec.<locals>.wrapper(__original_func, *args, **kwargs)
    582 @decorator
    583 def wrapper(__original_func: Callable[..., T], *args, **kwargs) -> T:
    584     args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 585     return __original_func(*args_, **kwargs_)

File /opt/conda/lib/python3.10/site-packages/hail/table.py:2814, in Table.collect(self, _localize, _timed)
   2812 e = construct_expr(rows_ir, hl.tarray(t.row.dtype))
   2813 if _localize:
-> 2814     return Env.backend().execute(e._ir, timed=_timed)
   2815 else:
   2816     return e

File /opt/conda/lib/python3.10/site-packages/hail/backend/spark_backend.py:226, in SparkBackend.execute(self, ir, timed)
    223     except Exception as fatal:
    224         raise err from fatal
--> 226 raise err

File /opt/conda/lib/python3.10/site-packages/hail/backend/spark_backend.py:218, in SparkBackend.execute(self, ir, timed)
    216 def execute(self, ir: BaseIR, timed: bool = False) -> Any:
    217     try:
--> 218         return super().execute(ir, timed)
    219     except Exception as err:
    220         if self._copy_log_on_error:

File /opt/conda/lib/python3.10/site-packages/hail/backend/backend.py:190, in Backend.execute(self, ir, timed)
    188     result, timings = self._rpc(ActionTag.EXECUTE, payload)
    189 except FatalError as e:
--> 190     raise e.maybe_user_error(ir) from None
    191 if ir.typ == tvoid:
    192     value = None

File /opt/conda/lib/python3.10/site-packages/hail/backend/backend.py:188, in Backend.execute(self, ir, timed)
    186 payload = ExecutePayload(self._render_ir(ir), '{"name":"StreamBufferSpec"}', timed)
    187 try:
--> 188     result, timings = self._rpc(ActionTag.EXECUTE, payload)
    189 except FatalError as e:
    190     raise e.maybe_user_error(ir) from None

File /opt/conda/lib/python3.10/site-packages/hail/backend/py4j_backend.py:221, in Py4JBackend._rpc(self, action, payload)
    219 if resp.status_code >= 400:
    220     error_json = orjson.loads(resp.content)
--> 221     raise fatal_error_from_java_error_triplet(
    222         error_json['short'], error_json['expanded'], error_json['error_id']
    223     )
    224 return resp.content, resp.headers.get('X-Hail-Timings', '')

FatalError: NoSuchFileException: /tmp/fasta-reader-mNZHC7Qg5Eu84B1G6MThPT.fasta

Java stack trace:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 33.0 failed 4 times, most recent failure: Lost task 0.3 in stage 33.0 (TID 138) (saturn-8e28782d-9b63-4684-a4f6-12b1c63b1df4-m.us-central1-b.c.terra-ff60cfda.internal executor 4): htsjdk.samtools.SAMException: Error opening file: /tmp/fasta-reader-mNZHC7Qg5Eu84B1G6MThPT.fasta
	at htsjdk.samtools.util.IOUtil.openFileForReading(IOUtil.java:702)
	at htsjdk.samtools.reference.FastaSequenceFile.<init>(FastaSequenceFile.java:64)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:144)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:100)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:88)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:77)
	at is.hail.io.reference.FASTAReader.newReader(FASTAReader.scala:80)
	at is.hail.io.reference.FASTAReader.<init>(FASTAReader.scala:83)
	at is.hail.io.reference.FASTAReaderConfig.reader(FASTAReader.scala:31)
	at is.hail.variant.ReferenceGenome.fastaReader(ReferenceGenome.scala:421)
	at is.hail.variant.ReferenceGenome.getSequence(ReferenceGenome.scala:428)
	at __C42324collect_distributed_array_table_head_recursive_count.__m42381getReferenceSequenceFromValidLocus(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.__m42336split_StreamLen(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.apply(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.apply(Unknown Source)
	at is.hail.backend.BackendUtils.$anonfun$collectDArray$6(BackendUtils.scala:87)
	at is.hail.utils.package$.using(package.scala:664)
	at is.hail.annotations.RegionPool.scopedRegion(RegionPool.scala:166)
	at is.hail.backend.BackendUtils.$anonfun$collectDArray$5(BackendUtils.scala:86)
	at is.hail.backend.spark.SparkBackendComputeRDD.compute(SparkBackend.scala:910)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:136)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:548)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:551)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:829)
Caused by: java.nio.file.NoSuchFileException: /tmp/fasta-reader-mNZHC7Qg5Eu84B1G6MThPT.fasta
	at java.base/sun.nio.fs.UnixException.translateToIOException(UnixException.java:92)
	at java.base/sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:111)
	at java.base/sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:116)
	at java.base/sun.nio.fs.UnixFileSystemProvider.newByteChannel(UnixFileSystemProvider.java:219)
	at java.base/java.nio.file.Files.newByteChannel(Files.java:371)
	at java.base/java.nio.file.Files.newByteChannel(Files.java:422)
	at java.base/java.nio.file.spi.FileSystemProvider.newInputStream(FileSystemProvider.java:420)
	at java.base/java.nio.file.Files.newInputStream(Files.java:156)
	at htsjdk.samtools.util.IOUtil.openFileForReading(IOUtil.java:698)
	... 29 more

Driver stacktrace:
	at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2673)
	at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2609)
	at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2608)
	at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
	at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2608)
	at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1182)
	at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1182)
	at scala.Option.foreach(Option.scala:407)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1182)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2861)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2803)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2792)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:952)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2236)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2257)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2276)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2301)
	at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1021)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:406)
	at org.apache.spark.rdd.RDD.collect(RDD.scala:1020)
	at is.hail.backend.spark.SparkBackend.parallelizeAndComputeWithIndex(SparkBackend.scala:429)
	at is.hail.backend.BackendUtils.collectDArray(BackendUtils.scala:82)
	at __C42303Compiled.__m42307split_TailLoop(Emit.scala)
	at __C42303Compiled.__m42305split_ToArray(Emit.scala)
	at __C42303Compiled.apply(Emit.scala)
	at is.hail.expr.ir.CompileAndEvaluate$.$anonfun$_apply$7(CompileAndEvaluate.scala:82)
	at scala.runtime.java8.JFunction0$mcJ$sp.apply(JFunction0$mcJ$sp.java:23)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:84)
	at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:82)
	at is.hail.expr.ir.CompileAndEvaluate$.$anonfun$apply$1(CompileAndEvaluate.scala:17)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:84)
	at is.hail.expr.ir.CompileAndEvaluate$.apply(CompileAndEvaluate.scala:17)
	at is.hail.expr.ir.lowering.LowerTableIR$.applyTable(LowerTableIR.scala:1486)
	at is.hail.expr.ir.lowering.LowerTableIR$.lower$2(LowerTableIR.scala:1050)
	at is.hail.expr.ir.lowering.LowerTableIR$.applyTable(LowerTableIR.scala:1654)
	at is.hail.expr.ir.lowering.LowerTableIR$.lower$1(LowerTableIR.scala:728)
	at is.hail.expr.ir.lowering.LowerTableIR$.apply(LowerTableIR.scala:823)
	at is.hail.expr.ir.lowering.LowerToCDA$.lower(LowerToCDA.scala:27)
	at is.hail.expr.ir.lowering.LowerToCDA$.apply(LowerToCDA.scala:11)
	at is.hail.expr.ir.lowering.LowerToDistributedArrayPass.transform(LoweringPass.scala:91)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.evaluate$1(LowerOrInterpretNonCompilable.scala:27)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.rewrite$1(LowerOrInterpretNonCompilable.scala:59)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.apply(LowerOrInterpretNonCompilable.scala:64)
	at is.hail.expr.ir.lowering.LowerOrInterpretNonCompilablePass$.transform(LoweringPass.scala:83)
	at is.hail.expr.ir.lowering.LoweringPass.$anonfun$apply$3(LoweringPass.scala:32)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:84)
	at is.hail.expr.ir.lowering.LoweringPass.$anonfun$apply$1(LoweringPass.scala:32)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:84)
	at is.hail.expr.ir.lowering.LoweringPass.apply(LoweringPass.scala:30)
	at is.hail.expr.ir.lowering.LoweringPass.apply$(LoweringPass.scala:29)
	at is.hail.expr.ir.lowering.LowerOrInterpretNonCompilablePass$.apply(LoweringPass.scala:78)
	at is.hail.expr.ir.lowering.LoweringPipeline.$anonfun$apply$1(LoweringPipeline.scala:21)
	at is.hail.expr.ir.lowering.LoweringPipeline.$anonfun$apply$1$adapted(LoweringPipeline.scala:19)
	at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
	at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
	at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:19)
	at is.hail.expr.ir.lowering.EvalRelationalLets$.execute$1(EvalRelationalLets.scala:13)
	at is.hail.expr.ir.lowering.EvalRelationalLets$.lower$1(EvalRelationalLets.scala:21)
	at is.hail.expr.ir.lowering.EvalRelationalLets$.apply(EvalRelationalLets.scala:35)
	at is.hail.expr.ir.lowering.EvalRelationalLetsPass.transform(LoweringPass.scala:168)
	at is.hail.expr.ir.lowering.LoweringPass.$anonfun$apply$3(LoweringPass.scala:32)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:84)
	at is.hail.expr.ir.lowering.LoweringPass.$anonfun$apply$1(LoweringPass.scala:32)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:84)
	at is.hail.expr.ir.lowering.LoweringPass.apply(LoweringPass.scala:30)
	at is.hail.expr.ir.lowering.LoweringPass.apply$(LoweringPass.scala:29)
	at is.hail.expr.ir.lowering.EvalRelationalLetsPass.apply(LoweringPass.scala:162)
	at is.hail.expr.ir.lowering.LoweringPipeline.$anonfun$apply$1(LoweringPipeline.scala:21)
	at is.hail.expr.ir.lowering.LoweringPipeline.$anonfun$apply$1$adapted(LoweringPipeline.scala:19)
	at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
	at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
	at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:19)
	at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:45)
	at is.hail.backend.spark.SparkBackend._execute(SparkBackend.scala:600)
	at is.hail.backend.spark.SparkBackend.$anonfun$execute$4(SparkBackend.scala:636)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:84)
	at is.hail.backend.spark.SparkBackend.$anonfun$execute$3(SparkBackend.scala:631)
	at is.hail.backend.spark.SparkBackend.$anonfun$execute$3$adapted(SparkBackend.scala:630)
	at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:78)
	at is.hail.utils.package$.using(package.scala:664)
	at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:78)
	at is.hail.utils.package$.using(package.scala:664)
	at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:13)
	at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:65)
	at is.hail.backend.spark.SparkBackend.$anonfun$withExecuteContext$2(SparkBackend.scala:407)
	at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:55)
	at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:62)
	at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:393)
	at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:630)
	at is.hail.backend.BackendHttpHandler.handle(BackendServer.scala:88)
	at jdk.httpserver/com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:77)
	at jdk.httpserver/sun.net.httpserver.AuthFilter.doFilter(AuthFilter.java:82)
	at jdk.httpserver/com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:80)
	at jdk.httpserver/sun.net.httpserver.ServerImpl$Exchange$LinkHandler.handle(ServerImpl.java:848)
	at jdk.httpserver/com.sun.net.httpserver.Filter$Chain.doFilter(Filter.java:77)
	at jdk.httpserver/sun.net.httpserver.ServerImpl$Exchange.run(ServerImpl.java:817)
	at jdk.httpserver/sun.net.httpserver.ServerImpl$DefaultExecutor.execute(ServerImpl.java:201)
	at jdk.httpserver/sun.net.httpserver.ServerImpl$Dispatcher.handle(ServerImpl.java:560)
	at jdk.httpserver/sun.net.httpserver.ServerImpl$Dispatcher.run(ServerImpl.java:526)
	at java.base/java.lang.Thread.run(Thread.java:829)

htsjdk.samtools.SAMException: Error opening file: /tmp/fasta-reader-mNZHC7Qg5Eu84B1G6MThPT.fasta
	at htsjdk.samtools.util.IOUtil.openFileForReading(IOUtil.java:702)
	at htsjdk.samtools.reference.FastaSequenceFile.<init>(FastaSequenceFile.java:64)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:144)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:100)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:88)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:77)
	at is.hail.io.reference.FASTAReader.newReader(FASTAReader.scala:80)
	at is.hail.io.reference.FASTAReader.<init>(FASTAReader.scala:83)
	at is.hail.io.reference.FASTAReaderConfig.reader(FASTAReader.scala:31)
	at is.hail.variant.ReferenceGenome.fastaReader(ReferenceGenome.scala:421)
	at is.hail.variant.ReferenceGenome.getSequence(ReferenceGenome.scala:428)
	at __C42324collect_distributed_array_table_head_recursive_count.__m42381getReferenceSequenceFromValidLocus(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.__m42336split_StreamLen(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.apply(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.apply(Unknown Source)
	at is.hail.backend.BackendUtils.$anonfun$collectDArray$6(BackendUtils.scala:87)
	at is.hail.utils.package$.using(package.scala:664)
	at is.hail.annotations.RegionPool.scopedRegion(RegionPool.scala:166)
	at is.hail.backend.BackendUtils.$anonfun$collectDArray$5(BackendUtils.scala:86)
	at is.hail.backend.spark.SparkBackendComputeRDD.compute(SparkBackend.scala:910)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:136)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:548)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:551)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:829)

java.nio.file.NoSuchFileException: /tmp/fasta-reader-mNZHC7Qg5Eu84B1G6MThPT.fasta
	at java.base/sun.nio.fs.UnixException.translateToIOException(UnixException.java:92)
	at java.base/sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:111)
	at java.base/sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:116)
	at java.base/sun.nio.fs.UnixFileSystemProvider.newByteChannel(UnixFileSystemProvider.java:219)
	at java.base/java.nio.file.Files.newByteChannel(Files.java:371)
	at java.base/java.nio.file.Files.newByteChannel(Files.java:422)
	at java.base/java.nio.file.spi.FileSystemProvider.newInputStream(FileSystemProvider.java:420)
	at java.base/java.nio.file.Files.newInputStream(Files.java:156)
	at htsjdk.samtools.util.IOUtil.openFileForReading(IOUtil.java:698)
	at htsjdk.samtools.reference.FastaSequenceFile.<init>(FastaSequenceFile.java:64)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:144)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:100)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:88)
	at htsjdk.samtools.reference.ReferenceSequenceFileFactory.getReferenceSequenceFile(ReferenceSequenceFileFactory.java:77)
	at is.hail.io.reference.FASTAReader.newReader(FASTAReader.scala:80)
	at is.hail.io.reference.FASTAReader.<init>(FASTAReader.scala:83)
	at is.hail.io.reference.FASTAReaderConfig.reader(FASTAReader.scala:31)
	at is.hail.variant.ReferenceGenome.fastaReader(ReferenceGenome.scala:421)
	at is.hail.variant.ReferenceGenome.getSequence(ReferenceGenome.scala:428)
	at __C42324collect_distributed_array_table_head_recursive_count.__m42381getReferenceSequenceFromValidLocus(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.__m42336split_StreamLen(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.apply(Unknown Source)
	at __C42324collect_distributed_array_table_head_recursive_count.apply(Unknown Source)
	at is.hail.backend.BackendUtils.$anonfun$collectDArray$6(BackendUtils.scala:87)
	at is.hail.utils.package$.using(package.scala:664)
	at is.hail.annotations.RegionPool.scopedRegion(RegionPool.scala:166)
	at is.hail.backend.BackendUtils.$anonfun$collectDArray$5(BackendUtils.scala:86)
	at is.hail.backend.spark.SparkBackendComputeRDD.compute(SparkBackend.scala:910)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:136)
	at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:548)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:551)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:829)





Hail version: 0.2.130-bea04d9c79b5
Error summary: NoSuchFileException: /tmp/fasta-reader-mNZHC7Qg5Eu84B1G6MThPT.fasta

1 Like