2024-01-18 10:18:38.509 Hail: INFO: SparkUI: http://ip-10-60-27-74.eu-west-2.compute.internal:8081 2024-01-18 10:18:38.708 Hail: INFO: Running Hail version 0.2.116-cd64e0876c94 2024-01-18 10:18:39.027 SparkSession$Builder: WARN: Using an existing SparkSession; the static sql configurations will not take effect. 2024-01-18 10:18:39.027 SparkSession$Builder: WARN: Using an existing SparkSession; some spark core configurations may not take effect. 2024-01-18 10:18:40.478 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:18:40.783 root: INFO: globbing path file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format [500k release]/chr1/ukb24310_c1_b92_v1.vcf.gz returned 0 files: 2024-01-18 10:18:40.784 Hail: WARN: 'file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format [500k release]/chr1/ukb24310_c1_b92_v1.vcf.gz' refers to no files 2024-01-18 10:18:40.785 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:18:40.786 root: INFO: TaskReport: stage=0, partition=0, attempt=0, peakBytes=0, peakBytesReadable=0.00 B, chunks requested=0, cache hits=0 2024-01-18 10:18:40.790 root: INFO: RegionPool: FREE: 0 allocated (0 blocks / 0 chunks), regions.size = 0, 0 current java objects, thread 23: Thread-4 2024-01-18 10:18:40.790 root: INFO: RegionPool: FREE: 64.0K allocated (64.0K blocks / 0 chunks), regions.size = 1, 0 current java objects, thread 23: Thread-4 2024-01-18 10:18:40.792 root: ERROR: HailException: arguments refer to no files From is.hail.utils.HailException: arguments refer to no files at is.hail.utils.ErrorHandling.fatal(ErrorHandling.scala:17) at is.hail.utils.ErrorHandling.fatal$(ErrorHandling.scala:17) at is.hail.utils.package$.fatal(package.scala:78) at is.hail.io.vcf.LoadVCF$.globAllVCFs(LoadVCF.scala:1151) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) 2024-01-18 10:18:40.794 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:18:40.798 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:18:40.799 root: INFO: TaskReport: stage=0, partition=0, attempt=0, peakBytes=0, peakBytesReadable=0.00 B, chunks requested=0, cache hits=0 2024-01-18 10:18:40.799 root: INFO: RegionPool: FREE: 0 allocated (0 blocks / 0 chunks), regions.size = 0, 0 current java objects, thread 23: Thread-4 2024-01-18 10:18:40.799 root: INFO: RegionPool: FREE: 64.0K allocated (64.0K blocks / 0 chunks), regions.size = 1, 0 current java objects, thread 23: Thread-4 2024-01-18 10:18:40.800 root: ERROR: PatternSyntaxException: error parsing regexp: Unclosed character class at pos 51: `DRAGEN population level WGS variants, pVCF format [[]500k release[]]` From java.io.IOException: Illegal file pattern: error parsing regexp: Unclosed character class at pos 51: `DRAGEN population level WGS variants, pVCF format [[]500k release[]]` at org.apache.hadoop.fs.GlobFilter.init(GlobFilter.java:71) at org.apache.hadoop.fs.GlobFilter.(GlobFilter.java:50) at org.apache.hadoop.fs.Globber.doGlob(Globber.java:265) at org.apache.hadoop.fs.Globber.glob(Globber.java:202) at org.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:2124) at is.hail.io.fs.HadoopFS.glob(HadoopFS.scala:169) at is.hail.io.fs.HadoopFS.$anonfun$globAll$1(HadoopFS.scala:148) at is.hail.io.fs.HadoopFS.$anonfun$globAll$1$adapted(HadoopFS.scala:147) at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492) at scala.collection.Iterator.foreach(Iterator.scala:943) at scala.collection.Iterator.foreach$(Iterator.scala:943) at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62) at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49) at scala.collection.TraversableOnce.to(TraversableOnce.scala:366) at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364) at scala.collection.AbstractIterator.to(Iterator.scala:1431) at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358) at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358) at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1431) at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345) at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339) at scala.collection.AbstractIterator.toArray(Iterator.scala:1431) at is.hail.io.fs.HadoopFS.globAll(HadoopFS.scala:153) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) org.apache.hadoop.shaded.com.google.re2j.PatternSyntaxException: error parsing regexp: Unclosed character class at pos 51: `DRAGEN population level WGS variants, pVCF format [[]500k release[]]` at org.apache.hadoop.fs.GlobPattern.error(GlobPattern.java:168) at org.apache.hadoop.fs.GlobPattern.set(GlobPattern.java:126) at org.apache.hadoop.fs.GlobPattern.(GlobPattern.java:42) at org.apache.hadoop.fs.GlobFilter.init(GlobFilter.java:67) at org.apache.hadoop.fs.GlobFilter.(GlobFilter.java:50) at org.apache.hadoop.fs.Globber.doGlob(Globber.java:265) at org.apache.hadoop.fs.Globber.glob(Globber.java:202) at org.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:2124) at is.hail.io.fs.HadoopFS.glob(HadoopFS.scala:169) at is.hail.io.fs.HadoopFS.$anonfun$globAll$1(HadoopFS.scala:148) at is.hail.io.fs.HadoopFS.$anonfun$globAll$1$adapted(HadoopFS.scala:147) at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492) at scala.collection.Iterator.foreach(Iterator.scala:943) at scala.collection.Iterator.foreach$(Iterator.scala:943) at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62) at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49) at scala.collection.TraversableOnce.to(TraversableOnce.scala:366) at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364) at scala.collection.AbstractIterator.to(Iterator.scala:1431) at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358) at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358) at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1431) at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345) at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339) at scala.collection.AbstractIterator.toArray(Iterator.scala:1431) at is.hail.io.fs.HadoopFS.globAll(HadoopFS.scala:153) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) 2024-01-18 10:18:40.847 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:18:40.852 root: INFO: globbing path file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format \[500k release\]/chr1/ukb24310_c1_b92_v1.vcf.gz returned 0 files: 2024-01-18 10:18:40.853 Hail: WARN: 'file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format \[500k release\]/chr1/ukb24310_c1_b92_v1.vcf.gz' refers to no files 2024-01-18 10:18:40.853 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:18:40.853 root: INFO: TaskReport: stage=0, partition=0, attempt=0, peakBytes=0, peakBytesReadable=0.00 B, chunks requested=0, cache hits=0 2024-01-18 10:18:40.853 root: INFO: RegionPool: FREE: 0 allocated (0 blocks / 0 chunks), regions.size = 0, 0 current java objects, thread 23: Thread-4 2024-01-18 10:18:40.853 root: INFO: RegionPool: FREE: 64.0K allocated (64.0K blocks / 0 chunks), regions.size = 1, 0 current java objects, thread 23: Thread-4 2024-01-18 10:18:40.854 root: ERROR: HailException: arguments refer to no files From is.hail.utils.HailException: arguments refer to no files at is.hail.utils.ErrorHandling.fatal(ErrorHandling.scala:17) at is.hail.utils.ErrorHandling.fatal$(ErrorHandling.scala:17) at is.hail.utils.package$.fatal(package.scala:78) at is.hail.io.vcf.LoadVCF$.globAllVCFs(LoadVCF.scala:1151) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) 2024-01-18 10:18:40.855 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:18:40.858 root: INFO: globbing path file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format \[500k release\]/chr1/ukb24310_c1_b92_v1.vcf.gz returned 0 files: 2024-01-18 10:18:40.858 Hail: WARN: 'file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format \[500k release\]/chr1/ukb24310_c1_b92_v1.vcf.gz' refers to no files 2024-01-18 10:18:40.859 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:18:40.859 root: INFO: TaskReport: stage=0, partition=0, attempt=0, peakBytes=0, peakBytesReadable=0.00 B, chunks requested=0, cache hits=0 2024-01-18 10:18:40.859 root: INFO: RegionPool: FREE: 0 allocated (0 blocks / 0 chunks), regions.size = 0, 0 current java objects, thread 23: Thread-4 2024-01-18 10:18:40.859 root: INFO: RegionPool: FREE: 64.0K allocated (64.0K blocks / 0 chunks), regions.size = 1, 0 current java objects, thread 23: Thread-4 2024-01-18 10:18:40.859 root: ERROR: HailException: arguments refer to no files From is.hail.utils.HailException: arguments refer to no files at is.hail.utils.ErrorHandling.fatal(ErrorHandling.scala:17) at is.hail.utils.ErrorHandling.fatal$(ErrorHandling.scala:17) at is.hail.utils.package$.fatal(package.scala:78) at is.hail.io.vcf.LoadVCF$.globAllVCFs(LoadVCF.scala:1151) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) 2024-01-18 10:21:07.447 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:21:07.456 root: INFO: globbing path file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format [500k release]/chr1/ukb24310_c1_b92_v1.vcf.gz returned 0 files: 2024-01-18 10:21:07.456 Hail: WARN: 'file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format [500k release]/chr1/ukb24310_c1_b92_v1.vcf.gz' refers to no files 2024-01-18 10:21:07.457 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:21:07.457 root: INFO: TaskReport: stage=0, partition=0, attempt=0, peakBytes=0, peakBytesReadable=0.00 B, chunks requested=0, cache hits=0 2024-01-18 10:21:07.457 root: INFO: RegionPool: FREE: 0 allocated (0 blocks / 0 chunks), regions.size = 0, 0 current java objects, thread 23: Thread-4 2024-01-18 10:21:07.457 root: INFO: RegionPool: FREE: 64.0K allocated (64.0K blocks / 0 chunks), regions.size = 1, 0 current java objects, thread 23: Thread-4 2024-01-18 10:21:07.458 root: ERROR: HailException: arguments refer to no files From is.hail.utils.HailException: arguments refer to no files at is.hail.utils.ErrorHandling.fatal(ErrorHandling.scala:17) at is.hail.utils.ErrorHandling.fatal$(ErrorHandling.scala:17) at is.hail.utils.package$.fatal(package.scala:78) at is.hail.io.vcf.LoadVCF$.globAllVCFs(LoadVCF.scala:1151) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) 2024-01-18 10:21:07.459 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:21:07.461 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:21:07.461 root: INFO: TaskReport: stage=0, partition=0, attempt=0, peakBytes=0, peakBytesReadable=0.00 B, chunks requested=0, cache hits=0 2024-01-18 10:21:07.461 root: INFO: RegionPool: FREE: 0 allocated (0 blocks / 0 chunks), regions.size = 0, 0 current java objects, thread 23: Thread-4 2024-01-18 10:21:07.462 root: INFO: RegionPool: FREE: 64.0K allocated (64.0K blocks / 0 chunks), regions.size = 1, 0 current java objects, thread 23: Thread-4 2024-01-18 10:21:07.462 root: ERROR: PatternSyntaxException: error parsing regexp: Unclosed character class at pos 51: `DRAGEN population level WGS variants, pVCF format [[]500k release[]]` From java.io.IOException: Illegal file pattern: error parsing regexp: Unclosed character class at pos 51: `DRAGEN population level WGS variants, pVCF format [[]500k release[]]` at org.apache.hadoop.fs.GlobFilter.init(GlobFilter.java:71) at org.apache.hadoop.fs.GlobFilter.(GlobFilter.java:50) at org.apache.hadoop.fs.Globber.doGlob(Globber.java:265) at org.apache.hadoop.fs.Globber.glob(Globber.java:202) at org.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:2124) at is.hail.io.fs.HadoopFS.glob(HadoopFS.scala:169) at is.hail.io.fs.HadoopFS.$anonfun$globAll$1(HadoopFS.scala:148) at is.hail.io.fs.HadoopFS.$anonfun$globAll$1$adapted(HadoopFS.scala:147) at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492) at scala.collection.Iterator.foreach(Iterator.scala:943) at scala.collection.Iterator.foreach$(Iterator.scala:943) at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62) at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49) at scala.collection.TraversableOnce.to(TraversableOnce.scala:366) at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364) at scala.collection.AbstractIterator.to(Iterator.scala:1431) at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358) at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358) at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1431) at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345) at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339) at scala.collection.AbstractIterator.toArray(Iterator.scala:1431) at is.hail.io.fs.HadoopFS.globAll(HadoopFS.scala:153) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) org.apache.hadoop.shaded.com.google.re2j.PatternSyntaxException: error parsing regexp: Unclosed character class at pos 51: `DRAGEN population level WGS variants, pVCF format [[]500k release[]]` at org.apache.hadoop.fs.GlobPattern.error(GlobPattern.java:168) at org.apache.hadoop.fs.GlobPattern.set(GlobPattern.java:126) at org.apache.hadoop.fs.GlobPattern.(GlobPattern.java:42) at org.apache.hadoop.fs.GlobFilter.init(GlobFilter.java:67) at org.apache.hadoop.fs.GlobFilter.(GlobFilter.java:50) at org.apache.hadoop.fs.Globber.doGlob(Globber.java:265) at org.apache.hadoop.fs.Globber.glob(Globber.java:202) at org.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:2124) at is.hail.io.fs.HadoopFS.glob(HadoopFS.scala:169) at is.hail.io.fs.HadoopFS.$anonfun$globAll$1(HadoopFS.scala:148) at is.hail.io.fs.HadoopFS.$anonfun$globAll$1$adapted(HadoopFS.scala:147) at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:486) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:492) at scala.collection.Iterator.foreach(Iterator.scala:943) at scala.collection.Iterator.foreach$(Iterator.scala:943) at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) at scala.collection.generic.Growable.$plus$plus$eq(Growable.scala:62) at scala.collection.generic.Growable.$plus$plus$eq$(Growable.scala:53) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:105) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:49) at scala.collection.TraversableOnce.to(TraversableOnce.scala:366) at scala.collection.TraversableOnce.to$(TraversableOnce.scala:364) at scala.collection.AbstractIterator.to(Iterator.scala:1431) at scala.collection.TraversableOnce.toBuffer(TraversableOnce.scala:358) at scala.collection.TraversableOnce.toBuffer$(TraversableOnce.scala:358) at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1431) at scala.collection.TraversableOnce.toArray(TraversableOnce.scala:345) at scala.collection.TraversableOnce.toArray$(TraversableOnce.scala:339) at scala.collection.AbstractIterator.toArray(Iterator.scala:1431) at is.hail.io.fs.HadoopFS.globAll(HadoopFS.scala:153) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) 2024-01-18 10:21:07.506 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:21:07.509 root: INFO: globbing path file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format \[500k release\]/chr1/ukb24310_c1_b92_v1.vcf.gz returned 0 files: 2024-01-18 10:21:07.509 Hail: WARN: 'file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format \[500k release\]/chr1/ukb24310_c1_b92_v1.vcf.gz' refers to no files 2024-01-18 10:21:07.510 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:21:07.510 root: INFO: TaskReport: stage=0, partition=0, attempt=0, peakBytes=0, peakBytesReadable=0.00 B, chunks requested=0, cache hits=0 2024-01-18 10:21:07.510 root: INFO: RegionPool: FREE: 0 allocated (0 blocks / 0 chunks), regions.size = 0, 0 current java objects, thread 23: Thread-4 2024-01-18 10:21:07.510 root: INFO: RegionPool: FREE: 64.0K allocated (64.0K blocks / 0 chunks), regions.size = 1, 0 current java objects, thread 23: Thread-4 2024-01-18 10:21:07.511 root: ERROR: HailException: arguments refer to no files From is.hail.utils.HailException: arguments refer to no files at is.hail.utils.ErrorHandling.fatal(ErrorHandling.scala:17) at is.hail.utils.ErrorHandling.fatal$(ErrorHandling.scala:17) at is.hail.utils.package$.fatal(package.scala:78) at is.hail.io.vcf.LoadVCF$.globAllVCFs(LoadVCF.scala:1151) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750) 2024-01-18 10:21:07.512 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:21:07.519 root: INFO: globbing path file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format \\[500k release\\]/chr1/ukb24310_c1_b92_v1.vcf.gz returned 0 files: 2024-01-18 10:21:07.519 Hail: WARN: 'file:///mnt/project/Bulk/DRAGEN WGS/DRAGEN population level WGS variants, pVCF format \\[500k release\\]/chr1/ukb24310_c1_b92_v1.vcf.gz' refers to no files 2024-01-18 10:21:07.519 root: INFO: RegionPool: initialized for thread 23: Thread-4 2024-01-18 10:21:07.519 root: INFO: TaskReport: stage=0, partition=0, attempt=0, peakBytes=0, peakBytesReadable=0.00 B, chunks requested=0, cache hits=0 2024-01-18 10:21:07.519 root: INFO: RegionPool: FREE: 0 allocated (0 blocks / 0 chunks), regions.size = 0, 0 current java objects, thread 23: Thread-4 2024-01-18 10:21:07.519 root: INFO: RegionPool: FREE: 64.0K allocated (64.0K blocks / 0 chunks), regions.size = 1, 0 current java objects, thread 23: Thread-4 2024-01-18 10:21:07.520 root: ERROR: HailException: arguments refer to no files From is.hail.utils.HailException: arguments refer to no files at is.hail.utils.ErrorHandling.fatal(ErrorHandling.scala:17) at is.hail.utils.ErrorHandling.fatal$(ErrorHandling.scala:17) at is.hail.utils.package$.fatal(package.scala:78) at is.hail.io.vcf.LoadVCF$.globAllVCFs(LoadVCF.scala:1151) at is.hail.io.vcf.MatrixVCFReader$.apply(LoadVCF.scala:1592) at is.hail.io.vcf.MatrixVCFReader$.fromJValue(LoadVCF.scala:1670) at is.hail.expr.ir.MatrixReader$.fromJson(MatrixIR.scala:89) at is.hail.expr.ir.IRParser$.matrix_ir_1(Parser.scala:1874) at is.hail.expr.ir.IRParser$.$anonfun$matrix_ir$1(Parser.scala:1790) at is.hail.utils.StackSafe$More.advance(StackSafe.scala:64) at is.hail.utils.StackSafe$.run(StackSafe.scala:16) at is.hail.utils.StackSafe$StackFrame.run(StackSafe.scala:32) at is.hail.expr.ir.IRParser$.$anonfun$parse_matrix_ir$1(Parser.scala:2153) at is.hail.expr.ir.IRParser$.parse(Parser.scala:2138) at is.hail.expr.ir.IRParser$.parse_matrix_ir(Parser.scala:2153) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$2(SparkBackend.scala:707) at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:75) at is.hail.utils.package$.using(package.scala:635) at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17) at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:63) at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:351) at is.hail.backend.spark.SparkBackend.$anonfun$parse_matrix_ir$1(SparkBackend.scala:706) at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52) at is.hail.utils.ExecutionTimer$.logTime(ExecutionTimer.scala:59) at is.hail.backend.spark.SparkBackend.parse_matrix_ir(SparkBackend.scala:705) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) at py4j.ClientServerConnection.run(ClientServerConnection.java:106) at java.lang.Thread.run(Thread.java:750)