Unable to utilize my vcf file

Hello, I am trying to import by vcf file, but I am getting an error whenever I am trying to apply any function to my file.

i_dir = '/Users/jeji/ASD_pQTL'
vcf_path = '/'.join([i_dir, "SNUBH_ASD_210210.vcf.gz"])
mt = hl.import_vcf(vcf_path, reference_genome = 'GRCh38', force_bgz = True, min_partitions = n_cores*4)

This is how I imported my file.
This is the error I get.
For example, when I try:

mt.show()

This is the error code I get:

---------------------------------------------------------------------------
FatalError                                Traceback (most recent call last)
/var/folders/6t/k806qy3120z5c974tdcphgrm0000gn/T/ipykernel_36666/2845130735.py in <module>
----> 1 mt.show()

<decorator-gen-1272> in show(self, n_rows, n_cols, include_row_fields, width, truncate, types, handler)

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    575     def wrapper(__original_func, *args, **kwargs):
    576         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577         return __original_func(*args_, **kwargs_)
    578 
    579     return wrapper

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/matrixtable.py in show(self, n_rows, n_cols, include_row_fields, width, truncate, types, handler)
   2631         col_key_type = self.col_key.dtype
   2632         if len(col_key_type) == 1 and col_key_type[0] in (hl.tstr, hl.tint32, hl.tint64):
-> 2633             cols = self.col_key[0].take(displayed_n_cols)
   2634             entries = {repr(cols[i]): t.entries[i]
   2635                        for i in range(0, displayed_n_cols)}

<decorator-gen-606> in take(self, n, _localize)

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    575     def wrapper(__original_func, *args, **kwargs):
    576         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577         return __original_func(*args_, **kwargs_)
    578 
    579     return wrapper

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/expr/expressions/base_expression.py in take(self, n, _localize)
   1003         e = t.take(n, _localize=False).map(lambda r: r[name])
   1004         if _localize:
-> 1005             return hl.eval(e)
   1006         return e
   1007 

<decorator-gen-750> in eval(expression)

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    575     def wrapper(__original_func, *args, **kwargs):
    576         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577         return __original_func(*args_, **kwargs_)
    578 
    579     return wrapper

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/expr/expressions/expression_utils.py in eval(expression)
    192     Any
    193     """
--> 194     return eval_timed(expression)[0]
    195 
    196 

<decorator-gen-748> in eval_timed(expression)

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    575     def wrapper(__original_func, *args, **kwargs):
    576         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577         return __original_func(*args_, **kwargs_)
    578 
    579     return wrapper

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/expr/expressions/expression_utils.py in eval_timed(expression)
    156         if ir_type != expression.dtype:
    157             raise ExpressionException(f'Expression type and IR type differed: \n{ir_type}\n vs \n{expression_type}')
--> 158         (tupled_ans, timing) = Env.backend().execute(tupled_expression._ir, True)
    159     else:
    160         uid = Env.get_uid()

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed)
    108                 raise HailUserError(message_and_trace) from None
    109 
--> 110             raise e

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed)
     84         # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1))
     85         try:
---> 86             result_tuple = self._jhc.backend().executeEncode(jir, stream_codec)
     87             (result, timings) = (result_tuple._1(), result_tuple._2())
     88             value = ir.typ._from_encoding(result)

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/py4j/java_gateway.py in __call__(self, *args)
   1302 
   1303         answer = self.gateway_client.send_command(command)
-> 1304         return_value = get_return_value(
   1305             answer, self.gateway_client, self.target_id, self.name)
   1306 

/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/site-packages/hail/backend/py4j_backend.py in deco(*args, **kwargs)
     27                 raise FatalError('Error summary: %s' % (deepest,), error_id) from None
     28             else:
---> 29                 raise FatalError('%s\n\nJava stack trace:\n%s\n'
     30                                  'Hail version: %s\n'
     31                                  'Error summary: %s' % (deepest, full, hail.__version__, deepest), error_id) from None

FatalError: IllegalArgumentException: requirement failed

Java stack trace:
java.lang.IllegalArgumentException: requirement failed
	at scala.Predef$.require(Predef.scala:268)
	at is.hail.rvd.RVDPartitioner.<init>(RVDPartitioner.scala:52)
	at is.hail.rvd.RVDPartitioner.extendKeySamePartitions(RVDPartitioner.scala:141)
	at is.hail.expr.ir.LoweredTableReader$$anon$2.coerce(TableIR.scala:383)
	at is.hail.expr.ir.GenericTableValue.toTableStage(GenericTableValue.scala:162)
	at is.hail.io.vcf.MatrixVCFReader.lower(LoadVCF.scala:1790)
	at is.hail.expr.ir.lowering.LowerTableIR$.lower$1(LowerTableIR.scala:407)
	at is.hail.expr.ir.lowering.LowerTableIR$.apply(LowerTableIR.scala:1199)
	at is.hail.expr.ir.lowering.LowerToCDA$.lower(LowerToCDA.scala:69)
	at is.hail.expr.ir.lowering.LowerToCDA$.apply(LowerToCDA.scala:18)
	at is.hail.expr.ir.lowering.LowerToDistributedArrayPass.transform(LoweringPass.scala:77)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.evaluate$1(LowerOrInterpretNonCompilable.scala:27)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.rewrite$1(LowerOrInterpretNonCompilable.scala:67)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.rewrite$1(LowerOrInterpretNonCompilable.scala:53)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.apply(LowerOrInterpretNonCompilable.scala:72)
	at is.hail.expr.ir.lowering.LowerOrInterpretNonCompilablePass$.transform(LoweringPass.scala:69)
	at is.hail.expr.ir.lowering.LoweringPass.$anonfun$apply$3(LoweringPass.scala:16)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81)
	at is.hail.expr.ir.lowering.LoweringPass.$anonfun$apply$1(LoweringPass.scala:16)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81)
	at is.hail.expr.ir.lowering.LoweringPass.apply(LoweringPass.scala:14)
	at is.hail.expr.ir.lowering.LoweringPass.apply$(LoweringPass.scala:13)
	at is.hail.expr.ir.lowering.LowerOrInterpretNonCompilablePass$.apply(LoweringPass.scala:64)
	at is.hail.expr.ir.lowering.LoweringPipeline.$anonfun$apply$1(LoweringPipeline.scala:15)
	at is.hail.expr.ir.lowering.LoweringPipeline.$anonfun$apply$1$adapted(LoweringPipeline.scala:13)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:13)
	at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:47)
	at is.hail.backend.spark.SparkBackend._execute(SparkBackend.scala:381)
	at is.hail.backend.spark.SparkBackend.$anonfun$executeEncode$2(SparkBackend.scala:417)
	at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:47)
	at is.hail.utils.package$.using(package.scala:638)
	at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:47)
	at is.hail.utils.package$.using(package.scala:638)
	at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17)
	at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:46)
	at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:275)
	at is.hail.backend.spark.SparkBackend.$anonfun$executeEncode$1(SparkBackend.scala:414)
	at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52)
	at is.hail.backend.spark.SparkBackend.executeEncode(SparkBackend.scala:413)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
	at py4j.Gateway.invoke(Gateway.java:282)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.GatewayConnection.run(GatewayConnection.java:238)
	at java.lang.Thread.run(Thread.java:748)



Hail version: 0.2.79-f141af259254
Error summary: IllegalArgumentException: requirement failed

I am not sure where I should approach the problem from. Any solutions? Thanks in advance :slight_smile:

This seems like probably a hail bug. Two things:

  1. Is this VCF private? Or is it public data?
  2. Can you try making the first line of your hail script:
hl._set_flags(no_whole_stage_codegen='1')

and see if that works?

Can you also share the hail log file with us?

  1. It is a private VCF data!

  2. Yes, it works when I initialize hail by that code!

and I have attached my log file. Thanks :smiley:
hail20211217.log (312.6 KB)

Hi all,

25d
I am running into a similar issue. If I use hl._set_flags(no_whole_stage_codegen=‘1’) however and initialise that way, I am then unable to intialise hail with build ‘GRCh38’. Is there a way around this?

Thanks

Hey @skose!

I’m sorry to hear Hail is giving you a hard time. Can you share the full error message you get when you start hail with GRCh38 as the default genome reference?

Hi @danking

Thanks for the reply. I initialise hail as per usual but then get a java error. The file is bgzipped and tabixed. When I try mt.show() it gets an error. I can do analysis on the file suck as kinship etc, but when I try to show anything it does that. The initial thread says to initialise with hl._set_flags(no_whole_stage_codegen=‘1’), but then I cannot do the init with the reference build I wish to use (‘GRCh38’).

$ ipython 
Python 3.6.13 | packaged by conda-forge | (default, Sep 23 2021, 07:55:15) 
Type 'copyright', 'credits' or 'license' for more information
IPython 7.16.2 -- An enhanced Interactive Python. Type '?' for help.

In [1]: import hail as hl

In [2]: build='GRCh38'

In [3]: hl.init(default_reference=build)
WARNING: An illegal reflective access operation has occurred
WARNING: Illegal reflective access by org.apache.spark.unsafe.Platform (file:/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/pyspark/jars/spark-unsafe_2.12-3.1.2.jar) to constructor java.nio.DirectByteBuffer(long,int)
WARNING: Please consider reporting this to the maintainers of org.apache.spark.unsafe.Platform
WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
WARNING: All illegal access operations will be denied in a future release
2022-01-10 06:01:49 WARN  NativeCodeLoader:60 - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
2022-01-10 06:01:50 WARN  Hail:43 - This Hail JAR was compiled for Spark 3.1.1, running with Spark 3.1.2.
  Compatibility is not guaranteed.
Running on Apache Spark version 3.1.2
SparkUI available at http://25229cgnlt:4040
Welcome to
     __  __     <>__
    / /_/ /__  __/ /
   / __  / _ `/ / /
  /_/ /_/\_,_/_/_/   version 0.2.78-b17627756568
LOGGING: writing to /Users/adminskose/Desktop/hail-20220110-0601-0.2.78-b17627756568.log

In [4]: mt = hl.import_vcf('final.vcf.gz',force_bgz=True)

In [5]: mt.show()
2022-01-10 06:03:41 Hail: INFO: Coerced prefix-sorted dataset     (10 + 5) / 15]
---------------------------------------------------------------------------
FatalError                                Traceback (most recent call last)
<ipython-input-5-6513529f97bb> in <module>
----> 1 mt.show()

<decorator-gen-1268> in show(self, n_rows, n_cols, include_row_fields, width, truncate, types, handler)

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    575     def wrapper(__original_func, *args, **kwargs):
    576         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577         return __original_func(*args_, **kwargs_)
    578 
    579     return wrapper

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/matrixtable.py in show(self, n_rows, n_cols, include_row_fields, width, truncate, types, handler)
   2631         col_key_type = self.col_key.dtype
   2632         if len(col_key_type) == 1 and col_key_type[0] in (hl.tstr, hl.tint32, hl.tint64):
-> 2633             cols = self.col_key[0].take(displayed_n_cols)
   2634             entries = {repr(cols[i]): t.entries[i]
   2635                        for i in range(0, displayed_n_cols)}

<decorator-gen-606> in take(self, n, _localize)

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    575     def wrapper(__original_func, *args, **kwargs):
    576         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577         return __original_func(*args_, **kwargs_)
    578 
    579     return wrapper

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/expr/expressions/base_expression.py in take(self, n, _localize)
   1003         e = t.take(n, _localize=False).map(lambda r: r[name])
   1004         if _localize:
-> 1005             return hl.eval(e)
   1006         return e
   1007 

<decorator-gen-750> in eval(expression)

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    575     def wrapper(__original_func, *args, **kwargs):
    576         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577         return __original_func(*args_, **kwargs_)
    578 
    579     return wrapper

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/expr/expressions/expression_utils.py in eval(expression)
    192     Any
    193     """
--> 194     return eval_timed(expression)[0]
    195 
    196 

<decorator-gen-748> in eval_timed(expression)

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    575     def wrapper(__original_func, *args, **kwargs):
    576         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577         return __original_func(*args_, **kwargs_)
    578 
    579     return wrapper

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/expr/expressions/expression_utils.py in eval_timed(expression)
    156         if ir_type != expression.dtype:
    157             raise ExpressionException(f'Expression type and IR type differed: \n{ir_type}\n vs \n{expression_type}')
--> 158         (tupled_ans, timing) = Env.backend().execute(tupled_expression._ir, True)
    159     else:
    160         uid = Env.get_uid()

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed)
    108                 raise HailUserError(message_and_trace) from None
    109 
--> 110             raise e

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed)
     84         # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1))
     85         try:
---> 86             result_tuple = self._jhc.backend().executeEncode(jir, stream_codec)
     87             (result, timings) = (result_tuple._1(), result_tuple._2())
     88             value = ir.typ._from_encoding(result)

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/py4j/java_gateway.py in __call__(self, *args)
   1303         answer = self.gateway_client.send_command(command)
   1304         return_value = get_return_value(
-> 1305             answer, self.gateway_client, self.target_id, self.name)
   1306 
   1307         for temp_arg in temp_args:

/Applications/anaconda3/envs/bioinfo/lib/python3.6/site-packages/hail/backend/py4j_backend.py in deco(*args, **kwargs)
     29                 raise FatalError('%s\n\nJava stack trace:\n%s\n'
     30                                  'Hail version: %s\n'
---> 31                                  'Error summary: %s' % (deepest, full, hail.__version__, deepest), error_id) from None
     32         except pyspark.sql.utils.CapturedException as e:
     33             raise FatalError('%s\n\nJava stack trace:\n%s\n'

FatalError: IllegalArgumentException: requirement failed

Java stack trace:
java.lang.IllegalArgumentException: requirement failed
	at scala.Predef$.require(Predef.scala:268)
	at is.hail.rvd.RVDPartitioner.<init>(RVDPartitioner.scala:52)
	at is.hail.rvd.RVDPartitioner.extendKeySamePartitions(RVDPartitioner.scala:141)
	at is.hail.expr.ir.LoweredTableReader$$anon$2.coerce(TableIR.scala:387)
	at is.hail.expr.ir.GenericTableValue.toTableStage(GenericTableValue.scala:162)
	at is.hail.io.vcf.MatrixVCFReader.lower(LoadVCF.scala:1790)
	at is.hail.expr.ir.lowering.LowerTableIR$.lower$1(LowerTableIR.scala:403)
	at is.hail.expr.ir.lowering.LowerTableIR$.apply(LowerTableIR.scala:1195)
	at is.hail.expr.ir.lowering.LowerToCDA$.lower(LowerToCDA.scala:69)
	at is.hail.expr.ir.lowering.LowerToCDA$.apply(LowerToCDA.scala:18)
	at is.hail.expr.ir.lowering.LowerToDistributedArrayPass.transform(LoweringPass.scala:77)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.evaluate$1(LowerOrInterpretNonCompilable.scala:27)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.rewrite$1(LowerOrInterpretNonCompilable.scala:67)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.rewrite$1(LowerOrInterpretNonCompilable.scala:53)
	at is.hail.expr.ir.LowerOrInterpretNonCompilable$.apply(LowerOrInterpretNonCompilable.scala:72)
	at is.hail.expr.ir.lowering.LowerOrInterpretNonCompilablePass$.transform(LoweringPass.scala:69)
	at is.hail.expr.ir.lowering.LoweringPass.$anonfun$apply$3(LoweringPass.scala:16)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81)
	at is.hail.expr.ir.lowering.LoweringPass.$anonfun$apply$1(LoweringPass.scala:16)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:81)
	at is.hail.expr.ir.lowering.LoweringPass.apply(LoweringPass.scala:14)
	at is.hail.expr.ir.lowering.LoweringPass.apply$(LoweringPass.scala:13)
	at is.hail.expr.ir.lowering.LowerOrInterpretNonCompilablePass$.apply(LoweringPass.scala:64)
	at is.hail.expr.ir.lowering.LoweringPipeline.$anonfun$apply$1(LoweringPipeline.scala:15)
	at is.hail.expr.ir.lowering.LoweringPipeline.$anonfun$apply$1$adapted(LoweringPipeline.scala:13)
	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
	at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:13)
	at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:47)
	at is.hail.backend.spark.SparkBackend._execute(SparkBackend.scala:381)
	at is.hail.backend.spark.SparkBackend.$anonfun$executeEncode$2(SparkBackend.scala:417)
	at is.hail.backend.ExecuteContext$.$anonfun$scoped$3(ExecuteContext.scala:47)
	at is.hail.utils.package$.using(package.scala:638)
	at is.hail.backend.ExecuteContext$.$anonfun$scoped$2(ExecuteContext.scala:47)
	at is.hail.utils.package$.using(package.scala:638)
	at is.hail.annotations.RegionPool$.scoped(RegionPool.scala:17)
	at is.hail.backend.ExecuteContext$.scoped(ExecuteContext.scala:46)
	at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:275)
	at is.hail.backend.spark.SparkBackend.$anonfun$executeEncode$1(SparkBackend.scala:414)
	at is.hail.utils.ExecutionTimer$.time(ExecutionTimer.scala:52)
	at is.hail.backend.spark.SparkBackend.executeEncode(SparkBackend.scala:413)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
	at py4j.Gateway.invoke(Gateway.java:282)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.GatewayConnection.run(GatewayConnection.java:238)
	at java.base/java.lang.Thread.run(Thread.java:834)



Hail version: 0.2.78-b17627756568
Error summary: IllegalArgumentException: requirement failed

You can set flags after initializing. If you set flags before initializing, that will initialize automatically (and prevent you from re-initializing with different parameters). The solution here is to move the _set_flags line after the hl.init line, I think.