Using hailctl to launch an google cloud instance, then after connecting to the notebook I get this error immediately after trying hail.ini()
I’m guessing I need to upgrade something else to make it compatible again?? help is appreciated, I’m stuck!
Trace
Py4JJavaError Traceback (most recent call last)
in <cell line: 1>()
----> 1 hl.init()
in init(sc, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmp_dir, default_reference, idempotent, global_seed, spark_conf, skip_logging_configuration, local_tmpdir, _optimizer_iterations, backend, driver_cores, driver_memory, worker_cores, worker_memory, gcs_requester_pays_configuration, regions)
/opt/conda/miniconda3/lib/python3.10/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
575 def wrapper(original_func, *args, **kwargs):
576 args, kwargs = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
→ 577 return original_func(*args, **kwargs)
578
579 return wrapper
/opt/conda/miniconda3/lib/python3.10/site-packages/hail/context.py in init(sc, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmp_dir, default_reference, idempotent, global_seed, spark_conf, skip_logging_configuration, local_tmpdir, _optimizer_iterations, backend, driver_cores, driver_memory, worker_cores, worker_memory, gcs_requester_pays_configuration, regions)
346 ))
347 if backend == ‘spark’:
→ 348 return init_spark(
349 sc=sc,
350 app_name=app_name,
in init_spark(sc, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmp_dir, default_reference, idempotent, global_seed, spark_conf, skip_logging_configuration, local_tmpdir, _optimizer_iterations, gcs_requester_pays_configuration)
/opt/conda/miniconda3/lib/python3.10/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
575 def wrapper(original_func, *args, **kwargs):
576 args, kwargs = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
→ 577 return original_func(*args, **kwargs)
578
579 return wrapper
/opt/conda/miniconda3/lib/python3.10/site-packages/hail/context.py in init_spark(sc, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmp_dir, default_reference, idempotent, global_seed, spark_conf, skip_logging_configuration, local_tmpdir, _optimizer_iterations, gcs_requester_pays_configuration)
425 app_name = app_name or ‘Hail’
426 gcs_requester_pays_project, gcs_requester_pays_buckets = convert_gcs_requester_pays_configuration_to_hadoop_conf_style(gcs_requester_pays_configuration)
→ 427 backend = SparkBackend(
428 idempotent, sc, spark_conf, app_name, master, local, log,
429 quiet, append, min_block_size, branching_factor, tmpdir, local_tmpdir,
/opt/conda/miniconda3/lib/python3.10/site-packages/hail/backend/spark_backend.py in init(self, idempotent, sc, spark_conf, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmpdir, local_tmpdir, skip_logging_configuration, optimizer_iterations, gcs_requester_pays_project, gcs_requester_pays_buckets)
182 self._jbackend, log, True, append, branching_factor, skip_logging_configuration, optimizer_iterations)
183 else:
→ 184 self._jbackend = hail_package.backend.spark.SparkBackend.apply(
185 jsc, app_name, master, local, True, min_block_size, tmpdir, local_tmpdir,
186 gcs_requester_pays_project, gcs_requester_pays_buckets)
/usr/lib/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/java_gateway.py in call(self, *args)
1319
1320 answer = self.gateway_client.send_command(command)
→ 1321 return_value = get_return_value(
1322 answer, self.gateway_client, self.target_id, self.name)
1323
/usr/lib/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
324 value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)
325 if answer[1] == REFERENCE_TYPE:
→ 326 raise Py4JJavaError(
327 “An error occurred while calling {0}{1}{2}.\n”.
328 format(target_id, “.”, name), value)
Py4JJavaError: An error occurred while calling z:is.hail.backend.spark.SparkBackend.apply.
: java.lang.IllegalArgumentException: requirement failed
at scala.Predef$.require(Predef.scala:268)
at is.hail.backend.spark.SparkBackend$.apply(SparkBackend.scala:227)
at is.hail.backend.spark.SparkBackend.apply(SparkBackend.scala)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
at java.base/java.lang.Thread.run(Thread.java:829)