Error while calling z:is.hail.backend.spark.SparkBackend.apply

Hi all!

I’m working on getting Hail 0.2.93 running on a Spark Environment (Version 3.1.1) with Scala 2.12.13 but am getting hit with the following error. Can anyone point me in the right direction on what might be going on?

After running:

import hail as hl
hl.init(sc)

I get the following:

An error was encountered:
An error occurred while calling z:is.hail.backend.spark.SparkBackend.apply.
: org.apache.spark.SparkException: Only one SparkContext should be running in this JVM (see SPARK-2243).The currently running SparkContext was created at:
org.apache.spark.SparkContext.getOrCreate(SparkContext.scala)
org.apache.livy.rsc.driver.SparkEntries.sc(SparkEntries.java:53)
org.apache.livy.rsc.driver.SparkEntries.sparkSession(SparkEntries.java:67)
org.apache.livy.repl.AbstractSparkInterpreter.postStart(AbstractSparkInterpreter.scala:69)
org.apache.livy.repl.SparkInterpreter.$anonfun$start$1(SparkInterpreter.scala:90)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.livy.repl.AbstractSparkInterpreter.restoreContextClassLoader(AbstractSparkInterpreter.scala:340)
org.apache.livy.repl.SparkInterpreter.start(SparkInterpreter.scala:63)
org.apache.livy.repl.Session.$anonfun$start$1(Session.scala:128)
scala.concurrent.Future$.$anonfun$apply$1(Future.scala:659)
scala.util.Success.$anonfun$map$1(Try.scala:255)
scala.util.Success.map(Try.scala:213)
scala.concurrent.Future.$anonfun$map$1(Future.scala:292)
scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33)
scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)
scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:750)
at org.apache.spark.SparkContext$.$anonfun$assertNoOtherContextIsRunning$2(SparkContext.scala:2672)
at scala.Option.foreach(Option.scala:407)
at org.apache.spark.SparkContext$.assertNoOtherContextIsRunning(SparkContext.scala:2669)
at org.apache.spark.SparkContext$.markPartiallyConstructed(SparkContext.scala:2759)
at org.apache.spark.SparkContext.(SparkContext.scala:96)
at is.hail.backend.spark.SparkBackend$.configureAndCreateSparkContext(SparkBackend.scala:146)
at is.hail.backend.spark.SparkBackend$.apply(SparkBackend.scala:222)
at is.hail.backend.spark.SparkBackend.apply(SparkBackend.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:238)
at java.lang.Thread.run(Thread.java:750)

Traceback (most recent call last):
File “”, line 2, in init
File “/usr/local/lib/python3.7/site-packages/hail/typecheck/check.py”, line 577, in wrapper
return original_func(*args, **kwargs)
File “/usr/local/lib/python3.7/site-packages/hail/context.py”, line 320, in init
skip_logging_configuration=skip_logging_configuration
File “”, line 2, in init_spark
File “/usr/local/lib/python3.7/site-packages/hail/typecheck/check.py”, line 577, in wrapper
return original_func(*args, **kwargs)
File “/usr/local/lib/python3.7/site-packages/hail/context.py”, line 379, in init_spark
skip_logging_configuration, optimizer_iterations)
File “/usr/local/lib/python3.7/site-packages/hail/backend/spark_backend.py”, line 181, in init
jsc, app_name, master, local, True, min_block_size, tmpdir, local_tmpdir)
File “/usr/lib/spark/python/lib/py4j-0.10.9-src.zip/py4j/java_gateway.py”, line 1305, in call
answer, self.gateway_client, self.target_id, self.name)
File “/usr/lib/spark/python/lib/pyspark.zip/pyspark/sql/utils.py”, line 111, in deco
return f(*a, **kw)
File “/usr/lib/spark/python/lib/py4j-0.10.9-src.zip/py4j/protocol.py”, line 328, in get_return_value
format(target_id, “.”, name), value)
py4j.protocol.Py4JJavaError: An error occurred while calling z:is.hail.backend.spark.SparkBackend.apply.
: org.apache.spark.SparkException: Only one SparkContext should be running in this JVM (see SPARK-2243).The currently running SparkContext was created at:
org.apache.spark.SparkContext.getOrCreate(SparkContext.scala)
org.apache.livy.rsc.driver.SparkEntries.sc(SparkEntries.java:53)
org.apache.livy.rsc.driver.SparkEntries.sparkSession(SparkEntries.java:67)
org.apache.livy.repl.AbstractSparkInterpreter.postStart(AbstractSparkInterpreter.scala:69)
org.apache.livy.repl.SparkInterpreter.$anonfun$start$1(SparkInterpreter.scala:90)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.livy.repl.AbstractSparkInterpreter.restoreContextClassLoader(AbstractSparkInterpreter.scala:340)
org.apache.livy.repl.SparkInterpreter.start(SparkInterpreter.scala:63)
org.apache.livy.repl.Session.$anonfun$start$1(Session.scala:128)
scala.concurrent.Future$.$anonfun$apply$1(Future.scala:659)
scala.util.Success.$anonfun$map$1(Try.scala:255)
scala.util.Success.map(Try.scala:213)
scala.concurrent.Future.$anonfun$map$1(Future.scala:292)
scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33)
scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)
scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:750)
at org.apache.spark.SparkContext$.$anonfun$assertNoOtherContextIsRunning$2(SparkContext.scala:2672)
at scala.Option.foreach(Option.scala:407)
at org.apache.spark.SparkContext$.assertNoOtherContextIsRunning(SparkContext.scala:2669)
at org.apache.spark.SparkContext$.markPartiallyConstructed(SparkContext.scala:2759)
at org.apache.spark.SparkContext.(SparkContext.scala:96)
at is.hail.backend.spark.SparkBackend$.configureAndCreateSparkContext(SparkBackend.scala:146)
at is.hail.backend.spark.SparkBackend$.apply(SparkBackend.scala:222)
at is.hail.backend.spark.SparkBackend.apply(SparkBackend.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:238)
at java.lang.Thread.run(Thread.java:750)

Hey @ryer! This is a known bug that was fixed in 0.2.95. [query] fix init(sc=sc): pass the spark context to init_spark (#11828) · hail-is/hail@2ef3722 · GitHub