… contd
The following error repeats several times and then ends with the last line.
Initializing Hail with default parameters...
Initializing Hail with default parameters...
Initializing Hail with default parameters...
Initializing Hail with default parameters...
Initializing Hail with default parameters...
Traceback (most recent call last):
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/py4j_backend.py", line 25, in deco
return f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/protocol.py", line 326, in get_return_value
raise Py4JJavaError(
py4j.protocol.Py4JJavaError: An error occurred while calling o34.exists.
: java.net.ConnectException: Call From Bens-MBP/192.168.132.162 to localhost:9000 failed on connection exception: java.net.ConnectException: Connection refused; For more details see: http://wiki.apache.org/hadoop/ConnectionRefused
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:913)
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:828)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1616)
at org.apache.hadoop.ipc.Client.call(Client.java:1558)
at org.apache.hadoop.ipc.Client.call(Client.java:1455)
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:242)
at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:129)
at com.sun.proxy.$Proxy33.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:965)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy34.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1739)
at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1753)
at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1750)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1765)
at is.hail.io.fs.HadoopFS.fileStatus(HadoopFS.scala:189)
at is.hail.io.fs.FS.exists(FS.scala:465)
at is.hail.io.fs.FS.exists$(FS.scala:463)
at is.hail.io.fs.HadoopFS.exists(HadoopFS.scala:81)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.net.ConnectException: Connection refused
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:716)
at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:205)
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:586)
at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:711)
at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:833)
at org.apache.hadoop.ipc.Client$Connection.access$3800(Client.java:414)
at org.apache.hadoop.ipc.Client.getConnection(Client.java:1677)
at org.apache.hadoop.ipc.Client.call(Client.java:1502)
... 36 more
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/py4j_backend.py", line 25, in deco
return f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/protocol.py", line 326, in get_return_value
raise Py4JJavaError(
py4j.protocol.Py4JJavaError: An error occurred while calling z:is.hail.backend.spark.SparkBackend.apply.
: java.lang.IllegalArgumentException: requirement failed
at scala.Predef$.require(Predef.scala:268)
at is.hail.backend.spark.SparkBackend$.apply(SparkBackend.scala:232)
at is.hail.backend.spark.SparkBackend.apply(SparkBackend.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
at java.lang.Thread.run(Thread.java:748)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/py4j_backend.py", line 25, in deco
return f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/protocol.py", line 326, in get_return_value
raise Py4JJavaError(
py4j.protocol.Py4JJavaError: An error occurred while calling z:is.hail.backend.spark.SparkBackend.apply.
: java.lang.IllegalArgumentException: requirement failed
at scala.Predef$.require(Predef.scala:268)
at is.hail.backend.spark.SparkBackend$.apply(SparkBackend.scala:232)
at is.hail.backend.spark.SparkBackend.apply(SparkBackend.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
at java.lang.Thread.run(Thread.java:748)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/py4j_backend.py", line 25, in deco
return f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/protocol.py", line 326, in get_return_value
.....
File "<decorator-gen-1734>", line 2, in init
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/typecheck/check.py", line 584, in wrapper
return __original_func(*args_, **kwargs_)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/context.py", line 353, in init
return init_spark(
^^^^^^^^^^^
File "<decorator-gen-1736>", line 2, in init_spark
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/typecheck/check.py", line 584, in wrapper
return __original_func(*args_, **kwargs_)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/context.py", line 436, in init_spark
backend = SparkBackend(
^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/spark_backend.py", line 208, in __init__
self._jbackend = hail_package.backend.spark.SparkBackend.apply(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/java_gateway.py", line 1321, in __call__
return_value = get_return_value(
^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/py4j_backend.py", line 33, in deco
tpl = Env.jutils().handleForPython(e.java_exception)
^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/utils/java.py", line 58, in jutils
return Env.py4j_backend('Env.jutils').utils_package_object()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/utils/java.py", line 93, in py4j_backend
b = Env.backend()
^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/utils/java.py", line 88, in backend
return Env.hc()._backend
^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/utils/java.py", line 66, in hc
init()
File "<decorator-gen-1734>", line 2, in init
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/typecheck/check.py", line 584, in wrapper
return __original_func(*args_, **kwargs_)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/context.py", line 353, in init
return init_spark(
^^^^^^^^^^^
File "<decorator-gen-1736>", line 2, in init_spark
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/typecheck/check.py", line 584, in wrapper
return __original_func(*args_, **kwargs_)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/context.py", line 436, in init_spark
backend = SparkBackend(
^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/spark_backend.py", line 208, in __init__
self._jbackend = hail_package.backend.spark.SparkBackend.apply(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/java_gateway.py", line 1321, in __call__
return_value = get_return_value(
^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/py4j_backend.py", line 33, in deco
tpl = Env.jutils().handleForPython(e.java_exception)
^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/utils/java.py", line 58, in jutils
return Env.py4j_backend('Env.jutils').utils_package_object()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/utils/java.py", line 93, in py4j_backend
b = Env.backend()
^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/utils/java.py", line 88, in backend
return Env.hc()._backend
^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/utils/java.py", line 66, in hc
init()
File "<decorator-gen-1734>", line 2, in init
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/typecheck/check.py", line 584, in wrapper
return __original_func(*args_, **kwargs_)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/context.py", line 353, in init
return init_spark(
^^^^^^^^^^^
File "<decorator-gen-1736>", line 2, in init_spark
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/typecheck/check.py", line 584, in wrapper
return __original_func(*args_, **kwargs_)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/context.py", line 436, in init_spark
backend = SparkBackend(
^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/hail/backend/spark_backend.py", line 141, in __init__
conf = pyspark.SparkConf()
^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/pyspark/conf.py", line 131, in __init__
self._jconf = _jvm.SparkConf(loadDefaults)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/java_gateway.py", line 1571, in __call__
(new_args, temp_args) = self._get_args(args)
^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/java_gateway.py", line 1556, in _get_args
if converter.can_convert(arg):
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/bensaini/anaconda3/lib/python3.11/site-packages/py4j/java_collections.py", line 490, in can_convert
return isinstance(object, Set)
^^^^^^^^^^^^^^^^^^^^^^^
File "<frozen abc>", line 119, in __instancecheck__
RecursionError: maximum recursion depth exceeded in comparison