Yep, here is one:
Py4JError Traceback (most recent call last)
in ()
----> 1 chr = chr.filter_cols(hl.is_defined(sample_table[chr.col_key]))
in filter_cols(self, expr, keep)
./Hail/hail-master/python/hail/typecheck/check.py in typecheck(orig_func, *args, **kwargs)
484 def typecheck(orig_func, *args, **kwargs):
485 args, kwargs = check_all(orig_func, args, kwargs, checkers, is_method=True)
–> 486 return orig_func(*args_, **kwargs_)
487
488 return decorator(_typecheck)
./Hail/hail-master/python/hail/matrixtable.py in filter_cols(self, expr, keep)
1394
1395 m = MatrixTable(base._jvds.filterColsExpr(expr._ast.to_hql(), keep))
-> 1396 return cleanup(m)
1397
1398 @typecheck_method(expr=expr_bool, keep=bool)
./Hail/hail-master/python/hail/matrixtable.py in cleanup(matrix)
2336 def cleanup(matrix):
2337 remaining_uids = [uid for uid in all_uids if uid in matrix._fields]
-> 2338 return matrix.drop(*remaining_uids)
2339
2340 return left, cleanup
in drop(self, *exprs)
./Hail/hail-master/python/hail/typecheck/check.py in typecheck(orig_func, *args, **kwargs)
484 def typecheck(orig_func, *args, **kwargs):
485 args, kwargs = check_all(orig_func, args, kwargs, checkers, is_method=True)
–> 486 return orig_func(*args_, **kwargs_)
487
488 return decorator(_typecheck)
./Hail/hail-master/python/hail/matrixtable.py in drop(self, *exprs)
1237 col_fields = [field for field in fields_to_drop if self._fields[field]._indices == self._col_indices]
1238 if col_fields:
-> 1239 m = m._select_cols(“MatrixTable.drop”, m.col.drop(*col_fields))
1240
1241 entry_fields = [field for field in fields_to_drop if self._fields[field]._indices == self._entry_indices]
in _select_cols(self, caller, s)
./Hail/hail-master/python/hail/typecheck/check.py in typecheck(orig_func, *args, **kwargs)
484 def typecheck(orig_func, *args, **kwargs):
485 args, kwargs = check_all(orig_func, args, kwargs, checkers, is_method=True)
–> 486 return orig_func(*args_, **kwargs_)
487
488 return decorator(_typecheck)
./Hail/hail-master/python/hail/matrixtable.py in _select_cols(self, caller, s)
2693 base, cleanup = self._process_joins(s)
2694 analyze(caller, s, self._col_indices, {self._row_axis})
-> 2695 return cleanup(MatrixTable(base._jvds.selectCols(s._ast.to_hql())))
2696
2697 @typecheck_method(caller=str, s=expr_struct())
./Hail/spark-2.2.0-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py in call(self , *args)
1131 answer = self.gateway_client.send_command(command)
1132 return_value = get_return_value(
-> 1133 answer, self.gateway_client, self.target_id, self.name)
1134
1135 for temp_arg in temp_args:
./Hail/hail-master/python/hail/utils/java.py in deco(*args, **kwargs)
182 import pyspark
183 try:
–> 184 return f(*args, **kwargs)
185 except py4j.protocol.Py4JJavaError as e:
186 s = e.java_exception.toString()
./Hail/spark-2.2.0-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py in get_return_value( answer, gateway_client, target_id, name)
321 raise Py4JError(
322 “An error occurred while calling {0}{1}{2}. Trace:\n{3}\n”.
–> 323 format(target_id, “.”, name, value))
324 else:
325 raise Py4JError(
Py4JError: An error occurred while calling o95.selectCols. Trace:
py4j.Py4JException: Method selectCols([class java.lang.String]) does not exist
at py4j.reflection.ReflectionEngine.getMethod(ReflectionEngine.java:318)
at py4j.reflection.ReflectionEngine.getMethod(ReflectionEngine.java:326)
at py4j.Gateway.invoke(Gateway.java:272)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:214)
at java.lang.Thread.run(Thread.java:748)
And another:
ERROR:root:Exception while sending command.
Traceback (most recent call last):
File “./Hail/spark-2.2.0-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py”, line 1 035, in send_command
raise Py4JNetworkError(“Answer from Java side is empty”)
py4j.protocol.Py4JNetworkError: Answer from Java side is empty
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File “./Hail/spark-2.2.0-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py”, line 8 83, in send_command
response = connection.send_command(command)
File “./Hail/spark-2.2.0-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py”, line 1 040, in send_command
“Error while receiving”, e, proto.ERROR_ON_RECEIVE)
py4j.protocol.Py4JNetworkError: Error while receiving
Py4JError Traceback (most recent call last)
in ()
----> 1 chr = chr.filter_cols(hl.is_defined(sample_table[chr.col_key]))
in filter_cols(self, expr, keep)
./Hail/hail-master/python/hail/typecheck/check.py in typecheck(orig_func, *args, **kwargs)
484 def typecheck(orig_func, *args, **kwargs):
485 args, kwargs = check_all(orig_func, args, kwargs, checkers, is_method=True)
–> 486 return orig_func(*args_, **kwargs_)
487
488 return decorator(_typecheck)
./Hail/hail-master/python/hail/matrixtable.py in filter_cols(self, expr, keep)
1390 Filtered matrix table.
1391 “”"
-> 1392 base, cleanup = self._process_joins(expr)
1393 analyze(‘MatrixTable.filter_cols’, expr, self._col_indices, {self._row_axis})
1394
./Hail/hail-master/python/hail/matrixtable.py in _process_joins(self, *exprs)
2330 for j in sorted(list(e._joins), key = lambda j: j.idx): # Make sure joins happen in order
2331 if j.uid not in used_uids:
-> 2332 left = j.join_function(left)
2333 all_uids.extend(j.temp_vars)
2334 used_uids.add(j.uid)
./Hail/hail-master/python/hail/table.py in (left)
1237 exprs[i] is src.col_key[i] for i in range(len(exprs))]):
1238 # key is already correct
-> 1239 joiner = lambda left: MatrixTable(left._jvds.annotateColsTable(right._jt, uid))
1240 else:
1241 index_uid = Env.get_uid()
./Hail/spark-2.2.0-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/java_gateway.py in call(self , *args)
1131 answer = self.gateway_client.send_command(command)
1132 return_value = get_return_value(
-> 1133 answer, self.gateway_client, self.target_id, self.name)
1134
1135 for temp_arg in temp_args:
.Hail/hail-master/python/hail/utils/java.py in deco(*args, **kwargs)
182 import pyspark
183 try:
–> 184 return f(*args, **kwargs)
185 except py4j.protocol.Py4JJavaError as e:
186 s = e.java_exception.toString()
./Hail/spark-2.2.0-bin-hadoop2.7/python/lib/py4j-0.10.4-src.zip/py4j/protocol.py in get_return_value( answer, gateway_client, target_id, name)
325 raise Py4JError(
326 “An error occurred while calling {0}{1}{2}”.
–> 327 format(target_id, “.”, name))
328 else:
329 type = answer[1]
Py4JError: An error occurred while calling o39.annotateColsTable