NoClassDefFoundError with add_sequence

Hi hail team,

I’m getting a strange error when trying to run the following code:

rg = hl.get_reference('GRCh37')
rg.add_sequence(
    'gs://hail-common/references/human_g1k_v37.fasta.gz', 
    'gs://hail-common/references/human_g1k_v37.fasta.fai'
)
context_ht = context_ht.annotate(
    context=hl.get_sequence(
        context_ht.locus.contig,
        context_ht.locus.position,
        before=1,
        after=1,
        reference_genome=rg,
    )
)
context_ht.context.show(5)

Error:

Error summary: NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37

Am I doing something wrong? I’d appreciate any insight.

Full error:

2020-08-27 20:40:00 Hail: INFO: reading 890 of 62164 data partitions
---------------------------------------------------------------------------
FatalError                                Traceback (most recent call last)
/opt/conda/miniconda3/lib/python3.6/site-packages/IPython/core/formatters.py in __call__(self, obj)
    700                 type_pprinters=self.type_printers,
    701                 deferred_pprinters=self.deferred_printers)
--> 702             printer.pretty(obj)
    703             printer.flush()
    704             return stream.getvalue()

/opt/conda/miniconda3/lib/python3.6/site-packages/IPython/lib/pretty.py in pretty(self, obj)
    392                         if cls is not object \
    393                                 and callable(cls.__dict__.get('__repr__')):
--> 394                             return _repr_pprint(obj, self, cycle)
    395 
    396             return _default_pprint(obj, self, cycle)

/opt/conda/miniconda3/lib/python3.6/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
    698     """A pprint that just redirects to the normal repr function."""
    699     # Find newlines and replace them with p.break_()
--> 700     output = repr(obj)
    701     lines = output.splitlines()
    702     with p.group():

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in __repr__(self)
   1283 
   1284         def __repr__(self):
-> 1285             return self.__str__()
   1286 
   1287         def data(self):

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in __str__(self)
   1280 
   1281         def __str__(self):
-> 1282             return self._ascii_str()
   1283 
   1284         def __repr__(self):

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in _ascii_str(self)
   1306                 return s
   1307 
-> 1308             rows, has_more, dtype = self.data()
   1309             fields = list(dtype)
   1310             trunc_fields = [trunc(f) for f in fields]

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in data(self)
   1290                 row_dtype = t.row.dtype
   1291                 t = t.select(**{k: hl._showstr(v) for (k, v) in t.row.items()})
-> 1292                 rows, has_more = t._take_n(self.n)
   1293                 self._data = (rows, has_more, row_dtype)
   1294             return self._data

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in _take_n(self, n)
   1437             has_more = False
   1438         else:
-> 1439             rows = self.take(n + 1)
   1440             has_more = len(rows) > n
   1441             rows = rows[:n]

<decorator-gen-1109> in take(self, n, _localize)

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    612     def wrapper(__original_func, *args, **kwargs):
    613         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 614         return __original_func(*args_, **kwargs_)
    615 
    616     return wrapper

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in take(self, n, _localize)
   2101         """
   2102 
-> 2103         return self.head(n).collect(_localize)
   2104 
   2105     @typecheck_method(n=int)

<decorator-gen-1103> in collect(self, _localize)

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    612     def wrapper(__original_func, *args, **kwargs):
    613         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 614         return __original_func(*args_, **kwargs_)
    615 
    616     return wrapper

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in collect(self, _localize)
   1900         e = construct_expr(rows_ir, hl.tarray(t.row.dtype))
   1901         if _localize:
-> 1902             return Env.backend().execute(e._ir)
   1903         else:
   1904             return e

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/backend/spark_backend.py in execute(self, ir, timed)
    294         jir = self._to_java_value_ir(ir)
    295         # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1))
--> 296         result = json.loads(self._jhc.backend().executeJSON(jir))
    297         value = ir.typ._from_json(result['value'])
    298         timings = result['timings']

/usr/lib/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in __call__(self, *args)
   1255         answer = self.gateway_client.send_command(command)
   1256         return_value = get_return_value(
-> 1257             answer, self.gateway_client, self.target_id, self.name)
   1258 
   1259         for temp_arg in temp_args:

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/backend/spark_backend.py in deco(*args, **kwargs)
     39             raise FatalError('%s\n\nJava stack trace:\n%s\n'
     40                              'Hail version: %s\n'
---> 41                              'Error summary: %s' % (deepest, full, hail.__version__, deepest)) from None
     42         except pyspark.sql.utils.CapturedException as e:
     43             raise FatalError('%s\n\nJava stack trace:\n%s\n'

FatalError: NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37

Java stack trace:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 207 in stage 6.0 failed 20 times, most recent failure: Lost task 207.19 in stage 6.0 (TID 483, kc-w-6.c.broad-mpg-gnomad.internal, executor 5): java.lang.NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37
	at __C194Compiled.__m208isValidLocus(Unknown Source)
	at __C194Compiled.apply(Unknown Source)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply$mcJJ$sp(TableIR.scala:1506)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply(TableIR.scala:1505)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply(TableIR.scala:1505)
	at scala.collection.Iterator$$anon$11.next(Iterator.scala:410)
	at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:463)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:66)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at is.hail.utils.package$.getIteratorSizeWithMaxN(package.scala:395)
	at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
	at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
	at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:355)
	at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:353)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:123)
	at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:
	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1892)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1880)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1879)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1879)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:927)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:927)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:927)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2113)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2062)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2051)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:738)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2082)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2101)
	at is.hail.sparkextras.ContextRDD.runJob(ContextRDD.scala:351)
	at is.hail.rvd.RVD$$anonfun$13.apply(RVD.scala:526)
	at is.hail.rvd.RVD$$anonfun$13.apply(RVD.scala:526)
	at is.hail.utils.PartitionCounts$.incrementalPCSubsetOffset(PartitionCounts.scala:73)
	at is.hail.rvd.RVD.head(RVD.scala:525)
	at is.hail.expr.ir.TableSubset$class.execute(TableIR.scala:954)
	at is.hail.expr.ir.TableHead.execute(TableIR.scala:960)
	at is.hail.expr.ir.TableMapRows.execute(TableIR.scala:1475)
	at is.hail.expr.ir.Interpret$.run(Interpret.scala:819)
	at is.hail.expr.ir.Interpret$.alreadyLowered(Interpret.scala:53)
	at is.hail.expr.ir.InterpretNonCompilable$.interpretAndCoerce$1(InterpretNonCompilable.scala:16)
	at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:53)
	at is.hail.expr.ir.InterpretNonCompilable$$anonfun$1.apply(InterpretNonCompilable.scala:25)
	at is.hail.expr.ir.InterpretNonCompilable$$anonfun$1.apply(InterpretNonCompilable.scala:25)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at is.hail.expr.ir.InterpretNonCompilable$.rewriteChildren$1(InterpretNonCompilable.scala:25)
	at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:54)
	at is.hail.expr.ir.InterpretNonCompilable$.apply(InterpretNonCompilable.scala:58)
	at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.transform(LoweringPass.scala:67)
	at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15)
	at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:69)
	at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:15)
	at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:13)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:69)
	at is.hail.expr.ir.lowering.LoweringPass$class.apply(LoweringPass.scala:13)
	at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.apply(LoweringPass.scala:62)
	at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:14)
	at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:12)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35)
	at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:12)
	at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28)
	at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:334)
	at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:321)
	at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:320)
	at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:20)
	at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:18)
	at is.hail.utils.package$.using(package.scala:609)
	at is.hail.annotations.Region$.scoped(Region.scala:18)
	at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:18)
	at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:230)
	at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:320)
	at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:340)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
	at py4j.Gateway.invoke(Gateway.java:282)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.GatewayConnection.run(GatewayConnection.java:238)
	at java.lang.Thread.run(Thread.java:748)

java.lang.NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37
	at __C194Compiled.__m208isValidLocus(Unknown Source)
	at __C194Compiled.apply(Unknown Source)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply$mcJJ$sp(TableIR.scala:1506)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply(TableIR.scala:1505)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply(TableIR.scala:1505)
	at scala.collection.Iterator$$anon$11.next(Iterator.scala:410)
	at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:463)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:66)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at is.hail.utils.package$.getIteratorSizeWithMaxN(package.scala:395)
	at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
	at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
	at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:355)
	at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:353)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:123)
	at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)




Hail version: 0.2.55-0d4ce0df2457
Error summary: NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37

2020-08-27 20:41:16 Hail: INFO: reading 890 of 62164 data partitions
---------------------------------------------------------------------------
FatalError                                Traceback (most recent call last)
/opt/conda/miniconda3/lib/python3.6/site-packages/IPython/core/formatters.py in __call__(self, obj)
    343             method = get_real_method(obj, self.print_method)
    344             if method is not None:
--> 345                 return method()
    346             return None
    347         else:

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in _repr_html_(self)
   1295 
   1296         def _repr_html_(self):
-> 1297             return self._html_str()
   1298 
   1299         def _ascii_str(self):

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in _html_str(self)
   1385             types = self.types
   1386 
-> 1387             rows, has_more, dtype = self.data()
   1388             fields = list(dtype)
   1389 

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in data(self)
   1290                 row_dtype = t.row.dtype
   1291                 t = t.select(**{k: hl._showstr(v) for (k, v) in t.row.items()})
-> 1292                 rows, has_more = t._take_n(self.n)
   1293                 self._data = (rows, has_more, row_dtype)
   1294             return self._data

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in _take_n(self, n)
   1437             has_more = False
   1438         else:
-> 1439             rows = self.take(n + 1)
   1440             has_more = len(rows) > n
   1441             rows = rows[:n]

<decorator-gen-1109> in take(self, n, _localize)

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    612     def wrapper(__original_func, *args, **kwargs):
    613         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 614         return __original_func(*args_, **kwargs_)
    615 
    616     return wrapper

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in take(self, n, _localize)
   2101         """
   2102 
-> 2103         return self.head(n).collect(_localize)
   2104 
   2105     @typecheck_method(n=int)

<decorator-gen-1103> in collect(self, _localize)

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
    612     def wrapper(__original_func, *args, **kwargs):
    613         args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 614         return __original_func(*args_, **kwargs_)
    615 
    616     return wrapper

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/table.py in collect(self, _localize)
   1900         e = construct_expr(rows_ir, hl.tarray(t.row.dtype))
   1901         if _localize:
-> 1902             return Env.backend().execute(e._ir)
   1903         else:
   1904             return e

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/backend/spark_backend.py in execute(self, ir, timed)
    294         jir = self._to_java_value_ir(ir)
    295         # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1))
--> 296         result = json.loads(self._jhc.backend().executeJSON(jir))
    297         value = ir.typ._from_json(result['value'])
    298         timings = result['timings']

/usr/lib/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in __call__(self, *args)
   1255         answer = self.gateway_client.send_command(command)
   1256         return_value = get_return_value(
-> 1257             answer, self.gateway_client, self.target_id, self.name)
   1258 
   1259         for temp_arg in temp_args:

/opt/conda/miniconda3/lib/python3.6/site-packages/hail/backend/spark_backend.py in deco(*args, **kwargs)
     39             raise FatalError('%s\n\nJava stack trace:\n%s\n'
     40                              'Hail version: %s\n'
---> 41                              'Error summary: %s' % (deepest, full, hail.__version__, deepest)) from None
     42         except pyspark.sql.utils.CapturedException as e:
     43             raise FatalError('%s\n\nJava stack trace:\n%s\n'

FatalError: NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37

Java stack trace:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 13 in stage 9.0 failed 20 times, most recent failure: Lost task 13.19 in stage 9.0 (TID 549, kc-w-0.c.broad-mpg-gnomad.internal, executor 8): java.lang.NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37
	at __C194Compiled.__m208isValidLocus(Unknown Source)
	at __C194Compiled.apply(Unknown Source)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply$mcJJ$sp(TableIR.scala:1506)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply(TableIR.scala:1505)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply(TableIR.scala:1505)
	at scala.collection.Iterator$$anon$11.next(Iterator.scala:410)
	at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:463)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:66)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at is.hail.utils.package$.getIteratorSizeWithMaxN(package.scala:395)
	at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
	at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
	at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:355)
	at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:353)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:123)
	at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

Driver stacktrace:
	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1892)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1880)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1879)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1879)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:927)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:927)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:927)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2113)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2062)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2051)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:738)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2082)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:2101)
	at is.hail.sparkextras.ContextRDD.runJob(ContextRDD.scala:351)
	at is.hail.rvd.RVD$$anonfun$13.apply(RVD.scala:526)
	at is.hail.rvd.RVD$$anonfun$13.apply(RVD.scala:526)
	at is.hail.utils.PartitionCounts$.incrementalPCSubsetOffset(PartitionCounts.scala:73)
	at is.hail.rvd.RVD.head(RVD.scala:525)
	at is.hail.expr.ir.TableSubset$class.execute(TableIR.scala:954)
	at is.hail.expr.ir.TableHead.execute(TableIR.scala:960)
	at is.hail.expr.ir.TableMapRows.execute(TableIR.scala:1475)
	at is.hail.expr.ir.Interpret$.run(Interpret.scala:819)
	at is.hail.expr.ir.Interpret$.alreadyLowered(Interpret.scala:53)
	at is.hail.expr.ir.InterpretNonCompilable$.interpretAndCoerce$1(InterpretNonCompilable.scala:16)
	at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:53)
	at is.hail.expr.ir.InterpretNonCompilable$$anonfun$1.apply(InterpretNonCompilable.scala:25)
	at is.hail.expr.ir.InterpretNonCompilable$$anonfun$1.apply(InterpretNonCompilable.scala:25)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.AbstractTraversable.map(Traversable.scala:104)
	at is.hail.expr.ir.InterpretNonCompilable$.rewriteChildren$1(InterpretNonCompilable.scala:25)
	at is.hail.expr.ir.InterpretNonCompilable$.is$hail$expr$ir$InterpretNonCompilable$$rewrite$1(InterpretNonCompilable.scala:54)
	at is.hail.expr.ir.InterpretNonCompilable$.apply(InterpretNonCompilable.scala:58)
	at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.transform(LoweringPass.scala:67)
	at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15)
	at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3$$anonfun$1.apply(LoweringPass.scala:15)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:69)
	at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:15)
	at is.hail.expr.ir.lowering.LoweringPass$$anonfun$apply$3.apply(LoweringPass.scala:13)
	at is.hail.utils.ExecutionTimer.time(ExecutionTimer.scala:69)
	at is.hail.expr.ir.lowering.LoweringPass$class.apply(LoweringPass.scala:13)
	at is.hail.expr.ir.lowering.InterpretNonCompilablePass$.apply(LoweringPass.scala:62)
	at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:14)
	at is.hail.expr.ir.lowering.LoweringPipeline$$anonfun$apply$1.apply(LoweringPipeline.scala:12)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35)
	at is.hail.expr.ir.lowering.LoweringPipeline.apply(LoweringPipeline.scala:12)
	at is.hail.expr.ir.CompileAndEvaluate$._apply(CompileAndEvaluate.scala:28)
	at is.hail.backend.spark.SparkBackend.is$hail$backend$spark$SparkBackend$$_execute(SparkBackend.scala:334)
	at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:321)
	at is.hail.backend.spark.SparkBackend$$anonfun$execute$1.apply(SparkBackend.scala:320)
	at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:20)
	at is.hail.expr.ir.ExecuteContext$$anonfun$scoped$1.apply(ExecuteContext.scala:18)
	at is.hail.utils.package$.using(package.scala:609)
	at is.hail.annotations.Region$.scoped(Region.scala:18)
	at is.hail.expr.ir.ExecuteContext$.scoped(ExecuteContext.scala:18)
	at is.hail.backend.spark.SparkBackend.withExecuteContext(SparkBackend.scala:230)
	at is.hail.backend.spark.SparkBackend.execute(SparkBackend.scala:320)
	at is.hail.backend.spark.SparkBackend.executeJSON(SparkBackend.scala:340)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
	at py4j.Gateway.invoke(Gateway.java:282)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.GatewayConnection.run(GatewayConnection.java:238)
	at java.lang.Thread.run(Thread.java:748)

java.lang.NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37
	at __C194Compiled.__m208isValidLocus(Unknown Source)
	at __C194Compiled.apply(Unknown Source)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply$mcJJ$sp(TableIR.scala:1506)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply(TableIR.scala:1505)
	at is.hail.expr.ir.TableMapRows$$anonfun$69$$anonfun$apply$3.apply(TableIR.scala:1505)
	at scala.collection.Iterator$$anon$11.next(Iterator.scala:410)
	at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:463)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:66)
	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
	at is.hail.utils.package$.getIteratorSizeWithMaxN(package.scala:395)
	at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
	at is.hail.rvd.RVD$$anonfun$13$$anonfun$apply$10.apply(RVD.scala:526)
	at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:355)
	at is.hail.sparkextras.ContextRDD$$anonfun$runJob$1.apply(ContextRDD.scala:353)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
	at org.apache.spark.scheduler.Task.run(Task.scala:123)
	at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)




Hail version: 0.2.55-0d4ce0df2457
Error summary: NoClassDefFoundError: Could not initialize class __C209RGContainer_GRCh37

Nevermind, I somehow fixed this!

This is weird and probably a bug.