Hi there! I ran into a ConnectionPoolTimeoutException a couple of times today, first while trying to summarize a Hail table, and then again while trying to write that table to a database within DNAnexus. I’d hoped that saving the table and then reading it back in would speed things up, but clearly that didn’t solve the problem. Trying to show a few rows of this table took about 15-20 minutes (which was expected, since I threw a whole bunch of annotations and filters at it first). I then added a couple more annotations I’d forgotten earlier and tried to summarize the table and then write/read it with the checkpoint command when I started getting this timeout error. Memory and storage don’t seem to be the issue as far as I can tell. Is this something that probably originates from the Hail side of things, or does this seem like a DNAnexus issue? Thanks!
First half of the error (too large to fit in one post)
2022-10-11 19:28:09 Hail: INFO: Ordering unsorted dataset with network shuffle
2022-10-11 19:28:13 Hail: INFO: Coerced sorted dataset
2022-10-11 19:29:27 Hail: INFO: Ordering unsorted dataset with network shuffle
2022-10-11 19:29:30 Hail: INFO: Coerced sorted dataset
2022-10-11 19:31:55 Hail: INFO: Ordering unsorted dataset with network shuffle
2022-10-11 19:34:05 Hail: INFO: Ordering unsorted dataset with network shuffle
2022-10-11 19:34:12 Hail: INFO: Coerced sorted dataset
2022-10-11 19:34:37 Hail: INFO: Ordering unsorted dataset with network shuffle
2022-10-11 19:34:40 Hail: INFO: Coerced sorted dataset
2022-10-11 19:35:03 Hail: INFO: Ordering unsorted dataset with network shuffle
2022-10-11 19:35:05 Hail: INFO: Coerced sorted dataset
2022-10-11 19:35:25 Hail: INFO: Ordering unsorted dataset with network shuffle
2022-10-11 19:35:27 Hail: INFO: Coerced sorted dataset
2022-10-11 19:35:46 Hail: INFO: Ordering unsorted dataset with network shuffle
2022-10-11 19:35:50 Hail: INFO: Coerced sorted dataset
---------------------------------------------------------------------------
FatalError Traceback (most recent call last)
<ipython-input-44-aea2b527eab8> in <module>
----> 1 annotated_samples_cast.checkpoint(url)
<decorator-gen-1135> in checkpoint(self, output, overwrite, stage_locally, _codec_spec, _read_if_exists, _intervals, _filter_intervals)
/opt/conda/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
575 def wrapper(__original_func, *args, **kwargs):
576 args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577 return __original_func(*args_, **kwargs_)
578
579 return wrapper
/opt/conda/lib/python3.6/site-packages/hail/table.py in checkpoint(self, output, overwrite, stage_locally, _codec_spec, _read_if_exists, _intervals, _filter_intervals)
1236
1237 if not _read_if_exists or not hl.hadoop_exists(f'{output}/_SUCCESS'):
-> 1238 self.write(output=output, overwrite=overwrite, stage_locally=stage_locally, _codec_spec=_codec_spec)
1239 return hl.read_table(output, _intervals=_intervals, _filter_intervals=_filter_intervals)
1240
<decorator-gen-1137> in write(self, output, overwrite, stage_locally, _codec_spec)
/opt/conda/lib/python3.6/site-packages/hail/typecheck/check.py in wrapper(__original_func, *args, **kwargs)
575 def wrapper(__original_func, *args, **kwargs):
576 args_, kwargs_ = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 577 return __original_func(*args_, **kwargs_)
578
579 return wrapper
/opt/conda/lib/python3.6/site-packages/hail/table.py in write(self, output, overwrite, stage_locally, _codec_spec)
1269 """
1270
-> 1271 Env.backend().execute(ir.TableWrite(self._tir, ir.TableNativeWriter(output, overwrite, stage_locally, _codec_spec)))
1272
1273 def _show(self, n, width, truncate, types):
/opt/conda/lib/python3.6/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed)
108 raise HailUserError(message_and_trace) from None
109
--> 110 raise e
/opt/conda/lib/python3.6/site-packages/hail/backend/py4j_backend.py in execute(self, ir, timed)
84 # print(self._hail_package.expr.ir.Pretty.apply(jir, True, -1))
85 try:
---> 86 result_tuple = self._jhc.backend().executeEncode(jir, stream_codec)
87 (result, timings) = (result_tuple._1(), result_tuple._2())
88 value = ir.typ._from_encoding(result)
/cluster/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py in __call__(self, *args)
1255 answer = self.gateway_client.send_command(command)
1256 return_value = get_return_value(
-> 1257 answer, self.gateway_client, self.target_id, self.name)
1258
1259 for temp_arg in temp_args:
/opt/conda/lib/python3.6/site-packages/hail/backend/py4j_backend.py in deco(*args, **kwargs)
29 raise FatalError('%s\n\nJava stack trace:\n%s\n'
30 'Hail version: %s\n'
---> 31 'Error summary: %s' % (deepest, full, hail.__version__, deepest), error_id) from None
32 except pyspark.sql.utils.CapturedException as e:
33 raise FatalError('%s\n\nJava stack trace:\n%s\n'
FatalError: ConnectionPoolTimeoutException: Timeout waiting for connection from pool
Java stack trace:
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 126.0 failed 4 times, most recent failure: Lost task 0.3 in stage 126.0 (TID 5038, ip-10-60-148-154.eu-west-2.compute.internal, executor 0): com.amazonaws.AmazonClientException: Unable to execute HTTP request: Timeout waiting for connection from pool
at com.amazonaws.http.AmazonHttpClient.executeHelper(AmazonHttpClient.java:454)
at com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:232)
at com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java:3528)
at com.amazonaws.services.s3.AmazonS3Client.getObjectMetadata(AmazonS3Client.java:976)
at com.amazonaws.services.s3.AmazonS3Client.getObjectMetadata(AmazonS3Client.java:956)
at org.apache.hadoop.fs.s3a.S3AFileSystem.getFileStatus(S3AFileSystem.java:1096)
at org.apache.hadoop.fs.s3a.S3AFileSystem.open(S3AFileSystem.java:577)
at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:769)
at is.hail.io.fs.HadoopFS.openNoCompression(HadoopFS.scala:83)
at is.hail.io.fs.FS$class.open(FS.scala:139)
at is.hail.io.fs.HadoopFS.open(HadoopFS.scala:70)
at is.hail.io.fs.FS$class.open(FS.scala:151)
at is.hail.io.fs.HadoopFS.open(HadoopFS.scala:70)
at is.hail.io.fs.FS$class.open(FS.scala:148)
at is.hail.io.fs.HadoopFS.open(HadoopFS.scala:70)
at is.hail.HailContext$$anon$1.compute(HailContext.scala:274)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
at is.hail.sparkextras.ContextRDD.iterator(ContextRDD.scala:390)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anon$1$$anonfun$1.apply(RepartitionedOrderedRDD2.scala:66)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anon$1$$anonfun$1.apply(RepartitionedOrderedRDD2.scala:66)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anon$1.dropLeft(RepartitionedOrderedRDD2.scala:76)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anon$1.<init>(RepartitionedOrderedRDD2.scala:73)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1.apply(RepartitionedOrderedRDD2.scala:62)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1.apply(RepartitionedOrderedRDD2.scala:61)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anonfun$5.apply(RichContextRDDRegionValue.scala:187)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anonfun$5.apply(RichContextRDDRegionValue.scala:187)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:196)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
at is.hail.utils.richUtils.RichIterator$$anon$7.isValid(RichIterator.scala:30)
at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:331)
at is.hail.utils.FlipbookIterator$$anon$9.<init>(FlipbookIterator.scala:344)
at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:43)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:152)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:149)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anon$1.dropLeft(RepartitionedOrderedRDD2.scala:76)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1$$anon$1.<init>(RepartitionedOrderedRDD2.scala:73)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1.apply(RepartitionedOrderedRDD2.scala:62)
at is.hail.sparkextras.RepartitionedOrderedRDD2$$anonfun$compute$1.apply(RepartitionedOrderedRDD2.scala:61)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anonfun$5.apply(RichContextRDDRegionValue.scala:187)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anonfun$5.apply(RichContextRDDRegionValue.scala:187)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:196)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
at is.hail.utils.richUtils.RichIterator$$anon$7.isValid(RichIterator.scala:30)
at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:331)
at is.hail.utils.FlipbookIterator$$anon$9.<init>(FlipbookIterator.scala:344)
at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:43)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:152)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:149)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:196)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
at is.hail.utils.richUtils.RichIterator$$anon$7.isValid(RichIterator.scala:30)
at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327)
at is.hail.utils.FlipbookIterator$$anon$9.<init>(FlipbookIterator.scala:344)
at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:43)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:152)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:149)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:196)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
at is.hail.utils.richUtils.RichIterator$$anon$7.isValid(RichIterator.scala:30)
at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327)
at is.hail.utils.FlipbookIterator$$anon$9.<init>(FlipbookIterator.scala:344)
at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:43)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:152)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:149)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at is.hail.io.RichContextRDDLong$$anonfun$boundary$extension$2$$anon$1.hasNext(RichContextRDDRegionValue.scala:196)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1002)
at is.hail.utils.richUtils.RichIterator$$anon$7.isValid(RichIterator.scala:30)
at is.hail.utils.StagingIterator.isValid(FlipbookIterator.scala:48)
at is.hail.utils.FlipbookIterator$$anon$9.setValue(FlipbookIterator.scala:327)
at is.hail.utils.FlipbookIterator$$anon$9.<init>(FlipbookIterator.scala:344)
at is.hail.utils.FlipbookIterator.leftJoinDistinct(FlipbookIterator.scala:323)
at is.hail.annotations.OrderedRVIterator.leftJoinDistinct(OrderedRVIterator.scala:43)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:152)
at is.hail.rvd.KeyedRVD$$anonfun$orderedLeftJoinDistinct$1.apply(KeyedRVD.scala:149)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$czipPartitions$1$$anonfun$apply$24.apply(ContextRDD.scala:316)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitions$1$$anonfun$apply$10$$anonfun$apply$11.apply(ContextRDD.scala:218)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$class.foreach(Iterator.scala:891)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
at is.hail.io.RichContextRDDRegionValue$.writeRowsPartition(RichContextRDDRegionValue.scala:35)
at is.hail.io.RichContextRDDLong$$anonfun$writeRows$extension$1.apply(RichContextRDDRegionValue.scala:229)
at is.hail.io.RichContextRDDLong$$anonfun$writeRows$extension$1.apply(RichContextRDDRegionValue.scala:229)
at is.hail.utils.richUtils.RichContextRDD$.writeParts(RichContextRDD.scala:42)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$3.apply(RichContextRDD.scala:108)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$3.apply(RichContextRDD.scala:106)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18.apply(ContextRDD.scala:259)
at is.hail.sparkextras.ContextRDD$$anonfun$cmapPartitionsWithIndex$1$$anonfun$apply$18.apply(ContextRDD.scala:259)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anonfun$2.apply(RichContextRDD.scala:60)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anonfun$2.apply(RichContextRDD.scala:60)
at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:435)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:441)
at is.hail.utils.richUtils.RichContextRDD$$anonfun$cleanupRegions$1$$anon$1.hasNext(RichContextRDD.scala:69)
at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439)
at scala.collection.Iterator$class.foreach(Iterator.scala:891)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310)
at scala.collection.AbstractIterator.to(Iterator.scala:1334)
at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302)
at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1334)
at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1334)
at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:945)
at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$13.apply(RDD.scala:945)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2101)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:123)
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:408)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:414)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.http.conn.ConnectionPoolTimeoutException: Timeout waiting for connection from pool
at org.apache.http.impl.conn.PoolingClientConnectionManager.leaseConnection(PoolingClientConnectionManager.java:231)
at org.apache.http.impl.conn.PoolingClientConnectionManager$1.getConnection(PoolingClientConnectionManager.java:200)
at sun.reflect.GeneratedMethodAccessor111.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.amazonaws.http.conn.ClientConnectionRequestFactory$Handler.invoke(ClientConnectionRequestFactory.java:70)
at com.amazonaws.http.conn.$Proxy14.getConnection(Unknown Source)
at org.apache.http.impl.client.DefaultRequestDirector.execute(DefaultRequestDirector.java:422)
at org.apache.http.impl.client.AbstractHttpClient.doExecute(AbstractHttpClient.java:835)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56)
at com.amazonaws.http.AmazonHttpClient.executeHelper(AmazonHttpClient.java:384)
... 194 more