Start and end position per partition

I have another basic question – I just ran into this error with this function (part_min_and_max):

  File "/tmp/25aa70a4f6c747838d0efc284b465c92/prepare_vcf_data_release.py", line 974, in <module>
    main(args)
  File "/tmp/25aa70a4f6c747838d0efc284b465c92/prepare_vcf_data_release.py", line 845, in main
    start_stop_list = ht._map_partitions(
  File "<decorator-gen-1127>", line 2, in collect
  File "/opt/conda/default/lib/python3.8/site-packages/hail/typecheck/check.py", line 577, in wrapper
    return __original_func(*args_, **kwargs_)
  File "/opt/conda/default/lib/python3.8/site-packages/hail/table.py", line 1920, in collect
    return Env.backend().execute(e._ir)
  File "/opt/conda/default/lib/python3.8/site-packages/hail/backend/py4j_backend.py", line 108, in execute
    raise HailUserError(message_and_trace) from None
hail.utils.java.HailUserError: Error summary: HailException: array index out of bounds: index=0, length=0
------------
Hail stack trace:
  File "/tmp/25aa70a4f6c747838d0efc284b465c92/prepare_vcf_data_release.py", line 974, in <module>
    main(args)

  File "/tmp/25aa70a4f6c747838d0efc284b465c92/prepare_vcf_data_release.py", line 845, in main
    start_stop_list = ht._map_partitions(

  File "/opt/conda/default/lib/python3.8/site-packages/hail/table.py", line 3518, in _map_partitions
    body = f(expr)

  File "/tmp/25aa70a4f6c747838d0efc284b465c92/prepare_vcf_data_release.py", line 846, in <lambda>
    lambda p: hl.array([part_min_and_max(p)])

  File "/tmp/25aa70a4f6c747838d0efc284b465c92/prepare_vcf_data_release.py", line 843, in part_min_and_max
    return hl.struct(start=keys[0], end=keys[-1])

  File "/opt/conda/default/lib/python3.8/site-packages/hail/expr/expressions/typed_expressions.py", line 834, in __getitem__
    return super().__getitem__(item)

  File "/opt/conda/default/lib/python3.8/site-packages/hail/expr/expressions/typed_expressions.py", line 481, in __getitem__
    return self._method("indexArray", self.dtype.element_type, item)

  File "/opt/conda/default/lib/python3.8/site-packages/hail/expr/expressions/base_expression.py", line 596, in _method
    x = ir.Apply(name, ret_type, self._ir, *(a._ir for a in args))

  File "/opt/conda/default/lib/python3.8/site-packages/hail/ir/ir.py", line 2263, in __init__
    self.save_error_info()

ERROR: (gcloud.dataproc.jobs.submit.pyspark) Job [25aa70a4f6c747838d0efc284b465c92] failed with error:
Google Cloud Dataproc Agent reports job failure. If logs are available, they can be found at:
https://console.cloud.google.com/dataproc/jobs/25aa70a4f6c747838d0efc284b465c92?project=maclab-ukbb&region=us-central1
gcloud dataproc jobs wait '25aa70a4f6c747838d0efc284b465c92' --region 'us-central1' --project 'maclab-ukbb'
https://console.cloud.google.com/storage/browser/dataproc-1aca38e4-67fe-4b64-b451-258ef1aea4d1-us-central1/google-cloud-dataproc-metainfo/3bfd808f-a289-49a2-9626-f1ead2ba216c/jobs/25aa70a4f6c747838d0efc284b465c92/
gs://dataproc-1aca38e4-67fe-4b64-b451-258ef1aea4d1-us-central1/google-cloud-dataproc-metainfo/3bfd808f-a289-49a2-9626-f1ead2ba216c/jobs/25aa70a4f6c747838d0efc284b465c92/driveroutput
Traceback (most recent call last):
  File "/Users/kchao/anaconda3/envs/hail/bin/hailctl", line 8, in <module>
    sys.exit(main())
  File "/Users/kchao/anaconda3/envs/hail/lib/python3.7/site-packages/hailtop/hailctl/__main__.py", line 100, in main
    cli.main(args)
  File "/Users/kchao/anaconda3/envs/hail/lib/python3.7/site-packages/hailtop/hailctl/dataproc/cli.py", line 122, in main
    jmp[args.module].main(args, pass_through_args)
  File "/Users/kchao/anaconda3/envs/hail/lib/python3.7/site-packages/hailtop/hailctl/dataproc/submit.py", line 78, in main
    gcloud.run(cmd)
  File "/Users/kchao/anaconda3/envs/hail/lib/python3.7/site-packages/hailtop/hailctl/dataproc/gcloud.py", line 9, in run
    return subprocess.check_call(["gcloud"] + command)
  File "/Users/kchao/anaconda3/envs/hail/lib/python3.7/subprocess.py", line 328, in check_call
    raise CalledProcessError(retcode, cmd)
subprocess.CalledProcessError: Command '['gcloud', 'dataproc', 'jobs', 'submit', 'pyspark', 'prepare_vcf_data_release.py', '--cluster=chr9', '--files=', '--py-files=/var/folders/xq/8jnhrt2s2h58ts2v0br5g8gm0000gp/T/pyscripts_y547yk4z.zip', '--properties=', '--', '--prepare_release_vcf', '--slack_channel', '@kc (she/her)', '--contig', 'chr9']' returned non-zero exit status 1.

What would cause this to throw an array index out of bounds error? Here’s the code: ukbb_qc/prepare_vcf_data_release.py at freeze_7 · broadinstitute/ukbb_qc · GitHub (will email the log)