3636
3737from __future__ import annotations
3838
39+ import contextlib
40+ import os
3941import re
4042from contextlib import contextmanager
4143from typing import TYPE_CHECKING , Generator
7476}
7577
7678
79+ # ---------------------------------------------------------------------------
80+ # Progress-suppression helper
81+ # ---------------------------------------------------------------------------
82+
83+
84+ @contextmanager
85+ def _suppress_dask_progress () -> Generator [None , None , None ]:
86+ """Suppress dask progress bar output during file-open operations.
87+
88+ When opening HE5/HDF5 files with ``chunks={}``, dask (or pqdm used
89+ internally by earthaccess) may emit verbose progress bar output
90+ (e.g. ``QUEUEING TASKS``, ``PROCESSING TASKS``, ``COLLECTING RESULTS``).
91+ This context manager suppresses that output without affecting the data.
92+
93+ In a Jupyter environment it uses :func:`IPython.utils.io.capture_output`;
94+ otherwise it redirects both ``stdout`` and ``stderr`` to ``/dev/null`` for
95+ the duration of the open call.
96+ """
97+ try :
98+ from IPython .utils import io as _ipy_io # type: ignore[import]
99+
100+ with _ipy_io .capture_output ():
101+ yield
102+ return
103+ except ImportError :
104+ pass
105+
106+ with open (os .devnull , "w" ) as _devnull :
107+ with contextlib .redirect_stdout (_devnull ), contextlib .redirect_stderr (_devnull ):
108+ yield
109+
110+
77111# ---------------------------------------------------------------------------
78112# Open kwargs helpers
79113# ---------------------------------------------------------------------------
@@ -575,13 +609,15 @@ def _open_and_merge_dataset_groups(
575609 group_paths = list (merge )
576610 else :
577611 # No merge requested — open the root dataset directly.
578- return xr .open_dataset (file_obj , ** effective_kwargs ) # type: ignore[arg-type]
612+ with _suppress_dask_progress ():
613+ return xr .open_dataset (file_obj , ** effective_kwargs ) # type: ignore[arg-type]
579614
580615 opened : list [xr .Dataset ] = []
581616 for path in group_paths :
582617 kwargs = {** effective_kwargs , "group" : path }
583618 try :
584- ds = xr .open_dataset (file_obj , ** kwargs ) # type: ignore[arg-type]
619+ with _suppress_dask_progress ():
620+ ds = xr .open_dataset (file_obj , ** kwargs ) # type: ignore[arg-type]
585621 if ds .data_vars :
586622 opened .append (ds )
587623 else :
@@ -686,22 +722,23 @@ def _visit(name: str, obj: object) -> None:
686722
687723def _open_datatree_fn (file_obj : object , kwargs : dict ) -> object :
688724 """Open *file_obj* as a DataTree using whichever API is available."""
689- try :
690- open_dt = xr .open_datatree # type: ignore[attr-defined]
691- return open_dt (file_obj , ** kwargs ) # type: ignore[arg-type]
692- except AttributeError :
693- pass
725+ with _suppress_dask_progress ():
726+ try :
727+ open_dt = xr .open_datatree # type: ignore[attr-defined]
728+ return open_dt (file_obj , ** kwargs ) # type: ignore[arg-type]
729+ except AttributeError :
730+ pass
694731
695- try :
696- import datatree # type: ignore[import-untyped]
732+ try :
733+ import datatree # type: ignore[import-untyped]
697734
698- return datatree .open_datatree (file_obj , ** kwargs ) # type: ignore[arg-type]
699- except ImportError as exc :
700- raise ImportError (
701- "open_method='datatree-merge' requires either xarray >= 2024.x (with "
702- "built-in DataTree support) or the 'datatree' package. "
703- "Install it with: pip install datatree"
704- ) from exc
735+ return datatree .open_datatree (file_obj , ** kwargs ) # type: ignore[arg-type]
736+ except ImportError as exc :
737+ raise ImportError (
738+ "open_method='datatree-merge' requires either xarray >= 2024.x (with "
739+ "built-in DataTree support) or the 'datatree' package. "
740+ "Install it with: pip install datatree"
741+ ) from exc
705742
706743
707744def _merge_datatree_with_spec (dt : object , spec : dict ) -> xr .Dataset :
@@ -876,13 +913,18 @@ def _seek_back() -> None:
876913
877914 # --- Try the fast dataset path ---
878915 dataset_error : BaseException | None = None
916+ ds_probe : xr .Dataset | None = None
879917 try :
880- with xr .open_dataset (file_obj , ** effective_kwargs ) as ds : # type: ignore[arg-type]
881- _apply_coords (ds , spec )
918+ with _suppress_dask_progress ():
919+ ds_probe = xr .open_dataset (file_obj , ** effective_kwargs ) # type: ignore[arg-type]
920+ _apply_coords (ds_probe , spec )
882921 _seek_back ()
883922 return {** spec , "xarray_open" : "dataset" }
884923 except Exception as exc :
885924 dataset_error = exc
925+ finally :
926+ if ds_probe is not None :
927+ ds_probe .close ()
886928
887929 _seek_back ()
888930
@@ -968,7 +1010,8 @@ def _open_as_flat_dataset(
9681010 for path in group_paths :
9691011 kwargs = {** effective_kwargs , "group" : path }
9701012 try :
971- ds_grp = xr .open_dataset (file_obj , ** kwargs ) # type: ignore[arg-type]
1013+ with _suppress_dask_progress ():
1014+ ds_grp = xr .open_dataset (file_obj , ** kwargs ) # type: ignore[arg-type]
9721015 if ds_grp .data_vars :
9731016 opened .append (ds_grp )
9741017 else :
@@ -985,9 +1028,15 @@ def _open_as_flat_dataset(
9851028 except Exception :
9861029 pass
9871030 else :
988- with xr .open_dataset (file_obj , ** effective_kwargs ) as ds : # type: ignore[arg-type]
989- ds , lon_name , lat_name = _apply_coords (ds , spec )
990- yield (ds , lon_name , lat_name )
1031+ ds_simple : xr .Dataset | None = None
1032+ try :
1033+ with _suppress_dask_progress ():
1034+ ds_simple = xr .open_dataset (file_obj , ** effective_kwargs ) # type: ignore[arg-type]
1035+ ds_simple , lon_name , lat_name = _apply_coords (ds_simple , spec )
1036+ yield (ds_simple , lon_name , lat_name )
1037+ finally :
1038+ if ds_simple is not None :
1039+ ds_simple .close ()
9911040
9921041 elif xarray_open == "datatree" :
9931042 dt = _open_datatree_fn (file_obj , effective_kwargs )
@@ -1032,7 +1081,8 @@ def _open_as_flat_dataset_auto(
10321081
10331082 # --- Fast path: try xr.open_dataset ---
10341083 try :
1035- ds_fast = xr .open_dataset (file_obj , ** effective_kwargs ) # type: ignore[arg-type]
1084+ with _suppress_dask_progress ():
1085+ ds_fast = xr .open_dataset (file_obj , ** effective_kwargs ) # type: ignore[arg-type]
10361086 ds_fast , lon_name_fast , lat_name_fast = _apply_coords (ds_fast , spec )
10371087 except Exception as exc :
10381088 dataset_exc = exc
0 commit comments