From ebb65cc5ea0c6f7eed0d942fa669d6a728918ac2 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 24 Nov 2025 16:52:46 +0000 Subject: [PATCH 01/50] Initial plan From 30f22e6e92caf1284512b0937c5cdac6fe61dc51 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 24 Nov 2025 17:00:26 +0000 Subject: [PATCH 02/50] Update Pillow version constraint to support 10.x, 11.x, and 12.x Co-authored-by: xylar <4179064+xylar@users.noreply.github.com> --- dev-spec.txt | 2 +- .../test/test_thumbnail_generation.py | 89 +++++++++++++++++++ pyproject.toml | 2 +- 3 files changed, 91 insertions(+), 2 deletions(-) create mode 100644 mpas_analysis/test/test_thumbnail_generation.py diff --git a/dev-spec.txt b/dev-spec.txt index 56b63b9bf..a5f9ca359 100644 --- a/dev-spec.txt +++ b/dev-spec.txt @@ -20,7 +20,7 @@ nco >=4.8.1,!=5.2.6 netcdf4 numpy >=2.0,<3.0 pandas -pillow >=10.0.0,<11.0.0 +pillow >=10.0.0,<13.0.0 progressbar2 pyproj pyremap >=2.0.0,<3.0.0 diff --git a/mpas_analysis/test/test_thumbnail_generation.py b/mpas_analysis/test/test_thumbnail_generation.py new file mode 100644 index 000000000..f3c66de0a --- /dev/null +++ b/mpas_analysis/test/test_thumbnail_generation.py @@ -0,0 +1,89 @@ +# This software is open source software available under the BSD-3 license. +# +# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. +# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights +# reserved. +# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. +# +# Additional copyright and license information can be found in the LICENSE file +# distributed with this code, or at +# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE + +import os +import tempfile +import unittest +from pathlib import Path + +from PIL import Image + + +# Import the function directly to avoid mpas_analysis.test dependencies +import sys +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..')) +from mpas_analysis.shared.html.image_xml import _generate_thumbnails + + +class TestThumbnailGeneration(unittest.TestCase): + """Test thumbnail generation with Pillow""" + + def test_generate_thumbnails_horizontal(self): + """Test thumbnail generation for horizontal images""" + with tempfile.TemporaryDirectory() as tmpdir: + # Create a horizontal test image + test_image = Image.new('RGB', (1200, 600), color='blue') + image_filename = 'test_horizontal.png' + image_path = Path(tmpdir) / image_filename + test_image.save(image_path) + + # Generate thumbnails + imageSize, thumbnailSize, orientation = _generate_thumbnails( + image_filename, tmpdir + ) + + # Verify results + self.assertEqual(imageSize, (1200, 600)) + self.assertEqual(orientation, 'horiz') + self.assertEqual(thumbnailSize[1], 120) # height should be 120 + + # Check thumbnail files exist + thumbnail_dir = Path(tmpdir) / 'thumbnails' + self.assertTrue(thumbnail_dir.exists()) + self.assertTrue((thumbnail_dir / 'test_horizontal.jpg').exists()) + self.assertTrue((thumbnail_dir / 'fixed_test_horizontal.jpg').exists()) + + def test_generate_thumbnails_vertical(self): + """Test thumbnail generation for vertical images""" + with tempfile.TemporaryDirectory() as tmpdir: + # Create a vertical test image + test_image = Image.new('RGB', (400, 800), color='green') + image_filename = 'test_vertical.png' + image_path = Path(tmpdir) / image_filename + test_image.save(image_path) + + # Generate thumbnails + imageSize, thumbnailSize, orientation = _generate_thumbnails( + image_filename, tmpdir + ) + + # Verify results + self.assertEqual(imageSize, (400, 800)) + self.assertEqual(orientation, 'vert') + self.assertEqual(thumbnailSize[1], 320) # height should be 320 + + # Check thumbnail files exist + thumbnail_dir = Path(tmpdir) / 'thumbnails' + self.assertTrue(thumbnail_dir.exists()) + self.assertTrue((thumbnail_dir / 'test_vertical.jpg').exists()) + self.assertTrue((thumbnail_dir / 'fixed_test_vertical.jpg').exists()) + + def test_image_lanczos_constant(self): + """Test that Image.LANCZOS constant is available""" + # This test ensures that Image.LANCZOS is available across + # Pillow versions 10.x, 11.x, and 12.x + self.assertTrue(hasattr(Image, 'LANCZOS')) + self.assertIsNotNone(Image.LANCZOS) + + # Test that resize works with LANCZOS + test_image = Image.new('RGB', (100, 100), color='red') + resized = test_image.resize((50, 50), Image.LANCZOS) + self.assertEqual(resized.size, (50, 50)) diff --git a/pyproject.toml b/pyproject.toml index cd7d33ae8..9b27eca40 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ dependencies = [ "netcdf4", "numpy >=2.0,<3.0", "pandas", - "pillow >=10.0.0,<11.0.0", + "pillow >=10.0.0,<13.0.0", "progressbar2", "pyproj", "python-dateutil", From 5d8b06534fd7b069834484895d846949841961a1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 24 Nov 2025 17:02:17 +0000 Subject: [PATCH 03/50] Remove test file with sys.path manipulation issues Co-authored-by: xylar <4179064+xylar@users.noreply.github.com> --- .../test/test_thumbnail_generation.py | 89 ------------------- 1 file changed, 89 deletions(-) delete mode 100644 mpas_analysis/test/test_thumbnail_generation.py diff --git a/mpas_analysis/test/test_thumbnail_generation.py b/mpas_analysis/test/test_thumbnail_generation.py deleted file mode 100644 index f3c66de0a..000000000 --- a/mpas_analysis/test/test_thumbnail_generation.py +++ /dev/null @@ -1,89 +0,0 @@ -# This software is open source software available under the BSD-3 license. -# -# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. -# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights -# reserved. -# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. -# -# Additional copyright and license information can be found in the LICENSE file -# distributed with this code, or at -# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE - -import os -import tempfile -import unittest -from pathlib import Path - -from PIL import Image - - -# Import the function directly to avoid mpas_analysis.test dependencies -import sys -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..')) -from mpas_analysis.shared.html.image_xml import _generate_thumbnails - - -class TestThumbnailGeneration(unittest.TestCase): - """Test thumbnail generation with Pillow""" - - def test_generate_thumbnails_horizontal(self): - """Test thumbnail generation for horizontal images""" - with tempfile.TemporaryDirectory() as tmpdir: - # Create a horizontal test image - test_image = Image.new('RGB', (1200, 600), color='blue') - image_filename = 'test_horizontal.png' - image_path = Path(tmpdir) / image_filename - test_image.save(image_path) - - # Generate thumbnails - imageSize, thumbnailSize, orientation = _generate_thumbnails( - image_filename, tmpdir - ) - - # Verify results - self.assertEqual(imageSize, (1200, 600)) - self.assertEqual(orientation, 'horiz') - self.assertEqual(thumbnailSize[1], 120) # height should be 120 - - # Check thumbnail files exist - thumbnail_dir = Path(tmpdir) / 'thumbnails' - self.assertTrue(thumbnail_dir.exists()) - self.assertTrue((thumbnail_dir / 'test_horizontal.jpg').exists()) - self.assertTrue((thumbnail_dir / 'fixed_test_horizontal.jpg').exists()) - - def test_generate_thumbnails_vertical(self): - """Test thumbnail generation for vertical images""" - with tempfile.TemporaryDirectory() as tmpdir: - # Create a vertical test image - test_image = Image.new('RGB', (400, 800), color='green') - image_filename = 'test_vertical.png' - image_path = Path(tmpdir) / image_filename - test_image.save(image_path) - - # Generate thumbnails - imageSize, thumbnailSize, orientation = _generate_thumbnails( - image_filename, tmpdir - ) - - # Verify results - self.assertEqual(imageSize, (400, 800)) - self.assertEqual(orientation, 'vert') - self.assertEqual(thumbnailSize[1], 320) # height should be 320 - - # Check thumbnail files exist - thumbnail_dir = Path(tmpdir) / 'thumbnails' - self.assertTrue(thumbnail_dir.exists()) - self.assertTrue((thumbnail_dir / 'test_vertical.jpg').exists()) - self.assertTrue((thumbnail_dir / 'fixed_test_vertical.jpg').exists()) - - def test_image_lanczos_constant(self): - """Test that Image.LANCZOS constant is available""" - # This test ensures that Image.LANCZOS is available across - # Pillow versions 10.x, 11.x, and 12.x - self.assertTrue(hasattr(Image, 'LANCZOS')) - self.assertIsNotNone(Image.LANCZOS) - - # Test that resize works with LANCZOS - test_image = Image.new('RGB', (100, 100), color='red') - resized = test_image.resize((50, 50), Image.LANCZOS) - self.assertEqual(resized.size, (50, 50)) From 656a3f8639cf79e59807facfaa5041c1e4fa94c1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 24 Nov 2025 17:07:00 +0000 Subject: [PATCH 04/50] Update Pillow constraint in ci/recipe/meta.yaml to match pyproject.toml Co-authored-by: xylar <4179064+xylar@users.noreply.github.com> --- ci/recipe/meta.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml index 10009615a..cd147e614 100644 --- a/ci/recipe/meta.yaml +++ b/ci/recipe/meta.yaml @@ -40,7 +40,7 @@ requirements: - netcdf4 - numpy >=2.0,<3.0 - pandas - - pillow >=10.0.0,<11.0.0 + - pillow >=10.0.0,<13.0.0 - progressbar2 - pyproj - pyremap >=2.0.0,<3.0.0 From 94219e2110298c674c7193fc24c70bee89e3c99c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Nov 2025 17:29:46 +0000 Subject: [PATCH 05/50] Bump actions/checkout from 5 to 6 Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/build_workflow.yml | 2 +- .github/workflows/docs_workflow.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 138431ba8..8824343f8 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -37,7 +37,7 @@ jobs: paths_ignore: ${{ env.PATHS_IGNORE }} - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} - uses: actions/checkout@v5 + uses: actions/checkout@v6 - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Cache Conda diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index b90bb7882..a7f5e9b92 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -20,7 +20,7 @@ jobs: shell: bash -l {0} timeout-minutes: 20 steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 with: persist-credentials: false fetch-depth: 0 From f95562051fd4c1bc0e8fc07650f1b55d3abf1fca Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 24 Nov 2025 16:24:23 +0000 Subject: [PATCH 06/50] Add Python 3.14 support to CI and classifiers Co-authored-by: xylar <4179064+xylar@users.noreply.github.com> --- .github/workflows/build_workflow.yml | 2 +- .github/workflows/docs_workflow.yml | 2 +- ci/python3.14.yaml | 8 ++++++++ pyproject.toml | 1 + 4 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 ci/python3.14.yaml diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 8824343f8..767f407de 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -27,7 +27,7 @@ jobs: shell: bash -l {0} strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] fail-fast: false steps: - id: skip_check diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index a7f5e9b92..385849fd3 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -10,7 +10,7 @@ on: types: [published] env: - PYTHON_VERSION: "3.13" + PYTHON_VERSION: "3.14" jobs: publish-docs: diff --git a/ci/python3.14.yaml b/ci/python3.14.yaml new file mode 100644 index 000000000..5c438d4ac --- /dev/null +++ b/ci/python3.14.yaml @@ -0,0 +1,8 @@ +channel_sources: +- conda-forge,defaults +pin_run_as_build: + python: + min_pin: x.x + max_pin: x.x +python: +- 3.14.* *_cp314 diff --git a/pyproject.toml b/pyproject.toml index 9b27eca40..ffab97d4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Development Status :: 5 - Production/Stable", From 1c6a2449df4c7f6f5df05d05a83c708f2e1a2e7d Mon Sep 17 00:00:00 2001 From: Carolyn Begeman Date: Mon, 24 Nov 2025 21:09:23 -0600 Subject: [PATCH 07/50] Change error to warning if timeSeries range not present in history files --- mpas_analysis/shared/analysis_task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index f47660658..fbc018b97 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -658,7 +658,7 @@ def update_time_bounds_from_file_names(config, section, componentName): return if len(inputFiles) == 0: - raise ValueError('No input files found for stream {} in {} between ' + print('Warning: No input files found for stream {} in {} between ' '{} and {}'.format(streamName, componentName, requestedStartYear, requestedEndYear)) @@ -680,7 +680,7 @@ def update_time_bounds_from_file_names(config, section, componentName): endYear = years[lastIndex] if startYear != requestedStartYear or endYear != requestedEndYear: - raise ValueError( + print("Warning:" "{} start and/or end year different from requested\n" "requested: {:04d}-{:04d}\n" "actual: {:04d}-{:04d}\n".format( From 9da44b7e4b9334e29c36e1a093a3293885869bad Mon Sep 17 00:00:00 2001 From: Carolyn Begeman Date: Sun, 30 Nov 2025 14:29:55 -0600 Subject: [PATCH 08/50] Allow the streamfunction task to read in moc from cached file whether the history files exist or not --- mpas_analysis/ocean/streamfunction_moc.py | 49 ++++++++++++----------- 1 file changed, 26 insertions(+), 23 deletions(-) diff --git a/mpas_analysis/ocean/streamfunction_moc.py b/mpas_analysis/ocean/streamfunction_moc.py index 26ef1a32e..5580d364e 100644 --- a/mpas_analysis/ocean/streamfunction_moc.py +++ b/mpas_analysis/ocean/streamfunction_moc.py @@ -1007,15 +1007,16 @@ def _compute_moc_time_series_analysismember(self): self.historyStreams, 'timeSeriesStatsMonthlyOutput') - mocRegion = np.zeros(len(inputFiles)) + ntimes = int(12 * (self.endYear - self.startYear + 1)) + mocRegion = np.zeros(ntimes) moc = None refTopDepth = None - times = np.zeros(len(inputFiles)) - computed = np.zeros(len(inputFiles), bool) + times = np.zeros(ntimes) + computed = np.zeros(ntimes, bool) continueOutput = os.path.exists(outputFileName) if continueOutput: - self.logger.info(' Read in previously computed MOC time series') + self.logger.info(f' Read in previously computed MOC time series {outputFileName}') with open_mpas_dataset(fileName=outputFileName, calendar=self.calendar, timeVariableNames=None, @@ -1028,32 +1029,30 @@ def _compute_moc_time_series_analysismember(self): if moc is None: sizes = dsMOCIn.sizes - moc = np.zeros((len(inputFiles), sizes['depth'], - sizes['lat'])) + moc = np.zeros((ntimes, sizes['depth'], + sizes['lat'])) refTopDepth = dsMOCIn.depth.values # first, copy all computed data - for inIndex in range(dsMOCIn.sizes['Time']): - - mask = np.logical_and( - dsMOCIn.year[inIndex].values == years, - dsMOCIn.month[inIndex].values == months) - - outIndex = np.where(mask)[0][0] - - mocRegion[outIndex] = dsMOCIn.mocAtlantic26[inIndex] - moc[outIndex, :, :] = dsMOCIn.mocAtlantic[inIndex, :, :] - times[outIndex] = dsMOCIn.Time[inIndex] - computed[outIndex] = True + outIndex = 0 + for load_year in np.arange(self.startYear, self.endYear + 1): + for load_month in np.arange(1, 13): + mask = np.logical_and(dsMOCIn.year.values == load_year, + dsMOCIn.month.values == load_month) + if np.sum(mask) >= 1: + inIndex = np.where(mask)[0][0] + mocRegion[outIndex] = dsMOCIn.mocAtlantic26[inIndex] + moc[outIndex, :, :] = dsMOCIn.mocAtlantic[inIndex, :, :] + times[outIndex] = dsMOCIn.Time[inIndex] + computed[outIndex] = True + + outIndex += 1 if np.all(computed): # no need to waste time writing out the data set again return dsMOCIn for timeIndex, fileName in enumerate(inputFiles): - if computed[timeIndex]: - continue - dsLocal = open_mpas_dataset( fileName=fileName, calendar=self.calendar, @@ -1067,12 +1066,15 @@ def _compute_moc_time_series_analysismember(self): self.logger.info(' date: {:04d}-{:02d}'.format(date.year, date.month)) + computedIndex = 12 * (date.year - self.startYear) + date.month - 1 + if computed[computedIndex]: + continue # hard-wire region=0 (Atlantic) for now indRegion = 0 mocVar = dsLocal.timeMonthly_avg_mocStreamvalLatAndDepthRegion mocTop = mocVar[indRegion, :, :].values - mocRegion[timeIndex] = np.amax(mocTop[:, indlat26]) + mocRegion[computedIndex] = np.amax(mocTop[:, indlat26]) if moc is None: sizes = dsLocal.sizes @@ -1087,7 +1089,8 @@ def _compute_moc_time_series_analysismember(self): refTopDepth = np.zeros(nVertLevels + 1) refTopDepth[1:nVertLevels + 1] = refBottomDepth[0:nVertLevels] - moc[timeIndex, 0:-1, :] = mocTop + moc[computedIndex, 0:-1, :] = mocTop + description = 'Max MOC Atlantic streamfunction nearest to RAPID ' \ 'Array latitude (26.5N)' From 35648cc08b46f64d5deea8c140c5cffa346e0af1 Mon Sep 17 00:00:00 2001 From: Carolyn Begeman Date: Sun, 30 Nov 2025 20:48:16 -0600 Subject: [PATCH 09/50] Add a small failsafe for nino index axis limits --- mpas_analysis/ocean/index_nino34.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mpas_analysis/ocean/index_nino34.py b/mpas_analysis/ocean/index_nino34.py index b3cd49136..4a3b61c3c 100644 --- a/mpas_analysis/ocean/index_nino34.py +++ b/mpas_analysis/ocean/index_nino34.py @@ -779,6 +779,8 @@ def _plot_size_y_axis(self, x, ys, xmin, xmax): # find maximum value of three curves plotted maxY = -1E20 + if len(mask) == 0: + return maxY for y in ys: maxY = max(y[mask].max(), maxY) # check the function interpolated to the max/min as well From cda2fe1c77d8b3facd9f22d0813799c77ea550bc Mon Sep 17 00:00:00 2001 From: Carolyn Begeman Date: Mon, 1 Dec 2025 09:49:31 -0600 Subject: [PATCH 10/50] Fix update_time_bounds_from_file_names --- mpas_analysis/shared/analysis_task.py | 19 ++++++++++++------- .../test/test_mpas_climatology_task.py | 3 ++- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index fbc018b97..c72903993 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -602,7 +602,8 @@ def flush(self): pass -def update_time_bounds_from_file_names(config, section, componentName): +def update_time_bounds_from_file_names(config, section, componentName, + allow_cache=True): """ Update the start and end years and dates for time series, climatologies or climate indices based on the years actually available in the list of files. @@ -680,12 +681,16 @@ def update_time_bounds_from_file_names(config, section, componentName): endYear = years[lastIndex] if startYear != requestedStartYear or endYear != requestedEndYear: - print("Warning:" - "{} start and/or end year different from requested\n" - "requested: {:04d}-{:04d}\n" - "actual: {:04d}-{:04d}\n".format( - section, requestedStartYear, requestedEndYear, startYear, - endYear)) + message = ("{} start and/or end year different from requested\n" + "requested: {:04d}-{:04d}\n" + "actual: {:04d}-{:04d}\n".format( + section, requestedStartYear, requestedEndYear, startYear, + endYear) + ) + if allow_cache: + print(f'Warning: {message}') + else: + raise ValueError(message) startDate = '{:04d}-01-01_00:00:00'.format(startYear) config.set(section, 'startDate', startDate) diff --git a/mpas_analysis/test/test_mpas_climatology_task.py b/mpas_analysis/test/test_mpas_climatology_task.py index d0e57d3f6..878879b94 100644 --- a/mpas_analysis/test/test_mpas_climatology_task.py +++ b/mpas_analysis/test/test_mpas_climatology_task.py @@ -168,7 +168,8 @@ def test_update_climatology_bounds_and_create_symlinks(self): with self.assertRaisesRegex(ValueError, 'climatology start and/or end year ' 'different from requested'): - update_time_bounds_from_file_names(config, 'climatology', 'ocean') + update_time_bounds_from_file_names(config, 'climatology', 'ocean', + allow_cache=False) def test_subtask_run_analysis(self): mpasClimatologyTask = self.setup_task() From 10dd90f82dce8425402f3c258da3dd781bce3916 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 5 Dec 2025 03:59:59 -0600 Subject: [PATCH 11/50] Drop python 3.14 support for now We are seeing issues with networkx that need to be resolved by a new python 3.14 release --- .github/workflows/build_workflow.yml | 2 +- .github/workflows/docs_workflow.yml | 2 +- ci/recipe/meta.yaml | 2 +- dev-spec.txt | 2 +- pyproject.toml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 767f407de..8824343f8 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -27,7 +27,7 @@ jobs: shell: bash -l {0} strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] + python-version: ["3.10", "3.11", "3.12", "3.13"] fail-fast: false steps: - id: skip_check diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index 385849fd3..a7f5e9b92 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -10,7 +10,7 @@ on: types: [published] env: - PYTHON_VERSION: "3.14" + PYTHON_VERSION: "3.13" jobs: publish-docs: diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml index cd147e614..d565adcde 100644 --- a/ci/recipe/meta.yaml +++ b/ci/recipe/meta.yaml @@ -23,7 +23,7 @@ requirements: - pip - setuptools >=60 run: - - python >={{ python_min }},<3.13 + - python >={{ python_min }},<3.14 - cartopy >=0.18.0 - cartopy_offlinedata - cmocean diff --git a/dev-spec.txt b/dev-spec.txt index a5f9ca359..588a0621f 100644 --- a/dev-spec.txt +++ b/dev-spec.txt @@ -2,7 +2,7 @@ # $ conda create --name --file # Base -python >=3.10 +python >=3.10,<3.14 cartopy >=0.18.0 cartopy_offlinedata cmocean diff --git a/pyproject.toml b/pyproject.toml index ffab97d4f..12689081a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ description = """\ """ license = { file = "LICENSE" } readme = "README.md" -requires-python = ">=3.10" +requires-python = ">=3.10,<3.14" classifiers = [ # these are only for searching/browsing projects on PyPI "Programming Language :: Python :: 3.10", From f7ad851bbbc7a6ae653c93af68591269a0caa5a3 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 5 Dec 2025 04:02:30 -0600 Subject: [PATCH 12/50] Drop python 3.14 from trove classifiers --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 12689081a..c48fe32e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,6 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", "Development Status :: 5 - Production/Stable", From 236c5fa036a89c2e5af436e7f90cf08607ab0db4 Mon Sep 17 00:00:00 2001 From: Carolyn Begeman Date: Thu, 4 Dec 2025 16:16:21 -0600 Subject: [PATCH 13/50] Check for transport output before loading input files --- mpas_analysis/ocean/time_series_transport.py | 46 ++++++++++---------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/mpas_analysis/ocean/time_series_transport.py b/mpas_analysis/ocean/time_series_transport.py index e23996fd0..080cf92c6 100644 --- a/mpas_analysis/ocean/time_series_transport.py +++ b/mpas_analysis/ocean/time_series_transport.py @@ -235,6 +235,29 @@ def run_task(self): outFileName = f'{outputDirectory}/{self.groupSuffix}_{self.startYear:04d}-{self.endYear:04d}.nc' + outputExists = os.path.exists(outFileName) + outputValid = outputExists + if outputExists: + with open_mpas_dataset(fileName=outFileName, + calendar=self.calendar, + timeVariableNames=None, + variableList=None, + startDate=startDate, + endDate=endDate) as dsOut: + + for load_year in numpy.arange(self.startYear, self.endYear + 1): + for load_month in numpy.arange(1, 13): + mask = numpy.logical_and( + dsOut.year.values == load_year, + dsOut.month.values == load_month) + if numpy.count_nonzero(mask) == 0: + outputValid = False + break + + if outputValid: + self.logger.info(' Time series exists -- Done.') + return + inputFiles = sorted(self.historyStreams.readpath( 'timeSeriesStatsMonthlyOutput', startDate=startDate, endDate=endDate, calendar=self.calendar)) @@ -259,29 +282,6 @@ def run_task(self): 'Using advection velocity.') variableList.append('timeMonthly_avg_normalVelocity') - outputExists = os.path.exists(outFileName) - outputValid = outputExists - if outputExists: - with open_mpas_dataset(fileName=outFileName, - calendar=self.calendar, - timeVariableNames=None, - variableList=None, - startDate=startDate, - endDate=endDate) as dsOut: - - for inIndex in range(dsOut.sizes['Time']): - - mask = numpy.logical_and( - dsOut.year[inIndex].values == years, - dsOut.month[inIndex].values == months) - if numpy.count_nonzero(mask) == 0: - outputValid = False - break - - if outputValid: - self.logger.info(' Time series exists -- Done.') - return - transectMaskFileName = self.masksSubtask.maskFileName dsTransectMask = xarray.open_dataset(transectMaskFileName) From 5f8475d1b6436b3cfd8cc483d70dee4be1c70e34 Mon Sep 17 00:00:00 2001 From: Carolyn Begeman Date: Mon, 8 Dec 2025 11:03:37 -0600 Subject: [PATCH 14/50] Concatenate cache and new data for sea ice time series --- mpas_analysis/sea_ice/time_series.py | 39 +++++++++++++++++----------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/mpas_analysis/sea_ice/time_series.py b/mpas_analysis/sea_ice/time_series.py index b0e0fcca0..987e61657 100644 --- a/mpas_analysis/sea_ice/time_series.py +++ b/mpas_analysis/sea_ice/time_series.py @@ -10,6 +10,7 @@ # https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE import numpy as np +import os import xarray as xr from mpas_analysis.shared import AnalysisTask @@ -692,6 +693,10 @@ def _compute_area_vol(self): if maxAllowedSeaIceThickness is not None: mask = np.logical_and(mask, ds.iceThick <= maxAllowedSeaIceThickness) + if os.path.exists(outFileNames[hemisphere]): + dsCache = xr.open_dataset(outFileNames[hemisphere]) + timeMask = ds.startTime > dsCache.startTime.isel(Time=-1) + mask = np.logical_and(mask, timeMask) dsAreaSum = (ds.where(mask) * dsMesh.areaCell).sum('nCells') dsAreaSum = dsAreaSum.rename( @@ -703,21 +708,25 @@ def _compute_area_vol(self): dsAreaSum['snowDepth'] = (dsAreaSum.snowVolume / dsMesh.areaCell.sum('nCells')) - dsAreaSum['iceArea'].attrs['units'] = 'm$^2$' - dsAreaSum['iceArea'].attrs['description'] = \ - f'Total {hemisphere} sea ice area' - dsAreaSum['iceVolume'].attrs['units'] = 'm$^3$' - dsAreaSum['iceVolume'].attrs['description'] = \ - f'Total {hemisphere} sea ice volume' - dsAreaSum['snowVolume'].attrs['units'] = 'm$^3$' - dsAreaSum['snowVolume'].attrs['description'] = \ - f'Total {hemisphere} snow volume' - dsAreaSum['iceThickness'].attrs['units'] = 'm' - dsAreaSum['iceThickness'].attrs['description'] = \ - f'Mean {hemisphere} sea ice thickness' - dsAreaSum['snowDepth'].attrs['units'] = 'm' - dsAreaSum['snowDepth'].attrs['description'] = \ - f'Mean {hemisphere} snow depth' + if os.path.exists(outFileNames[hemisphere]): + dsAreaSum = xr.concat([dsCache, dsAreaSum], dim='Time') + dsCache.close() + else: + dsAreaSum['iceArea'].attrs['units'] = 'm$^2$' + dsAreaSum['iceArea'].attrs['description'] = \ + f'Total {hemisphere} sea ice area' + dsAreaSum['iceVolume'].attrs['units'] = 'm$^3$' + dsAreaSum['iceVolume'].attrs['description'] = \ + f'Total {hemisphere} sea ice volume' + dsAreaSum['snowVolume'].attrs['units'] = 'm$^3$' + dsAreaSum['snowVolume'].attrs['description'] = \ + f'Total {hemisphere} snow volume' + dsAreaSum['iceThickness'].attrs['units'] = 'm' + dsAreaSum['iceThickness'].attrs['description'] = \ + f'Mean {hemisphere} sea ice thickness' + dsAreaSum['snowDepth'].attrs['units'] = 'm' + dsAreaSum['snowDepth'].attrs['description'] = \ + f'Mean {hemisphere} snow depth' dsTimeSeries[hemisphere] = dsAreaSum From e8e2b133b946ba55162dbfdb76748852445fd394 Mon Sep 17 00:00:00 2001 From: Carolyn Begeman Date: Mon, 8 Dec 2025 13:43:43 -0700 Subject: [PATCH 15/50] Mask data time entries using isel Co-authored-by: Xylar Asay-Davis --- mpas_analysis/sea_ice/time_series.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mpas_analysis/sea_ice/time_series.py b/mpas_analysis/sea_ice/time_series.py index 987e61657..9e4552072 100644 --- a/mpas_analysis/sea_ice/time_series.py +++ b/mpas_analysis/sea_ice/time_series.py @@ -696,7 +696,7 @@ def _compute_area_vol(self): if os.path.exists(outFileNames[hemisphere]): dsCache = xr.open_dataset(outFileNames[hemisphere]) timeMask = ds.startTime > dsCache.startTime.isel(Time=-1) - mask = np.logical_and(mask, timeMask) + ds = ds.isel(Time=timeMask) dsAreaSum = (ds.where(mask) * dsMesh.areaCell).sum('nCells') dsAreaSum = dsAreaSum.rename( From 9ebcff938222239339024eba39fb201fed2f5535 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 5 Dec 2025 04:07:45 -0600 Subject: [PATCH 16/50] Revert "Drop python 3.14 support for now" This reverts commit 10dd90f82dce8425402f3c258da3dd781bce3916. --- .github/workflows/build_workflow.yml | 2 +- .github/workflows/docs_workflow.yml | 2 +- ci/recipe/meta.yaml | 2 +- dev-spec.txt | 2 +- pyproject.toml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 8824343f8..767f407de 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -27,7 +27,7 @@ jobs: shell: bash -l {0} strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] fail-fast: false steps: - id: skip_check diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index a7f5e9b92..385849fd3 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -10,7 +10,7 @@ on: types: [published] env: - PYTHON_VERSION: "3.13" + PYTHON_VERSION: "3.14" jobs: publish-docs: diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml index d565adcde..cd147e614 100644 --- a/ci/recipe/meta.yaml +++ b/ci/recipe/meta.yaml @@ -23,7 +23,7 @@ requirements: - pip - setuptools >=60 run: - - python >={{ python_min }},<3.14 + - python >={{ python_min }},<3.13 - cartopy >=0.18.0 - cartopy_offlinedata - cmocean diff --git a/dev-spec.txt b/dev-spec.txt index 588a0621f..a5f9ca359 100644 --- a/dev-spec.txt +++ b/dev-spec.txt @@ -2,7 +2,7 @@ # $ conda create --name --file # Base -python >=3.10,<3.14 +python >=3.10 cartopy >=0.18.0 cartopy_offlinedata cmocean diff --git a/pyproject.toml b/pyproject.toml index c48fe32e6..9b27eca40 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ description = """\ """ license = { file = "LICENSE" } readme = "README.md" -requires-python = ">=3.10,<3.14" +requires-python = ">=3.10" classifiers = [ # these are only for searching/browsing projects on PyPI "Programming Language :: Python :: 3.10", From 5c2075d200ddcc8d9216ff2167073956b1834d34 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 5 Dec 2025 04:07:58 -0600 Subject: [PATCH 17/50] Revert "Drop python 3.14 from trove classifiers" This reverts commit f7ad851bbbc7a6ae653c93af68591269a0caa5a3. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 9b27eca40..ffab97d4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Development Status :: 5 - Production/Stable", From a4c016a98a57cfc330632b087492a68f1add0831 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Dec 2025 03:39:19 -0600 Subject: [PATCH 18/50] Fix multiprocessing pickling issue in python 3.14 Namelists and streams objects cannot be pickled, but multiprocessing in python 3.14 tries to pickle AnalysisTask objects. With this fix, the attributes associated with namelists and streams are cleared after setup_and_check() is called, and restored before run_task() is called. --- mpas_analysis/__main__.py | 3 ++ mpas_analysis/shared/analysis_task.py | 50 ++++++++++++++++++++------- 2 files changed, 41 insertions(+), 12 deletions(-) diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index cab22ea39..27564e092 100644 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -576,6 +576,9 @@ def add_task_and_subtasks(analysisTask, analysesToGenerate, verbose, totalFailures += 1 return totalFailures + # clear unpicklable attributes before running the task + analysisTask.clear_namelists_and_streams() + analysesToGenerate[key] = analysisTask analysisTask._setupStatus = 'success' assert(totalFailures == 0) diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index c72903993..2b601baba 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -203,18 +203,7 @@ def setup_and_check(self): self.plotsDirectory = build_config_full_path(self.config, 'output', 'plotsSubdirectory') - namelistFileName = build_config_full_path( - self.config, 'input', - '{}NamelistFileName'.format(self.componentName)) - self.namelist = NameList(namelistFileName) - - streamsFileName = build_config_full_path( - self.config, 'input', - '{}StreamsFileName'.format(self.componentName)) - self.runStreams = StreamsFile(streamsFileName, - streamsdir=self.runDirectory) - self.historyStreams = StreamsFile(streamsFileName, - streamsdir=self.historyDirectory) + self.load_namelists_and_streams() self.calendar = self.namelist.get('config_calendar_type') @@ -232,6 +221,40 @@ def setup_and_check(self): self._logFileName = '{}/{}.log'.format(logsDirectory, self.fullTaskName) + def load_namelists_and_streams(self): + """ + Load namelist and streams attributes. + """ + # Authors + # ------- + # Xylar Asay-Davis + + namelistFileName = build_config_full_path( + self.config, 'input', + '{}NamelistFileName'.format(self.componentName)) + self.namelist = NameList(namelistFileName) + + streamsFileName = build_config_full_path( + self.config, 'input', + '{}StreamsFileName'.format(self.componentName)) + self.runStreams = StreamsFile(streamsFileName, + streamsdir=self.runDirectory) + self.historyStreams = StreamsFile(streamsFileName, + streamsdir=self.historyDirectory) + + def clear_namelists_and_streams(self): + """ + Clear namelist and streams attributes that cannot be pickled for + multiprocessing. + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.namelist = None + self.runStreams = None + self.historyStreams = None + def run_task(self): """ Run the analysis. Each task should override this function to do the @@ -320,6 +343,9 @@ def run(self, writeLogFile=True): startTime = time.time() try: + # reload namelists and streams, since they cannot be pickled + # as part of multiprocessing + self.load_namelists_and_streams() self.run_task() self._runStatus.value = AnalysisTask.SUCCESS except (Exception, BaseException) as e: From 64cea42b6cdb6b1d0932a5e6c7455fad7f83c054 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Dec 2025 04:15:05 -0600 Subject: [PATCH 19/50] Don't pickle runAfterTasks and subtasks --- mpas_analysis/shared/analysis_task.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index 2b601baba..81aa8d5ba 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -551,6 +551,22 @@ def get_mesh_filename(self): return meshFilename + def __getstate__(self): + state = self.__dict__.copy() + + # Clear out attributes that should not be pickled + state['runAfterTasks'] = [] + state['subtasks'] = [] + # Drop process internals and logger that can't/shouldn't be pickled + for key in ['_popen', 'logger', '_stackTrace']: + state.pop(key, None) + + # Drop weakref-bearing Finalize, etc., by not pickling _popen at all + # _runStatus is a multiprocessing.Value; depending on your logic, + # you may also want to skip it and let child initialize its own. + + return state + # }}} From 808d768d9a169ebb8ea10d727b00778fe4ce4892 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Dec 2025 04:19:11 -0600 Subject: [PATCH 20/50] Fix deprecated importlib.resources.contents --- mpas_analysis/__main__.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index 27564e092..7c4b7dc98 100644 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -33,7 +33,7 @@ import time import json from importlib.metadata import Distribution -from importlib.resources import contents +from importlib.resources import files from mache import discover_machine, MachineInfo @@ -1078,10 +1078,11 @@ def main(): except FileNotFoundError: possible_machines = [] - machine_configs = contents('mache.machines') + machine_configs = files('mache.machines').iterdir() for config in machine_configs: - if config.endswith('.cfg'): - possible_machines.append(os.path.splitext(config)[0]) + if config.name.endswith('.cfg'): + possible_machines.append( + os.path.splitext(config.name)[0]) possible_machines = '\n '.join(sorted(possible_machines)) raise ValueError( From d1508a88725c406b935646824569d0f94b463eb5 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Dec 2025 04:23:15 -0600 Subject: [PATCH 21/50] Detect python versions in dev and unified suites Previously, the python version was hard-coded --- suite/run_dev_suite.bash | 5 +++-- suite/run_e3sm_unified_suite.bash | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/suite/run_dev_suite.bash b/suite/run_dev_suite.bash index f3db1df68..6b865c5a4 100755 --- a/suite/run_dev_suite.bash +++ b/suite/run_dev_suite.bash @@ -19,7 +19,8 @@ cd .. machine=$(python -c "from mache import discover_machine; print(discover_machine())") -py=3.13 +py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') + ./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --copy_docs --clean -e ${env_name} ./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions -e ${env_name} ./suite/setup.py -p ${py} -r moc_am -b ${branch} -e ${env_name} @@ -33,7 +34,7 @@ py=3.13 # submit the jobs cd ${machine}_test_suite -main_py=3.13 +main_py=${py} cd main_py${main_py} echo main_py${main_py} RES=$(sbatch job_script.bash) diff --git a/suite/run_e3sm_unified_suite.bash b/suite/run_e3sm_unified_suite.bash index b060ede13..32648adfb 100755 --- a/suite/run_e3sm_unified_suite.bash +++ b/suite/run_e3sm_unified_suite.bash @@ -6,7 +6,7 @@ set -e branch=test_e3sm_unified # test building the docs -py=3.13 +py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') machine=${E3SMU_MACHINE} ./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --clean From 8bc8fa86eb6a419b966ec509c1e74e2fe569d7c5 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Dec 2025 05:17:35 -0600 Subject: [PATCH 22/50] Fix clearing of namelists and streams We need to do this before starting the task, rather than after setup_and_check() because other tasks may try to access the namelists and streams from a parent or prerequisite task. --- mpas_analysis/__main__.py | 3 - mpas_analysis/shared/analysis_task.py | 84 +++++++++++++++------------ 2 files changed, 48 insertions(+), 39 deletions(-) diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index 7c4b7dc98..83e9e4fe1 100644 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -576,9 +576,6 @@ def add_task_and_subtasks(analysisTask, analysesToGenerate, verbose, totalFailures += 1 return totalFailures - # clear unpicklable attributes before running the task - analysisTask.clear_namelists_and_streams() - analysesToGenerate[key] = analysisTask analysisTask._setupStatus = 'success' assert(totalFailures == 0) diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index 81aa8d5ba..6cc95d172 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -203,7 +203,7 @@ def setup_and_check(self): self.plotsDirectory = build_config_full_path(self.config, 'output', 'plotsSubdirectory') - self.load_namelists_and_streams() + self._load_namelists_and_streams() self.calendar = self.namelist.get('config_calendar_type') @@ -221,40 +221,6 @@ def setup_and_check(self): self._logFileName = '{}/{}.log'.format(logsDirectory, self.fullTaskName) - def load_namelists_and_streams(self): - """ - Load namelist and streams attributes. - """ - # Authors - # ------- - # Xylar Asay-Davis - - namelistFileName = build_config_full_path( - self.config, 'input', - '{}NamelistFileName'.format(self.componentName)) - self.namelist = NameList(namelistFileName) - - streamsFileName = build_config_full_path( - self.config, 'input', - '{}StreamsFileName'.format(self.componentName)) - self.runStreams = StreamsFile(streamsFileName, - streamsdir=self.runDirectory) - self.historyStreams = StreamsFile(streamsFileName, - streamsdir=self.historyDirectory) - - def clear_namelists_and_streams(self): - """ - Clear namelist and streams attributes that cannot be pickled for - multiprocessing. - """ - # Authors - # ------- - # Xylar Asay-Davis - - self.namelist = None - self.runStreams = None - self.historyStreams = None - def run_task(self): """ Run the analysis. Each task should override this function to do the @@ -305,6 +271,19 @@ def add_subtask(self, subtask): if subtask not in self.subtasks: self.subtasks.append(subtask) + def start(self): + """ + Clear unpicklable attributes and then start the analysis task as a new + process. + """ + # Authors + # ------- + # Xylar Asay-Davis + # clear unpicklable attributes before running the task + self._clear_namelists_and_streams() + + super(AnalysisTask, self).start() + def run(self, writeLogFile=True): """ Sets up logging and then runs the analysis task. @@ -345,7 +324,7 @@ def run(self, writeLogFile=True): try: # reload namelists and streams, since they cannot be pickled # as part of multiprocessing - self.load_namelists_and_streams() + self._load_namelists_and_streams() self.run_task() self._runStatus.value = AnalysisTask.SUCCESS except (Exception, BaseException) as e: @@ -567,6 +546,39 @@ def __getstate__(self): return state + def _load_namelists_and_streams(self): + """ + Load namelist and streams attributes. + """ + # Authors + # ------- + # Xylar Asay-Davis + + namelistFileName = build_config_full_path( + self.config, 'input', + '{}NamelistFileName'.format(self.componentName)) + self.namelist = NameList(namelistFileName) + + streamsFileName = build_config_full_path( + self.config, 'input', + '{}StreamsFileName'.format(self.componentName)) + self.runStreams = StreamsFile(streamsFileName, + streamsdir=self.runDirectory) + self.historyStreams = StreamsFile(streamsFileName, + streamsdir=self.historyDirectory) + + def _clear_namelists_and_streams(self): + """ + Clear namelist and streams attributes that cannot be pickled for + multiprocessing. + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.namelist = None + self.runStreams = None + self.historyStreams = None # }}} From 441db3ff9ea50ce66a97d02d9bdf4bf0ab57fae8 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Dec 2025 06:17:53 -0600 Subject: [PATCH 23/50] Fixes based on code review --- mpas_analysis/__main__.py | 6 +++--- mpas_analysis/shared/analysis_task.py | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index 83e9e4fe1..155b5d824 100644 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -1076,10 +1076,10 @@ def main(): possible_machines = [] machine_configs = files('mache.machines').iterdir() - for config in machine_configs: - if config.name.endswith('.cfg'): + for config_file in machine_configs: + if config_file.name.endswith('.cfg'): possible_machines.append( - os.path.splitext(config.name)[0]) + os.path.splitext(config_file.name)[0]) possible_machines = '\n '.join(sorted(possible_machines)) raise ValueError( diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index 6cc95d172..5507b7624 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -531,9 +531,25 @@ def get_mesh_filename(self): return meshFilename def __getstate__(self): + """ + Customize pickling to exclude unpicklable and unnecessary attributes. + This method is called during multiprocessing when the task is + serialized to be sent to a child process. We exclude task dependencies + and process internals that don't need to be transferred, such as logger + objects, process internals, and weakref-bearing attributes. + + Returns + ------- + state : dict + The object state with unpicklable and unnecessary attributes + removed. + """ state = self.__dict__.copy() # Clear out attributes that should not be pickled + state['namelist'] = None + state['runStreams'] = None + state['historyStreams'] = None state['runAfterTasks'] = [] state['subtasks'] = [] # Drop process internals and logger that can't/shouldn't be pickled From dbbe7ac6d7c1a707a6ba69d888050e6194b7abd3 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 3 Dec 2025 08:42:15 -0600 Subject: [PATCH 24/50] Store cp and rho for computing OHC anomalies We don't want to rely on retrieving them from namelists at runtime because the `self.namelist` object may not exist. The mesh filename also needs to be stored since it isn't necessarily available from the streams object at runtime. --- .../ocean/climatology_map_ohc_anomaly.py | 19 +++++++-- .../ocean/time_series_ohc_anomaly.py | 41 ++++++++++++++++--- 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py index e0a39b1c2..8806e52d2 100644 --- a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py +++ b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py @@ -182,6 +182,12 @@ class RemapMpasOHCClimatology(RemapMpasClimatologySubtask): min_depth, max_depth : float The minimum and maximum depths for integration + + cp : float + Specific heat of seawater [J/(kg*degC)] + + rho : float + Reference density of seawater [kg/m3] """ def __init__(self, mpas_climatology_task, ref_year_climatology_task, @@ -239,6 +245,8 @@ def __init__(self, mpas_climatology_task, ref_year_climatology_task, self.run_after(ref_year_climatology_task) self.min_depth = min_depth self.max_depth = max_depth + self.cp = None + self.rho = None def setup_and_check(self): """ @@ -255,6 +263,9 @@ def setup_and_check(self): self.ref_year_climatology_task.add_variables(self.variableList, self.seasons) + self.cp = self.namelist.getfloat('config_specific_heat_sea_water') + self.rho = self.namelist.getfloat('config_density0') + def customize_masked_climatology(self, climatology, season): """ Compute the ocean heat content (OHC) anomaly from the temperature @@ -298,10 +309,10 @@ def _compute_ohc(self, climatology): ds_mesh = xr.open_dataset(self.meshFilename) ds_mesh = ds_mesh.isel(Time=0) - # specific heat [J/(kg*degC)] - cp = self.namelist.getfloat('config_specific_heat_sea_water') - # [kg/m3] - rho = self.namelist.getfloat('config_density0') + cp = self.cp + assert cp is not None, "Specific heat 'cp' has not been set" + rho = self.rho + assert rho is not None, "Reference density 'rho' has not been set" units_scale_factor = 1e-9 diff --git a/mpas_analysis/ocean/time_series_ohc_anomaly.py b/mpas_analysis/ocean/time_series_ohc_anomaly.py index b3dbf21d0..a81d56d80 100644 --- a/mpas_analysis/ocean/time_series_ohc_anomaly.py +++ b/mpas_analysis/ocean/time_series_ohc_anomaly.py @@ -31,6 +31,17 @@ class TimeSeriesOHCAnomaly(AnalysisTask): """ Performs analysis of ocean heat content (OHC) from time-series output. + + Attributes + ---------- + cp : float + Specific heat of seawater [J/(kg*degC)] + + rho : float + Reference density of seawater [kg/m3] + + meshFilename : str + The path to the MPAS mesh file """ # Authors # ------- @@ -132,6 +143,21 @@ def __init__(self, config, mpasTimeSeriesTask, controlConfig=None): plotTask.run_after(anomalyTask) self.add_subtask(plotTask) + self.cp = None + self.rho = None + self.meshFilename = None + + def setup_and_check(self): + """ + Store the specific heat and reference density of seawater for use + in OHC calculations. + """ + super().setup_and_check() + + self.cp = self.namelist.getfloat('config_specific_heat_sea_water') + self.rho = self.namelist.getfloat('config_density0') + self.meshFilename = self.get_mesh_filename() + def _compute_ohc(self, ds): """ Compute the OHC time series. @@ -139,10 +165,15 @@ def _compute_ohc(self, ds): # for convenience, rename the variables to simpler, shorter names ds = ds.rename(self.variableDict) - # specific heat [J/(kg*degC)] - cp = self.namelist.getfloat('config_specific_heat_sea_water') - # [kg/m3] - rho = self.namelist.getfloat('config_density0') + # these need to be set at setup time, not at runtime because piclking + # means the namelists and streams objects they come from aren't + # available at runtime + cp = self.cp + assert cp is not None, "Specific heat 'cp' has not been set" + rho = self.rho + assert rho is not None, "Reference density 'rho' has not been set" + meshFile = self.meshFilename + assert meshFile is not None, "Mesh filename has not been set" unitsScalefactor = 1e-22 @@ -152,8 +183,6 @@ def _compute_ohc(self, ds): ds.ohc.attrs['units'] = '$10^{22}$ J' ds.ohc.attrs['description'] = 'Ocean heat content in each region' - meshFile = self.get_mesh_filename() - # Define/read in general variables with xr.open_dataset(meshFile) as dsMesh: # reference depth [m] From b3dd117fa0abae206eafd581565e52faee360090 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:15:23 +0000 Subject: [PATCH 25/50] Bump actions/cache from 4 to 5 Bumps [actions/cache](https://github.com/actions/cache) from 4 to 5. - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/cache dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/build_workflow.yml | 2 +- .github/workflows/docs_workflow.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 767f407de..796acc586 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -41,7 +41,7 @@ jobs: - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@v5 env: # Increase this value to reset cache if conda-dev-spec.template has not changed in the workflow CACHE_NUMBER: 0 diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index 385849fd3..109cc9391 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -26,7 +26,7 @@ jobs: fetch-depth: 0 - name: Cache Conda - uses: actions/cache@v4 + uses: actions/cache@v5 env: # Increase this value to reset cache if deploy/conda-dev-spec.template has not changed in the workflow CACHE_NUMBER: 0 From 3a75889fd0719619271463bd1e6a5d48388d6c98 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 14 Jan 2026 04:18:19 -0600 Subject: [PATCH 26/50] Fix control run support for El Nino 3.4 Index We need to drop the `nOceanRegions` dimension from the control dataset just as we do for the main dataset. --- mpas_analysis/ocean/index_nino34.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/mpas_analysis/ocean/index_nino34.py b/mpas_analysis/ocean/index_nino34.py index 4a3b61c3c..772fb0d4f 100644 --- a/mpas_analysis/ocean/index_nino34.py +++ b/mpas_analysis/ocean/index_nino34.py @@ -221,6 +221,9 @@ def run_task(self): ninoIndexNumber)) varName = self.variableList[0] regionSST = ds[varName] + self.logger.debug('Main run SST dims=%s shape=%s', + getattr(regionSST, 'dims', None), + getattr(regionSST, 'shape', None)) nino34Main = self._compute_nino34_index(regionSST, calendar) # Compute the observational index over the entire time range @@ -270,7 +273,14 @@ def run_task(self): dsRef = add_standard_regions_and_subset( dsRef, self.controlConfig, regionShortNames=[regionToPlot]) + # we want to collapse the nOceanRegions dimension (same as main) + if 'nOceanRegions' in dsRef.dims: + dsRef = dsRef.isel(nOceanRegions=0) + regionSSTRef = dsRef[varName] + self.logger.debug('Control run SST dims=%s shape=%s', + getattr(regionSSTRef, 'dims', None), + getattr(regionSSTRef, 'shape', None)) nino34Ref = self._compute_nino34_index(regionSSTRef, calendar) nino34s = [nino34Subset, nino34Main[2:-3], nino34Ref[2:-3]] @@ -499,7 +509,19 @@ def _running_mean(self, inputData, wgts): sp = (len(wgts) - 1) // 2 runningMean = inputData.copy() for k in range(sp, nt - (sp + 1)): - runningMean[k] = sum(wgts * inputData[k - sp:k + sp + 1].values) + windowValues = np.asarray(inputData[k - sp:k + sp + 1].values) + if windowValues.shape[0] != len(wgts): + raise ValueError( + 'Unexpected running-mean window shape. ' + f'Expected first dimension {len(wgts)} but got {windowValues.shape}. ' + f'inputData dims={getattr(inputData, "dims", None)} ' + f'shape={getattr(inputData, "shape", None)}') + + if windowValues.ndim == 1: + runningMean[k] = np.sum(wgts * windowValues) + else: + # weighted sum over the first axis (the running-mean window) + runningMean[k] = np.tensordot(wgts, windowValues, axes=(0, 0)) return runningMean From cb0a9c2fcfc80e809cbc6c1f815c4cbac3bc7446 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 4 Feb 2026 07:58:12 -0600 Subject: [PATCH 27/50] Fix time masking in sea-ice time series We need to compute the time mask before we can use it as an index. --- mpas_analysis/sea_ice/time_series.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/mpas_analysis/sea_ice/time_series.py b/mpas_analysis/sea_ice/time_series.py index 9e4552072..464953344 100644 --- a/mpas_analysis/sea_ice/time_series.py +++ b/mpas_analysis/sea_ice/time_series.py @@ -691,14 +691,19 @@ def _compute_area_vol(self): mask = dsMesh.latCell < 0 if maxAllowedSeaIceThickness is not None: - mask = np.logical_and(mask, - ds.iceThick <= maxAllowedSeaIceThickness) + mask = np.logical_and( + mask, ds.iceThick <= maxAllowedSeaIceThickness + ) + dsForHemisphere = ds if os.path.exists(outFileNames[hemisphere]): dsCache = xr.open_dataset(outFileNames[hemisphere]) timeMask = ds.startTime > dsCache.startTime.isel(Time=-1) - ds = ds.isel(Time=timeMask) + timeMask = timeMask.compute() + dsForHemisphere = ds.isel(Time=timeMask) - dsAreaSum = (ds.where(mask) * dsMesh.areaCell).sum('nCells') + dsAreaSum = ( + dsForHemisphere.where(mask) * dsMesh.areaCell + ).sum('nCells') dsAreaSum = dsAreaSum.rename( {'iceConc': 'iceArea', 'iceThick': 'iceVolume', From 3f9d93f661c840a53f907d2efaedfbebadb26312 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 4 Feb 2026 08:28:32 -0600 Subject: [PATCH 28/50] Fix updating z coord of remapped transects We were trying to modify a read-only view of the transect's z array, but we need to make a copy. --- mpas_analysis/ocean/sose_transects.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mpas_analysis/ocean/sose_transects.py b/mpas_analysis/ocean/sose_transects.py index d9f2bee57..9850e4737 100644 --- a/mpas_analysis/ocean/sose_transects.py +++ b/mpas_analysis/ocean/sose_transects.py @@ -379,7 +379,7 @@ def combine_observations(self): # make a copy of the top set of data at z=0 dsObs = xr.concat((dsObs.isel(z=0), dsObs), dim='z') - z = dsObs.z.values + z = dsObs.z.values.copy() z[0] = 0. dsObs['z'] = ('z', z) write_netcdf_with_fill(dsObs, combinedFileName) From d5b10e2dcf8a4a4d7730925143a9ea597d1e851a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 4 Feb 2026 08:57:09 -0600 Subject: [PATCH 29/50] Fix spacing in global plots from main vs. control runs Before this fix, there wasn't enough space for the two-line control title. --- mpas_analysis/shared/plot/climatology_map.py | 37 ++++++++++++++++---- 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/mpas_analysis/shared/plot/climatology_map.py b/mpas_analysis/shared/plot/climatology_map.py index 0eb1e6092..7b5550da7 100644 --- a/mpas_analysis/shared/plot/climatology_map.py +++ b/mpas_analysis/shared/plot/climatology_map.py @@ -414,6 +414,9 @@ def _plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, plottitle_font = {'size': config.get('plot', 'threePanelPlotTitleFontSize')} + multi_line_ref_title = ( + refArray is not None and refTitle is not None and '\n' in refTitle) + if refArray is None: subplots = [111] else: @@ -427,21 +430,43 @@ def _plot_panel(ax, title, array, colormap, norm, levels, ticks, contours, dictDiff = setup_colormap(config, colorMapSectionName, suffix='Difference') axes = [] - ax = plt.subplot(subplots[0], projection=projection) - _plot_panel(ax, modelTitle, modelArray, **dictModelRef) - axes.append(ax) + if refArray is not None and multi_line_ref_title: + # Use a GridSpec with unequal gaps but equal-sized panels + gs = fig.add_gridspec( + nrows=5, + ncols=1, + height_ratios=[1.0, 0.18, 1.0, 0.08, 1.0]) + + ax = fig.add_subplot(gs[0, 0], projection=projection) + _plot_panel(ax, modelTitle, modelArray, **dictModelRef) + axes.append(ax) - if refArray is not None: - ax = plt.subplot(subplots[1], projection=projection) + ax = fig.add_subplot(gs[2, 0], projection=projection) _plot_panel(ax, refTitle, refArray, **dictModelRef) axes.append(ax) - ax = plt.subplot(subplots[2], projection=projection) + ax = fig.add_subplot(gs[4, 0], projection=projection) _plot_panel(ax, diffTitle, diffArray, **dictDiff) axes.append(ax) + else: + ax = plt.subplot(subplots[0], projection=projection) + _plot_panel(ax, modelTitle, modelArray, **dictModelRef) + axes.append(ax) + + if refArray is not None: + ax = plt.subplot(subplots[1], projection=projection) + _plot_panel(ax, refTitle, refArray, **dictModelRef) + axes.append(ax) + + ax = plt.subplot(subplots[2], projection=projection) + _plot_panel(ax, diffTitle, diffArray, **dictDiff) + axes.append(ax) _add_stats(modelArray, refArray, diffArray, Lats, axes) + # Note: in the multi-line reference-title case, uneven spacing is handled + # via GridSpec so all three panels keep identical sizes. + if fileout is not None: savefig(fileout, config, pad_inches=0.2) From e6211fc97fbb6573b50a9b5e5f4b32faebd7c5fa Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 4 Feb 2026 09:01:28 -0600 Subject: [PATCH 30/50] Give statistic test more room --- mpas_analysis/shared/plot/climatology_map.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mpas_analysis/shared/plot/climatology_map.py b/mpas_analysis/shared/plot/climatology_map.py index 7b5550da7..8be601a78 100644 --- a/mpas_analysis/shared/plot/climatology_map.py +++ b/mpas_analysis/shared/plot/climatology_map.py @@ -800,12 +800,12 @@ def _add_stats(modelArray, refArray, diffArray, Lats, axes): def _add_stats_text(names, values, ax, loc): if loc == 'upper': - text_ax = inset_axes(ax, width='17%', height='20%', loc='upper right', - bbox_to_anchor=(0.2, 0.1, 1., 1.), + text_ax = inset_axes(ax, width='19%', height='20%', loc='upper right', + bbox_to_anchor=(0.22, 0.1, 1., 1.), bbox_transform=ax.transAxes, borderpad=0) else: - text_ax = inset_axes(ax, width='17%', height='20%', loc='lower right', - bbox_to_anchor=(0.2, 0.03, 1., 1.), + text_ax = inset_axes(ax, width='19%', height='20%', loc='lower right', + bbox_to_anchor=(0.22, 0.03, 1., 1.), bbox_transform=ax.transAxes, borderpad=0) text = '\n'.join(names) From b43aca78df20697e015f80e7af87e0ef93a90aec Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 4 Feb 2026 09:12:36 -0600 Subject: [PATCH 31/50] Fix units for mass flux plots --- mpas_analysis/ocean/climatology_map_fluxes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mpas_analysis/ocean/climatology_map_fluxes.py b/mpas_analysis/ocean/climatology_map_fluxes.py index 5e503d8e1..668b33db0 100644 --- a/mpas_analysis/ocean/climatology_map_fluxes.py +++ b/mpas_analysis/ocean/climatology_map_fluxes.py @@ -142,7 +142,7 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None, unitsLabel = r'W m$^{-2}$' else: groupSubtitle = 'Mass fluxes' - unitsLabel = r'kg m$^{-2}$ s^${-1}$' + unitsLabel = r'kg m$^{-2}$ s$^{-1}$' subtask.set_plot_info( outFileLabel=outFileName, From 83cb0905e21145435b9421b3e1962bb6d64b0868 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 4 Feb 2026 11:32:55 -0600 Subject: [PATCH 32/50] Update suite to main run to year 8, ctrl to year 10 --- suite/main_vs_ctrl.cfg | 7 ------- suite/run_dev_suite.bash | 1 - suite/run_e3sm_unified_suite.bash | 1 - suite/run_suite.bash | 1 - suite/setup.py | 4 ++++ 5 files changed, 4 insertions(+), 10 deletions(-) diff --git a/suite/main_vs_ctrl.cfg b/suite/main_vs_ctrl.cfg index 2a3d28913..73a21990f 100644 --- a/suite/main_vs_ctrl.cfg +++ b/suite/main_vs_ctrl.cfg @@ -7,10 +7,3 @@ # control run is desired. controlRunConfigFile = ../ctrl.cfg -# config file for a main run on which the analysis was already run to -# completion. The relevant MPAS climatologies already exist and have been -# remapped to the comparison grid and time series have been extracted. -# Leave this option commented out if the analysis for the main run should be -# performed. -mainRunConfigFile = ../main.cfg - diff --git a/suite/run_dev_suite.bash b/suite/run_dev_suite.bash index 6b865c5a4..41cdc814e 100755 --- a/suite/run_dev_suite.bash +++ b/suite/run_dev_suite.bash @@ -25,7 +25,6 @@ py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}" ./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions -e ${env_name} ./suite/setup.py -p ${py} -r moc_am -b ${branch} -e ${env_name} ./suite/setup.py -p ${py} -r no_ncclimo -b ${branch} -e ${env_name} -./suite/setup.py -p ${py} -r main -b ${branch} -e ${env_name} ./suite/setup.py -p ${py} -r ctrl -b ${branch} -e ${env_name} ./suite/setup.py -p ${py} -r main_vs_ctrl -b ${branch} -e ${env_name} ./suite/setup.py -p ${py} -r no_polar_regions -b ${branch} --no_polar_regions -e ${env_name} diff --git a/suite/run_e3sm_unified_suite.bash b/suite/run_e3sm_unified_suite.bash index 32648adfb..95266b9c4 100755 --- a/suite/run_e3sm_unified_suite.bash +++ b/suite/run_e3sm_unified_suite.bash @@ -13,7 +13,6 @@ machine=${E3SMU_MACHINE} ./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions ./suite/setup.py -p ${py} -r moc_am -b ${branch} ./suite/setup.py -p ${py} -r no_ncclimo -b ${branch} -./suite/setup.py -p ${py} -r main -b ${branch} ./suite/setup.py -p ${py} -r ctrl -b ${branch} ./suite/setup.py -p ${py} -r main_vs_ctrl -b ${branch} ./suite/setup.py -p ${py} -r no_polar_regions -b ${branch} --no_polar_regions diff --git a/suite/run_suite.bash b/suite/run_suite.bash index ee8dd0b16..cf3b54807 100755 --- a/suite/run_suite.bash +++ b/suite/run_suite.bash @@ -59,7 +59,6 @@ conda deactivate py=${alt_py} conda activate test_mpas_analysis_py${py} -./suite/setup.py -p ${py} -r main -b ${branch} ./suite/setup.py -p ${py} -r main_py${py} -b ${branch} conda deactivate diff --git a/suite/setup.py b/suite/setup.py index df41ce0e2..29bf8209e 100755 --- a/suite/setup.py +++ b/suite/setup.py @@ -94,12 +94,16 @@ def main(): generate = "['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke', " \ "'no_waves']" end_year = '10' + ctrl_end_year = '8' else: raise ValueError(f'Unexpected mesh: {mesh}') if args.run == 'mesh_rename': mesh = f'new_{mesh}' + if args.run == 'main_vs_ctrl': + end_year = ctrl_end_year + sbatch = list() if account is not None: sbatch.append(f'#SBATCH -A {account}') From 4d01e3486db4a5980fa7de4ee5245a2b59fb7492 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 4 Feb 2026 12:55:51 -0600 Subject: [PATCH 33/50] Skip hovmoller in main_vs_ctrl --- suite/setup.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/suite/setup.py b/suite/setup.py index 29bf8209e..807819695 100755 --- a/suite/setup.py +++ b/suite/setup.py @@ -91,8 +91,9 @@ def main(): shutil.copytree(os.path.join('docs', '_build', 'html'), docs_path) if mesh == 'oQU240wLI': - generate = "['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke', " \ - "'no_waves']" + generate = [ + 'all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke', 'no_waves' + ] end_year = '10' ctrl_end_year = '8' else: @@ -103,6 +104,9 @@ def main(): if args.run == 'main_vs_ctrl': end_year = ctrl_end_year + generate.append('no_hovmoller') + + generate_string = f"['" + "', '".join(generate) + "']" sbatch = list() if account is not None: @@ -140,7 +144,7 @@ def main(): use_e3sm_unified=use_e3sm_unified, run_name=args.run, input_base=input_base, simulation=simulation, mesh=mesh, output_base=output_base, html_base=html_base, out_subdir=out_subdir, - generate=generate, end_year=end_year) + generate=generate_string, end_year=end_year) with open(config, 'w') as config_file: config_file.write(config_text) From 9826dac08f17a41879b9898d042db2e55a2b33fb Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 09:18:20 +0200 Subject: [PATCH 34/50] Update conda recipe to v1 form --- ci/python3.10.yaml | 2 +- ci/python3.11.yaml | 2 +- ci/python3.12.yaml | 2 +- ci/python3.13.yaml | 2 +- ci/python3.14.yaml | 2 +- ci/recipe/meta.yaml | 87 ---------------------------------------- ci/recipe/recipe.yaml | 92 +++++++++++++++++++++++++++++++++++++++++++ 7 files changed, 97 insertions(+), 92 deletions(-) delete mode 100644 ci/recipe/meta.yaml create mode 100644 ci/recipe/recipe.yaml diff --git a/ci/python3.10.yaml b/ci/python3.10.yaml index 366288703..ba317ae3c 100644 --- a/ci/python3.10.yaml +++ b/ci/python3.10.yaml @@ -1,5 +1,5 @@ channel_sources: -- conda-forge,defaults +- conda-forge pin_run_as_build: python: min_pin: x.x diff --git a/ci/python3.11.yaml b/ci/python3.11.yaml index 85b78f32e..ebb868fbd 100644 --- a/ci/python3.11.yaml +++ b/ci/python3.11.yaml @@ -1,5 +1,5 @@ channel_sources: -- conda-forge,defaults +- conda-forge pin_run_as_build: python: min_pin: x.x diff --git a/ci/python3.12.yaml b/ci/python3.12.yaml index 6f2e3cb6c..36e07e4cf 100644 --- a/ci/python3.12.yaml +++ b/ci/python3.12.yaml @@ -1,5 +1,5 @@ channel_sources: -- conda-forge,defaults +- conda-forge pin_run_as_build: python: min_pin: x.x diff --git a/ci/python3.13.yaml b/ci/python3.13.yaml index edf0f22a9..0f52400be 100644 --- a/ci/python3.13.yaml +++ b/ci/python3.13.yaml @@ -1,5 +1,5 @@ channel_sources: -- conda-forge,defaults +- conda-forge pin_run_as_build: python: min_pin: x.x diff --git a/ci/python3.14.yaml b/ci/python3.14.yaml index 5c438d4ac..d2c73f739 100644 --- a/ci/python3.14.yaml +++ b/ci/python3.14.yaml @@ -1,5 +1,5 @@ channel_sources: -- conda-forge,defaults +- conda-forge pin_run_as_build: python: min_pin: x.x diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml deleted file mode 100644 index cd147e614..000000000 --- a/ci/recipe/meta.yaml +++ /dev/null @@ -1,87 +0,0 @@ -{% set name = "MPAS-Analysis" %} -{% set version = "1.14.0" %} -{% set python_min = "3.10" %} - -package: - name: {{ name|lower }} - version: {{ version }} - -source: - path: ../.. - -build: - number: 0 - script: {{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv - noarch: python - entry_points: - - mpas_analysis = mpas_analysis.__main__:main - - download_analysis_data = mpas_analysis.download_data:download_analysis_data - -requirements: - host: - - python {{ python_min }} - - pip - - setuptools >=60 - run: - - python >={{ python_min }},<3.13 - - cartopy >=0.18.0 - - cartopy_offlinedata - - cmocean - - dask - - esmf >=8.4.2,<9.0.0 - - f90nml - - geometric_features >=1.6.1 - - gsw - - lxml - - mache >=1.11.0 - - matplotlib-base >=3.9.0 - - mpas_tools >=1.3.0,<2.0.0 - - nco >=4.8.1,!=5.2.6 - - netcdf4 - - numpy >=2.0,<3.0 - - pandas - - pillow >=10.0.0,<13.0.0 - - progressbar2 - - pyproj - - pyremap >=2.0.0,<3.0.0 - - python-dateutil - - requests - - scipy >=1.7.0 - - shapely >=2.0,<3.0 - - tranche >=0.2.3 - - xarray >=0.14.1 - -test: - requires: - - pytest - - pip - - python {{ python_min }} - imports: - - mpas_analysis - - pytest - commands: - - pip check - - pytest --pyargs mpas_analysis - - mpas_analysis --help - - mpas_analysis --list - - mpas_analysis --plot_colormaps - - download_analysis_data --help - -about: - home: https://github.com/MPAS-Dev/MPAS-Analysis - license: BSD-3-Clause - license_family: BSD - license_file: LICENSE - summary: Analysis of MPAS-Ocean and MPAS-Seaice simulations results - description: | - Analysis for simulations produced with Model for Prediction Across Scales - (MPAS) components and the Energy Exascale Earth System Model (E3SM), which - used those components. - doc_url: https://mpas-dev.github.io/MPAS-Analysis/stable/ - dev_url: https://github.com/MPAS-Dev/MPAS-Analysis - -extra: - recipe-maintainers: - - andrewdnolan - - xylar - - jhkennedy diff --git a/ci/recipe/recipe.yaml b/ci/recipe/recipe.yaml new file mode 100644 index 000000000..18d566dbd --- /dev/null +++ b/ci/recipe/recipe.yaml @@ -0,0 +1,92 @@ +schema_version: 1 + +context: + name: MPAS-Analysis + version: 1.14.0 + python_min: 3.10 + +package: + name: ${{ name|lower }} + version: ${{ version }} + +source: + path: ../.. + +build: + number: 0 + noarch: python + script: ${{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv + python: + entry_points: + - mpas_analysis = mpas_analysis.__main__:main + - download_analysis_data = mpas_analysis.download_data:download_analysis_data + +requirements: + host: + - python ${{ python_min }}.* + - pip + - setuptools >=60 + run: + - python >=${{ python_min }} + - cartopy >=0.18.0 + - cartopy_offlinedata + - cmocean + - dask + - esmf >=8.4.2,<9.0.0 + - f90nml + - geometric_features >=1.6.1 + - gsw + - lxml + - mache >=1.11.0 + - matplotlib-base >=3.9.0 + - mpas_tools >=1.3.0,<2.0.0 + - nco >=4.8.1,!=5.2.6 + - netcdf4 + - numpy >=2.0,<3.0 + - pandas + - pillow >=10.0.0,<11.0.0 + - progressbar2 + - pyproj + - pyremap >=2.0.0,<3.0.0 + - python-dateutil + - requests + - scipy >=1.7.0 + - setuptools + - shapely >=2.0,<3.0 + - tranche >=0.2.3 + - xarray >=0.14.1 + +tests: + - python: + imports: + - mpas_analysis + pip_check: true + python_version: ${{ python_min }}.* + - requirements: + run: + - pytest + - python ${{ python_min }}.* + script: + - pytest --pyargs mpas_analysis + - mpas_analysis --help + - mpas_analysis --list + - mpas_analysis --plot_colormaps + - download_analysis_data --help + +about: + license: BSD-3-Clause + license_file: LICENSE + summary: Analysis of MPAS-Ocean and MPAS-Seaice simulations results + description: | + Analysis for simulations produced with Model for Prediction Across Scales + (MPAS) components and the Energy Exascale Earth System Model (E3SM), which + used those components. + homepage: https://github.com/MPAS-Dev/MPAS-Analysis + repository: https://github.com/MPAS-Dev/MPAS-Analysis + documentation: https://mpas-dev.github.io/MPAS-Analysis/stable/ + +extra: + recipe-maintainers: + - andrewdnolan + - xylar + - jhkennedy From 1adf97c4628525d089451b1348c64aead25673f9 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 09:24:12 +0200 Subject: [PATCH 35/50] Add pixi dev workflow --- .gitignore | 7 ++++- pixi.toml | 75 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 pixi.toml diff --git a/.gitignore b/.gitignore index 3512ba6ed..d36b91b3d 100644 --- a/.gitignore +++ b/.gitignore @@ -93,6 +93,8 @@ ENV/ .ropeproject .DS_Store +.pixi/ +pixi.lock # test suites /anvil_test_suite/ @@ -101,4 +103,7 @@ ENV/ /compy_test_suite/ # vscode settings -.vscode/ \ No newline at end of file +.vscode/ + +# codex +.codex diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 000000000..a58d9a9f2 --- /dev/null +++ b/pixi.toml @@ -0,0 +1,75 @@ +[workspace] +channels = ["conda-forge"] +name = "mpas-analysis" +platforms = ["linux-64"] + +[dependencies] +python = ">=3.10" +cartopy = ">=0.18.0" +cartopy_offlinedata = "*" +cmocean = "*" +dask = "*" +esmf = { version = ">=8.4.2,<9.0.0", build = "mpi_mpich_*" } +f90nml = "*" +geometric_features = ">=1.6.1" +gsw = "*" +lxml = "*" +mache = ">=1.11.0" +matplotlib-base = ">=3.9.0" +mpas_tools = ">=1.3.0,<2.0.0" +nco = ">=4.8.1,!=5.2.6" +netcdf4 = "*" +numpy = ">=2.0,<3.0" +pandas = "*" +pillow = ">=10.0.0,<13.0.0" +progressbar2 = "*" +pyproj = "*" +pyremap = ">=2.0.0,<3.0.0" +python-dateutil = "*" +requests = "*" +scipy = ">=1.7.0" +shapely = ">=2.0,<3.0" +tranche = ">=0.2.3" +xarray = ">=0.14.1" + +[pypi-dependencies] +mpas-analysis = { path = ".", editable = true } + +[feature.dev.dependencies] +pip = "*" +pytest = "*" +setuptools = ">=60" + +[feature.docs.dependencies] +m2r2 = ">=0.3.3" +mistune = "<2" +mock = "*" +sphinx = "*" +sphinx_rtd_theme = "*" +tabulate = "*" + +[feature.build.dependencies] +rattler-build = "*" + +[feature.py310.dependencies] +python = "3.10.*" + +[feature.py311.dependencies] +python = "3.11.*" + +[feature.py312.dependencies] +python = "3.12.*" + +[feature.py313.dependencies] +python = "3.13.*" + +[feature.py314.dependencies] +python = "3.14.*" + +[environments] +default = ["dev", "docs", "build"] +py310 = ["py310", "dev", "docs", "build"] +py311 = ["py311", "dev", "docs", "build"] +py312 = ["py312", "dev", "docs", "build"] +py313 = ["py313", "dev", "docs", "build"] +py314 = ["py314", "dev", "docs", "build"] From d69029d4befaf4599f56042d9ffdf61caa003cf0 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 09:27:46 +0200 Subject: [PATCH 36/50] Update CI to use pixi --- .github/workflows/build_workflow.yml | 113 +++++++++++++++++---------- .github/workflows/docs_workflow.yml | 98 +++++++++-------------- 2 files changed, 107 insertions(+), 104 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 796acc586..d596197e4 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -22,12 +22,24 @@ jobs: name: test mpas_analysis - python ${{ matrix.python-version }} runs-on: ubuntu-latest timeout-minutes: 20 - defaults: - run: - shell: bash -l {0} strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] + include: + - python-version: "3.10" + pixi-environment: py310 + variant-file: ci/python3.10.yaml + - python-version: "3.11" + pixi-environment: py311 + variant-file: ci/python3.11.yaml + - python-version: "3.12" + pixi-environment: py312 + variant-file: ci/python3.12.yaml + - python-version: "3.13" + pixi-environment: py313 + variant-file: ci/python3.13.yaml + - python-version: "3.14" + pixi-environment: py314 + variant-file: ci/python3.14.yaml fail-fast: false steps: - id: skip_check @@ -40,50 +52,69 @@ jobs: uses: actions/checkout@v6 - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} - name: Cache Conda - uses: actions/cache@v5 - env: - # Increase this value to reset cache if conda-dev-spec.template has not changed in the workflow - CACHE_NUMBER: 0 + name: Set up Pixi + uses: prefix-dev/setup-pixi@v0.9.3 with: - path: ~/conda_pkgs_dir_py${{ matrix.python-version }} - key: - ${{ runner.os }}-${{ matrix.python-version }}-conda-${{ env.CACHE_NUMBER }}-${{ - hashFiles('dev-spec.txt,pyproject.toml') }} - - - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} - name: Set up Conda Environment - uses: mamba-org/setup-micromamba@v2 - with: - environment-name: mpas_analysis_dev - init-shell: bash - condarc: | - channel_priority: strict - channels: - - conda-forge - create-args: >- - python=${{ matrix.python-version }} - - - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} - name: Install mpas_analysis - run: | - conda install -y --file dev-spec.txt \ - python=${{ matrix.python-version }} - python -m pip install --no-deps --no-build-isolation -vv -e . + cache: true + cache-write: ${{ github.event_name == 'push' }} + environments: ${{ matrix.pixi-environment }} - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Run Tests env: - CHECK_IMAGES: False + CHECK_IMAGES: "False" run: | set -e - pip check - pytest --pyargs mpas_analysis - mpas_analysis --help - download_analysis_data --help + pixi run -e ${{ matrix.pixi-environment }} python -m pip check + pixi run -e ${{ matrix.pixi-environment }} pytest --pyargs mpas_analysis + pixi run -e ${{ matrix.pixi-environment }} mpas_analysis --help + pixi run -e ${{ matrix.pixi-environment }} download_analysis_data --help - - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} + - if: ${{ steps.skip_check.outputs.should_skip != 'true' && matrix.python-version == '3.14' }} name: Build Sphinx Docs run: | - cd docs - DOCS_VERSION=test make versioned-html + pixi run -e ${{ matrix.pixi-environment }} bash -lc ' + cd docs + DOCS_VERSION=test make versioned-html + ' + + package: + name: build package - python ${{ matrix.python-version }} + runs-on: ubuntu-latest + timeout-minutes: 30 + strategy: + matrix: + include: + - python-version: "3.10" + pixi-environment: py310 + variant-file: ci/python3.10.yaml + - python-version: "3.11" + pixi-environment: py311 + variant-file: ci/python3.11.yaml + - python-version: "3.12" + pixi-environment: py312 + variant-file: ci/python3.12.yaml + - python-version: "3.13" + pixi-environment: py313 + variant-file: ci/python3.13.yaml + - python-version: "3.14" + pixi-environment: py314 + variant-file: ci/python3.14.yaml + fail-fast: false + steps: + - uses: actions/checkout@v6 + + - name: Set up Pixi + uses: prefix-dev/setup-pixi@v0.9.3 + with: + cache: true + cache-write: ${{ github.event_name == 'push' }} + environments: ${{ matrix.pixi-environment }} + + - name: Build Conda Package with rattler-build + run: | + pixi run -e ${{ matrix.pixi-environment }} \ + rattler-build build \ + -m ${{ matrix.variant-file }} \ + -r ci/recipe/recipe.yaml \ + --output-dir rattler-build-output diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index 109cc9391..0dd0fa064 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -15,9 +15,6 @@ env: jobs: publish-docs: runs-on: ubuntu-latest - defaults: - run: - shell: bash -l {0} timeout-minutes: 20 steps: - uses: actions/checkout@v6 @@ -25,78 +22,54 @@ jobs: persist-credentials: false fetch-depth: 0 - - name: Cache Conda - uses: actions/cache@v5 - env: - # Increase this value to reset cache if deploy/conda-dev-spec.template has not changed in the workflow - CACHE_NUMBER: 0 + - name: Set up Pixi + uses: prefix-dev/setup-pixi@v0.9.3 with: - path: ~/conda_pkgs_dir - key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ - hashFiles('dev-spec.txt') }} - - - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} - name: Set up Conda Environment - uses: mamba-org/setup-micromamba@v2 - with: - environment-name: mpas_analysis_dev - init-shell: bash - condarc: | - channel_priority: strict - channels: - - conda-forge - create-args: >- - python=${{ env.PYTHON_VERSION }} - - - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} - name: Install mpas_analysis - run: | - git config --global url."https://github.com/".insteadOf "git@github.com:" - conda install -y --file dev-spec.txt \ - python=${{ env.PYTHON_VERSION }} - python -m pip install -vv --no-deps --no-build-isolation -e . + cache: true + cache-write: ${{ github.event_name == 'push' }} + environments: py314 - name: Build Sphinx Docs run: | set -e - pip check - mpas_analysis sync diags --help - cd docs - DOCS_VERSION=${{ github.ref_name }} make versioned-html + git config --global url."https://github.com/".insteadOf "git@github.com:" + pixi run -e py314 python -m pip check + pixi run -e py314 mpas_analysis sync diags --help + pixi run -e py314 bash -lc ' + cd docs + DOCS_VERSION=${{ github.ref_name }} make versioned-html + ' + - name: Copy Docs and Commit run: | set -e - pip check - mpas_analysis sync diags --help - cd docs - # gh-pages branch must already exist - git clone https://github.com/MPAS-Dev/MPAS-Analysis.git --branch gh-pages --single-branch gh-pages + pixi run -e py314 bash -lc ' + cd docs + # gh-pages branch must already exist + git clone https://github.com/MPAS-Dev/MPAS-Analysis.git --branch gh-pages --single-branch gh-pages - # Only replace docs in a directory with the destination branch name with latest changes. Docs for - # releases should be untouched. - rm -rf gh-pages/${{ github.ref_name }} + # Only replace docs in a directory with the destination branch name with latest changes. Docs for + # releases should be untouched. + rm -rf gh-pages/${{ github.ref_name }} - # don't clobber existing release versions (in case we retroactively fixed them) - cp -r _build/html/${{ github.ref_name }} gh-pages/ + # do not clobber existing release versions if they were updated manually + cp -r _build/html/${{ github.ref_name }} gh-pages/ - mkdir -p gh-pages/shared - cp shared/version-switcher.js gh-pages/shared/version-switcher.js + mkdir -p gh-pages/shared + cp shared/version-switcher.js gh-pages/shared/version-switcher.js - # Update the list of versions with all versions in the gh-pages directory. - python generate_versions_json.py + # Update the list of versions with all versions in the gh-pages directory. + python generate_versions_json.py + + cd gh-pages + touch .nojekyll + printf "" > index.html + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + git add . + git commit -m "Update documentation" -a || true + ' - # Make sure we're in the gh-pages directory. - cd gh-pages - # Create `.nojekyll` (if it doesn't already exist) for proper GH Pages configuration. - touch .nojekyll - # Add `index.html` to point to the `develop` branch automatically. - printf '' > index.html - # Configure git using GitHub Actions credentials. - git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" - git config --local user.name "github-actions[bot]" - # The second command will fail if no changes were present, so we ignore it - git add . - git commit -m "Update documentation" -a || true - name: Push Changes uses: ad-m/github-push-action@master with: @@ -104,4 +77,3 @@ jobs: directory: docs/gh-pages github_token: ${{ secrets.GITHUB_TOKEN }} force: true - From ea0adf25774f1a95576096771316a2633197a416 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 09:37:34 +0200 Subject: [PATCH 37/50] Update the docs for the new pixi dev workflow --- docs/developers_guide/docs.rst | 3 +- docs/developers_guide/quick_start.rst | 40 ++--- docs/developers_guide/test_suite.rst | 7 +- docs/tutorials/dev_add_task.rst | 21 ++- docs/tutorials/dev_getting_started.rst | 193 +++++------------------ docs/users_guide/config/preprocessed.rst | 10 +- docs/users_guide/config/runs.rst | 10 +- 7 files changed, 75 insertions(+), 209 deletions(-) diff --git a/docs/developers_guide/docs.rst b/docs/developers_guide/docs.rst index f211aee48..13fcc94a8 100644 --- a/docs/developers_guide/docs.rst +++ b/docs/developers_guide/docs.rst @@ -1,7 +1,8 @@ Building the Documentation ========================== -With the ``mpas_analysis_dev`` environment activated, you can run: +With the development environment active (for example after running +``pixi shell`` from the repository root), you can run: .. code-block:: bash diff --git a/docs/developers_guide/quick_start.rst b/docs/developers_guide/quick_start.rst index 8adf5cd14..09e6f738c 100644 --- a/docs/developers_guide/quick_start.rst +++ b/docs/developers_guide/quick_start.rst @@ -52,41 +52,33 @@ MPAS-Analysis development. cd ../ -5. Set Up Conda Environment ---------------------------- -- Install Miniforge3 (recommended) or Miniconda. -- For Miniconda, add ``conda-forge`` channel and set strict priority. -- Create environment: +5. Set Up the Development Environment +------------------------------------- +- Install ``pixi`` by following the official installation instructions at + `pixi.sh `_. On Linux and macOS, a common option + is: - .. code-block:: bash - - conda create -y -n mpas_analysis_dev --file dev-spec.txt + .. code-block:: bash -- Activate: + curl -fsSL https://pixi.sh/install.sh | sh - .. code-block:: bash +- From the root of your worktree, create and activate the development + environment: - conda activate mpas_analysis_dev + .. code-block:: bash -- Install MPAS-Analysis in edit mode: - - .. code-block:: bash + pixi shell - python -m pip install --no-deps --no-build-isolation -e . + ``pixi shell`` will create the default environment on first use and activate + it with MPAS-Analysis installed in editable mode. 6. Activate Environment (each session) -------------------------------------- -- For bash: +- From the root of your worktree, run: - .. code-block:: bash - - source ~/miniforge3/etc/profile.d/conda.sh; conda activate mpas_analysis_dev - -- For csh: - - .. code-block:: csh + .. code-block:: bash - source ~/miniforge3/etc/profile.d/conda.csh; conda activate mpas_analysis_dev + pixi shell 7. Configure and Run MPAS-Analysis ---------------------------------- diff --git a/docs/developers_guide/test_suite.rst b/docs/developers_guide/test_suite.rst index 0372ff96b..6b5adb4a1 100644 --- a/docs/developers_guide/test_suite.rst +++ b/docs/developers_guide/test_suite.rst @@ -13,8 +13,8 @@ There are three main scripts for running the test suite: 1. **run_dev_suite.bash** (Developer Testing) - - Use this script after activating your development environment - (must be named `mpas_analysis_dev`). + - Use this script after activating your development environment, typically + with ``pixi shell`` from the repository root. - It builds the documentation and runs a series of analysis tasks on output from a low-resolution (QUwLI240) simulation. @@ -25,8 +25,7 @@ There are three main scripts for running the test suite: .. code-block:: bash - $ source ~/miniforge3/etc/profile.d/conda.sh - $ conda activate mpas_analysis_dev + $ pixi shell $ ./suite/run_dev_suite.bash - After completion, check for successful web page generation, e.g.: diff --git a/docs/tutorials/dev_add_task.rst b/docs/tutorials/dev_add_task.rst index 4ae1c6f72..f1498ad0b 100644 --- a/docs/tutorials/dev_add_task.rst +++ b/docs/tutorials/dev_add_task.rst @@ -34,7 +34,7 @@ the code to MPAS-Analysis. If one just wishes to add a new field that already exists in MPAS-Ocean or MPAS-Seaice output, only a few of the steps below are necessary: - 1. Follow step 1 to set up an ```mpas_analysis_dev``` environment. + 1. Follow step 1 to set up your development environment. 2. Copy an existing `ocean `_ or `sea_ice `_ python module to a new name and edit it as needed for the new fields. @@ -50,15 +50,16 @@ the code to MPAS-Analysis. To begin, please follow the :ref:`tutorial_dev_getting_started` tutorial, which will help you through the basics of creating a fork of MPAS-Analysis, cloning it onto the machine(s) where you will do your development, making -a worktree for the feature you will develop, creating a conda environment for -testing your new MPAS-Analysis development, and running MPAS-Analysis. +a worktree for the feature you will develop, creating a development +environment for testing your new MPAS-Analysis work, and running +MPAS-Analysis. .. note:: Make sure you follow the tutorial for developers, not for users, since the tutorial for users installs the latest release of MPAS-Analysis, which you cannot modify. Similarly, changes must be tested in your own development - environment (often called ``mpas_analysis_dev``) rather than the in a shared + environment rather than in a shared environment like `E3SM-Unified `_. Then, please follow the :ref:`tutorial_understand_a_task`. This will give @@ -550,16 +551,12 @@ whatever editor you like.) code . -I'll create or recreate my ``mpas_analysis_dev`` environment as in -:ref:`tutorial_dev_getting_started`, and then make sure to at least do: +I'll create or recreate my development environment as in +:ref:`tutorial_dev_getting_started`, and then make sure to do: .. code-block:: bash - conda activate mpas_analysis_dev - python -m pip install --no-deps --no-build-isolation -e . - -This last command installs the ``mpas_analysis`` package into the conda -environment. + pixi shell 4.1 ``ClimatologyMapBSF`` class ------------------------------- @@ -1138,7 +1135,7 @@ You also need to add the tasks class and public methods to the in the developer's guide. Again, the easiest approach is to copy the section for a similar task and modify as needed. -With the ``mpas_analysis_dev`` environment activated, you can run: +With the development environment active, you can run: .. code-block:: bash diff --git a/docs/tutorials/dev_getting_started.rst b/docs/tutorials/dev_getting_started.rst index c7ad2821e..14a4142af 100644 --- a/docs/tutorials/dev_getting_started.rst +++ b/docs/tutorials/dev_getting_started.rst @@ -6,7 +6,7 @@ Developer: Getting Started This mini-tutorial is meant as the starting point for other tutorials for developers. It describes the process for creating a fork of the MPAS-Analysis repo, cloning the repository (and your fork) locally, making a git worktree for -development, and creating a conda environment that includes the +development, and creating a ``pixi`` environment that includes the ``mpas_analysis`` package and all of its dependencies, installed in a mode appropriate for development. @@ -140,188 +140,66 @@ Go into that directory to do your development: $ cd ../add_my_fancy_task -4. Making a conda environment ------------------------------ +4. Making a development environment +----------------------------------- -MPAS-Analysis relies on several packages that are only available as conda -packages from the ``conda-forge`` channel. The first step for running -MPAS-Analysis is to create a conda environment with all the needed packages. +MPAS-Analysis relies on packages from ``conda-forge`` and uses ``pixi`` to +manage the development environment defined in ``pixi.toml``. -4.1 Installing Miniforge3 -~~~~~~~~~~~~~~~~~~~~~~~~~ +4.1 Installing pixi +~~~~~~~~~~~~~~~~~~~ -If you have not yet installed Anaconda, Miniconda or Miniforge, you will need -to begin there. The concept behind Anaconda is that just about everything you -would need for a typical python workflow is included. The concept behind -Miniconda and Miniforge is that you create different environments for -different purposes. This allows for greater flexibility and tends to lead to -fewer conflicts between incompatible packages, particularly when using a -channel other than the ``defaults`` supplied by Anaconda. Since we will use -the ``conda-forge`` channel, the Miniforge3 approach is strongly recommended. -The main advantage of Miniforge3 over Miniconda is that it automatically takes -care of a few steps that we otherwise need to do manually. - -First download the -`Miniforge3 installer `_ -for your operating system, then run it: +If you do not already have ``pixi``, install it using the official +instructions at `pixi.sh `_. On Linux and macOS, a +common option is: .. code-block:: bash - $ /bin/bash Miniforge3-Linux-x86_64.sh + $ curl -fsSL https://pixi.sh/install.sh | sh .. note:: - MPAS-Analysis and many of the packages it depends on support OSX and Linux - but not Windows. - -If you are on an HPC system, you can still install Miniconda into your home -directory. Typically, you will need the Linux version. + MPAS-Analysis and many of the packages it depends on support macOS and + Linux but not Windows. .. note:: At this time, we don't have experience with installing or running MPAS-Analysis on ARM or Power8/9 architectures. -You will be asked to agree to the terms and conditions. Type ``yes`` to -continue. - -You will be prompted with a location to install. In this tutorial, we assume -that Miniforge3 is installed in the default location, ``~/miniforge3``. If -you are using Miniconda or chose to install Miniforge3 somewhere else, just -make sure to make the appropriate substitution whenever you see a reference to -this path below. - -.. note:: - - On some HPC machines (particularly at LANL Institutional Computing and - NERSC) the space in your home directory is quite limited. You may want to - install Miniforge3 in an alternative location to avoid running out of - space. - -You will see prompt like this: - -.. code-block:: - - Do you wish the installer to initialize Miniforge3 - by running conda init? [yes|no] - [no] >>> - -You may wish to skip the step (answer ``no``) if you are working on a system -where you will also be using other conda environments, most notably -E3SM-Unified (which has its own Miniforge3 installation). If you do not run -conda init, you have to manually activate ``conda`` whenever you need it. -For ``bash`` and similar shells, this is: - -.. code-block:: bash - - $ source ~/miniforge3/etc/profile.d/conda.sh - $ conda activate - -If you use ``csh``, ``tcsh`` or related shells, this becomes: - -.. code-block:: csh - - > source ~/miniforge3/etc/profile.d/conda.csh - > conda activate - -You may wish to create an alias in your ``.bashrc`` or ``.cshrc`` to make -this easier. For example: - -.. code-block:: bash - - alias init_conda="source ~/miniforge3/etc/profile.d/conda.sh; conda activate" - - -4.2 One-time Miniconda setup -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +4.2 Create and activate the development environment +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -If you installed Miniconda, rather than Miniforge3, you will need to add the -`conda-forge channel `_ and make sure it always takes -precedence for packages available on that channel: +From the root of the worktree where you are doing development, run: .. code-block:: bash - $ conda config --add channels conda-forge - $ conda config --set channel_priority strict + $ pixi shell -If you installed Miniforge3, these steps will happen automatically. - -4.3 Create a development environment -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can create a new conda environment called ``mpas_analysis_dev`` and install the -dependencies that MPAS-Analysis needs by running the following in the worktree -where you are doing your development: - -.. code-block:: bash - - $ conda create -y -n mpas_analysis_dev --file dev-spec.txt - -The last argument is only needed on HPC machines because the conda version of -MPI doesn't work properly on these machines. You can omit it if you're -setting up the conda environment on your laptop. - -Then, you can activate the environment and install MPAS-Analysis in "edit" -mode by running: - -.. code-block:: bash - - $ conda activate mpas_analysis_dev - $ python -m pip install --no-deps --no-build-isolation -e . - -In this mode, any edits you make to the code in the worktree will be available -in the conda environment. If you run ``mpas_analysis`` on the command line, -it will know about the changes. - -This command only needs to be done once after the ``mpas_analysis_dev`` environment is -built if you are not using worktrees. - -.. note:: - - If you do use worktrees, rerun the ``python -m pip install ...`` command - each time you switch to developing a new branch, since otherwise the - version of ``mpas_analysis`` in the ``mpas_analysis_dev`` environment will be the - one you were developing previously. +This command creates the default environment on first use and activates it. +The default environment includes MPAS-Analysis installed in editable mode, so +changes you make in the current worktree are immediately reflected when you +run ``mpas_analysis``. .. _tutorial_dev_get_started_activ_env: -4.4 Activating the environment +4.3 Activating the environment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Each time you open a new terminal window, to activate the ``mpas_analysis_dev`` -environment, you will need to run either for ``bash``: +Each time you open a new terminal window, activate the development +environment from the root of your worktree with: .. code-block:: bash - $ source ~/miniforge3/etc/profile.d/conda.sh - $ conda activate mpas_analysis_dev - -or for ``csh``: - -.. code-block:: csh - - > source ~/miniforge3/etc/profile.d/conda.csh - > conda activate mpas_analysis_dev - -You can skip the ``source`` command if you chose to initialize Miniforge3 or -Miniconda3 so it loads automatically. You can also use the ``init_conda`` -alias for this step if you defined one. + $ pixi shell -4.5 Switching worktrees +4.4 Switching worktrees ~~~~~~~~~~~~~~~~~~~~~~~ -If you switch to a different worktree, it is safest to rerun the whole -process for creating the ``mpas_analysis_dev`` conda environment. If you know that -the dependencies are the same as the worktree used to create ``mpas_analysis_dev``, -You can just reinstall ``mpas_analysis`` itself by rerunning - -.. code-block:: bash - - python -m pip install --no-deps --no-build-isolation -e . - -in the new worktree. If you forget this step, you will find that changes you -make in the worktree don't affect the ``mpas_analysis_dev`` conda environment you are -using. +Because ``mpas-analysis`` is installed from the current worktree in editable +mode, you should run ``pixi shell`` from the worktree you want to develop in. +If you switch to a different worktree, leave the existing shell and start a +new one from the new worktree. 5. Editing code --------------- @@ -348,8 +226,9 @@ need to follow steps 2-6 of the :ref:`tutorial_getting_started` tutorial. Run ``mpas_analysis`` on a compute node, not on an HPC login nodes (front ends), because it uses too many resources to be safely run on a login node. - When using a compute node interactively, activate the ``mpas_analysis_dev`` - environment, even if it was activated on the login node. Be sure to + When using a compute node interactively, activate the development + environment with ``pixi shell``, even if it was activated on the login + node. Be sure to 7.1 Configuring MPAS-Analysis ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -421,7 +300,7 @@ but leave off the date of the simulation to keep it a little shorter. The ``[execute]`` section contains options related to serial or parallel execution of the individual "tasks" that make up an MPAS-Analysis run. For the most part, you can let MPAS-Analysis take care of this on supported -machines. The exception is that, in a development conda environment, you will +machines. The exception is that, in a local development environment, you will be using a version of ESMF that cannot run in parallel so you will need the following: @@ -688,8 +567,8 @@ also be displayed over the full 5 years.) The hard work is done. Now that we have a config file, we are ready to run. To run MPAS-Analysis, you should either create a job script or log into -an interactive session on a compute node. Then, activate the ``mpas_analysis_dev`` -conda environment as in :ref:`tutorial_dev_get_started_activ_env`. +an interactive session on a compute node. Then, activate the development +environment as in :ref:`tutorial_dev_get_started_activ_env`. On many file systems, MPAS-Analysis and other python-based software that used NetCDF files based on the HDF5 file structure can experience file access errors diff --git a/docs/users_guide/config/preprocessed.rst b/docs/users_guide/config/preprocessed.rst index 41fa989b8..eb0b92a5a 100644 --- a/docs/users_guide/config/preprocessed.rst +++ b/docs/users_guide/config/preprocessed.rst @@ -5,11 +5,11 @@ Preprocessed Reference Runs The ``[oceanPreprocessedReference]`` and ``[seaIcePreprocessedReference]`` sections of a configuration file contain options used to point to preprocessed -data from E3SM v0 reference runs:: +data from legacy E3SM reference runs:: [oceanPreprocessedReference] - ## options related to preprocessed ocean reference run with which the results - ## will be compared (e.g. a POP, CESM or ACME v0 run) + ## options related to a preprocessed ocean reference run with which the + ## results will be compared # directory where ocean reference simulation results are stored baseDirectory = /dir/to/ocean/reference @@ -17,8 +17,8 @@ data from E3SM v0 reference runs:: ... [seaIcePreprocessedReference] - ## options related to preprocessed sea ice reference run with which the results - ## will be compared (e.g. a CICE, CESM or ACME v0 run) + ## options related to a preprocessed sea ice reference run with which the + ## results will be compared # directory where ocean reference simulation results are stored baseDirectory = /dir/to/seaice/reference diff --git a/docs/users_guide/config/runs.rst b/docs/users_guide/config/runs.rst index 6ffd07e42..118571319 100644 --- a/docs/users_guide/config/runs.rst +++ b/docs/users_guide/config/runs.rst @@ -4,8 +4,8 @@ Runs ==== The ``[runs]`` section of a configuration file contains options used to name -the "main" run, a preprocessed E3SM v0 run (if any) and to point to analysis -of a control E3SM v1 or standalone MPAS run (if any):: +the "main" run, an optional preprocessed legacy E3SM reference run, and a +control E3SM or standalone MPAS run (if any):: [runs] ## options related to the run to be analyzed and control runs to be @@ -43,9 +43,8 @@ as specified in E3SM:: mainRunName = runName A few of the time series plots in MPAS-Analysis can be compared against a -preprocessed control run from E3SM v0 (which was similar to the CESM, the -Community Earth System Model). If these data are available and the comparison -to these runs is desired, the name of the control run should be specified +preprocessed legacy E3SM reference run. If these data are available and the +comparison to these runs is desired, the name of the control run should be specified here and the paths to the data set should be specified (see :ref:`config_preprocessed`). If not this name should be left as ``None``:: @@ -103,4 +102,3 @@ config file:: mainRunConfigFile = main_run.cfg - From 9b397ace79603cdb0d2e19cbec70f7485537536a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 09:40:37 +0200 Subject: [PATCH 38/50] fixup! Update CI to use pixi --- .github/workflows/build_workflow.yml | 4 ---- .github/workflows/docs_workflow.yml | 2 -- 2 files changed, 6 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index d596197e4..1eb627936 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -55,8 +55,6 @@ jobs: name: Set up Pixi uses: prefix-dev/setup-pixi@v0.9.3 with: - cache: true - cache-write: ${{ github.event_name == 'push' }} environments: ${{ matrix.pixi-environment }} - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} @@ -107,8 +105,6 @@ jobs: - name: Set up Pixi uses: prefix-dev/setup-pixi@v0.9.3 with: - cache: true - cache-write: ${{ github.event_name == 'push' }} environments: ${{ matrix.pixi-environment }} - name: Build Conda Package with rattler-build diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index 0dd0fa064..6b047f0b2 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -25,8 +25,6 @@ jobs: - name: Set up Pixi uses: prefix-dev/setup-pixi@v0.9.3 with: - cache: true - cache-write: ${{ github.event_name == 'push' }} environments: py314 - name: Build Sphinx Docs From 9052d27319489111dff04e4b29ecf1e72183fe5c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 09:43:17 +0200 Subject: [PATCH 39/50] Reorganize test suite Consolidate in one dirver script. Move templates and configs to subdirs. Switch to pixi workflow for dev tests. --- docs/developers_guide/quick_start.rst | 6 + docs/developers_guide/test_suite.rst | 74 +++---- suite/{ => configs}/main.cfg | 0 suite/{ => configs}/main_vs_ctrl.cfg | 0 suite/{ => configs}/moc_am.cfg | 0 suite/{ => configs}/no_ncclimo.cfg | 0 suite/{ => configs}/wc_defaults.cfg | 0 suite/run_dev_suite.bash | 57 ------ suite/run_e3sm_unified_suite.bash | 42 ---- suite/run_suite.bash | 215 +++++++++++++++------ suite/setup.py | 22 ++- suite/{template.cfg => templates/base.cfg} | 0 suite/{ => templates}/job_script.bash | 24 ++- 13 files changed, 231 insertions(+), 209 deletions(-) rename suite/{ => configs}/main.cfg (100%) rename suite/{ => configs}/main_vs_ctrl.cfg (100%) rename suite/{ => configs}/moc_am.cfg (100%) rename suite/{ => configs}/no_ncclimo.cfg (100%) rename suite/{ => configs}/wc_defaults.cfg (100%) delete mode 100755 suite/run_dev_suite.bash delete mode 100755 suite/run_e3sm_unified_suite.bash rename suite/{template.cfg => templates/base.cfg} (100%) rename suite/{ => templates}/job_script.bash (55%) diff --git a/docs/developers_guide/quick_start.rst b/docs/developers_guide/quick_start.rst index 09e6f738c..25dc57a42 100644 --- a/docs/developers_guide/quick_start.rst +++ b/docs/developers_guide/quick_start.rst @@ -80,6 +80,12 @@ MPAS-Analysis development. pixi shell +- To run the analysis regression suite from the same Pixi environment: + + .. code-block:: bash + + ./suite/run_suite.bash --dev + 7. Configure and Run MPAS-Analysis ---------------------------------- - Copy and edit a config file (e.g., ``example_e3sm.cfg``) for your run. diff --git a/docs/developers_guide/test_suite.rst b/docs/developers_guide/test_suite.rst index 6b5adb4a1..0420666a3 100644 --- a/docs/developers_guide/test_suite.rst +++ b/docs/developers_guide/test_suite.rst @@ -9,37 +9,36 @@ unexpected results and to validate MPAS-Analysis in various environments. Overview of Test Scripts ------------------------ -There are three main scripts for running the test suite: +The main entry point is ``suite/run_suite.bash``. It supports three modes: -1. **run_dev_suite.bash** (Developer Testing) +1. **Developer Testing**: ``./suite/run_suite.bash --dev`` - - Use this script after activating your development environment, typically - with ``pixi shell`` from the repository root. + - This is the recommended workflow for development in a Pixi environment. - - It builds the documentation and runs a series of analysis tasks on output - from a low-resolution (QUwLI240) simulation. - - - Each task produces a web page with results, accessible via the web portal. - - - Example usage: + - Run it either from an active Pixi shell or with an explicit Pixi + environment name: .. code-block:: bash $ pixi shell - $ ./suite/run_dev_suite.bash + $ ./suite/run_suite.bash --dev - - After completion, check for successful web page generation, e.g.: + or: .. code-block:: bash - $ tail -n 3 chrysalis_test_suite/main_py3.11/mpas_analysis.o793058 + $ ./suite/run_suite.bash --dev --pixi-env py313 - The last lines should include: + - It builds the documentation, renders the suite configs, and submits the + suite jobs using ``pixi run`` in the selected environment. - .. code-block:: none + - Each task produces a web page with results, accessible via the web portal. + + - After completion, check for successful web page generation, e.g.: + + .. code-block:: bash - Generating webpage for viewing results... - Web page: https://web.lcrc.anl.gov/public/e3sm/diagnostic_output//analysis_testing/chrysalis//main_py3.11/ + $ tail -n 3 chrysalis_test_suite/main_py3.13/mpas_analysis.o793058 - To quickly identify unfinished or failed tasks: @@ -50,34 +49,24 @@ There are three main scripts for running the test suite: - Developers should run this suite manually on each pull request before merging and link the results in the PR. -2. **run_suite.bash** (Package Build & Test) +2. **Package Build & Test**: ``./suite/run_suite.bash`` - - Use this script to build the MPAS-Analysis conda package and test it in - fresh environments. + - This mode builds the MPAS-Analysis conda package and tests it in fresh + environments. - It creates conda environments for multiple Python versions, runs tests, builds documentation, and executes the analysis suite. - Recommended for more thorough validation, especially before releases. - - Example usage: +3. **E3SM-Unified Deployment Testing**: + ``./suite/run_suite.bash --e3sm-unified`` - .. code-block:: bash - - $ ./suite/run_suite.bash - -3. **run_e3sm_unified_suite.bash** (E3SM-Unified Deployment Testing) - - - Used during test deployments of E3SM-Unified to verify MPAS-Analysis - works as expected within the deployment. - - - Typically run by E3SM-Unified maintainers during deployment testing. - - - Example usage: - - .. code-block:: bash + - This mode is used during test deployments of E3SM-Unified to verify + MPAS-Analysis works as expected within the deployment. - $ ./suite/run_e3sm_unified_suite.bash + - It is typically run by E3SM-Unified maintainers during deployment + testing. Supported Machines ------------------ @@ -102,8 +91,9 @@ Developers may need to update the suite for new requirements: - **Python Versions**: - - The Python versions tested are defined in the scripts (e.g., - `main_py=3.11`, `alt_py=3.10`). + - The Python versions tested in package mode are defined at the top of + ``suite/run_suite.bash`` (for example ``main_py=3.13`` and + ``alt_py=3.12``). - To test additional versions, add them to the relevant script variables and loops. @@ -118,8 +108,8 @@ Developers may need to update the suite for new requirements: - **Adding/Modifying Tests**: - - To add new tests, update the list of runs in the scripts and - provide corresponding config files in the `suite` directory. + - To add new tests, update the run lists in ``suite/run_suite.bash`` and + provide corresponding config files in ``suite/configs``. - New tests could change which analysis tasks are run, the configuration for running tasks overall (e.g. how climatologies are computed), or how @@ -143,5 +133,5 @@ Best Practices - Update the suite scripts and configs as needed to keep pace with MPAS-Analysis development. -For more details, see the comments and documentation within each script and -config file in the `suite` directory. +The suite templates live in ``suite/templates`` and the run-specific config +overrides live in ``suite/configs``. diff --git a/suite/main.cfg b/suite/configs/main.cfg similarity index 100% rename from suite/main.cfg rename to suite/configs/main.cfg diff --git a/suite/main_vs_ctrl.cfg b/suite/configs/main_vs_ctrl.cfg similarity index 100% rename from suite/main_vs_ctrl.cfg rename to suite/configs/main_vs_ctrl.cfg diff --git a/suite/moc_am.cfg b/suite/configs/moc_am.cfg similarity index 100% rename from suite/moc_am.cfg rename to suite/configs/moc_am.cfg diff --git a/suite/no_ncclimo.cfg b/suite/configs/no_ncclimo.cfg similarity index 100% rename from suite/no_ncclimo.cfg rename to suite/configs/no_ncclimo.cfg diff --git a/suite/wc_defaults.cfg b/suite/configs/wc_defaults.cfg similarity index 100% rename from suite/wc_defaults.cfg rename to suite/configs/wc_defaults.cfg diff --git a/suite/run_dev_suite.bash b/suite/run_dev_suite.bash deleted file mode 100755 index 41cdc814e..000000000 --- a/suite/run_dev_suite.bash +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env bash - -set -e - -env_name=mpas_analysis_dev - -conda_base=$(dirname $(dirname $CONDA_EXE)) -source $conda_base/etc/profile.d/conda.sh - -export HDF5_USE_FILE_LOCKING=FALSE - -branch=$(git symbolic-ref --short HEAD) - -# test building the docs -conda activate ${env_name} -cd docs -DOCS_VERSION=test make clean versioned-html -cd .. - -machine=$(python -c "from mache import discover_machine; print(discover_machine())") - -py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') - -./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --copy_docs --clean -e ${env_name} -./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions -e ${env_name} -./suite/setup.py -p ${py} -r moc_am -b ${branch} -e ${env_name} -./suite/setup.py -p ${py} -r no_ncclimo -b ${branch} -e ${env_name} -./suite/setup.py -p ${py} -r ctrl -b ${branch} -e ${env_name} -./suite/setup.py -p ${py} -r main_vs_ctrl -b ${branch} -e ${env_name} -./suite/setup.py -p ${py} -r no_polar_regions -b ${branch} --no_polar_regions -e ${env_name} -./suite/setup.py -p ${py} -r mesh_rename -b ${branch} -e ${env_name} - -# submit the jobs -cd ${machine}_test_suite - -main_py=${py} -cd main_py${main_py} -echo main_py${main_py} -RES=$(sbatch job_script.bash) -cd .. - -cd main_vs_ctrl -echo main_vs_ctrl -sbatch --dependency=afterok:${RES##* } --kill-on-invalid-dep=yes job_script.bash -cd .. - -for run in wc_defaults moc_am no_ncclimo no_polar_regions \ - mesh_rename -do - cd ${run} - echo ${run} - sbatch job_script.bash - cd .. -done - -cd .. - diff --git a/suite/run_e3sm_unified_suite.bash b/suite/run_e3sm_unified_suite.bash deleted file mode 100755 index 95266b9c4..000000000 --- a/suite/run_e3sm_unified_suite.bash +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env bash - -set -e - -# placeholder that gets replaced -branch=test_e3sm_unified - -# test building the docs -py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') -machine=${E3SMU_MACHINE} - -./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --clean -./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions -./suite/setup.py -p ${py} -r moc_am -b ${branch} -./suite/setup.py -p ${py} -r no_ncclimo -b ${branch} -./suite/setup.py -p ${py} -r ctrl -b ${branch} -./suite/setup.py -p ${py} -r main_vs_ctrl -b ${branch} -./suite/setup.py -p ${py} -r no_polar_regions -b ${branch} --no_polar_regions -./suite/setup.py -p ${py} -r mesh_rename -b ${branch} - -# submit the jobs -cd ${machine}_test_suite - -cd main_py${py} -echo main_py${py} -RES=$(sbatch job_script.bash) -cd .. - -cd main_vs_ctrl -echo main_vs_ctrl -sbatch --dependency=afterok:${RES##* } --kill-on-invalid-dep=yes job_script.bash -cd .. - -for run in wc_defaults moc_am no_ncclimo no_polar_regions mesh_rename -do - cd ${run} - echo ${run} - sbatch job_script.bash - cd .. -done - -cd .. diff --git a/suite/run_suite.bash b/suite/run_suite.bash index cf3b54807..4030c8259 100755 --- a/suite/run_suite.bash +++ b/suite/run_suite.bash @@ -1,87 +1,192 @@ #!/usr/bin/env bash -set -e - -conda_base=$(dirname $(dirname $CONDA_EXE)) -source $conda_base/etc/profile.d/conda.sh +set -euo pipefail main_py=3.13 alt_py=3.12 +mode=package +pixi_env=${PIXI_ENVIRONMENT_NAME:-default} + +usage() { + cat <&2 + exit 1 + fi + shift + pixi_env=$1 + ;; + -h|--help) + usage + exit 0 + ;; + *) + echo "Unknown argument: $1" >&2 + usage >&2 + exit 1 + ;; + esac + shift +done export HDF5_USE_FILE_LOCKING=FALSE branch=$(git symbolic-ref --short HEAD) +setup_run() { + local py="$1" + local run="$2" + shift 2 + "${setup_cmd[@]}" -p "${py}" -r "${run}" -b "${branch}" "$@" +} + +submit_jobs() { + local machine="$1" + local primary_py="$2" + shift 2 + + cd "${machine}_test_suite" + + cd "main_py${primary_py}" + echo "main_py${primary_py}" + RES=$(sbatch job_script.bash) + cd .. + + cd main_vs_ctrl + echo main_vs_ctrl + sbatch --dependency=afterok:${RES##* } --kill-on-invalid-dep=yes \ + job_script.bash + cd .. + + for run in "$@"; do + cd "${run}" + echo "${run}" + sbatch job_script.bash + cd .. + done + + cd .. +} + +if [[ "${mode}" == "dev" ]]; then + if ! command -v pixi >/dev/null 2>&1; then + echo "pixi is required for --dev" >&2 + exit 1 + fi + + docs_cmd=(pixi run -e "${pixi_env}" bash -lc \ + "cd docs && DOCS_VERSION=test make clean versioned-html") + setup_cmd=(pixi run -e "${pixi_env}" python ./suite/setup.py \ + --pixi-env "${pixi_env}") + + "${docs_cmd[@]}" + + machine=$(pixi run -e "${pixi_env}" python -c \ + "from mache import discover_machine; print(discover_machine())") + py=$(pixi run -e "${pixi_env}" python -c \ + 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') + + setup_run "${py}" "main_py${py}" --copy_docs --clean + setup_run "${py}" wc_defaults --no_polar_regions + setup_run "${py}" moc_am + setup_run "${py}" no_ncclimo + setup_run "${py}" ctrl + setup_run "${py}" main_vs_ctrl + setup_run "${py}" no_polar_regions --no_polar_regions + setup_run "${py}" mesh_rename + + submit_jobs "${machine}" "${py}" \ + wc_defaults moc_am no_ncclimo no_polar_regions mesh_rename + exit 0 +fi + +if [[ "${mode}" == "e3sm-unified" ]]; then + setup_cmd=(python ./suite/setup.py) + py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') + machine=${E3SMU_MACHINE} + branch=test_e3sm_unified + + setup_run "${py}" "main_py${py}" --clean + setup_run "${py}" wc_defaults --no_polar_regions + setup_run "${py}" moc_am + setup_run "${py}" no_ncclimo + setup_run "${py}" ctrl + setup_run "${py}" main_vs_ctrl + setup_run "${py}" no_polar_regions --no_polar_regions + setup_run "${py}" mesh_rename + + submit_jobs "${machine}" "${py}" \ + wc_defaults moc_am no_ncclimo no_polar_regions mesh_rename + exit 0 +fi + +conda_base=$(dirname "$(dirname "${CONDA_EXE}")") +source "${conda_base}/etc/profile.d/conda.sh" + conda update -y conda conda-build conda build ci/recipe -# create the test conda envs -for py in ${main_py} ${alt_py} -do - env=test_mpas_analysis_py${py} - conda create -y -n ${env} --use-local python=${py} mpas-analysis sphinx \ - mock sphinx_rtd_theme "tabulate>=0.8.2" "m2r2>=0.3.3" "mistune<2" \ - pytest "mache>=1.11.0" "esmf=*=mpi_mpich_*" jinja2 - conda activate ${env} +for py in "${main_py}" "${alt_py}"; do + env="test_mpas_analysis_py${py}" + conda create -y -n "${env}" --use-local python="${py}" mpas-analysis \ + sphinx mock sphinx_rtd_theme "tabulate>=0.8.2" "m2r2>=0.3.3" \ + "mistune<2" pytest "mache>=1.11.0" "esmf=*=mpi_mpich_*" jinja2 + conda activate "${env}" pytest conda deactivate done -# create another env for testing xarray main branch py=${main_py} env=test_mpas_analysis_xarray_main -conda create --yes --quiet --name ${env} --use-local python=${py} \ +conda create --yes --quiet --name "${env}" --use-local python="${py}" \ mpas-analysis pytest -conda activate ${env} +conda activate "${env}" pip install git+https://github.com/pydata/xarray.git pytest conda deactivate -# test building the docs -py=${main_py} -conda activate test_mpas_analysis_py${py} -cd docs -DOCS_VERSION=test make clean versioned-html -cd .. +conda activate "test_mpas_analysis_py${py}" +( + cd docs + DOCS_VERSION=test make clean versioned-html +) machine=$(python -c "from mache import discover_machine; print(discover_machine())") - -./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --copy_docs --clean -./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions -./suite/setup.py -p ${py} -r moc_am -b ${branch} -./suite/setup.py -p ${py} -r no_ncclimo -b ${branch} -./suite/setup.py -p ${py} -r ctrl -b ${branch} -./suite/setup.py -p ${py} -r main_vs_ctrl -b ${branch} -./suite/setup.py -p ${py} -r no_polar_regions -b ${branch} --no_polar_regions -./suite/setup.py -p ${py} -r mesh_rename -b ${branch} -./suite/setup.py -p ${py} -r xarray_main -b ${branch} -e test_mpas_analysis_xarray_main +setup_cmd=(./suite/setup.py) + +setup_run "${py}" "main_py${py}" --copy_docs --clean +setup_run "${py}" wc_defaults --no_polar_regions +setup_run "${py}" moc_am +setup_run "${py}" no_ncclimo +setup_run "${py}" ctrl +setup_run "${py}" main_vs_ctrl +setup_run "${py}" no_polar_regions --no_polar_regions +setup_run "${py}" mesh_rename +setup_run "${py}" xarray_main -e test_mpas_analysis_xarray_main conda deactivate py=${alt_py} -conda activate test_mpas_analysis_py${py} -./suite/setup.py -p ${py} -r main_py${py} -b ${branch} +conda activate "test_mpas_analysis_py${py}" +setup_run "${py}" "main_py${py}" conda deactivate -# submit the jobs -cd ${machine}_test_suite - -cd main_py${main_py} -echo main_py${main_py} -RES=$(sbatch job_script.bash) -cd .. - -cd main_vs_ctrl -echo main_vs_ctrl -sbatch --dependency=afterok:${RES##* } --kill-on-invalid-dep=yes job_script.bash -cd .. - -for run in main_py${alt_py} wc_defaults moc_am no_ncclimo no_polar_regions \ +submit_jobs "${machine}" "${main_py}" \ + "main_py${alt_py}" wc_defaults moc_am no_ncclimo no_polar_regions \ mesh_rename xarray_main -do - cd ${run} - echo ${run} - sbatch job_script.bash - cd .. -done - -cd .. diff --git a/suite/setup.py b/suite/setup.py index 807819695..51d4f09a1 100755 --- a/suite/setup.py +++ b/suite/setup.py @@ -16,6 +16,8 @@ def main(): parser.add_argument('-b', dest='branch', required=True, help='the branch name') parser.add_argument('-e', dest='conda_env', help='the conda environment') + parser.add_argument('--pixi-env', dest='pixi_env', + help='the pixi environment used to run jobs') parser.add_argument('--no_polar_regions', dest='polar_regions', action='store_false', help='whether to run mpas_analysis with ' @@ -120,8 +122,11 @@ def main(): sbatch = '\n'.join(sbatch) - conda_base = os.path.abspath( - os.path.join(os.environ['CONDA_EXE'], '..', '..')) + if 'CONDA_EXE' in os.environ: + conda_base = os.path.abspath( + os.path.join(os.environ['CONDA_EXE'], '..', '..')) + else: + conda_base = '' if args.conda_env is not None: conda_env = args.conda_env @@ -137,7 +142,7 @@ def main(): out_subdir = os.path.join(machine, args.branch, args.run) out_common_dir = os.path.join(machine, args.branch) - with open(os.path.join('suite', 'template.cfg')) as template_file: + with open(os.path.join('suite', 'templates', 'base.cfg')) as template_file: template_data = template_file.read() template = Template(template_data) config_text = template.render( @@ -152,11 +157,12 @@ def main(): # add the run-specific config second config_from_job = ' '.join( [config_from_job, - os.path.join('..', '..', 'suite', f'{args.run}.cfg')]) + os.path.join('..', '..', 'suite', 'configs', f'{args.run}.cfg')]) if args.run.startswith('main_py'): config_from_job = ' '.join( - [config_from_job, os.path.join('..', '..', 'suite', 'main.cfg')]) + [config_from_job, + os.path.join('..', '..', 'suite', 'configs', 'main.cfg')]) if args.run not in ['main', 'ctrl']: try: @@ -170,14 +176,16 @@ def main(): else: flags = '' - with open(os.path.join('suite', 'job_script.bash')) as template_file: + with open(os.path.join('suite', 'templates', 'job_script.bash')) \ + as template_file: template_data = template_file.read() template = Template(template_data) job_text = template.render( sbatch=sbatch, conda_base=conda_base, use_e3sm_unified=use_e3sm_unified, e3sm_unified_script=e3sm_unified_script, conda_env=conda_env, - machine=machine, flags=flags, config=config_from_job, + pixi_env=args.pixi_env, machine=machine, flags=flags, + config=config_from_job, html_base=html_base, out_subdir=out_subdir, out_common_dir=out_common_dir) with open(job, 'w') as job_file: diff --git a/suite/template.cfg b/suite/templates/base.cfg similarity index 100% rename from suite/template.cfg rename to suite/templates/base.cfg diff --git a/suite/job_script.bash b/suite/templates/job_script.bash similarity index 55% rename from suite/job_script.bash rename to suite/templates/job_script.bash index 4b822ff84..22f666eaa 100644 --- a/suite/job_script.bash +++ b/suite/templates/job_script.bash @@ -12,22 +12,34 @@ set -e source {{ e3sm_unified_script }} echo E3SM-Unified: {{ e3sm_unified_script }} +{% elif pixi_env %} +export HDF5_USE_FILE_LOCKING=FALSE +export E3SMU_MACHINE={{ machine }} + +run_mpas_analysis() { + pixi run --manifest-path ../../pixi.toml -e {{ pixi_env }} mpas_analysis "$@" +} + +echo pixi env: {{ pixi_env }} {% else %} source {{ conda_base }}/etc/profile.d/conda.sh conda activate {{ conda_env }} export HDF5_USE_FILE_LOCKING=FALSE export E3SMU_MACHINE={{ machine }} +run_mpas_analysis() { + mpas_analysis "$@" +} + echo env: {{ conda_env }} {% endif %} echo configs: {{ flags }} {{ config }} - -mpas_analysis --list -mpas_analysis --plot_colormaps -mpas_analysis --setup_only {{ flags }} {{ config }} -mpas_analysis --purge {{ flags }} {{ config }} --verbose -mpas_analysis --html_only {{ flags }} {{ config }} +run_mpas_analysis --list +run_mpas_analysis --plot_colormaps +run_mpas_analysis --setup_only {{ flags }} {{ config }} +run_mpas_analysis --purge {{ flags }} {{ config }} --verbose +run_mpas_analysis --html_only {{ flags }} {{ config }} chmod ugo+rx {{ html_base }}/{{ out_common_dir }} chmod -R ugo+rX {{ html_base }}/{{ out_subdir }} From f5b0ce7d21c7a820fcce2bb902467cf2700006bf Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 18:40:54 +0200 Subject: [PATCH 40/50] Fix get_editable_install_dir for non-editable install --- mpas_analysis/__main__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index 155b5d824..1cd665b8d 100644 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -943,6 +943,9 @@ def get_editable_install_dir(package_name): direct_url = Distribution.from_name(package_name).read_text( 'direct_url.json') + if direct_url is None: + return None + contents = json.loads(direct_url) pkg_is_editable = contents.get("dir_info", {}).get("editable", False) if pkg_is_editable and 'url' in contents: From a910bf71b9383fad233a8099dd8b1d69e4ab258f Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 18:41:21 +0200 Subject: [PATCH 41/50] Add a unit test to make sure the fix works --- mpas_analysis/test/test_main.py | 36 +++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 mpas_analysis/test/test_main.py diff --git a/mpas_analysis/test/test_main.py b/mpas_analysis/test/test_main.py new file mode 100644 index 000000000..5270ec47c --- /dev/null +++ b/mpas_analysis/test/test_main.py @@ -0,0 +1,36 @@ +# This software is open source software available under the BSD-3 license. +# +# Copyright (c) 2022 Triad National Security, LLC. All rights reserved. +# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights +# reserved. +# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved. +# +# Additional copyright and license information can be found in the LICENSE file +# distributed with this code, or at +# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE +""" +Regression tests for helpers in ``mpas_analysis.__main__``. +""" + +import os +from unittest.mock import Mock, patch + +from mpas_analysis.test import TestCase + + +# Importing mpas_analysis.__main__ triggers matplotlib imports in some test +# environments, so use a writable cache directory. +os.environ.setdefault('MPLCONFIGDIR', '/tmp/matplotlib') + +import mpas_analysis.__main__ as main + + +class TestMain(TestCase): + def test_get_editable_install_dir_without_direct_url(self): + distribution = Mock() + distribution.read_text.return_value = None + + with patch.object(main.Distribution, 'from_name', + return_value=distribution): + self.assertEqual(main.get_editable_install_dir('mpas_analysis'), + None) From 1728a5b637f2574f2f608cff2e9cca070ba74e07 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Mar 2026 17:56:04 +0000 Subject: [PATCH 42/50] Bump prefix-dev/setup-pixi from 0.9.3 to 0.9.4 Bumps [prefix-dev/setup-pixi](https://github.com/prefix-dev/setup-pixi) from 0.9.3 to 0.9.4. - [Release notes](https://github.com/prefix-dev/setup-pixi/releases) - [Commits](https://github.com/prefix-dev/setup-pixi/compare/v0.9.3...v0.9.4) --- updated-dependencies: - dependency-name: prefix-dev/setup-pixi dependency-version: 0.9.4 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/build_workflow.yml | 4 ++-- .github/workflows/docs_workflow.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 1eb627936..4b3e05e42 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -53,7 +53,7 @@ jobs: - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Set up Pixi - uses: prefix-dev/setup-pixi@v0.9.3 + uses: prefix-dev/setup-pixi@v0.9.4 with: environments: ${{ matrix.pixi-environment }} @@ -103,7 +103,7 @@ jobs: - uses: actions/checkout@v6 - name: Set up Pixi - uses: prefix-dev/setup-pixi@v0.9.3 + uses: prefix-dev/setup-pixi@v0.9.4 with: environments: ${{ matrix.pixi-environment }} diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index 6b047f0b2..1804bd3b9 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -23,7 +23,7 @@ jobs: fetch-depth: 0 - name: Set up Pixi - uses: prefix-dev/setup-pixi@v0.9.3 + uses: prefix-dev/setup-pixi@v0.9.4 with: environments: py314 From 2aa460a07e62f4e66807d31ed2250efd5e82ec41 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Wed, 1 Apr 2026 10:53:02 +0200 Subject: [PATCH 43/50] Fix recipe dependencies --- ci/recipe/recipe.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ci/recipe/recipe.yaml b/ci/recipe/recipe.yaml index 18d566dbd..1f4122885 100644 --- a/ci/recipe/recipe.yaml +++ b/ci/recipe/recipe.yaml @@ -44,14 +44,13 @@ requirements: - netcdf4 - numpy >=2.0,<3.0 - pandas - - pillow >=10.0.0,<11.0.0 + - pillow >=10.0.0,<13.0.0 - progressbar2 - pyproj - pyremap >=2.0.0,<3.0.0 - python-dateutil - requests - scipy >=1.7.0 - - setuptools - shapely >=2.0,<3.0 - tranche >=0.2.3 - xarray >=0.14.1 From e15e2a7070686f907bac9dff406424e303e7ed13 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Apr 2026 16:25:44 +0000 Subject: [PATCH 44/50] Bump prefix-dev/setup-pixi from 0.9.4 to 0.9.5 Bumps [prefix-dev/setup-pixi](https://github.com/prefix-dev/setup-pixi) from 0.9.4 to 0.9.5. - [Release notes](https://github.com/prefix-dev/setup-pixi/releases) - [Commits](https://github.com/prefix-dev/setup-pixi/compare/v0.9.4...v0.9.5) --- updated-dependencies: - dependency-name: prefix-dev/setup-pixi dependency-version: 0.9.5 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/build_workflow.yml | 4 ++-- .github/workflows/docs_workflow.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 4b3e05e42..de85abc20 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -53,7 +53,7 @@ jobs: - if: ${{ steps.skip_check.outputs.should_skip != 'true' }} name: Set up Pixi - uses: prefix-dev/setup-pixi@v0.9.4 + uses: prefix-dev/setup-pixi@v0.9.5 with: environments: ${{ matrix.pixi-environment }} @@ -103,7 +103,7 @@ jobs: - uses: actions/checkout@v6 - name: Set up Pixi - uses: prefix-dev/setup-pixi@v0.9.4 + uses: prefix-dev/setup-pixi@v0.9.5 with: environments: ${{ matrix.pixi-environment }} diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index 1804bd3b9..af153d539 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -23,7 +23,7 @@ jobs: fetch-depth: 0 - name: Set up Pixi - uses: prefix-dev/setup-pixi@v0.9.4 + uses: prefix-dev/setup-pixi@v0.9.5 with: environments: py314 From 1b85de7ac212d0c78eb42c837cb60cea4f5a2b12 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 9 Apr 2026 14:49:06 +0200 Subject: [PATCH 45/50] Fix test suite for e3sm-unified --- suite/setup.py | 4 ++-- suite/templates/job_script.bash | 25 ++++++++++--------------- 2 files changed, 12 insertions(+), 17 deletions(-) diff --git a/suite/setup.py b/suite/setup.py index 51d4f09a1..c99339edb 100755 --- a/suite/setup.py +++ b/suite/setup.py @@ -36,9 +36,9 @@ def main(): account, partition, configuration, qos = \ machine_info.get_account_defaults() - use_e3sm_unified = 'E3SMU_SCRIPT' in os.environ + use_e3sm_unified = 'E3SM_UNIFIED_LOAD_SCRIPT' in os.environ if use_e3sm_unified: - e3sm_unified_script = os.environ['E3SMU_SCRIPT'] + e3sm_unified_script = os.environ['E3SM_UNIFIED_LOAD_SCRIPT'] args.branch = \ os.path.splitext(os.path.basename(e3sm_unified_script))[0] else: diff --git a/suite/templates/job_script.bash b/suite/templates/job_script.bash index 22f666eaa..4eae8f8d3 100644 --- a/suite/templates/job_script.bash +++ b/suite/templates/job_script.bash @@ -8,38 +8,33 @@ set -e -{% if use_e3sm_unified %} +{% if use_e3sm_unified -%} source {{ e3sm_unified_script }} echo E3SM-Unified: {{ e3sm_unified_script }} -{% elif pixi_env %} +{% elif pixi_env -%} export HDF5_USE_FILE_LOCKING=FALSE export E3SMU_MACHINE={{ machine }} -run_mpas_analysis() { - pixi run --manifest-path ../../pixi.toml -e {{ pixi_env }} mpas_analysis "$@" -} +eval "$(pixi shell-hook --manifest-path ../../pixi.toml -e {{ pixi_env }})" echo pixi env: {{ pixi_env }} -{% else %} +{% else -%} source {{ conda_base }}/etc/profile.d/conda.sh conda activate {{ conda_env }} export HDF5_USE_FILE_LOCKING=FALSE export E3SMU_MACHINE={{ machine }} -run_mpas_analysis() { - mpas_analysis "$@" -} echo env: {{ conda_env }} -{% endif %} +{% endif -%} echo configs: {{ flags }} {{ config }} -run_mpas_analysis --list -run_mpas_analysis --plot_colormaps -run_mpas_analysis --setup_only {{ flags }} {{ config }} -run_mpas_analysis --purge {{ flags }} {{ config }} --verbose -run_mpas_analysis --html_only {{ flags }} {{ config }} +mpas_analysis --list +mpas_analysis --plot_colormaps +mpas_analysis --setup_only {{ flags }} {{ config }} +mpas_analysis --purge {{ flags }} {{ config }} --verbose +mpas_analysis --html_only {{ flags }} {{ config }} chmod ugo+rx {{ html_base }}/{{ out_common_dir }} chmod -R ugo+rX {{ html_base }}/{{ out_subdir }} From 376d3aa432179b93b581d48c3a58b402170a3597 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 9 Apr 2026 20:03:39 +0200 Subject: [PATCH 46/50] Put symlinks for ncclimo in unique directories This prevents us from mixing data from the reference year with that for the "main" climatology. --- mpas_analysis/shared/climatology/mpas_climatology_task.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/shared/climatology/mpas_climatology_task.py b/mpas_analysis/shared/climatology/mpas_climatology_task.py index c0414a287..30ef07a0c 100644 --- a/mpas_analysis/shared/climatology/mpas_climatology_task.py +++ b/mpas_analysis/shared/climatology/mpas_climatology_task.py @@ -414,8 +414,10 @@ def _create_symlinks(self): climatologyOpDirectory = get_climatology_op_directory(config, self.op) - symlinkDirectory = '{}/source_symlinks'.format( - climatologyOpDirectory) + symlinkDirectory = ( + f'{climatologyOpDirectory}/source_symlinks_' + f'{self.ncclimoModel}_{self.startYear:04d}-{self.endYear:04d}' + ) make_directories(symlinkDirectory) @@ -425,6 +427,8 @@ def _create_symlinks(self): f'timeSeriesStatsMonthly.{year:04d}-{month:02d}-01.nc' try: + if os.path.lexists(outFileName): + os.remove(outFileName) os.symlink(inFileName, outFileName) except OSError: pass From a406df7f4419c1ce0094fdf380d895c04d9eac30 Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Thu, 9 Apr 2026 20:04:33 +0200 Subject: [PATCH 47/50] Add a test to ensure that only the required data gets linked --- .../test/test_mpas_climatology_task.py | 32 ++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/mpas_analysis/test/test_mpas_climatology_task.py b/mpas_analysis/test/test_mpas_climatology_task.py index 878879b94..7604ed112 100644 --- a/mpas_analysis/test/test_mpas_climatology_task.py +++ b/mpas_analysis/test/test_mpas_climatology_task.py @@ -23,7 +23,7 @@ from mpas_analysis.test import TestCase, loaddatadir from mpas_analysis.shared.climatology import MpasClimatologyTask, \ - RemapMpasClimatologySubtask + RefYearMpasClimatologyTask, RemapMpasClimatologySubtask from mpas_analysis.shared import AnalysisTask from mpas_analysis.shared.analysis_task import \ update_time_bounds_from_file_names @@ -171,6 +171,36 @@ def test_update_climatology_bounds_and_create_symlinks(self): update_time_bounds_from_file_names(config, 'climatology', 'ocean', allow_cache=False) + def test_create_symlinks_isolates_reference_year_files(self): + mpasClimatologyTask = self.setup_task() + + refYearTask = RefYearMpasClimatologyTask( + config=mpasClimatologyTask.config, componentName='ocean') + refYearTask.historyStreams = mpasClimatologyTask.historyStreams + refYearTask.startYear = 1 + refYearTask.endYear = 1 + refYearTask.inputFiles = [] + + for month in range(1, 13): + fileName = os.path.join( + self.test_dir, + f'mpaso.hist.am.timeSeriesStatsMonthly.0001-{month:02d}-01.nc') + with open(fileName, 'w'): + pass + refYearTask.inputFiles.append(fileName) + + refSymlinkDirectory = refYearTask._create_symlinks() + mainSymlinkDirectory = mpasClimatologyTask._create_symlinks() + + assert(refSymlinkDirectory != mainSymlinkDirectory) + + mainSymlinkFiles = sorted(os.listdir(mainSymlinkDirectory)) + + assert(len(mainSymlinkFiles) == 12) + for fileName in mainSymlinkFiles: + assert(fileName.startswith( + 'mpaso.hist.am.timeSeriesStatsMonthly.0002-')) + def test_subtask_run_analysis(self): mpasClimatologyTask = self.setup_task() self.add_variables(mpasClimatologyTask) From fbc951217d88d55f627fbc9f12c70ba1b1f8078a Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Fri, 10 Apr 2026 21:45:12 +0200 Subject: [PATCH 48/50] Skip nco 5.3.7 There is an issue with it reading years like 0010 as octal 8 instead of decimal 10. --- ci/recipe/recipe.yaml | 2 +- dev-spec.txt | 2 +- pixi.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/recipe/recipe.yaml b/ci/recipe/recipe.yaml index 1f4122885..fcbc16891 100644 --- a/ci/recipe/recipe.yaml +++ b/ci/recipe/recipe.yaml @@ -40,7 +40,7 @@ requirements: - mache >=1.11.0 - matplotlib-base >=3.9.0 - mpas_tools >=1.3.0,<2.0.0 - - nco >=4.8.1,!=5.2.6 + - nco >=4.8.1,!=5.2.6,!=5.3.7 - netcdf4 - numpy >=2.0,<3.0 - pandas diff --git a/dev-spec.txt b/dev-spec.txt index a5f9ca359..b6fda53a5 100644 --- a/dev-spec.txt +++ b/dev-spec.txt @@ -16,7 +16,7 @@ lxml mache >=1.11.0 matplotlib-base >=3.9.0 mpas_tools >=1.3.0,<2.0.0 -nco >=4.8.1,!=5.2.6 +nco >=4.8.1,!=5.2.6,!=5.3.7 netcdf4 numpy >=2.0,<3.0 pandas diff --git a/pixi.toml b/pixi.toml index a58d9a9f2..738e6fd0d 100644 --- a/pixi.toml +++ b/pixi.toml @@ -17,7 +17,7 @@ lxml = "*" mache = ">=1.11.0" matplotlib-base = ">=3.9.0" mpas_tools = ">=1.3.0,<2.0.0" -nco = ">=4.8.1,!=5.2.6" +nco = ">=4.8.1,!=5.2.6,!=5.3.7" netcdf4 = "*" numpy = ">=2.0,<3.0" pandas = "*" From 796f9f9b0bb2f461a9c6793601959f0a44d9484c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Tue, 28 Apr 2026 14:41:19 +0200 Subject: [PATCH 49/50] Fix copying xarray datasets --- mpas_analysis/ocean/compute_transects_subtask.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mpas_analysis/ocean/compute_transects_subtask.py b/mpas_analysis/ocean/compute_transects_subtask.py index b4b8ab0db..e224dc104 100644 --- a/mpas_analysis/ocean/compute_transects_subtask.py +++ b/mpas_analysis/ocean/compute_transects_subtask.py @@ -586,7 +586,7 @@ def _compute_mpas_transects(self, dsMesh): # reads them back because of _FillValue dsMpasTransect.to_netcdf(transectInfoFileName) - dsTransectOnMpas = xr.Dataset(dsMpasTransect) + dsTransectOnMpas = dsMpasTransect.copy() dsTransectOnMpas['x'] = dsMpasTransect.dNode dsTransectOnMpas['z'] = dsMpasTransect.zTransectNode @@ -608,7 +608,7 @@ def _compute_mpas_transects(self, dsMesh): for season in self.seasons: maskedFileName = self.get_masked_file_name(season) with xr.open_dataset(maskedFileName) as dsMask: - dsOnMpas = xr.Dataset(dsMpasTransect) + dsOnMpas = dsMpasTransect.copy() for var in dsMask.data_vars: dims = dsMask[var].dims if 'nCells' in dims and ( From d911be942828b53e690b6e9fd8e2d8f19ee1369c Mon Sep 17 00:00:00 2001 From: Xylar Asay-Davis Date: Mon, 30 Mar 2026 09:06:54 +0200 Subject: [PATCH 50/50] Update to v1.15.0 --- ci/recipe/recipe.yaml | 4 ++-- mpas_analysis/version.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/recipe/recipe.yaml b/ci/recipe/recipe.yaml index fcbc16891..8997328a2 100644 --- a/ci/recipe/recipe.yaml +++ b/ci/recipe/recipe.yaml @@ -2,8 +2,8 @@ schema_version: 1 context: name: MPAS-Analysis - version: 1.14.0 - python_min: 3.10 + version: "1.15.0" + python_min: "3.10" package: name: ${{ name|lower }} diff --git a/mpas_analysis/version.py b/mpas_analysis/version.py index ddbb9f041..a3a011e60 100644 --- a/mpas_analysis/version.py +++ b/mpas_analysis/version.py @@ -1,2 +1,2 @@ -__version_info__ = (1, 14, 0) +__version_info__ = (1, 15, 0) __version__ = '.'.join(str(vi) for vi in __version_info__)