diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml
index 138431ba8..de85abc20 100644
--- a/.github/workflows/build_workflow.yml
+++ b/.github/workflows/build_workflow.yml
@@ -22,12 +22,24 @@ jobs:
name: test mpas_analysis - python ${{ matrix.python-version }}
runs-on: ubuntu-latest
timeout-minutes: 20
- defaults:
- run:
- shell: bash -l {0}
strategy:
matrix:
- python-version: ["3.10", "3.11", "3.12", "3.13"]
+ include:
+ - python-version: "3.10"
+ pixi-environment: py310
+ variant-file: ci/python3.10.yaml
+ - python-version: "3.11"
+ pixi-environment: py311
+ variant-file: ci/python3.11.yaml
+ - python-version: "3.12"
+ pixi-environment: py312
+ variant-file: ci/python3.12.yaml
+ - python-version: "3.13"
+ pixi-environment: py313
+ variant-file: ci/python3.13.yaml
+ - python-version: "3.14"
+ pixi-environment: py314
+ variant-file: ci/python3.14.yaml
fail-fast: false
steps:
- id: skip_check
@@ -37,53 +49,68 @@ jobs:
paths_ignore: ${{ env.PATHS_IGNORE }}
- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
- uses: actions/checkout@v5
+ uses: actions/checkout@v6
- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
- name: Cache Conda
- uses: actions/cache@v4
- env:
- # Increase this value to reset cache if conda-dev-spec.template has not changed in the workflow
- CACHE_NUMBER: 0
+ name: Set up Pixi
+ uses: prefix-dev/setup-pixi@v0.9.5
with:
- path: ~/conda_pkgs_dir_py${{ matrix.python-version }}
- key:
- ${{ runner.os }}-${{ matrix.python-version }}-conda-${{ env.CACHE_NUMBER }}-${{
- hashFiles('dev-spec.txt,pyproject.toml') }}
-
- - if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
- name: Set up Conda Environment
- uses: mamba-org/setup-micromamba@v2
- with:
- environment-name: mpas_analysis_dev
- init-shell: bash
- condarc: |
- channel_priority: strict
- channels:
- - conda-forge
- create-args: >-
- python=${{ matrix.python-version }}
-
- - if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
- name: Install mpas_analysis
- run: |
- conda install -y --file dev-spec.txt \
- python=${{ matrix.python-version }}
- python -m pip install --no-deps --no-build-isolation -vv -e .
+ environments: ${{ matrix.pixi-environment }}
- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
name: Run Tests
env:
- CHECK_IMAGES: False
+ CHECK_IMAGES: "False"
run: |
set -e
- pip check
- pytest --pyargs mpas_analysis
- mpas_analysis --help
- download_analysis_data --help
+ pixi run -e ${{ matrix.pixi-environment }} python -m pip check
+ pixi run -e ${{ matrix.pixi-environment }} pytest --pyargs mpas_analysis
+ pixi run -e ${{ matrix.pixi-environment }} mpas_analysis --help
+ pixi run -e ${{ matrix.pixi-environment }} download_analysis_data --help
- - if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
+ - if: ${{ steps.skip_check.outputs.should_skip != 'true' && matrix.python-version == '3.14' }}
name: Build Sphinx Docs
run: |
- cd docs
- DOCS_VERSION=test make versioned-html
+ pixi run -e ${{ matrix.pixi-environment }} bash -lc '
+ cd docs
+ DOCS_VERSION=test make versioned-html
+ '
+
+ package:
+ name: build package - python ${{ matrix.python-version }}
+ runs-on: ubuntu-latest
+ timeout-minutes: 30
+ strategy:
+ matrix:
+ include:
+ - python-version: "3.10"
+ pixi-environment: py310
+ variant-file: ci/python3.10.yaml
+ - python-version: "3.11"
+ pixi-environment: py311
+ variant-file: ci/python3.11.yaml
+ - python-version: "3.12"
+ pixi-environment: py312
+ variant-file: ci/python3.12.yaml
+ - python-version: "3.13"
+ pixi-environment: py313
+ variant-file: ci/python3.13.yaml
+ - python-version: "3.14"
+ pixi-environment: py314
+ variant-file: ci/python3.14.yaml
+ fail-fast: false
+ steps:
+ - uses: actions/checkout@v6
+
+ - name: Set up Pixi
+ uses: prefix-dev/setup-pixi@v0.9.5
+ with:
+ environments: ${{ matrix.pixi-environment }}
+
+ - name: Build Conda Package with rattler-build
+ run: |
+ pixi run -e ${{ matrix.pixi-environment }} \
+ rattler-build build \
+ -m ${{ matrix.variant-file }} \
+ -r ci/recipe/recipe.yaml \
+ --output-dir rattler-build-output
diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml
index b90bb7882..af153d539 100644
--- a/.github/workflows/docs_workflow.yml
+++ b/.github/workflows/docs_workflow.yml
@@ -10,93 +10,64 @@ on:
types: [published]
env:
- PYTHON_VERSION: "3.13"
+ PYTHON_VERSION: "3.14"
jobs:
publish-docs:
runs-on: ubuntu-latest
- defaults:
- run:
- shell: bash -l {0}
timeout-minutes: 20
steps:
- - uses: actions/checkout@v5
+ - uses: actions/checkout@v6
with:
persist-credentials: false
fetch-depth: 0
- - name: Cache Conda
- uses: actions/cache@v4
- env:
- # Increase this value to reset cache if deploy/conda-dev-spec.template has not changed in the workflow
- CACHE_NUMBER: 0
+ - name: Set up Pixi
+ uses: prefix-dev/setup-pixi@v0.9.5
with:
- path: ~/conda_pkgs_dir
- key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{
- hashFiles('dev-spec.txt') }}
-
- - if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
- name: Set up Conda Environment
- uses: mamba-org/setup-micromamba@v2
- with:
- environment-name: mpas_analysis_dev
- init-shell: bash
- condarc: |
- channel_priority: strict
- channels:
- - conda-forge
- create-args: >-
- python=${{ env.PYTHON_VERSION }}
-
- - if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
- name: Install mpas_analysis
- run: |
- git config --global url."https://github.com/".insteadOf "git@github.com:"
- conda install -y --file dev-spec.txt \
- python=${{ env.PYTHON_VERSION }}
- python -m pip install -vv --no-deps --no-build-isolation -e .
+ environments: py314
- name: Build Sphinx Docs
run: |
set -e
- pip check
- mpas_analysis sync diags --help
- cd docs
- DOCS_VERSION=${{ github.ref_name }} make versioned-html
+ git config --global url."https://github.com/".insteadOf "git@github.com:"
+ pixi run -e py314 python -m pip check
+ pixi run -e py314 mpas_analysis sync diags --help
+ pixi run -e py314 bash -lc '
+ cd docs
+ DOCS_VERSION=${{ github.ref_name }} make versioned-html
+ '
+
- name: Copy Docs and Commit
run: |
set -e
- pip check
- mpas_analysis sync diags --help
- cd docs
- # gh-pages branch must already exist
- git clone https://github.com/MPAS-Dev/MPAS-Analysis.git --branch gh-pages --single-branch gh-pages
+ pixi run -e py314 bash -lc '
+ cd docs
+ # gh-pages branch must already exist
+ git clone https://github.com/MPAS-Dev/MPAS-Analysis.git --branch gh-pages --single-branch gh-pages
- # Only replace docs in a directory with the destination branch name with latest changes. Docs for
- # releases should be untouched.
- rm -rf gh-pages/${{ github.ref_name }}
+ # Only replace docs in a directory with the destination branch name with latest changes. Docs for
+ # releases should be untouched.
+ rm -rf gh-pages/${{ github.ref_name }}
- # don't clobber existing release versions (in case we retroactively fixed them)
- cp -r _build/html/${{ github.ref_name }} gh-pages/
+ # do not clobber existing release versions if they were updated manually
+ cp -r _build/html/${{ github.ref_name }} gh-pages/
- mkdir -p gh-pages/shared
- cp shared/version-switcher.js gh-pages/shared/version-switcher.js
+ mkdir -p gh-pages/shared
+ cp shared/version-switcher.js gh-pages/shared/version-switcher.js
- # Update the list of versions with all versions in the gh-pages directory.
- python generate_versions_json.py
+ # Update the list of versions with all versions in the gh-pages directory.
+ python generate_versions_json.py
+
+ cd gh-pages
+ touch .nojekyll
+ printf "" > index.html
+ git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
+ git config --local user.name "github-actions[bot]"
+ git add .
+ git commit -m "Update documentation" -a || true
+ '
- # Make sure we're in the gh-pages directory.
- cd gh-pages
- # Create `.nojekyll` (if it doesn't already exist) for proper GH Pages configuration.
- touch .nojekyll
- # Add `index.html` to point to the `develop` branch automatically.
- printf '' > index.html
- # Configure git using GitHub Actions credentials.
- git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
- git config --local user.name "github-actions[bot]"
- # The second command will fail if no changes were present, so we ignore it
- git add .
- git commit -m "Update documentation" -a || true
- name: Push Changes
uses: ad-m/github-push-action@master
with:
@@ -104,4 +75,3 @@ jobs:
directory: docs/gh-pages
github_token: ${{ secrets.GITHUB_TOKEN }}
force: true
-
diff --git a/.gitignore b/.gitignore
index 3512ba6ed..d36b91b3d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -93,6 +93,8 @@ ENV/
.ropeproject
.DS_Store
+.pixi/
+pixi.lock
# test suites
/anvil_test_suite/
@@ -101,4 +103,7 @@ ENV/
/compy_test_suite/
# vscode settings
-.vscode/
\ No newline at end of file
+.vscode/
+
+# codex
+.codex
diff --git a/ci/python3.10.yaml b/ci/python3.10.yaml
index 366288703..ba317ae3c 100644
--- a/ci/python3.10.yaml
+++ b/ci/python3.10.yaml
@@ -1,5 +1,5 @@
channel_sources:
-- conda-forge,defaults
+- conda-forge
pin_run_as_build:
python:
min_pin: x.x
diff --git a/ci/python3.11.yaml b/ci/python3.11.yaml
index 85b78f32e..ebb868fbd 100644
--- a/ci/python3.11.yaml
+++ b/ci/python3.11.yaml
@@ -1,5 +1,5 @@
channel_sources:
-- conda-forge,defaults
+- conda-forge
pin_run_as_build:
python:
min_pin: x.x
diff --git a/ci/python3.12.yaml b/ci/python3.12.yaml
index 6f2e3cb6c..36e07e4cf 100644
--- a/ci/python3.12.yaml
+++ b/ci/python3.12.yaml
@@ -1,5 +1,5 @@
channel_sources:
-- conda-forge,defaults
+- conda-forge
pin_run_as_build:
python:
min_pin: x.x
diff --git a/ci/python3.13.yaml b/ci/python3.13.yaml
index edf0f22a9..0f52400be 100644
--- a/ci/python3.13.yaml
+++ b/ci/python3.13.yaml
@@ -1,5 +1,5 @@
channel_sources:
-- conda-forge,defaults
+- conda-forge
pin_run_as_build:
python:
min_pin: x.x
diff --git a/ci/python3.14.yaml b/ci/python3.14.yaml
new file mode 100644
index 000000000..d2c73f739
--- /dev/null
+++ b/ci/python3.14.yaml
@@ -0,0 +1,8 @@
+channel_sources:
+- conda-forge
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.14.* *_cp314
diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml
deleted file mode 100644
index 10009615a..000000000
--- a/ci/recipe/meta.yaml
+++ /dev/null
@@ -1,87 +0,0 @@
-{% set name = "MPAS-Analysis" %}
-{% set version = "1.14.0" %}
-{% set python_min = "3.10" %}
-
-package:
- name: {{ name|lower }}
- version: {{ version }}
-
-source:
- path: ../..
-
-build:
- number: 0
- script: {{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv
- noarch: python
- entry_points:
- - mpas_analysis = mpas_analysis.__main__:main
- - download_analysis_data = mpas_analysis.download_data:download_analysis_data
-
-requirements:
- host:
- - python {{ python_min }}
- - pip
- - setuptools >=60
- run:
- - python >={{ python_min }},<3.13
- - cartopy >=0.18.0
- - cartopy_offlinedata
- - cmocean
- - dask
- - esmf >=8.4.2,<9.0.0
- - f90nml
- - geometric_features >=1.6.1
- - gsw
- - lxml
- - mache >=1.11.0
- - matplotlib-base >=3.9.0
- - mpas_tools >=1.3.0,<2.0.0
- - nco >=4.8.1,!=5.2.6
- - netcdf4
- - numpy >=2.0,<3.0
- - pandas
- - pillow >=10.0.0,<11.0.0
- - progressbar2
- - pyproj
- - pyremap >=2.0.0,<3.0.0
- - python-dateutil
- - requests
- - scipy >=1.7.0
- - shapely >=2.0,<3.0
- - tranche >=0.2.3
- - xarray >=0.14.1
-
-test:
- requires:
- - pytest
- - pip
- - python {{ python_min }}
- imports:
- - mpas_analysis
- - pytest
- commands:
- - pip check
- - pytest --pyargs mpas_analysis
- - mpas_analysis --help
- - mpas_analysis --list
- - mpas_analysis --plot_colormaps
- - download_analysis_data --help
-
-about:
- home: https://github.com/MPAS-Dev/MPAS-Analysis
- license: BSD-3-Clause
- license_family: BSD
- license_file: LICENSE
- summary: Analysis of MPAS-Ocean and MPAS-Seaice simulations results
- description: |
- Analysis for simulations produced with Model for Prediction Across Scales
- (MPAS) components and the Energy Exascale Earth System Model (E3SM), which
- used those components.
- doc_url: https://mpas-dev.github.io/MPAS-Analysis/stable/
- dev_url: https://github.com/MPAS-Dev/MPAS-Analysis
-
-extra:
- recipe-maintainers:
- - andrewdnolan
- - xylar
- - jhkennedy
diff --git a/ci/recipe/recipe.yaml b/ci/recipe/recipe.yaml
new file mode 100644
index 000000000..8997328a2
--- /dev/null
+++ b/ci/recipe/recipe.yaml
@@ -0,0 +1,91 @@
+schema_version: 1
+
+context:
+ name: MPAS-Analysis
+ version: "1.15.0"
+ python_min: "3.10"
+
+package:
+ name: ${{ name|lower }}
+ version: ${{ version }}
+
+source:
+ path: ../..
+
+build:
+ number: 0
+ noarch: python
+ script: ${{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv
+ python:
+ entry_points:
+ - mpas_analysis = mpas_analysis.__main__:main
+ - download_analysis_data = mpas_analysis.download_data:download_analysis_data
+
+requirements:
+ host:
+ - python ${{ python_min }}.*
+ - pip
+ - setuptools >=60
+ run:
+ - python >=${{ python_min }}
+ - cartopy >=0.18.0
+ - cartopy_offlinedata
+ - cmocean
+ - dask
+ - esmf >=8.4.2,<9.0.0
+ - f90nml
+ - geometric_features >=1.6.1
+ - gsw
+ - lxml
+ - mache >=1.11.0
+ - matplotlib-base >=3.9.0
+ - mpas_tools >=1.3.0,<2.0.0
+ - nco >=4.8.1,!=5.2.6,!=5.3.7
+ - netcdf4
+ - numpy >=2.0,<3.0
+ - pandas
+ - pillow >=10.0.0,<13.0.0
+ - progressbar2
+ - pyproj
+ - pyremap >=2.0.0,<3.0.0
+ - python-dateutil
+ - requests
+ - scipy >=1.7.0
+ - shapely >=2.0,<3.0
+ - tranche >=0.2.3
+ - xarray >=0.14.1
+
+tests:
+ - python:
+ imports:
+ - mpas_analysis
+ pip_check: true
+ python_version: ${{ python_min }}.*
+ - requirements:
+ run:
+ - pytest
+ - python ${{ python_min }}.*
+ script:
+ - pytest --pyargs mpas_analysis
+ - mpas_analysis --help
+ - mpas_analysis --list
+ - mpas_analysis --plot_colormaps
+ - download_analysis_data --help
+
+about:
+ license: BSD-3-Clause
+ license_file: LICENSE
+ summary: Analysis of MPAS-Ocean and MPAS-Seaice simulations results
+ description: |
+ Analysis for simulations produced with Model for Prediction Across Scales
+ (MPAS) components and the Energy Exascale Earth System Model (E3SM), which
+ used those components.
+ homepage: https://github.com/MPAS-Dev/MPAS-Analysis
+ repository: https://github.com/MPAS-Dev/MPAS-Analysis
+ documentation: https://mpas-dev.github.io/MPAS-Analysis/stable/
+
+extra:
+ recipe-maintainers:
+ - andrewdnolan
+ - xylar
+ - jhkennedy
diff --git a/dev-spec.txt b/dev-spec.txt
index 56b63b9bf..b6fda53a5 100644
--- a/dev-spec.txt
+++ b/dev-spec.txt
@@ -16,11 +16,11 @@ lxml
mache >=1.11.0
matplotlib-base >=3.9.0
mpas_tools >=1.3.0,<2.0.0
-nco >=4.8.1,!=5.2.6
+nco >=4.8.1,!=5.2.6,!=5.3.7
netcdf4
numpy >=2.0,<3.0
pandas
-pillow >=10.0.0,<11.0.0
+pillow >=10.0.0,<13.0.0
progressbar2
pyproj
pyremap >=2.0.0,<3.0.0
diff --git a/docs/developers_guide/docs.rst b/docs/developers_guide/docs.rst
index f211aee48..13fcc94a8 100644
--- a/docs/developers_guide/docs.rst
+++ b/docs/developers_guide/docs.rst
@@ -1,7 +1,8 @@
Building the Documentation
==========================
-With the ``mpas_analysis_dev`` environment activated, you can run:
+With the development environment active (for example after running
+``pixi shell`` from the repository root), you can run:
.. code-block:: bash
diff --git a/docs/developers_guide/quick_start.rst b/docs/developers_guide/quick_start.rst
index 8adf5cd14..25dc57a42 100644
--- a/docs/developers_guide/quick_start.rst
+++ b/docs/developers_guide/quick_start.rst
@@ -52,41 +52,39 @@ MPAS-Analysis development.
cd ../
-5. Set Up Conda Environment
----------------------------
-- Install Miniforge3 (recommended) or Miniconda.
-- For Miniconda, add ``conda-forge`` channel and set strict priority.
-- Create environment:
+5. Set Up the Development Environment
+-------------------------------------
+- Install ``pixi`` by following the official installation instructions at
+ `pixi.sh `_. On Linux and macOS, a common option
+ is:
- .. code-block:: bash
-
- conda create -y -n mpas_analysis_dev --file dev-spec.txt
+ .. code-block:: bash
-- Activate:
+ curl -fsSL https://pixi.sh/install.sh | sh
- .. code-block:: bash
+- From the root of your worktree, create and activate the development
+ environment:
- conda activate mpas_analysis_dev
+ .. code-block:: bash
-- Install MPAS-Analysis in edit mode:
+ pixi shell
- .. code-block:: bash
-
- python -m pip install --no-deps --no-build-isolation -e .
+ ``pixi shell`` will create the default environment on first use and activate
+ it with MPAS-Analysis installed in editable mode.
6. Activate Environment (each session)
--------------------------------------
-- For bash:
+- From the root of your worktree, run:
- .. code-block:: bash
+ .. code-block:: bash
- source ~/miniforge3/etc/profile.d/conda.sh; conda activate mpas_analysis_dev
+ pixi shell
-- For csh:
+- To run the analysis regression suite from the same Pixi environment:
- .. code-block:: csh
+ .. code-block:: bash
- source ~/miniforge3/etc/profile.d/conda.csh; conda activate mpas_analysis_dev
+ ./suite/run_suite.bash --dev
7. Configure and Run MPAS-Analysis
----------------------------------
diff --git a/docs/developers_guide/test_suite.rst b/docs/developers_guide/test_suite.rst
index 0372ff96b..0420666a3 100644
--- a/docs/developers_guide/test_suite.rst
+++ b/docs/developers_guide/test_suite.rst
@@ -9,38 +9,36 @@ unexpected results and to validate MPAS-Analysis in various environments.
Overview of Test Scripts
------------------------
-There are three main scripts for running the test suite:
+The main entry point is ``suite/run_suite.bash``. It supports three modes:
-1. **run_dev_suite.bash** (Developer Testing)
+1. **Developer Testing**: ``./suite/run_suite.bash --dev``
- - Use this script after activating your development environment
- (must be named `mpas_analysis_dev`).
+ - This is the recommended workflow for development in a Pixi environment.
- - It builds the documentation and runs a series of analysis tasks on output
- from a low-resolution (QUwLI240) simulation.
-
- - Each task produces a web page with results, accessible via the web portal.
-
- - Example usage:
+ - Run it either from an active Pixi shell or with an explicit Pixi
+ environment name:
.. code-block:: bash
- $ source ~/miniforge3/etc/profile.d/conda.sh
- $ conda activate mpas_analysis_dev
- $ ./suite/run_dev_suite.bash
+ $ pixi shell
+ $ ./suite/run_suite.bash --dev
- - After completion, check for successful web page generation, e.g.:
+ or:
.. code-block:: bash
- $ tail -n 3 chrysalis_test_suite/main_py3.11/mpas_analysis.o793058
+ $ ./suite/run_suite.bash --dev --pixi-env py313
- The last lines should include:
+ - It builds the documentation, renders the suite configs, and submits the
+ suite jobs using ``pixi run`` in the selected environment.
- .. code-block:: none
+ - Each task produces a web page with results, accessible via the web portal.
+
+ - After completion, check for successful web page generation, e.g.:
+
+ .. code-block:: bash
- Generating webpage for viewing results...
- Web page: https://web.lcrc.anl.gov/public/e3sm/diagnostic_output//analysis_testing/chrysalis//main_py3.11/
+ $ tail -n 3 chrysalis_test_suite/main_py3.13/mpas_analysis.o793058
- To quickly identify unfinished or failed tasks:
@@ -51,34 +49,24 @@ There are three main scripts for running the test suite:
- Developers should run this suite manually on each pull request before
merging and link the results in the PR.
-2. **run_suite.bash** (Package Build & Test)
+2. **Package Build & Test**: ``./suite/run_suite.bash``
- - Use this script to build the MPAS-Analysis conda package and test it in
- fresh environments.
+ - This mode builds the MPAS-Analysis conda package and tests it in fresh
+ environments.
- It creates conda environments for multiple Python versions, runs tests,
builds documentation, and executes the analysis suite.
- Recommended for more thorough validation, especially before releases.
- - Example usage:
+3. **E3SM-Unified Deployment Testing**:
+ ``./suite/run_suite.bash --e3sm-unified``
- .. code-block:: bash
-
- $ ./suite/run_suite.bash
-
-3. **run_e3sm_unified_suite.bash** (E3SM-Unified Deployment Testing)
-
- - Used during test deployments of E3SM-Unified to verify MPAS-Analysis
- works as expected within the deployment.
-
- - Typically run by E3SM-Unified maintainers during deployment testing.
-
- - Example usage:
-
- .. code-block:: bash
+ - This mode is used during test deployments of E3SM-Unified to verify
+ MPAS-Analysis works as expected within the deployment.
- $ ./suite/run_e3sm_unified_suite.bash
+ - It is typically run by E3SM-Unified maintainers during deployment
+ testing.
Supported Machines
------------------
@@ -103,8 +91,9 @@ Developers may need to update the suite for new requirements:
- **Python Versions**:
- - The Python versions tested are defined in the scripts (e.g.,
- `main_py=3.11`, `alt_py=3.10`).
+ - The Python versions tested in package mode are defined at the top of
+ ``suite/run_suite.bash`` (for example ``main_py=3.13`` and
+ ``alt_py=3.12``).
- To test additional versions, add them to the relevant script variables and
loops.
@@ -119,8 +108,8 @@ Developers may need to update the suite for new requirements:
- **Adding/Modifying Tests**:
- - To add new tests, update the list of runs in the scripts and
- provide corresponding config files in the `suite` directory.
+ - To add new tests, update the run lists in ``suite/run_suite.bash`` and
+ provide corresponding config files in ``suite/configs``.
- New tests could change which analysis tasks are run, the configuration for
running tasks overall (e.g. how climatologies are computed), or how
@@ -144,5 +133,5 @@ Best Practices
- Update the suite scripts and configs as needed to keep pace with
MPAS-Analysis development.
-For more details, see the comments and documentation within each script and
-config file in the `suite` directory.
+The suite templates live in ``suite/templates`` and the run-specific config
+overrides live in ``suite/configs``.
diff --git a/docs/tutorials/dev_add_task.rst b/docs/tutorials/dev_add_task.rst
index 4ae1c6f72..f1498ad0b 100644
--- a/docs/tutorials/dev_add_task.rst
+++ b/docs/tutorials/dev_add_task.rst
@@ -34,7 +34,7 @@ the code to MPAS-Analysis.
If one just wishes to add a new field that already exists in MPAS-Ocean or
MPAS-Seaice output, only a few of the steps below are necessary:
- 1. Follow step 1 to set up an ```mpas_analysis_dev``` environment.
+ 1. Follow step 1 to set up your development environment.
2. Copy an existing `ocean `_
or `sea_ice `_
python module to a new name and edit it as needed for the new fields.
@@ -50,15 +50,16 @@ the code to MPAS-Analysis.
To begin, please follow the :ref:`tutorial_dev_getting_started` tutorial, which
will help you through the basics of creating a fork of MPAS-Analysis,
cloning it onto the machine(s) where you will do your development, making
-a worktree for the feature you will develop, creating a conda environment for
-testing your new MPAS-Analysis development, and running MPAS-Analysis.
+a worktree for the feature you will develop, creating a development
+environment for testing your new MPAS-Analysis work, and running
+MPAS-Analysis.
.. note::
Make sure you follow the tutorial for developers, not for users, since the
tutorial for users installs the latest release of MPAS-Analysis, which you
cannot modify. Similarly, changes must be tested in your own development
- environment (often called ``mpas_analysis_dev``) rather than the in a shared
+ environment rather than in a shared
environment like `E3SM-Unified `_.
Then, please follow the :ref:`tutorial_understand_a_task`. This will give
@@ -550,16 +551,12 @@ whatever editor you like.)
code .
-I'll create or recreate my ``mpas_analysis_dev`` environment as in
-:ref:`tutorial_dev_getting_started`, and then make sure to at least do:
+I'll create or recreate my development environment as in
+:ref:`tutorial_dev_getting_started`, and then make sure to do:
.. code-block:: bash
- conda activate mpas_analysis_dev
- python -m pip install --no-deps --no-build-isolation -e .
-
-This last command installs the ``mpas_analysis`` package into the conda
-environment.
+ pixi shell
4.1 ``ClimatologyMapBSF`` class
-------------------------------
@@ -1138,7 +1135,7 @@ You also need to add the tasks class and public methods to the
in the developer's guide. Again, the easiest approach is to copy the section
for a similar task and modify as needed.
-With the ``mpas_analysis_dev`` environment activated, you can run:
+With the development environment active, you can run:
.. code-block:: bash
diff --git a/docs/tutorials/dev_getting_started.rst b/docs/tutorials/dev_getting_started.rst
index c7ad2821e..14a4142af 100644
--- a/docs/tutorials/dev_getting_started.rst
+++ b/docs/tutorials/dev_getting_started.rst
@@ -6,7 +6,7 @@ Developer: Getting Started
This mini-tutorial is meant as the starting point for other tutorials for
developers. It describes the process for creating a fork of the MPAS-Analysis
repo, cloning the repository (and your fork) locally, making a git worktree for
-development, and creating a conda environment that includes the
+development, and creating a ``pixi`` environment that includes the
``mpas_analysis`` package and all of its dependencies, installed in a mode
appropriate for development.
@@ -140,188 +140,66 @@ Go into that directory to do your development:
$ cd ../add_my_fancy_task
-4. Making a conda environment
------------------------------
+4. Making a development environment
+-----------------------------------
-MPAS-Analysis relies on several packages that are only available as conda
-packages from the ``conda-forge`` channel. The first step for running
-MPAS-Analysis is to create a conda environment with all the needed packages.
+MPAS-Analysis relies on packages from ``conda-forge`` and uses ``pixi`` to
+manage the development environment defined in ``pixi.toml``.
-4.1 Installing Miniforge3
-~~~~~~~~~~~~~~~~~~~~~~~~~
+4.1 Installing pixi
+~~~~~~~~~~~~~~~~~~~
-If you have not yet installed Anaconda, Miniconda or Miniforge, you will need
-to begin there. The concept behind Anaconda is that just about everything you
-would need for a typical python workflow is included. The concept behind
-Miniconda and Miniforge is that you create different environments for
-different purposes. This allows for greater flexibility and tends to lead to
-fewer conflicts between incompatible packages, particularly when using a
-channel other than the ``defaults`` supplied by Anaconda. Since we will use
-the ``conda-forge`` channel, the Miniforge3 approach is strongly recommended.
-The main advantage of Miniforge3 over Miniconda is that it automatically takes
-care of a few steps that we otherwise need to do manually.
-
-First download the
-`Miniforge3 installer `_
-for your operating system, then run it:
+If you do not already have ``pixi``, install it using the official
+instructions at `pixi.sh `_. On Linux and macOS, a
+common option is:
.. code-block:: bash
- $ /bin/bash Miniforge3-Linux-x86_64.sh
+ $ curl -fsSL https://pixi.sh/install.sh | sh
.. note::
- MPAS-Analysis and many of the packages it depends on support OSX and Linux
- but not Windows.
-
-If you are on an HPC system, you can still install Miniconda into your home
-directory. Typically, you will need the Linux version.
+ MPAS-Analysis and many of the packages it depends on support macOS and
+ Linux but not Windows.
.. note::
At this time, we don't have experience with installing or running
MPAS-Analysis on ARM or Power8/9 architectures.
-You will be asked to agree to the terms and conditions. Type ``yes`` to
-continue.
-
-You will be prompted with a location to install. In this tutorial, we assume
-that Miniforge3 is installed in the default location, ``~/miniforge3``. If
-you are using Miniconda or chose to install Miniforge3 somewhere else, just
-make sure to make the appropriate substitution whenever you see a reference to
-this path below.
-
-.. note::
-
- On some HPC machines (particularly at LANL Institutional Computing and
- NERSC) the space in your home directory is quite limited. You may want to
- install Miniforge3 in an alternative location to avoid running out of
- space.
-
-You will see prompt like this:
-
-.. code-block::
-
- Do you wish the installer to initialize Miniforge3
- by running conda init? [yes|no]
- [no] >>>
-
-You may wish to skip the step (answer ``no``) if you are working on a system
-where you will also be using other conda environments, most notably
-E3SM-Unified (which has its own Miniforge3 installation). If you do not run
-conda init, you have to manually activate ``conda`` whenever you need it.
-For ``bash`` and similar shells, this is:
-
-.. code-block:: bash
-
- $ source ~/miniforge3/etc/profile.d/conda.sh
- $ conda activate
-
-If you use ``csh``, ``tcsh`` or related shells, this becomes:
-
-.. code-block:: csh
-
- > source ~/miniforge3/etc/profile.d/conda.csh
- > conda activate
-
-You may wish to create an alias in your ``.bashrc`` or ``.cshrc`` to make
-this easier. For example:
-
-.. code-block:: bash
-
- alias init_conda="source ~/miniforge3/etc/profile.d/conda.sh; conda activate"
-
-
-4.2 One-time Miniconda setup
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+4.2 Create and activate the development environment
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-If you installed Miniconda, rather than Miniforge3, you will need to add the
-`conda-forge channel `_ and make sure it always takes
-precedence for packages available on that channel:
+From the root of the worktree where you are doing development, run:
.. code-block:: bash
- $ conda config --add channels conda-forge
- $ conda config --set channel_priority strict
+ $ pixi shell
-If you installed Miniforge3, these steps will happen automatically.
-
-4.3 Create a development environment
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-You can create a new conda environment called ``mpas_analysis_dev`` and install the
-dependencies that MPAS-Analysis needs by running the following in the worktree
-where you are doing your development:
-
-.. code-block:: bash
-
- $ conda create -y -n mpas_analysis_dev --file dev-spec.txt
-
-The last argument is only needed on HPC machines because the conda version of
-MPI doesn't work properly on these machines. You can omit it if you're
-setting up the conda environment on your laptop.
-
-Then, you can activate the environment and install MPAS-Analysis in "edit"
-mode by running:
-
-.. code-block:: bash
-
- $ conda activate mpas_analysis_dev
- $ python -m pip install --no-deps --no-build-isolation -e .
-
-In this mode, any edits you make to the code in the worktree will be available
-in the conda environment. If you run ``mpas_analysis`` on the command line,
-it will know about the changes.
-
-This command only needs to be done once after the ``mpas_analysis_dev`` environment is
-built if you are not using worktrees.
-
-.. note::
-
- If you do use worktrees, rerun the ``python -m pip install ...`` command
- each time you switch to developing a new branch, since otherwise the
- version of ``mpas_analysis`` in the ``mpas_analysis_dev`` environment will be the
- one you were developing previously.
+This command creates the default environment on first use and activates it.
+The default environment includes MPAS-Analysis installed in editable mode, so
+changes you make in the current worktree are immediately reflected when you
+run ``mpas_analysis``.
.. _tutorial_dev_get_started_activ_env:
-4.4 Activating the environment
+4.3 Activating the environment
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Each time you open a new terminal window, to activate the ``mpas_analysis_dev``
-environment, you will need to run either for ``bash``:
+Each time you open a new terminal window, activate the development
+environment from the root of your worktree with:
.. code-block:: bash
- $ source ~/miniforge3/etc/profile.d/conda.sh
- $ conda activate mpas_analysis_dev
-
-or for ``csh``:
-
-.. code-block:: csh
-
- > source ~/miniforge3/etc/profile.d/conda.csh
- > conda activate mpas_analysis_dev
-
-You can skip the ``source`` command if you chose to initialize Miniforge3 or
-Miniconda3 so it loads automatically. You can also use the ``init_conda``
-alias for this step if you defined one.
+ $ pixi shell
-4.5 Switching worktrees
+4.4 Switching worktrees
~~~~~~~~~~~~~~~~~~~~~~~
-If you switch to a different worktree, it is safest to rerun the whole
-process for creating the ``mpas_analysis_dev`` conda environment. If you know that
-the dependencies are the same as the worktree used to create ``mpas_analysis_dev``,
-You can just reinstall ``mpas_analysis`` itself by rerunning
-
-.. code-block:: bash
-
- python -m pip install --no-deps --no-build-isolation -e .
-
-in the new worktree. If you forget this step, you will find that changes you
-make in the worktree don't affect the ``mpas_analysis_dev`` conda environment you are
-using.
+Because ``mpas-analysis`` is installed from the current worktree in editable
+mode, you should run ``pixi shell`` from the worktree you want to develop in.
+If you switch to a different worktree, leave the existing shell and start a
+new one from the new worktree.
5. Editing code
---------------
@@ -348,8 +226,9 @@ need to follow steps 2-6 of the :ref:`tutorial_getting_started` tutorial.
Run ``mpas_analysis`` on a compute node, not on an HPC login nodes (front
ends), because it uses too many resources to be safely run on a login node.
- When using a compute node interactively, activate the ``mpas_analysis_dev``
- environment, even if it was activated on the login node. Be sure to
+ When using a compute node interactively, activate the development
+ environment with ``pixi shell``, even if it was activated on the login
+ node. Be sure to
7.1 Configuring MPAS-Analysis
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -421,7 +300,7 @@ but leave off the date of the simulation to keep it a little shorter.
The ``[execute]`` section contains options related to serial or parallel
execution of the individual "tasks" that make up an MPAS-Analysis run. For
the most part, you can let MPAS-Analysis take care of this on supported
-machines. The exception is that, in a development conda environment, you will
+machines. The exception is that, in a local development environment, you will
be using a version of ESMF that cannot run in parallel so you will need the
following:
@@ -688,8 +567,8 @@ also be displayed over the full 5 years.)
The hard work is done. Now that we have a config file, we are ready to run.
To run MPAS-Analysis, you should either create a job script or log into
-an interactive session on a compute node. Then, activate the ``mpas_analysis_dev``
-conda environment as in :ref:`tutorial_dev_get_started_activ_env`.
+an interactive session on a compute node. Then, activate the development
+environment as in :ref:`tutorial_dev_get_started_activ_env`.
On many file systems, MPAS-Analysis and other python-based software that used
NetCDF files based on the HDF5 file structure can experience file access errors
diff --git a/docs/users_guide/config/preprocessed.rst b/docs/users_guide/config/preprocessed.rst
index 41fa989b8..eb0b92a5a 100644
--- a/docs/users_guide/config/preprocessed.rst
+++ b/docs/users_guide/config/preprocessed.rst
@@ -5,11 +5,11 @@ Preprocessed Reference Runs
The ``[oceanPreprocessedReference]`` and ``[seaIcePreprocessedReference]``
sections of a configuration file contain options used to point to preprocessed
-data from E3SM v0 reference runs::
+data from legacy E3SM reference runs::
[oceanPreprocessedReference]
- ## options related to preprocessed ocean reference run with which the results
- ## will be compared (e.g. a POP, CESM or ACME v0 run)
+ ## options related to a preprocessed ocean reference run with which the
+ ## results will be compared
# directory where ocean reference simulation results are stored
baseDirectory = /dir/to/ocean/reference
@@ -17,8 +17,8 @@ data from E3SM v0 reference runs::
...
[seaIcePreprocessedReference]
- ## options related to preprocessed sea ice reference run with which the results
- ## will be compared (e.g. a CICE, CESM or ACME v0 run)
+ ## options related to a preprocessed sea ice reference run with which the
+ ## results will be compared
# directory where ocean reference simulation results are stored
baseDirectory = /dir/to/seaice/reference
diff --git a/docs/users_guide/config/runs.rst b/docs/users_guide/config/runs.rst
index 6ffd07e42..118571319 100644
--- a/docs/users_guide/config/runs.rst
+++ b/docs/users_guide/config/runs.rst
@@ -4,8 +4,8 @@ Runs
====
The ``[runs]`` section of a configuration file contains options used to name
-the "main" run, a preprocessed E3SM v0 run (if any) and to point to analysis
-of a control E3SM v1 or standalone MPAS run (if any)::
+the "main" run, an optional preprocessed legacy E3SM reference run, and a
+control E3SM or standalone MPAS run (if any)::
[runs]
## options related to the run to be analyzed and control runs to be
@@ -43,9 +43,8 @@ as specified in E3SM::
mainRunName = runName
A few of the time series plots in MPAS-Analysis can be compared against a
-preprocessed control run from E3SM v0 (which was similar to the CESM, the
-Community Earth System Model). If these data are available and the comparison
-to these runs is desired, the name of the control run should be specified
+preprocessed legacy E3SM reference run. If these data are available and the
+comparison to these runs is desired, the name of the control run should be specified
here and the paths to the data set should be specified (see
:ref:`config_preprocessed`). If not this name should be left as ``None``::
@@ -103,4 +102,3 @@ config file::
mainRunConfigFile = main_run.cfg
-
diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py
index cab22ea39..1cd665b8d 100644
--- a/mpas_analysis/__main__.py
+++ b/mpas_analysis/__main__.py
@@ -33,7 +33,7 @@
import time
import json
from importlib.metadata import Distribution
-from importlib.resources import contents
+from importlib.resources import files
from mache import discover_machine, MachineInfo
@@ -943,6 +943,9 @@ def get_editable_install_dir(package_name):
direct_url = Distribution.from_name(package_name).read_text(
'direct_url.json')
+ if direct_url is None:
+ return None
+
contents = json.loads(direct_url)
pkg_is_editable = contents.get("dir_info", {}).get("editable", False)
if pkg_is_editable and 'url' in contents:
@@ -1075,10 +1078,11 @@ def main():
except FileNotFoundError:
possible_machines = []
- machine_configs = contents('mache.machines')
- for config in machine_configs:
- if config.endswith('.cfg'):
- possible_machines.append(os.path.splitext(config)[0])
+ machine_configs = files('mache.machines').iterdir()
+ for config_file in machine_configs:
+ if config_file.name.endswith('.cfg'):
+ possible_machines.append(
+ os.path.splitext(config_file.name)[0])
possible_machines = '\n '.join(sorted(possible_machines))
raise ValueError(
diff --git a/mpas_analysis/ocean/climatology_map_fluxes.py b/mpas_analysis/ocean/climatology_map_fluxes.py
index 5e503d8e1..668b33db0 100644
--- a/mpas_analysis/ocean/climatology_map_fluxes.py
+++ b/mpas_analysis/ocean/climatology_map_fluxes.py
@@ -142,7 +142,7 @@ def __init__(self, config, mpasClimatologyTask, controlConfig=None,
unitsLabel = r'W m$^{-2}$'
else:
groupSubtitle = 'Mass fluxes'
- unitsLabel = r'kg m$^{-2}$ s^${-1}$'
+ unitsLabel = r'kg m$^{-2}$ s$^{-1}$'
subtask.set_plot_info(
outFileLabel=outFileName,
diff --git a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py
index e0a39b1c2..8806e52d2 100644
--- a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py
+++ b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py
@@ -182,6 +182,12 @@ class RemapMpasOHCClimatology(RemapMpasClimatologySubtask):
min_depth, max_depth : float
The minimum and maximum depths for integration
+
+ cp : float
+ Specific heat of seawater [J/(kg*degC)]
+
+ rho : float
+ Reference density of seawater [kg/m3]
"""
def __init__(self, mpas_climatology_task, ref_year_climatology_task,
@@ -239,6 +245,8 @@ def __init__(self, mpas_climatology_task, ref_year_climatology_task,
self.run_after(ref_year_climatology_task)
self.min_depth = min_depth
self.max_depth = max_depth
+ self.cp = None
+ self.rho = None
def setup_and_check(self):
"""
@@ -255,6 +263,9 @@ def setup_and_check(self):
self.ref_year_climatology_task.add_variables(self.variableList,
self.seasons)
+ self.cp = self.namelist.getfloat('config_specific_heat_sea_water')
+ self.rho = self.namelist.getfloat('config_density0')
+
def customize_masked_climatology(self, climatology, season):
"""
Compute the ocean heat content (OHC) anomaly from the temperature
@@ -298,10 +309,10 @@ def _compute_ohc(self, climatology):
ds_mesh = xr.open_dataset(self.meshFilename)
ds_mesh = ds_mesh.isel(Time=0)
- # specific heat [J/(kg*degC)]
- cp = self.namelist.getfloat('config_specific_heat_sea_water')
- # [kg/m3]
- rho = self.namelist.getfloat('config_density0')
+ cp = self.cp
+ assert cp is not None, "Specific heat 'cp' has not been set"
+ rho = self.rho
+ assert rho is not None, "Reference density 'rho' has not been set"
units_scale_factor = 1e-9
diff --git a/mpas_analysis/ocean/compute_transects_subtask.py b/mpas_analysis/ocean/compute_transects_subtask.py
index b4b8ab0db..e224dc104 100644
--- a/mpas_analysis/ocean/compute_transects_subtask.py
+++ b/mpas_analysis/ocean/compute_transects_subtask.py
@@ -586,7 +586,7 @@ def _compute_mpas_transects(self, dsMesh):
# reads them back because of _FillValue
dsMpasTransect.to_netcdf(transectInfoFileName)
- dsTransectOnMpas = xr.Dataset(dsMpasTransect)
+ dsTransectOnMpas = dsMpasTransect.copy()
dsTransectOnMpas['x'] = dsMpasTransect.dNode
dsTransectOnMpas['z'] = dsMpasTransect.zTransectNode
@@ -608,7 +608,7 @@ def _compute_mpas_transects(self, dsMesh):
for season in self.seasons:
maskedFileName = self.get_masked_file_name(season)
with xr.open_dataset(maskedFileName) as dsMask:
- dsOnMpas = xr.Dataset(dsMpasTransect)
+ dsOnMpas = dsMpasTransect.copy()
for var in dsMask.data_vars:
dims = dsMask[var].dims
if 'nCells' in dims and (
diff --git a/mpas_analysis/ocean/index_nino34.py b/mpas_analysis/ocean/index_nino34.py
index b3cd49136..772fb0d4f 100644
--- a/mpas_analysis/ocean/index_nino34.py
+++ b/mpas_analysis/ocean/index_nino34.py
@@ -221,6 +221,9 @@ def run_task(self):
ninoIndexNumber))
varName = self.variableList[0]
regionSST = ds[varName]
+ self.logger.debug('Main run SST dims=%s shape=%s',
+ getattr(regionSST, 'dims', None),
+ getattr(regionSST, 'shape', None))
nino34Main = self._compute_nino34_index(regionSST, calendar)
# Compute the observational index over the entire time range
@@ -270,7 +273,14 @@ def run_task(self):
dsRef = add_standard_regions_and_subset(
dsRef, self.controlConfig, regionShortNames=[regionToPlot])
+ # we want to collapse the nOceanRegions dimension (same as main)
+ if 'nOceanRegions' in dsRef.dims:
+ dsRef = dsRef.isel(nOceanRegions=0)
+
regionSSTRef = dsRef[varName]
+ self.logger.debug('Control run SST dims=%s shape=%s',
+ getattr(regionSSTRef, 'dims', None),
+ getattr(regionSSTRef, 'shape', None))
nino34Ref = self._compute_nino34_index(regionSSTRef, calendar)
nino34s = [nino34Subset, nino34Main[2:-3], nino34Ref[2:-3]]
@@ -499,7 +509,19 @@ def _running_mean(self, inputData, wgts):
sp = (len(wgts) - 1) // 2
runningMean = inputData.copy()
for k in range(sp, nt - (sp + 1)):
- runningMean[k] = sum(wgts * inputData[k - sp:k + sp + 1].values)
+ windowValues = np.asarray(inputData[k - sp:k + sp + 1].values)
+ if windowValues.shape[0] != len(wgts):
+ raise ValueError(
+ 'Unexpected running-mean window shape. '
+ f'Expected first dimension {len(wgts)} but got {windowValues.shape}. '
+ f'inputData dims={getattr(inputData, "dims", None)} '
+ f'shape={getattr(inputData, "shape", None)}')
+
+ if windowValues.ndim == 1:
+ runningMean[k] = np.sum(wgts * windowValues)
+ else:
+ # weighted sum over the first axis (the running-mean window)
+ runningMean[k] = np.tensordot(wgts, windowValues, axes=(0, 0))
return runningMean
@@ -779,6 +801,8 @@ def _plot_size_y_axis(self, x, ys, xmin, xmax):
# find maximum value of three curves plotted
maxY = -1E20
+ if len(mask) == 0:
+ return maxY
for y in ys:
maxY = max(y[mask].max(), maxY)
# check the function interpolated to the max/min as well
diff --git a/mpas_analysis/ocean/sose_transects.py b/mpas_analysis/ocean/sose_transects.py
index d9f2bee57..9850e4737 100644
--- a/mpas_analysis/ocean/sose_transects.py
+++ b/mpas_analysis/ocean/sose_transects.py
@@ -379,7 +379,7 @@ def combine_observations(self):
# make a copy of the top set of data at z=0
dsObs = xr.concat((dsObs.isel(z=0), dsObs), dim='z')
- z = dsObs.z.values
+ z = dsObs.z.values.copy()
z[0] = 0.
dsObs['z'] = ('z', z)
write_netcdf_with_fill(dsObs, combinedFileName)
diff --git a/mpas_analysis/ocean/streamfunction_moc.py b/mpas_analysis/ocean/streamfunction_moc.py
index 26ef1a32e..5580d364e 100644
--- a/mpas_analysis/ocean/streamfunction_moc.py
+++ b/mpas_analysis/ocean/streamfunction_moc.py
@@ -1007,15 +1007,16 @@ def _compute_moc_time_series_analysismember(self):
self.historyStreams,
'timeSeriesStatsMonthlyOutput')
- mocRegion = np.zeros(len(inputFiles))
+ ntimes = int(12 * (self.endYear - self.startYear + 1))
+ mocRegion = np.zeros(ntimes)
moc = None
refTopDepth = None
- times = np.zeros(len(inputFiles))
- computed = np.zeros(len(inputFiles), bool)
+ times = np.zeros(ntimes)
+ computed = np.zeros(ntimes, bool)
continueOutput = os.path.exists(outputFileName)
if continueOutput:
- self.logger.info(' Read in previously computed MOC time series')
+ self.logger.info(f' Read in previously computed MOC time series {outputFileName}')
with open_mpas_dataset(fileName=outputFileName,
calendar=self.calendar,
timeVariableNames=None,
@@ -1028,32 +1029,30 @@ def _compute_moc_time_series_analysismember(self):
if moc is None:
sizes = dsMOCIn.sizes
- moc = np.zeros((len(inputFiles), sizes['depth'],
- sizes['lat']))
+ moc = np.zeros((ntimes, sizes['depth'],
+ sizes['lat']))
refTopDepth = dsMOCIn.depth.values
# first, copy all computed data
- for inIndex in range(dsMOCIn.sizes['Time']):
-
- mask = np.logical_and(
- dsMOCIn.year[inIndex].values == years,
- dsMOCIn.month[inIndex].values == months)
-
- outIndex = np.where(mask)[0][0]
-
- mocRegion[outIndex] = dsMOCIn.mocAtlantic26[inIndex]
- moc[outIndex, :, :] = dsMOCIn.mocAtlantic[inIndex, :, :]
- times[outIndex] = dsMOCIn.Time[inIndex]
- computed[outIndex] = True
+ outIndex = 0
+ for load_year in np.arange(self.startYear, self.endYear + 1):
+ for load_month in np.arange(1, 13):
+ mask = np.logical_and(dsMOCIn.year.values == load_year,
+ dsMOCIn.month.values == load_month)
+ if np.sum(mask) >= 1:
+ inIndex = np.where(mask)[0][0]
+ mocRegion[outIndex] = dsMOCIn.mocAtlantic26[inIndex]
+ moc[outIndex, :, :] = dsMOCIn.mocAtlantic[inIndex, :, :]
+ times[outIndex] = dsMOCIn.Time[inIndex]
+ computed[outIndex] = True
+
+ outIndex += 1
if np.all(computed):
# no need to waste time writing out the data set again
return dsMOCIn
for timeIndex, fileName in enumerate(inputFiles):
- if computed[timeIndex]:
- continue
-
dsLocal = open_mpas_dataset(
fileName=fileName,
calendar=self.calendar,
@@ -1067,12 +1066,15 @@ def _compute_moc_time_series_analysismember(self):
self.logger.info(' date: {:04d}-{:02d}'.format(date.year,
date.month))
+ computedIndex = 12 * (date.year - self.startYear) + date.month - 1
+ if computed[computedIndex]:
+ continue
# hard-wire region=0 (Atlantic) for now
indRegion = 0
mocVar = dsLocal.timeMonthly_avg_mocStreamvalLatAndDepthRegion
mocTop = mocVar[indRegion, :, :].values
- mocRegion[timeIndex] = np.amax(mocTop[:, indlat26])
+ mocRegion[computedIndex] = np.amax(mocTop[:, indlat26])
if moc is None:
sizes = dsLocal.sizes
@@ -1087,7 +1089,8 @@ def _compute_moc_time_series_analysismember(self):
refTopDepth = np.zeros(nVertLevels + 1)
refTopDepth[1:nVertLevels + 1] = refBottomDepth[0:nVertLevels]
- moc[timeIndex, 0:-1, :] = mocTop
+ moc[computedIndex, 0:-1, :] = mocTop
+
description = 'Max MOC Atlantic streamfunction nearest to RAPID ' \
'Array latitude (26.5N)'
diff --git a/mpas_analysis/ocean/time_series_ohc_anomaly.py b/mpas_analysis/ocean/time_series_ohc_anomaly.py
index b3dbf21d0..a81d56d80 100644
--- a/mpas_analysis/ocean/time_series_ohc_anomaly.py
+++ b/mpas_analysis/ocean/time_series_ohc_anomaly.py
@@ -31,6 +31,17 @@
class TimeSeriesOHCAnomaly(AnalysisTask):
"""
Performs analysis of ocean heat content (OHC) from time-series output.
+
+ Attributes
+ ----------
+ cp : float
+ Specific heat of seawater [J/(kg*degC)]
+
+ rho : float
+ Reference density of seawater [kg/m3]
+
+ meshFilename : str
+ The path to the MPAS mesh file
"""
# Authors
# -------
@@ -132,6 +143,21 @@ def __init__(self, config, mpasTimeSeriesTask, controlConfig=None):
plotTask.run_after(anomalyTask)
self.add_subtask(plotTask)
+ self.cp = None
+ self.rho = None
+ self.meshFilename = None
+
+ def setup_and_check(self):
+ """
+ Store the specific heat and reference density of seawater for use
+ in OHC calculations.
+ """
+ super().setup_and_check()
+
+ self.cp = self.namelist.getfloat('config_specific_heat_sea_water')
+ self.rho = self.namelist.getfloat('config_density0')
+ self.meshFilename = self.get_mesh_filename()
+
def _compute_ohc(self, ds):
"""
Compute the OHC time series.
@@ -139,10 +165,15 @@ def _compute_ohc(self, ds):
# for convenience, rename the variables to simpler, shorter names
ds = ds.rename(self.variableDict)
- # specific heat [J/(kg*degC)]
- cp = self.namelist.getfloat('config_specific_heat_sea_water')
- # [kg/m3]
- rho = self.namelist.getfloat('config_density0')
+ # these need to be set at setup time, not at runtime because piclking
+ # means the namelists and streams objects they come from aren't
+ # available at runtime
+ cp = self.cp
+ assert cp is not None, "Specific heat 'cp' has not been set"
+ rho = self.rho
+ assert rho is not None, "Reference density 'rho' has not been set"
+ meshFile = self.meshFilename
+ assert meshFile is not None, "Mesh filename has not been set"
unitsScalefactor = 1e-22
@@ -152,8 +183,6 @@ def _compute_ohc(self, ds):
ds.ohc.attrs['units'] = '$10^{22}$ J'
ds.ohc.attrs['description'] = 'Ocean heat content in each region'
- meshFile = self.get_mesh_filename()
-
# Define/read in general variables
with xr.open_dataset(meshFile) as dsMesh:
# reference depth [m]
diff --git a/mpas_analysis/ocean/time_series_transport.py b/mpas_analysis/ocean/time_series_transport.py
index e23996fd0..080cf92c6 100644
--- a/mpas_analysis/ocean/time_series_transport.py
+++ b/mpas_analysis/ocean/time_series_transport.py
@@ -235,6 +235,29 @@ def run_task(self):
outFileName = f'{outputDirectory}/{self.groupSuffix}_{self.startYear:04d}-{self.endYear:04d}.nc'
+ outputExists = os.path.exists(outFileName)
+ outputValid = outputExists
+ if outputExists:
+ with open_mpas_dataset(fileName=outFileName,
+ calendar=self.calendar,
+ timeVariableNames=None,
+ variableList=None,
+ startDate=startDate,
+ endDate=endDate) as dsOut:
+
+ for load_year in numpy.arange(self.startYear, self.endYear + 1):
+ for load_month in numpy.arange(1, 13):
+ mask = numpy.logical_and(
+ dsOut.year.values == load_year,
+ dsOut.month.values == load_month)
+ if numpy.count_nonzero(mask) == 0:
+ outputValid = False
+ break
+
+ if outputValid:
+ self.logger.info(' Time series exists -- Done.')
+ return
+
inputFiles = sorted(self.historyStreams.readpath(
'timeSeriesStatsMonthlyOutput', startDate=startDate,
endDate=endDate, calendar=self.calendar))
@@ -259,29 +282,6 @@ def run_task(self):
'Using advection velocity.')
variableList.append('timeMonthly_avg_normalVelocity')
- outputExists = os.path.exists(outFileName)
- outputValid = outputExists
- if outputExists:
- with open_mpas_dataset(fileName=outFileName,
- calendar=self.calendar,
- timeVariableNames=None,
- variableList=None,
- startDate=startDate,
- endDate=endDate) as dsOut:
-
- for inIndex in range(dsOut.sizes['Time']):
-
- mask = numpy.logical_and(
- dsOut.year[inIndex].values == years,
- dsOut.month[inIndex].values == months)
- if numpy.count_nonzero(mask) == 0:
- outputValid = False
- break
-
- if outputValid:
- self.logger.info(' Time series exists -- Done.')
- return
-
transectMaskFileName = self.masksSubtask.maskFileName
dsTransectMask = xarray.open_dataset(transectMaskFileName)
diff --git a/mpas_analysis/sea_ice/time_series.py b/mpas_analysis/sea_ice/time_series.py
index b0e0fcca0..464953344 100644
--- a/mpas_analysis/sea_ice/time_series.py
+++ b/mpas_analysis/sea_ice/time_series.py
@@ -10,6 +10,7 @@
# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
import numpy as np
+import os
import xarray as xr
from mpas_analysis.shared import AnalysisTask
@@ -690,10 +691,19 @@ def _compute_area_vol(self):
mask = dsMesh.latCell < 0
if maxAllowedSeaIceThickness is not None:
- mask = np.logical_and(mask,
- ds.iceThick <= maxAllowedSeaIceThickness)
-
- dsAreaSum = (ds.where(mask) * dsMesh.areaCell).sum('nCells')
+ mask = np.logical_and(
+ mask, ds.iceThick <= maxAllowedSeaIceThickness
+ )
+ dsForHemisphere = ds
+ if os.path.exists(outFileNames[hemisphere]):
+ dsCache = xr.open_dataset(outFileNames[hemisphere])
+ timeMask = ds.startTime > dsCache.startTime.isel(Time=-1)
+ timeMask = timeMask.compute()
+ dsForHemisphere = ds.isel(Time=timeMask)
+
+ dsAreaSum = (
+ dsForHemisphere.where(mask) * dsMesh.areaCell
+ ).sum('nCells')
dsAreaSum = dsAreaSum.rename(
{'iceConc': 'iceArea',
'iceThick': 'iceVolume',
@@ -703,21 +713,25 @@ def _compute_area_vol(self):
dsAreaSum['snowDepth'] = (dsAreaSum.snowVolume /
dsMesh.areaCell.sum('nCells'))
- dsAreaSum['iceArea'].attrs['units'] = 'm$^2$'
- dsAreaSum['iceArea'].attrs['description'] = \
- f'Total {hemisphere} sea ice area'
- dsAreaSum['iceVolume'].attrs['units'] = 'm$^3$'
- dsAreaSum['iceVolume'].attrs['description'] = \
- f'Total {hemisphere} sea ice volume'
- dsAreaSum['snowVolume'].attrs['units'] = 'm$^3$'
- dsAreaSum['snowVolume'].attrs['description'] = \
- f'Total {hemisphere} snow volume'
- dsAreaSum['iceThickness'].attrs['units'] = 'm'
- dsAreaSum['iceThickness'].attrs['description'] = \
- f'Mean {hemisphere} sea ice thickness'
- dsAreaSum['snowDepth'].attrs['units'] = 'm'
- dsAreaSum['snowDepth'].attrs['description'] = \
- f'Mean {hemisphere} snow depth'
+ if os.path.exists(outFileNames[hemisphere]):
+ dsAreaSum = xr.concat([dsCache, dsAreaSum], dim='Time')
+ dsCache.close()
+ else:
+ dsAreaSum['iceArea'].attrs['units'] = 'm$^2$'
+ dsAreaSum['iceArea'].attrs['description'] = \
+ f'Total {hemisphere} sea ice area'
+ dsAreaSum['iceVolume'].attrs['units'] = 'm$^3$'
+ dsAreaSum['iceVolume'].attrs['description'] = \
+ f'Total {hemisphere} sea ice volume'
+ dsAreaSum['snowVolume'].attrs['units'] = 'm$^3$'
+ dsAreaSum['snowVolume'].attrs['description'] = \
+ f'Total {hemisphere} snow volume'
+ dsAreaSum['iceThickness'].attrs['units'] = 'm'
+ dsAreaSum['iceThickness'].attrs['description'] = \
+ f'Mean {hemisphere} sea ice thickness'
+ dsAreaSum['snowDepth'].attrs['units'] = 'm'
+ dsAreaSum['snowDepth'].attrs['description'] = \
+ f'Mean {hemisphere} snow depth'
dsTimeSeries[hemisphere] = dsAreaSum
diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py
index f47660658..5507b7624 100644
--- a/mpas_analysis/shared/analysis_task.py
+++ b/mpas_analysis/shared/analysis_task.py
@@ -203,18 +203,7 @@ def setup_and_check(self):
self.plotsDirectory = build_config_full_path(self.config, 'output',
'plotsSubdirectory')
- namelistFileName = build_config_full_path(
- self.config, 'input',
- '{}NamelistFileName'.format(self.componentName))
- self.namelist = NameList(namelistFileName)
-
- streamsFileName = build_config_full_path(
- self.config, 'input',
- '{}StreamsFileName'.format(self.componentName))
- self.runStreams = StreamsFile(streamsFileName,
- streamsdir=self.runDirectory)
- self.historyStreams = StreamsFile(streamsFileName,
- streamsdir=self.historyDirectory)
+ self._load_namelists_and_streams()
self.calendar = self.namelist.get('config_calendar_type')
@@ -282,6 +271,19 @@ def add_subtask(self, subtask):
if subtask not in self.subtasks:
self.subtasks.append(subtask)
+ def start(self):
+ """
+ Clear unpicklable attributes and then start the analysis task as a new
+ process.
+ """
+ # Authors
+ # -------
+ # Xylar Asay-Davis
+ # clear unpicklable attributes before running the task
+ self._clear_namelists_and_streams()
+
+ super(AnalysisTask, self).start()
+
def run(self, writeLogFile=True):
"""
Sets up logging and then runs the analysis task.
@@ -320,6 +322,9 @@ def run(self, writeLogFile=True):
startTime = time.time()
try:
+ # reload namelists and streams, since they cannot be pickled
+ # as part of multiprocessing
+ self._load_namelists_and_streams()
self.run_task()
self._runStatus.value = AnalysisTask.SUCCESS
except (Exception, BaseException) as e:
@@ -525,6 +530,71 @@ def get_mesh_filename(self):
return meshFilename
+ def __getstate__(self):
+ """
+ Customize pickling to exclude unpicklable and unnecessary attributes.
+ This method is called during multiprocessing when the task is
+ serialized to be sent to a child process. We exclude task dependencies
+ and process internals that don't need to be transferred, such as logger
+ objects, process internals, and weakref-bearing attributes.
+
+ Returns
+ -------
+ state : dict
+ The object state with unpicklable and unnecessary attributes
+ removed.
+ """
+ state = self.__dict__.copy()
+
+ # Clear out attributes that should not be pickled
+ state['namelist'] = None
+ state['runStreams'] = None
+ state['historyStreams'] = None
+ state['runAfterTasks'] = []
+ state['subtasks'] = []
+ # Drop process internals and logger that can't/shouldn't be pickled
+ for key in ['_popen', 'logger', '_stackTrace']:
+ state.pop(key, None)
+
+ # Drop weakref-bearing Finalize, etc., by not pickling _popen at all
+ # _runStatus is a multiprocessing.Value; depending on your logic,
+ # you may also want to skip it and let child initialize its own.
+
+ return state
+
+ def _load_namelists_and_streams(self):
+ """
+ Load namelist and streams attributes.
+ """
+ # Authors
+ # -------
+ # Xylar Asay-Davis
+
+ namelistFileName = build_config_full_path(
+ self.config, 'input',
+ '{}NamelistFileName'.format(self.componentName))
+ self.namelist = NameList(namelistFileName)
+
+ streamsFileName = build_config_full_path(
+ self.config, 'input',
+ '{}StreamsFileName'.format(self.componentName))
+ self.runStreams = StreamsFile(streamsFileName,
+ streamsdir=self.runDirectory)
+ self.historyStreams = StreamsFile(streamsFileName,
+ streamsdir=self.historyDirectory)
+
+ def _clear_namelists_and_streams(self):
+ """
+ Clear namelist and streams attributes that cannot be pickled for
+ multiprocessing.
+ """
+ # Authors
+ # -------
+ # Xylar Asay-Davis
+
+ self.namelist = None
+ self.runStreams = None
+ self.historyStreams = None
# }}}
@@ -602,7 +672,8 @@ def flush(self):
pass
-def update_time_bounds_from_file_names(config, section, componentName):
+def update_time_bounds_from_file_names(config, section, componentName,
+ allow_cache=True):
"""
Update the start and end years and dates for time series, climatologies or
climate indices based on the years actually available in the list of files.
@@ -658,7 +729,7 @@ def update_time_bounds_from_file_names(config, section, componentName):
return
if len(inputFiles) == 0:
- raise ValueError('No input files found for stream {} in {} between '
+ print('Warning: No input files found for stream {} in {} between '
'{} and {}'.format(streamName, componentName,
requestedStartYear,
requestedEndYear))
@@ -680,12 +751,16 @@ def update_time_bounds_from_file_names(config, section, componentName):
endYear = years[lastIndex]
if startYear != requestedStartYear or endYear != requestedEndYear:
- raise ValueError(
- "{} start and/or end year different from requested\n"
- "requested: {:04d}-{:04d}\n"
- "actual: {:04d}-{:04d}\n".format(
- section, requestedStartYear, requestedEndYear, startYear,
- endYear))
+ message = ("{} start and/or end year different from requested\n"
+ "requested: {:04d}-{:04d}\n"
+ "actual: {:04d}-{:04d}\n".format(
+ section, requestedStartYear, requestedEndYear, startYear,
+ endYear)
+ )
+ if allow_cache:
+ print(f'Warning: {message}')
+ else:
+ raise ValueError(message)
startDate = '{:04d}-01-01_00:00:00'.format(startYear)
config.set(section, 'startDate', startDate)
diff --git a/mpas_analysis/shared/climatology/mpas_climatology_task.py b/mpas_analysis/shared/climatology/mpas_climatology_task.py
index c0414a287..30ef07a0c 100644
--- a/mpas_analysis/shared/climatology/mpas_climatology_task.py
+++ b/mpas_analysis/shared/climatology/mpas_climatology_task.py
@@ -414,8 +414,10 @@ def _create_symlinks(self):
climatologyOpDirectory = get_climatology_op_directory(config, self.op)
- symlinkDirectory = '{}/source_symlinks'.format(
- climatologyOpDirectory)
+ symlinkDirectory = (
+ f'{climatologyOpDirectory}/source_symlinks_'
+ f'{self.ncclimoModel}_{self.startYear:04d}-{self.endYear:04d}'
+ )
make_directories(symlinkDirectory)
@@ -425,6 +427,8 @@ def _create_symlinks(self):
f'timeSeriesStatsMonthly.{year:04d}-{month:02d}-01.nc'
try:
+ if os.path.lexists(outFileName):
+ os.remove(outFileName)
os.symlink(inFileName, outFileName)
except OSError:
pass
diff --git a/mpas_analysis/shared/plot/climatology_map.py b/mpas_analysis/shared/plot/climatology_map.py
index 0eb1e6092..8be601a78 100644
--- a/mpas_analysis/shared/plot/climatology_map.py
+++ b/mpas_analysis/shared/plot/climatology_map.py
@@ -414,6 +414,9 @@ def _plot_panel(ax, title, array, colormap, norm, levels, ticks, contours,
plottitle_font = {'size': config.get('plot',
'threePanelPlotTitleFontSize')}
+ multi_line_ref_title = (
+ refArray is not None and refTitle is not None and '\n' in refTitle)
+
if refArray is None:
subplots = [111]
else:
@@ -427,21 +430,43 @@ def _plot_panel(ax, title, array, colormap, norm, levels, ticks, contours,
dictDiff = setup_colormap(config, colorMapSectionName, suffix='Difference')
axes = []
- ax = plt.subplot(subplots[0], projection=projection)
- _plot_panel(ax, modelTitle, modelArray, **dictModelRef)
- axes.append(ax)
+ if refArray is not None and multi_line_ref_title:
+ # Use a GridSpec with unequal gaps but equal-sized panels
+ gs = fig.add_gridspec(
+ nrows=5,
+ ncols=1,
+ height_ratios=[1.0, 0.18, 1.0, 0.08, 1.0])
+
+ ax = fig.add_subplot(gs[0, 0], projection=projection)
+ _plot_panel(ax, modelTitle, modelArray, **dictModelRef)
+ axes.append(ax)
- if refArray is not None:
- ax = plt.subplot(subplots[1], projection=projection)
+ ax = fig.add_subplot(gs[2, 0], projection=projection)
_plot_panel(ax, refTitle, refArray, **dictModelRef)
axes.append(ax)
- ax = plt.subplot(subplots[2], projection=projection)
+ ax = fig.add_subplot(gs[4, 0], projection=projection)
_plot_panel(ax, diffTitle, diffArray, **dictDiff)
axes.append(ax)
+ else:
+ ax = plt.subplot(subplots[0], projection=projection)
+ _plot_panel(ax, modelTitle, modelArray, **dictModelRef)
+ axes.append(ax)
+
+ if refArray is not None:
+ ax = plt.subplot(subplots[1], projection=projection)
+ _plot_panel(ax, refTitle, refArray, **dictModelRef)
+ axes.append(ax)
+
+ ax = plt.subplot(subplots[2], projection=projection)
+ _plot_panel(ax, diffTitle, diffArray, **dictDiff)
+ axes.append(ax)
_add_stats(modelArray, refArray, diffArray, Lats, axes)
+ # Note: in the multi-line reference-title case, uneven spacing is handled
+ # via GridSpec so all three panels keep identical sizes.
+
if fileout is not None:
savefig(fileout, config, pad_inches=0.2)
@@ -775,12 +800,12 @@ def _add_stats(modelArray, refArray, diffArray, Lats, axes):
def _add_stats_text(names, values, ax, loc):
if loc == 'upper':
- text_ax = inset_axes(ax, width='17%', height='20%', loc='upper right',
- bbox_to_anchor=(0.2, 0.1, 1., 1.),
+ text_ax = inset_axes(ax, width='19%', height='20%', loc='upper right',
+ bbox_to_anchor=(0.22, 0.1, 1., 1.),
bbox_transform=ax.transAxes, borderpad=0)
else:
- text_ax = inset_axes(ax, width='17%', height='20%', loc='lower right',
- bbox_to_anchor=(0.2, 0.03, 1., 1.),
+ text_ax = inset_axes(ax, width='19%', height='20%', loc='lower right',
+ bbox_to_anchor=(0.22, 0.03, 1., 1.),
bbox_transform=ax.transAxes, borderpad=0)
text = '\n'.join(names)
diff --git a/mpas_analysis/test/test_main.py b/mpas_analysis/test/test_main.py
new file mode 100644
index 000000000..5270ec47c
--- /dev/null
+++ b/mpas_analysis/test/test_main.py
@@ -0,0 +1,36 @@
+# This software is open source software available under the BSD-3 license.
+#
+# Copyright (c) 2022 Triad National Security, LLC. All rights reserved.
+# Copyright (c) 2022 Lawrence Livermore National Security, LLC. All rights
+# reserved.
+# Copyright (c) 2022 UT-Battelle, LLC. All rights reserved.
+#
+# Additional copyright and license information can be found in the LICENSE file
+# distributed with this code, or at
+# https://raw.githubusercontent.com/MPAS-Dev/MPAS-Analysis/main/LICENSE
+"""
+Regression tests for helpers in ``mpas_analysis.__main__``.
+"""
+
+import os
+from unittest.mock import Mock, patch
+
+from mpas_analysis.test import TestCase
+
+
+# Importing mpas_analysis.__main__ triggers matplotlib imports in some test
+# environments, so use a writable cache directory.
+os.environ.setdefault('MPLCONFIGDIR', '/tmp/matplotlib')
+
+import mpas_analysis.__main__ as main
+
+
+class TestMain(TestCase):
+ def test_get_editable_install_dir_without_direct_url(self):
+ distribution = Mock()
+ distribution.read_text.return_value = None
+
+ with patch.object(main.Distribution, 'from_name',
+ return_value=distribution):
+ self.assertEqual(main.get_editable_install_dir('mpas_analysis'),
+ None)
diff --git a/mpas_analysis/test/test_mpas_climatology_task.py b/mpas_analysis/test/test_mpas_climatology_task.py
index d0e57d3f6..7604ed112 100644
--- a/mpas_analysis/test/test_mpas_climatology_task.py
+++ b/mpas_analysis/test/test_mpas_climatology_task.py
@@ -23,7 +23,7 @@
from mpas_analysis.test import TestCase, loaddatadir
from mpas_analysis.shared.climatology import MpasClimatologyTask, \
- RemapMpasClimatologySubtask
+ RefYearMpasClimatologyTask, RemapMpasClimatologySubtask
from mpas_analysis.shared import AnalysisTask
from mpas_analysis.shared.analysis_task import \
update_time_bounds_from_file_names
@@ -168,7 +168,38 @@ def test_update_climatology_bounds_and_create_symlinks(self):
with self.assertRaisesRegex(ValueError,
'climatology start and/or end year '
'different from requested'):
- update_time_bounds_from_file_names(config, 'climatology', 'ocean')
+ update_time_bounds_from_file_names(config, 'climatology', 'ocean',
+ allow_cache=False)
+
+ def test_create_symlinks_isolates_reference_year_files(self):
+ mpasClimatologyTask = self.setup_task()
+
+ refYearTask = RefYearMpasClimatologyTask(
+ config=mpasClimatologyTask.config, componentName='ocean')
+ refYearTask.historyStreams = mpasClimatologyTask.historyStreams
+ refYearTask.startYear = 1
+ refYearTask.endYear = 1
+ refYearTask.inputFiles = []
+
+ for month in range(1, 13):
+ fileName = os.path.join(
+ self.test_dir,
+ f'mpaso.hist.am.timeSeriesStatsMonthly.0001-{month:02d}-01.nc')
+ with open(fileName, 'w'):
+ pass
+ refYearTask.inputFiles.append(fileName)
+
+ refSymlinkDirectory = refYearTask._create_symlinks()
+ mainSymlinkDirectory = mpasClimatologyTask._create_symlinks()
+
+ assert(refSymlinkDirectory != mainSymlinkDirectory)
+
+ mainSymlinkFiles = sorted(os.listdir(mainSymlinkDirectory))
+
+ assert(len(mainSymlinkFiles) == 12)
+ for fileName in mainSymlinkFiles:
+ assert(fileName.startswith(
+ 'mpaso.hist.am.timeSeriesStatsMonthly.0002-'))
def test_subtask_run_analysis(self):
mpasClimatologyTask = self.setup_task()
diff --git a/mpas_analysis/version.py b/mpas_analysis/version.py
index ddbb9f041..a3a011e60 100644
--- a/mpas_analysis/version.py
+++ b/mpas_analysis/version.py
@@ -1,2 +1,2 @@
-__version_info__ = (1, 14, 0)
+__version_info__ = (1, 15, 0)
__version__ = '.'.join(str(vi) for vi in __version_info__)
diff --git a/pixi.toml b/pixi.toml
new file mode 100644
index 000000000..738e6fd0d
--- /dev/null
+++ b/pixi.toml
@@ -0,0 +1,75 @@
+[workspace]
+channels = ["conda-forge"]
+name = "mpas-analysis"
+platforms = ["linux-64"]
+
+[dependencies]
+python = ">=3.10"
+cartopy = ">=0.18.0"
+cartopy_offlinedata = "*"
+cmocean = "*"
+dask = "*"
+esmf = { version = ">=8.4.2,<9.0.0", build = "mpi_mpich_*" }
+f90nml = "*"
+geometric_features = ">=1.6.1"
+gsw = "*"
+lxml = "*"
+mache = ">=1.11.0"
+matplotlib-base = ">=3.9.0"
+mpas_tools = ">=1.3.0,<2.0.0"
+nco = ">=4.8.1,!=5.2.6,!=5.3.7"
+netcdf4 = "*"
+numpy = ">=2.0,<3.0"
+pandas = "*"
+pillow = ">=10.0.0,<13.0.0"
+progressbar2 = "*"
+pyproj = "*"
+pyremap = ">=2.0.0,<3.0.0"
+python-dateutil = "*"
+requests = "*"
+scipy = ">=1.7.0"
+shapely = ">=2.0,<3.0"
+tranche = ">=0.2.3"
+xarray = ">=0.14.1"
+
+[pypi-dependencies]
+mpas-analysis = { path = ".", editable = true }
+
+[feature.dev.dependencies]
+pip = "*"
+pytest = "*"
+setuptools = ">=60"
+
+[feature.docs.dependencies]
+m2r2 = ">=0.3.3"
+mistune = "<2"
+mock = "*"
+sphinx = "*"
+sphinx_rtd_theme = "*"
+tabulate = "*"
+
+[feature.build.dependencies]
+rattler-build = "*"
+
+[feature.py310.dependencies]
+python = "3.10.*"
+
+[feature.py311.dependencies]
+python = "3.11.*"
+
+[feature.py312.dependencies]
+python = "3.12.*"
+
+[feature.py313.dependencies]
+python = "3.13.*"
+
+[feature.py314.dependencies]
+python = "3.14.*"
+
+[environments]
+default = ["dev", "docs", "build"]
+py310 = ["py310", "dev", "docs", "build"]
+py311 = ["py311", "dev", "docs", "build"]
+py312 = ["py312", "dev", "docs", "build"]
+py313 = ["py313", "dev", "docs", "build"]
+py314 = ["py314", "dev", "docs", "build"]
diff --git a/pyproject.toml b/pyproject.toml
index cd7d33ae8..ffab97d4f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -34,6 +34,7 @@ classifiers = [
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
"Development Status :: 5 - Production/Stable",
@@ -53,7 +54,7 @@ dependencies = [
"netcdf4",
"numpy >=2.0,<3.0",
"pandas",
- "pillow >=10.0.0,<11.0.0",
+ "pillow >=10.0.0,<13.0.0",
"progressbar2",
"pyproj",
"python-dateutil",
diff --git a/suite/main.cfg b/suite/configs/main.cfg
similarity index 100%
rename from suite/main.cfg
rename to suite/configs/main.cfg
diff --git a/suite/main_vs_ctrl.cfg b/suite/configs/main_vs_ctrl.cfg
similarity index 52%
rename from suite/main_vs_ctrl.cfg
rename to suite/configs/main_vs_ctrl.cfg
index 2a3d28913..73a21990f 100644
--- a/suite/main_vs_ctrl.cfg
+++ b/suite/configs/main_vs_ctrl.cfg
@@ -7,10 +7,3 @@
# control run is desired.
controlRunConfigFile = ../ctrl.cfg
-# config file for a main run on which the analysis was already run to
-# completion. The relevant MPAS climatologies already exist and have been
-# remapped to the comparison grid and time series have been extracted.
-# Leave this option commented out if the analysis for the main run should be
-# performed.
-mainRunConfigFile = ../main.cfg
-
diff --git a/suite/moc_am.cfg b/suite/configs/moc_am.cfg
similarity index 100%
rename from suite/moc_am.cfg
rename to suite/configs/moc_am.cfg
diff --git a/suite/no_ncclimo.cfg b/suite/configs/no_ncclimo.cfg
similarity index 100%
rename from suite/no_ncclimo.cfg
rename to suite/configs/no_ncclimo.cfg
diff --git a/suite/wc_defaults.cfg b/suite/configs/wc_defaults.cfg
similarity index 100%
rename from suite/wc_defaults.cfg
rename to suite/configs/wc_defaults.cfg
diff --git a/suite/run_dev_suite.bash b/suite/run_dev_suite.bash
deleted file mode 100755
index f3db1df68..000000000
--- a/suite/run_dev_suite.bash
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-env_name=mpas_analysis_dev
-
-conda_base=$(dirname $(dirname $CONDA_EXE))
-source $conda_base/etc/profile.d/conda.sh
-
-export HDF5_USE_FILE_LOCKING=FALSE
-
-branch=$(git symbolic-ref --short HEAD)
-
-# test building the docs
-conda activate ${env_name}
-cd docs
-DOCS_VERSION=test make clean versioned-html
-cd ..
-
-machine=$(python -c "from mache import discover_machine; print(discover_machine())")
-
-py=3.13
-./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --copy_docs --clean -e ${env_name}
-./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions -e ${env_name}
-./suite/setup.py -p ${py} -r moc_am -b ${branch} -e ${env_name}
-./suite/setup.py -p ${py} -r no_ncclimo -b ${branch} -e ${env_name}
-./suite/setup.py -p ${py} -r main -b ${branch} -e ${env_name}
-./suite/setup.py -p ${py} -r ctrl -b ${branch} -e ${env_name}
-./suite/setup.py -p ${py} -r main_vs_ctrl -b ${branch} -e ${env_name}
-./suite/setup.py -p ${py} -r no_polar_regions -b ${branch} --no_polar_regions -e ${env_name}
-./suite/setup.py -p ${py} -r mesh_rename -b ${branch} -e ${env_name}
-
-# submit the jobs
-cd ${machine}_test_suite
-
-main_py=3.13
-cd main_py${main_py}
-echo main_py${main_py}
-RES=$(sbatch job_script.bash)
-cd ..
-
-cd main_vs_ctrl
-echo main_vs_ctrl
-sbatch --dependency=afterok:${RES##* } --kill-on-invalid-dep=yes job_script.bash
-cd ..
-
-for run in wc_defaults moc_am no_ncclimo no_polar_regions \
- mesh_rename
-do
- cd ${run}
- echo ${run}
- sbatch job_script.bash
- cd ..
-done
-
-cd ..
-
diff --git a/suite/run_e3sm_unified_suite.bash b/suite/run_e3sm_unified_suite.bash
deleted file mode 100755
index b060ede13..000000000
--- a/suite/run_e3sm_unified_suite.bash
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-# placeholder that gets replaced
-branch=test_e3sm_unified
-
-# test building the docs
-py=3.13
-machine=${E3SMU_MACHINE}
-
-./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --clean
-./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions
-./suite/setup.py -p ${py} -r moc_am -b ${branch}
-./suite/setup.py -p ${py} -r no_ncclimo -b ${branch}
-./suite/setup.py -p ${py} -r main -b ${branch}
-./suite/setup.py -p ${py} -r ctrl -b ${branch}
-./suite/setup.py -p ${py} -r main_vs_ctrl -b ${branch}
-./suite/setup.py -p ${py} -r no_polar_regions -b ${branch} --no_polar_regions
-./suite/setup.py -p ${py} -r mesh_rename -b ${branch}
-
-# submit the jobs
-cd ${machine}_test_suite
-
-cd main_py${py}
-echo main_py${py}
-RES=$(sbatch job_script.bash)
-cd ..
-
-cd main_vs_ctrl
-echo main_vs_ctrl
-sbatch --dependency=afterok:${RES##* } --kill-on-invalid-dep=yes job_script.bash
-cd ..
-
-for run in wc_defaults moc_am no_ncclimo no_polar_regions mesh_rename
-do
- cd ${run}
- echo ${run}
- sbatch job_script.bash
- cd ..
-done
-
-cd ..
diff --git a/suite/run_suite.bash b/suite/run_suite.bash
index ee8dd0b16..4030c8259 100755
--- a/suite/run_suite.bash
+++ b/suite/run_suite.bash
@@ -1,88 +1,192 @@
#!/usr/bin/env bash
-set -e
-
-conda_base=$(dirname $(dirname $CONDA_EXE))
-source $conda_base/etc/profile.d/conda.sh
+set -euo pipefail
main_py=3.13
alt_py=3.12
+mode=package
+pixi_env=${PIXI_ENVIRONMENT_NAME:-default}
+
+usage() {
+ cat <&2
+ exit 1
+ fi
+ shift
+ pixi_env=$1
+ ;;
+ -h|--help)
+ usage
+ exit 0
+ ;;
+ *)
+ echo "Unknown argument: $1" >&2
+ usage >&2
+ exit 1
+ ;;
+ esac
+ shift
+done
export HDF5_USE_FILE_LOCKING=FALSE
branch=$(git symbolic-ref --short HEAD)
+setup_run() {
+ local py="$1"
+ local run="$2"
+ shift 2
+ "${setup_cmd[@]}" -p "${py}" -r "${run}" -b "${branch}" "$@"
+}
+
+submit_jobs() {
+ local machine="$1"
+ local primary_py="$2"
+ shift 2
+
+ cd "${machine}_test_suite"
+
+ cd "main_py${primary_py}"
+ echo "main_py${primary_py}"
+ RES=$(sbatch job_script.bash)
+ cd ..
+
+ cd main_vs_ctrl
+ echo main_vs_ctrl
+ sbatch --dependency=afterok:${RES##* } --kill-on-invalid-dep=yes \
+ job_script.bash
+ cd ..
+
+ for run in "$@"; do
+ cd "${run}"
+ echo "${run}"
+ sbatch job_script.bash
+ cd ..
+ done
+
+ cd ..
+}
+
+if [[ "${mode}" == "dev" ]]; then
+ if ! command -v pixi >/dev/null 2>&1; then
+ echo "pixi is required for --dev" >&2
+ exit 1
+ fi
+
+ docs_cmd=(pixi run -e "${pixi_env}" bash -lc \
+ "cd docs && DOCS_VERSION=test make clean versioned-html")
+ setup_cmd=(pixi run -e "${pixi_env}" python ./suite/setup.py \
+ --pixi-env "${pixi_env}")
+
+ "${docs_cmd[@]}"
+
+ machine=$(pixi run -e "${pixi_env}" python -c \
+ "from mache import discover_machine; print(discover_machine())")
+ py=$(pixi run -e "${pixi_env}" python -c \
+ 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")')
+
+ setup_run "${py}" "main_py${py}" --copy_docs --clean
+ setup_run "${py}" wc_defaults --no_polar_regions
+ setup_run "${py}" moc_am
+ setup_run "${py}" no_ncclimo
+ setup_run "${py}" ctrl
+ setup_run "${py}" main_vs_ctrl
+ setup_run "${py}" no_polar_regions --no_polar_regions
+ setup_run "${py}" mesh_rename
+
+ submit_jobs "${machine}" "${py}" \
+ wc_defaults moc_am no_ncclimo no_polar_regions mesh_rename
+ exit 0
+fi
+
+if [[ "${mode}" == "e3sm-unified" ]]; then
+ setup_cmd=(python ./suite/setup.py)
+ py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")')
+ machine=${E3SMU_MACHINE}
+ branch=test_e3sm_unified
+
+ setup_run "${py}" "main_py${py}" --clean
+ setup_run "${py}" wc_defaults --no_polar_regions
+ setup_run "${py}" moc_am
+ setup_run "${py}" no_ncclimo
+ setup_run "${py}" ctrl
+ setup_run "${py}" main_vs_ctrl
+ setup_run "${py}" no_polar_regions --no_polar_regions
+ setup_run "${py}" mesh_rename
+
+ submit_jobs "${machine}" "${py}" \
+ wc_defaults moc_am no_ncclimo no_polar_regions mesh_rename
+ exit 0
+fi
+
+conda_base=$(dirname "$(dirname "${CONDA_EXE}")")
+source "${conda_base}/etc/profile.d/conda.sh"
+
conda update -y conda conda-build
conda build ci/recipe
-# create the test conda envs
-for py in ${main_py} ${alt_py}
-do
- env=test_mpas_analysis_py${py}
- conda create -y -n ${env} --use-local python=${py} mpas-analysis sphinx \
- mock sphinx_rtd_theme "tabulate>=0.8.2" "m2r2>=0.3.3" "mistune<2" \
- pytest "mache>=1.11.0" "esmf=*=mpi_mpich_*" jinja2
- conda activate ${env}
+for py in "${main_py}" "${alt_py}"; do
+ env="test_mpas_analysis_py${py}"
+ conda create -y -n "${env}" --use-local python="${py}" mpas-analysis \
+ sphinx mock sphinx_rtd_theme "tabulate>=0.8.2" "m2r2>=0.3.3" \
+ "mistune<2" pytest "mache>=1.11.0" "esmf=*=mpi_mpich_*" jinja2
+ conda activate "${env}"
pytest
conda deactivate
done
-# create another env for testing xarray main branch
py=${main_py}
env=test_mpas_analysis_xarray_main
-conda create --yes --quiet --name ${env} --use-local python=${py} \
+conda create --yes --quiet --name "${env}" --use-local python="${py}" \
mpas-analysis pytest
-conda activate ${env}
+conda activate "${env}"
pip install git+https://github.com/pydata/xarray.git
pytest
conda deactivate
-# test building the docs
-py=${main_py}
-conda activate test_mpas_analysis_py${py}
-cd docs
-DOCS_VERSION=test make clean versioned-html
-cd ..
+conda activate "test_mpas_analysis_py${py}"
+(
+ cd docs
+ DOCS_VERSION=test make clean versioned-html
+)
machine=$(python -c "from mache import discover_machine; print(discover_machine())")
-
-./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --copy_docs --clean
-./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions
-./suite/setup.py -p ${py} -r moc_am -b ${branch}
-./suite/setup.py -p ${py} -r no_ncclimo -b ${branch}
-./suite/setup.py -p ${py} -r ctrl -b ${branch}
-./suite/setup.py -p ${py} -r main_vs_ctrl -b ${branch}
-./suite/setup.py -p ${py} -r no_polar_regions -b ${branch} --no_polar_regions
-./suite/setup.py -p ${py} -r mesh_rename -b ${branch}
-./suite/setup.py -p ${py} -r xarray_main -b ${branch} -e test_mpas_analysis_xarray_main
+setup_cmd=(./suite/setup.py)
+
+setup_run "${py}" "main_py${py}" --copy_docs --clean
+setup_run "${py}" wc_defaults --no_polar_regions
+setup_run "${py}" moc_am
+setup_run "${py}" no_ncclimo
+setup_run "${py}" ctrl
+setup_run "${py}" main_vs_ctrl
+setup_run "${py}" no_polar_regions --no_polar_regions
+setup_run "${py}" mesh_rename
+setup_run "${py}" xarray_main -e test_mpas_analysis_xarray_main
conda deactivate
py=${alt_py}
-conda activate test_mpas_analysis_py${py}
-./suite/setup.py -p ${py} -r main -b ${branch}
-./suite/setup.py -p ${py} -r main_py${py} -b ${branch}
+conda activate "test_mpas_analysis_py${py}"
+setup_run "${py}" "main_py${py}"
conda deactivate
-# submit the jobs
-cd ${machine}_test_suite
-
-cd main_py${main_py}
-echo main_py${main_py}
-RES=$(sbatch job_script.bash)
-cd ..
-
-cd main_vs_ctrl
-echo main_vs_ctrl
-sbatch --dependency=afterok:${RES##* } --kill-on-invalid-dep=yes job_script.bash
-cd ..
-
-for run in main_py${alt_py} wc_defaults moc_am no_ncclimo no_polar_regions \
+submit_jobs "${machine}" "${main_py}" \
+ "main_py${alt_py}" wc_defaults moc_am no_ncclimo no_polar_regions \
mesh_rename xarray_main
-do
- cd ${run}
- echo ${run}
- sbatch job_script.bash
- cd ..
-done
-
-cd ..
diff --git a/suite/setup.py b/suite/setup.py
index df41ce0e2..c99339edb 100755
--- a/suite/setup.py
+++ b/suite/setup.py
@@ -16,6 +16,8 @@ def main():
parser.add_argument('-b', dest='branch', required=True,
help='the branch name')
parser.add_argument('-e', dest='conda_env', help='the conda environment')
+ parser.add_argument('--pixi-env', dest='pixi_env',
+ help='the pixi environment used to run jobs')
parser.add_argument('--no_polar_regions', dest='polar_regions',
action='store_false',
help='whether to run mpas_analysis with '
@@ -34,9 +36,9 @@ def main():
account, partition, configuration, qos = \
machine_info.get_account_defaults()
- use_e3sm_unified = 'E3SMU_SCRIPT' in os.environ
+ use_e3sm_unified = 'E3SM_UNIFIED_LOAD_SCRIPT' in os.environ
if use_e3sm_unified:
- e3sm_unified_script = os.environ['E3SMU_SCRIPT']
+ e3sm_unified_script = os.environ['E3SM_UNIFIED_LOAD_SCRIPT']
args.branch = \
os.path.splitext(os.path.basename(e3sm_unified_script))[0]
else:
@@ -91,15 +93,23 @@ def main():
shutil.copytree(os.path.join('docs', '_build', 'html'), docs_path)
if mesh == 'oQU240wLI':
- generate = "['all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke', " \
- "'no_waves']"
+ generate = [
+ 'all', 'no_BGC', 'no_icebergs', 'no_index', 'no_eke', 'no_waves'
+ ]
end_year = '10'
+ ctrl_end_year = '8'
else:
raise ValueError(f'Unexpected mesh: {mesh}')
if args.run == 'mesh_rename':
mesh = f'new_{mesh}'
+ if args.run == 'main_vs_ctrl':
+ end_year = ctrl_end_year
+ generate.append('no_hovmoller')
+
+ generate_string = f"['" + "', '".join(generate) + "']"
+
sbatch = list()
if account is not None:
sbatch.append(f'#SBATCH -A {account}')
@@ -112,8 +122,11 @@ def main():
sbatch = '\n'.join(sbatch)
- conda_base = os.path.abspath(
- os.path.join(os.environ['CONDA_EXE'], '..', '..'))
+ if 'CONDA_EXE' in os.environ:
+ conda_base = os.path.abspath(
+ os.path.join(os.environ['CONDA_EXE'], '..', '..'))
+ else:
+ conda_base = ''
if args.conda_env is not None:
conda_env = args.conda_env
@@ -129,14 +142,14 @@ def main():
out_subdir = os.path.join(machine, args.branch, args.run)
out_common_dir = os.path.join(machine, args.branch)
- with open(os.path.join('suite', 'template.cfg')) as template_file:
+ with open(os.path.join('suite', 'templates', 'base.cfg')) as template_file:
template_data = template_file.read()
template = Template(template_data)
config_text = template.render(
use_e3sm_unified=use_e3sm_unified, run_name=args.run,
input_base=input_base, simulation=simulation, mesh=mesh,
output_base=output_base, html_base=html_base, out_subdir=out_subdir,
- generate=generate, end_year=end_year)
+ generate=generate_string, end_year=end_year)
with open(config, 'w') as config_file:
config_file.write(config_text)
@@ -144,11 +157,12 @@ def main():
# add the run-specific config second
config_from_job = ' '.join(
[config_from_job,
- os.path.join('..', '..', 'suite', f'{args.run}.cfg')])
+ os.path.join('..', '..', 'suite', 'configs', f'{args.run}.cfg')])
if args.run.startswith('main_py'):
config_from_job = ' '.join(
- [config_from_job, os.path.join('..', '..', 'suite', 'main.cfg')])
+ [config_from_job,
+ os.path.join('..', '..', 'suite', 'configs', 'main.cfg')])
if args.run not in ['main', 'ctrl']:
try:
@@ -162,14 +176,16 @@ def main():
else:
flags = ''
- with open(os.path.join('suite', 'job_script.bash')) as template_file:
+ with open(os.path.join('suite', 'templates', 'job_script.bash')) \
+ as template_file:
template_data = template_file.read()
template = Template(template_data)
job_text = template.render(
sbatch=sbatch, conda_base=conda_base,
use_e3sm_unified=use_e3sm_unified,
e3sm_unified_script=e3sm_unified_script, conda_env=conda_env,
- machine=machine, flags=flags, config=config_from_job,
+ pixi_env=args.pixi_env, machine=machine, flags=flags,
+ config=config_from_job,
html_base=html_base, out_subdir=out_subdir,
out_common_dir=out_common_dir)
with open(job, 'w') as job_file:
diff --git a/suite/template.cfg b/suite/templates/base.cfg
similarity index 100%
rename from suite/template.cfg
rename to suite/templates/base.cfg
diff --git a/suite/job_script.bash b/suite/templates/job_script.bash
similarity index 75%
rename from suite/job_script.bash
rename to suite/templates/job_script.bash
index 4b822ff84..4eae8f8d3 100644
--- a/suite/job_script.bash
+++ b/suite/templates/job_script.bash
@@ -8,21 +8,28 @@
set -e
-{% if use_e3sm_unified %}
+{% if use_e3sm_unified -%}
source {{ e3sm_unified_script }}
echo E3SM-Unified: {{ e3sm_unified_script }}
-{% else %}
+{% elif pixi_env -%}
+export HDF5_USE_FILE_LOCKING=FALSE
+export E3SMU_MACHINE={{ machine }}
+
+eval "$(pixi shell-hook --manifest-path ../../pixi.toml -e {{ pixi_env }})"
+
+echo pixi env: {{ pixi_env }}
+{% else -%}
source {{ conda_base }}/etc/profile.d/conda.sh
conda activate {{ conda_env }}
export HDF5_USE_FILE_LOCKING=FALSE
export E3SMU_MACHINE={{ machine }}
+
echo env: {{ conda_env }}
-{% endif %}
+{% endif -%}
echo configs: {{ flags }} {{ config }}
-
mpas_analysis --list
mpas_analysis --plot_colormaps
mpas_analysis --setup_only {{ flags }} {{ config }}