diff --git a/.github/workflows/docker_build_tpls.yml b/.github/workflows/docker_build_tpls.yml index 2f4151e6..aa561125 100644 --- a/.github/workflows/docker_build_tpls.yml +++ b/.github/workflows/docker_build_tpls.yml @@ -7,85 +7,190 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true +# TPL images build on top of the base images produced by +# https://github.com/GEOS-DEV/docker_base_images. +env: + DOCKER_BASE_IMAGE_SHA: 1c3c049b3f629d9d44838656fd306b2a0c04c9e8 + jobs: build_images: name: ${{ matrix.name }} runs-on: ${{ matrix.RUNS_ON }} strategy: - # In-progress jobs will not be cancelled if there is a failure - fail-fast : false + fail-fast: false matrix: include: - - name: Ubuntu (20.04, gcc 9.4.0, open-mpi 4.0.3) - DOCKER_REPOSITORY: geosx/ubuntu20.04-gcc9 - TPL_DOCKERFILE: docker/tpl-ubuntu-gcc.Dockerfile - DOCKER_ROOT_IMAGE: ubuntu:20.04 - DOCKER_COMPILER_BUILD_ARG: "--build-arg GCC_MAJOR_VERSION=9" + # Minimal restart slice: only known-good Ubuntu host builds. + - name: Ubuntu 24.04 - gcc 12 RUNS_ON: ubuntu-latest - - name: Ubuntu (20.04, gcc 10.5.0, open-mpi 4.0.3) - github codespaces - DOCKER_REPOSITORY: geosx/ubuntu20.04-gcc10 - TPL_DOCKERFILE: docker/tpl-ubuntu-gcc.Dockerfile - DOCKER_ROOT_IMAGE: mcr.microsoft.com/devcontainers/base:ubuntu-20.04 - DOCKER_COMPILER_BUILD_ARG: "--build-arg GCC_MAJOR_VERSION=10" + TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + DOCKER_BASE_IMAGE_TAG: 24.04-gcc12 + DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + DOCKER_REPOSITORY: geosx/ubuntu24.04-gcc12 + GCC_VERSION: 12 + SPEC: "~pygeosx ~docs %gcc-12" + + - name: Ubuntu 24.04 - gcc 13 (docs) RUNS_ON: ubuntu-latest - - name: Ubuntu (22.04, gcc 11.4.0, open-mpi 4.1.2) - DOCKER_ROOT_IMAGE: ubuntu:22.04 - DOCKER_REPOSITORY: geosx/ubuntu22.04-gcc11 - TPL_DOCKERFILE: docker/tpl-ubuntu-gcc.Dockerfile - DOCKER_COMPILER_BUILD_ARG: "--build-arg GCC_MAJOR_VERSION=11" + TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + DOCKER_BASE_IMAGE_TAG: 24.04-gcc13 + DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + DOCKER_REPOSITORY: geosx/ubuntu24.04-gcc13 + GCC_VERSION: 13 + SPEC: "~pygeosx +docs %gcc-13" + + - name: Ubuntu 24.04 - gcc 13 (+docs +hypredrive) RUNS_ON: ubuntu-latest - - name: Ubuntu (22.04, gcc 12.3.0, open-mpi 4.1.2) - DOCKER_ROOT_IMAGE: ubuntu:22.04 - DOCKER_REPOSITORY: geosx/ubuntu22.04-gcc12 - TPL_DOCKERFILE: docker/tpl-ubuntu-gcc.Dockerfile - DOCKER_COMPILER_BUILD_ARG: "--build-arg GCC_MAJOR_VERSION=12" + TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + DOCKER_BASE_IMAGE_TAG: 24.04-gcc13 + DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + DOCKER_REPOSITORY: geosx/ubuntu24.04-gcc13-hypredrive + GCC_VERSION: 13 + SPEC: "~pygeosx +docs +hypredrive %gcc-13" + + - name: Ubuntu 24.04 - clang 19 RUNS_ON: ubuntu-latest - - name: Ubuntu (22.04, gcc 12.3.0, open-mpi 4.1.2, +hypredrive) - DOCKER_ROOT_IMAGE: ubuntu:22.04 - DOCKER_REPOSITORY: geosx/ubuntu22.04-gcc12-hypredrive - TPL_DOCKERFILE: docker/tpl-ubuntu-gcc.Dockerfile - DOCKER_COMPILER_BUILD_ARG: "--build-arg GCC_MAJOR_VERSION=12" - SPEC: "~pygeosx +docs +hypredrive %gcc-12" + TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + DOCKER_BASE_IMAGE_TAG: 24.04-clang19 + DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + DOCKER_REPOSITORY: geosx/ubuntu24.04-clang19 + CLANG_VERSION: 19 + SPEC: "~pygeosx ~docs %clang-19" + + - name: Ubuntu 24.04 - clang 20 RUNS_ON: ubuntu-latest - - name: Ubuntu (22.04, clang 15.0.7 + gcc 11.4.0, open-mpi 4.1.2) - DOCKER_ROOT_IMAGE: ubuntu:22.04 - DOCKER_REPOSITORY: geosx/ubuntu22.04-clang15 - TPL_DOCKERFILE: docker/tpl-ubuntu-clang.Dockerfile - DOCKER_COMPILER_BUILD_ARG: "--build-arg CLANG_MAJOR_VERSION=15 --build-arg GCC_MAJOR_VERSION=11" + TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + DOCKER_BASE_IMAGE_TAG: 24.04-clang20 + DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + DOCKER_REPOSITORY: geosx/ubuntu24.04-clang20 + CLANG_VERSION: 20 + SPEC: "~pygeosx ~docs %clang-20" + + # First Rocky non-CUDA row re-enabled for incremental validation. + - name: Rocky Linux 8 - gcc 12 RUNS_ON: ubuntu-latest - - name: Ubuntu (20.04, clang 10.0.0 + gcc 9.4.0, open-mpi 4.0.3, cuda-11.8.89) - DOCKER_REPOSITORY: geosx/ubuntu20.04-clang10.0.0-cuda11.8.89 - TPL_DOCKERFILE: docker/tpl-ubuntu-clang-cuda.Dockerfile + TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + DOCKER_BASE_IMAGE_TAG: 8-gcc12 + DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + DOCKER_REPOSITORY: geosx/rockylinux8-gcc12 + SPEC: "~pygeosx ~docs %gcc-12" + + - name: Rocky Linux 8 - gcc 13 RUNS_ON: ubuntu-latest - - name: Rockylinux (8, gcc 13.3, cuda 12.9.1) - DOCKER_REPOSITORY: geosx/rockylinux8-gcc13-cuda12.9.1 - TPL_DOCKERFILE: docker/tpl-rockylinux-gcc-cuda-12.Dockerfile + TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + DOCKER_BASE_IMAGE_TAG: 8-gcc13 + DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + DOCKER_REPOSITORY: geosx/rockylinux8-gcc13 + SPEC: "~pygeosx ~docs %gcc-13" + + - name: Rocky Linux 8 - gcc 13 (+hypredrive) + RUNS_ON: ubuntu-latest + TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + DOCKER_BASE_IMAGE_TAG: 8-gcc13 + DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + DOCKER_REPOSITORY: geosx/rockylinux8-gcc13-hypredrive + SPEC: "~pygeosx ~docs +hypredrive %gcc-13" + + - name: Rocky Linux 8 - clang 19 + RUNS_ON: ubuntu-latest + TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + DOCKER_BASE_IMAGE_TAG: 8-clang19 + DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + DOCKER_REPOSITORY: geosx/rockylinux8-clang19 + SPEC: "~pygeosx ~docs %clang-19" + + - name: Rocky Linux 9 - clang 22 + RUNS_ON: ubuntu-latest + TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + DOCKER_BASE_IMAGE_TAG: 9-clang22 + DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + DOCKER_REPOSITORY: geosx/rockylinux9-clang22 + SPEC: "~pygeosx ~docs %clang-22" + + # CUDA restart slice. + - name: Ubuntu 24.04 - gcc 13 + CUDA 12.9.1 RUNS_ON: streak2 - NPROC: 8 - DOCKER_RUN_ARGS: "--cpus=8 --memory=128g --runtime=nvidia -e NVIDIA_VISIBLE_DEVICES=all -v /etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro -v /etc/pki/tls/certs/ca-bundle.crt:/certs/ca-bundle.crt:ro" - - name: Rockylinux (8, clang 17.0.6, cuda 12.9.1) - DOCKER_REPOSITORY: geosx/rockylinux8-clang17-cuda12.9.1 - TPL_DOCKERFILE: docker/tpl-rockylinux-clang-cuda-12.Dockerfile + TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + DOCKER_BASE_IMAGE_TAG: 24.04-gcc13-cuda12.9.1 + DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + DOCKER_REPOSITORY: geosx/ubuntu24.04-gcc13-cuda12.9.1 + GCC_VERSION: 13 + SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs %gcc-13 ^cuda@12.9.1+allow-unsupported-compilers" + + - name: Ubuntu 24.04 - gcc 13 + CUDA 12.9.1 (+hypredrive) RUNS_ON: streak2 - NPROC: 8 - DOCKER_RUN_ARGS: "--cpus=8 --memory=128g --runtime=nvidia -e NVIDIA_VISIBLE_DEVICES=all -v /etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro -v /etc/pki/tls/certs/ca-bundle.crt:/certs/ca-bundle.crt:ro" - # - name: Sherlock CPU (centos 7.9.2009, gcc 10.1.0, open-mpi 4.1.2, openblas 0.3.10, zlib 1.2.11) - # DOCKER_REPOSITORY: geosx/sherlock-gcc10.1.0-openmpi4.1.2-openblas0.3.10-zlib1.2.11 - # TPL_DOCKERFILE: docker/Stanford/Dockerfile - # DOCKER_ROOT_IMAGE: matteofrigo5/sherlock-gcc10.1.0-openmpi4.1.2-cuda12.4.0-openblas0.3.10-zlib1.2.11-cmake3.31.4-no-geosx:0.0.1 - # SPEC: "~pygeosx~openmp %gcc@10.1.0" - # INSTALL_DIR_ROOT: /oak/stanford/groups/tchelepi/geos-sherlock/CPU - # RUNS_ON: ubuntu-latest - # - name: Sherlock GPU (centos 7.9.2009, gcc 10.1.0, open-mpi 4.1.2, cuda 12.4.0, openblas 0.3.10, zlib 1.2.11) - # DOCKER_REPOSITORY: geosx/sherlock-gcc10.1.0-openmpi4.1.2-cuda12.4.0-openblas0.3.10-zlib1.2.11 - # TPL_DOCKERFILE: docker/Stanford/Dockerfile - # DOCKER_ROOT_IMAGE: matteofrigo5/sherlock-gcc10.1.0-openmpi4.1.2-cuda12.4.0-openblas0.3.10-zlib1.2.11-cmake3.31.4-no-geosx:0.0.1 - # SPEC: "~pygeosx~openmp cuda_arch=86 %gcc@10.1.0 ^cuda@12.4.0+allow-unsupported-compilers" - # INSTALL_DIR_ROOT: /oak/stanford/groups/tchelepi/geos-sherlock/GPU - # RUNS_ON: ubuntu-latest + TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + DOCKER_BASE_IMAGE_TAG: 24.04-gcc13-cuda12.9.1 + DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + DOCKER_REPOSITORY: geosx/ubuntu24.04-gcc13-cuda12.9.1-hypredrive + GCC_VERSION: 13 + SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs +hypredrive %gcc-13 ^cuda@12.9.1+allow-unsupported-compilers" + + # CUDA 13 is blocked by the pinned RAJA package: + # raja: '^cuda@13:' conflicts with '+cuda' + # - name: Ubuntu 24.04 - gcc 14 + CUDA 13.2.1 + # RUNS_ON: streak2 + # TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + # DOCKER_BASE_IMAGE_TAG: 24.04-gcc14-cuda13.2.1 + # DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + # DOCKER_REPOSITORY: geosx/ubuntu24.04-gcc14-cuda13.2.1 + # GCC_VERSION: 14 + # SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs %gcc-14 ^cuda@13.2.1+allow-unsupported-compilers" + + - name: Ubuntu 24.04 - clang 19 + CUDA 12.9.1 + RUNS_ON: streak2 + TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + DOCKER_BASE_IMAGE_TAG: 24.04-clang19-cuda12.9.1 + DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + DOCKER_REPOSITORY: geosx/ubuntu24.04-clang19-cuda12.9.1 + CLANG_VERSION: 19 + SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs %clang-19 ^cuda@12.9.1+allow-unsupported-compilers" + + # - name: Ubuntu 24.04 - clang 20 + CUDA 13.2.1 + # RUNS_ON: streak2 + # TPL_DOCKERFILE: docker/tpl-ubuntu.Dockerfile + # DOCKER_BASE_IMAGE_TAG: 24.04-clang20-cuda13.2.1 + # DOCKER_BASE_IMAGE_REPO: geosx/ubuntu + # DOCKER_REPOSITORY: geosx/ubuntu24.04-clang20-cuda13.2.1 + # CLANG_VERSION: 20 + # SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs %clang-20 ^cuda@13.2.1+allow-unsupported-compilers" + + - name: Rocky Linux 8 - gcc 13 + CUDA 12.9.1 + RUNS_ON: streak2 + TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + DOCKER_BASE_IMAGE_TAG: 8-gcc13-cuda12.9.1 + DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + DOCKER_REPOSITORY: geosx/rockylinux8-gcc13-cuda12.9.1 + SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs %gcc-13 ^cuda@12.9.1+allow-unsupported-compilers" + + - name: Rocky Linux 8 - gcc 13 + CUDA 12.9.1 (+hypredrive) + RUNS_ON: streak2 + TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + DOCKER_BASE_IMAGE_TAG: 8-gcc13-cuda12.9.1 + DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + DOCKER_REPOSITORY: geosx/rockylinux8-gcc13-cuda12.9.1-hypredrive + SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs +hypredrive %gcc-13 ^cuda@12.9.1+allow-unsupported-compilers" + + # Rocky8 clang19 CUDA 12.9.1 is blocked by SuperLU_DIST using + # C++11 with CUDA 12.9 + clang19 + GCC 14 libstdc++. + # - name: Rocky Linux 8 - clang 19 + CUDA 12.9.1 + # RUNS_ON: streak2 + # TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + # DOCKER_BASE_IMAGE_TAG: 8-clang19-cuda12.9.1 + # DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + # DOCKER_REPOSITORY: geosx/rockylinux8-clang19-cuda12.9.1 + # SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs %clang-19 ^cuda@12.9.1+allow-unsupported-compilers" + + # - name: Rocky Linux 9 - gcc 15 + CUDA 13.2.1 + # RUNS_ON: streak2 + # TPL_DOCKERFILE: docker/tpl-rockylinux.Dockerfile + # DOCKER_BASE_IMAGE_TAG: 9-gcc15-cuda13.2.1 + # DOCKER_BASE_IMAGE_REPO: geosx/rockylinux + # DOCKER_REPOSITORY: geosx/rockylinux9-gcc15-cuda13.2.1 + # SPEC: "+cuda cuda_arch=86,120 ~openmp ~pygeosx ~docs %gcc-15 ^cuda@13.2.1+allow-unsupported-compilers" steps: - name: Checkout @@ -95,37 +200,39 @@ jobs: lfs: true - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@v4 - name: Print environment run: printenv + - name: Compose DOCKER_BASE_IMAGE + id: base + run: | + echo "DOCKER_BASE_IMAGE=${{ matrix.DOCKER_BASE_IMAGE_REPO }}:${{ matrix.DOCKER_BASE_IMAGE_TAG }}-${DOCKER_BASE_IMAGE_SHA}" >> "$GITHUB_OUTPUT" + - name: Inject CA certificate into Docker build if: matrix.RUNS_ON == 'streak2' run: | - # 1. Copy the host's CA bundle into the Docker build context cp /etc/pki/tls/certs/ca-bundle.crt ./ca-bundle.crt - - # 2. Inject the COPY and update-ca-trust commands right before dnf runs sed -i '/RUN dnf clean all/i COPY ca-bundle.crt /etc/pki/ca-trust/source/anchors/ca-bundle.crt\nRUN update-ca-trust extract' ${{ matrix.TPL_DOCKERFILE }} - name: Run the docker build docker script env: TPL_DOCKERFILE: ${{ matrix.TPL_DOCKERFILE }} DOCKER_REPOSITORY: ${{ matrix.DOCKER_REPOSITORY }} - DOCKER_COMPILER_BUILD_ARG: ${{ matrix.DOCKER_COMPILER_BUILD_ARG }} + DOCKER_BASE_IMAGE: ${{ steps.base.outputs.DOCKER_BASE_IMAGE }} + GCC_VERSION: ${{ matrix.GCC_VERSION }} + CLANG_VERSION: ${{ matrix.CLANG_VERSION }} INSTALL_DIR_ROOT: ${{ matrix.INSTALL_DIR_ROOT || '/opt/GEOS' }} - HOST_CONFIG: ${{ matrix.HOST_CONFIG || 'host-configs/environment.cmake'}} + HOST_CONFIG: ${{ matrix.HOST_CONFIG || 'host-configs/environment.cmake' }} SPEC: ${{ matrix.SPEC || 'undefined' }} - DOCKER_ROOT_IMAGE: ${{ matrix.DOCKER_ROOT_IMAGE || 'undefined' }} COMMIT: ${{ github.sha }} BUILD_DIR: ${{ github.workspace }} DOCKER_TAG: ${{ github.event.number }}-${{ github.run_number }} - run: bash -x ./scripts/docker-build.sh - name: Login to DockerHub - uses: docker/login-action@v3 + uses: docker/login-action@v4 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} @@ -135,7 +242,6 @@ jobs: env: DOCKER_REPOSITORY: ${{ matrix.DOCKER_REPOSITORY }} DOCKER_TAG: ${{ github.event.number }}-${{ github.run_number }} - run: docker push ${DOCKER_REPOSITORY}:${DOCKER_TAG} # Convenience job - passes when all docker images are built. diff --git a/docker/rocky-spack.yaml b/docker/rocky-spack.yaml index b8f4d457..16edb2f0 100644 --- a/docker/rocky-spack.yaml +++ b/docker/rocky-spack.yaml @@ -18,6 +18,17 @@ spack: - ../versions.yaml toolchains: + gcc-12: + - spec: cxxflags='-fPIC -pthread' + - spec: cflags='-fPIC -pthread' + - spec: '%c=gcc@12.2.1' + when: '%c' + - spec: '%cxx=gcc@12.2.1' + when: '%cxx' + - spec: '%fortran=gcc@12.2.1' + when: '%fortran' + - spec: '%openmpi@4.1.1' + when: '%mpi' gcc-13: - spec: cxxflags='-fPIC -pthread' - spec: cflags='-fPIC -pthread' @@ -29,6 +40,17 @@ spack: when: '%fortran' - spec: '%openmpi@4.1.1' when: '%mpi' + gcc-15: + - spec: cxxflags='-fPIC -pthread' + - spec: cflags='-fPIC -pthread' + - spec: '%c=gcc@15.2.1' + when: '%c' + - spec: '%cxx=gcc@15.2.1' + when: '%cxx' + - spec: '%fortran=gcc@15.2.1' + when: '%fortran' + - spec: '%openmpi@4.1.1' + when: '%mpi' clang-17: - spec: cxxflags='-fPIC -pthread' - spec: cflags='-fPIC -pthread' @@ -40,6 +62,28 @@ spack: when: '%fortran' - spec: '%openmpi@4.1.1' when: '%mpi' + clang-19: + - spec: cxxflags='-fPIC -pthread' + - spec: cflags='-fPIC -pthread' + - spec: '%[virtuals=c]llvm@19+clang~flang~lld~lldb' + when: '%c' + - spec: '%[virtuals=cxx]llvm@19+clang~flang~lld~lldb' + when: '%cxx' + - spec: '%[virtuals=fortran]gcc@14.2.1' + when: '%fortran' + - spec: '%openmpi@4.1.1' + when: '%mpi' + clang-22: + - spec: cxxflags='-fPIC -pthread' + - spec: cflags='-fPIC -pthread' + - spec: '%[virtuals=c]llvm@22+clang~flang~lld~lldb' + when: '%c' + - spec: '%[virtuals=cxx]llvm@22+clang~flang~lld~lldb' + when: '%cxx' + - spec: '%[virtuals=fortran]gcc@15.2.1' + when: '%fortran' + - spec: '%openmpi@4.1.1' + when: '%mpi' packages: all: @@ -59,6 +103,7 @@ spack: - "netlib-lapack" llvm: + buildable: false externals: - spec: llvm@17.0.6+clang~flang~lld~lldb prefix: /usr @@ -68,8 +113,42 @@ spack: # doesn't accidentally pick GCC 8 headers when compiling CUDA sources. c: /usr/local/bin/clang-gcc13 cxx: /usr/local/bin/clang++-gcc13 + - spec: llvm@19+clang~flang~lld~lldb + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/clang + cxx: /usr/bin/clang++ + - spec: llvm@22+clang~flang~lld~lldb + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/clang + cxx: /usr/bin/clang++ gcc: + buildable: false externals: + - spec: gcc@8 languages:='c,c++,fortran' + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/gcc + cxx: /usr/bin/g++ + fortran: /usr/bin/gfortran + - spec: gcc@11 languages:='c,c++,fortran' + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/gcc + cxx: /usr/bin/g++ + fortran: /usr/bin/gfortran + - spec: gcc@12.2.1 languages:='c,c++,fortran' + prefix: /opt/rh/gcc-toolset-12/root/usr + extra_attributes: + compilers: + c: /opt/rh/gcc-toolset-12/root/usr/bin/gcc + cxx: /opt/rh/gcc-toolset-12/root/usr/bin/g++ + fortran: /opt/rh/gcc-toolset-12/root/usr/bin/gfortran - spec: gcc@13.3.1 languages:='c,c++,fortran' prefix: /opt/rh/gcc-toolset-13/root/usr extra_attributes: @@ -77,6 +156,20 @@ spack: c: /opt/rh/gcc-toolset-13/root/usr/bin/gcc cxx: /opt/rh/gcc-toolset-13/root/usr/bin/g++ fortran: /opt/rh/gcc-toolset-13/root/usr/bin/gfortran + - spec: gcc@14.2.1 languages:='c,c++,fortran' + prefix: /opt/rh/gcc-toolset-14/root/usr + extra_attributes: + compilers: + c: /opt/rh/gcc-toolset-14/root/usr/bin/gcc + cxx: /opt/rh/gcc-toolset-14/root/usr/bin/g++ + fortran: /opt/rh/gcc-toolset-14/root/usr/bin/gfortran + - spec: gcc@15.2.1 languages:='c,c++,fortran' + prefix: /opt/rh/gcc-toolset-15/root/usr + extra_attributes: + compilers: + c: /opt/rh/gcc-toolset-15/root/usr/bin/gcc + cxx: /opt/rh/gcc-toolset-15/root/usr/bin/g++ + fortran: /opt/rh/gcc-toolset-15/root/usr/bin/gfortran autoconf: version: [2.71] @@ -101,6 +194,8 @@ spack: externals: - spec: cuda@12.9.1 +allow-unsupported-compilers prefix: /usr/local/cuda + - spec: cuda@13.2.1 +allow-unsupported-compilers + prefix: /usr/local/cuda m4: buildable: false externals: @@ -135,7 +230,7 @@ spack: python: buildable: false externals: - - spec: python@3.6.8 + - spec: python@3.12 prefix: /usr tar: buildable: false diff --git a/docker/spack.yaml b/docker/spack.yaml deleted file mode 100644 index 50c71756..00000000 --- a/docker/spack.yaml +++ /dev/null @@ -1,262 +0,0 @@ -spack: - config: - install_tree: - root: $spack/.. - projections: - all: '{compiler.name}-{compiler.version}/{name}-{version}-{hash}' - misc_cache: $spack/../misc_cache - test_stage: $spack/../test_stage - build_stage:: - - $spack/../build_stage - - # Regular TPLs do not need views - view: false - - # Include shared variants and versions - include: - - ../defaults.yaml - - ../versions.yaml - - toolchains: - clang-15: - - spec: '%[virtuals=c]llvm@15.0.7+clang~flang~lld~lldb' - when: '%c' - - spec: '%[virtuals=cxx]llvm@15.0.7+clang~flang~lld~lldb' - when: '%cxx' - - spec: '%[virtuals=fortran]gcc@11.4.0' - when: '%fortran' - - spec: '%openmpi' - when: '%mpi' - clang-10: - - spec: '%[virtuals=c]llvm@10.0.0+clang~flang~lld~lldb openmp=project' - when: '%c' - - spec: '%[virtuals=cxx]llvm@10.0.0+clang~flang~lld~lldb openmp=project' - when: '%cxx' - - spec: '%[virtuals=fortran]gcc@9.4.0' - when: '%fortran' - - spec: '%openmpi' - when: '%mpi' - gcc-9: - - spec: cxxflags='-pthread' - - spec: cflags='-pthread' - - spec: '%c=gcc@9.4.0' - when: '%c' - - spec: '%cxx=gcc@9.4.0' - when: '%cxx' - - spec: '%fortran=gcc@9.4.0' - when: '%fortran' - - spec: '%openmpi %gcc@9.4.0' - when: '%mpi' - gcc-10: - - spec: cxxflags='-pthread' - - spec: cflags='-pthread' - - spec: '%c=gcc@10.5.0' - when: '%c' - - spec: '%cxx=gcc@10.5.0' - when: '%cxx' - - spec: '%fortran=gcc@10.5.0' - when: '%fortran' - - spec: '%openmpi %gcc@10.5.0' - when: '%mpi' - gcc-11: - - spec: cxxflags='-pthread' - - spec: cflags='-pthread' - - spec: '%c=gcc@11.4.0' - when: '%c' - - spec: '%cxx=gcc@11.4.0' - when: '%cxx' - - spec: '%fortran=gcc@11.4.0' - when: '%fortran' - - spec: '%openmpi %gcc@11.4.0' - when: '%mpi' - gcc-12: - - spec: cxxflags='-pthread' - - spec: cflags='-pthread' - - spec: '%c=gcc@12.3.0' - when: '%c' - - spec: '%cxx=gcc@12.3.0' - when: '%cxx' - - spec: '%fortran=gcc@12.3.0' - when: '%fortran' - - spec: '%openmpi %gcc@12.3.0' - when: '%mpi' - - packages: - all: - target: [x86_64] - - mpi: - require: - - openmpi@4.1.2 - - zlib-api: - require: - - zlib - - blas: - require: - - "netlib-lapack" - lapack: - require: - - "netlib-lapack" - - llvm: - externals: - - spec: llvm@10.0.0+clang~flang~lld~lldb openmp=project - prefix: /usr - extra_attributes: - compilers: - c: /usr/bin/clang - cxx: /usr/bin/clang++ - - spec: llvm@15.0.7+clang~flang~lld~lldb - prefix: /usr - extra_attributes: - compilers: - c: /usr/bin/clang-15 - cxx: /usr/bin/clang++-15 - - gcc: - externals: - - spec: gcc@9.4.0 languages:='c,c++,fortran' - prefix: /usr - extra_attributes: - compilers: - c: /usr/bin/gcc-9 - cxx: /usr/bin/g++-9 - fortran: /usr/bin/gfortran-9 - - spec: gcc@10.5.0 languages:='c,c++,fortran' - prefix: /usr - extra_attributes: - compilers: - c: /usr/bin/gcc-10 - cxx: /usr/bin/g++-10 - fortran: /usr/bin/gfortran-10 - - spec: gcc@11.4.0 languages:='c,c++,fortran' - prefix: /usr - extra_attributes: - compilers: - c: /usr/bin/gcc-11 - cxx: /usr/bin/g++-11 - fortran: /usr/bin/gfortran-11 - - spec: gcc@12.3.0 languages:='c,c++,fortran' - prefix: /usr - extra_attributes: - compilers: - c: /usr/bin/gcc-12 - cxx: /usr/bin/g++-12 - fortran: /usr/bin/gfortran-12 - - autoconf: - version: [2.71] - buildable: false - externals: - - spec: autoconf@2.71 - prefix: /usr - automake: - version: [1.16.5] - buildable: false - externals: - - spec: automake@1.16.5 - prefix: /usr - cmake: - version: [3.28.3] - buildable: false - externals: - - spec: cmake@3.28.3 - prefix: /usr/local - cuda: - buildable: False - externals: - - spec: cuda@11.8.0 +allow-unsupported-compilers - prefix: /usr/local/cuda - findutils: - version: [4.7.0] - buildable: false - externals: - - spec: findutils@4.7.0 - prefix: /usr - m4: - buildable: false - externals: - - spec: m4@1.4.18 - prefix: /usr - mpfr: - buildable: false - externals: - - spec: mpfr@6.0.2 - prefix: /usr - - netlib-lapack: - buildable: false - externals: - - spec: netlib-lapack@3.10.0 - prefix: /usr - netlib-blas: - buildable: false - externals: - - spec: netlib-blas@3.10.0 - prefix: /usr - openmpi: - buildable: false - externals: - # Ubuntu 22.04 (apt) provides OpenMPI 4.1.2. We treat it as external for all toolchains - # to prevent Spack from building OpenMPI (which may otherwise drift to v5). - - spec: openmpi@4.1.2 %clang@15.0.7 - prefix: /usr - modules: [mpi] - - spec: openmpi@4.1.2 %clang@10.0.0 - prefix: /usr - modules: [mpi] - - spec: openmpi@4.1.2 %gcc@9.4.0 - prefix: /usr - - spec: openmpi@4.1.2 %gcc@10.5.0 - prefix: /usr - - spec: openmpi@4.1.2 %gcc@11.4.0 - prefix: /usr - - spec: openmpi@4.1.2 %gcc@12.3.0 - prefix: /usr - perl: - buildable: false - externals: - - spec: perl@5.34.0 - prefix: /usr - pkg-config: - buildable: false - externals: - - spec: pkg-config@0.29.2 - prefix: /usr - py-sphinx: - buildable: false - externals: - - spec: py-sphinx@4.3.2 - prefix: /usr - python: - buildable: false - externals: - - spec: python@3.10.12 - prefix: /usr - readline: - buildable: false - externals: - - spec: readline@8.0 - prefix: /usr - tar: - buildable: false - externals: - - spec: tar@1.34 - prefix: /usr - unzip: - buildable: false - externals: - - spec: unzip@6.0 - prefix: /usr - xz: - buildable: false - externals: - - spec: xz@5.2.5 - prefix: /usr - zlib: - buildable: false - externals: - - spec: zlib@1.2.11 - prefix: /usr diff --git a/docker/tpl-centos-gcc-cuda.Dockerfile b/docker/tpl-centos-gcc-cuda.Dockerfile deleted file mode 100644 index dbea5b7b..00000000 --- a/docker/tpl-centos-gcc-cuda.Dockerfile +++ /dev/null @@ -1,120 +0,0 @@ -# NOTE: see docker/tpl-ubuntu-gcc.Dockerfile for detailed comments -ARG TMP_DIR=/tmp -ARG SRC_DIR=$TMP_DIR/thirdPartyLibs -ARG BLD_DIR=$TMP_DIR/build - -FROM nvidia/cuda:11.8.0-devel-centos7 AS tpl_toolchain_intersect_geosx_toolchain -ARG SRC_DIR - -ARG INSTALL_DIR -ENV GEOSX_TPL_DIR=$INSTALL_DIR - -RUN sed -i s/mirror.centos.org/vault.centos.org/g /etc/yum.repos.d/*.repo && \ - sed -i s/^#.*baseurl=http/baseurl=https/g /etc/yum.repos.d/*.repo && \ - sed -i s/^mirrorlist=http/#mirrorlist=https/g /etc/yum.repos.d/*.repo - -# Using gcc 8.3.1 provided by the Software Collections (SCL). -RUN yum install -y \ - centos-release-scl - -# Modify the SCLo repository configuration -RUN sed -i 's|^mirrorlist=|#mirrorlist=|g' /etc/yum.repos.d/CentOS-SCLo-scl.repo && \ - sed -i 's|^baseurl=http://mirror.centos.org/centos/\$releasever/sclo/\$basearch/rh|baseurl=http://vault.centos.org/7.9.2009/sclo/x86_64/rh|g' /etc/yum.repos.d/CentOS-SCLo-scl.repo && \ - sed -i 's|^mirrorlist=|#mirrorlist=|g' /etc/yum.repos.d/CentOS-SCLo-scl-rh.repo && \ - sed -i 's|^baseurl=http://mirror.centos.org/centos/\$releasever/sclo/\$basearch/rh|baseurl=http://vault.centos.org/7.9.2009/sclo/x86_64/rh|g' /etc/yum.repos.d/CentOS-SCLo-scl-rh.repo - -# Install necessary tools and update the system -RUN yum -y update && \ - yum -y install yum-utils - -RUN yum install -y \ - devtoolset-8-gcc \ - devtoolset-8-gcc-c++ \ - devtoolset-8-gcc-gfortran - -# Installing dependencies -RUN yum -y install \ - ca-certificates \ - curl \ - tbb \ - blas-devel \ - lapack-devel \ - zlib-devel \ - openmpi-devel \ - python3 \ -# Additional spack dependencies - python3-pip \ - # pkgconfig \ - # xz \ - unzip \ - bzip2 \ - gnupg \ - && pip3 install virtualenv - -# Install clingo for Spack -RUN python3 -m pip install --upgrade pip && \ - python3 -m pip install clingo - -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-cmake.sh - -# Installing TPL's -FROM tpl_toolchain_intersect_geosx_toolchain AS tpl_toolchain -ARG SRC_DIR -ARG BLD_DIR - -RUN yum install -y \ - tbb-devel \ - make \ - bc \ - file \ - patch \ - ca-certificates \ - autoconf \ - automake \ - git - -# Run uberenv -# Have to create install directory first for uberenv -# -k flag is to ignore SSL errors -RUN --mount=src=.,dst=$SRC_DIR,readwrite cd ${SRC_DIR} && \ - mkdir -p ${GEOSX_TPL_DIR} && \ -# Create symlink to openmpi include directory - ln -s /usr/include/openmpi-x86_64 /usr/lib64/openmpi/include && \ - ./scripts/uberenv/uberenv.py \ - --spec "%gcc@8+cuda~uncrustify~openmp~pygeosx cuda_arch=86 ^cuda@11.8.0+allow-unsupported-compilers ^caliper~gotcha~sampler~libunwind~libdw~papi" \ - --spack-env-file=${SRC_DIR}/docker/spack.yaml \ - --project-json=.uberenv_config.json \ - --prefix ${GEOSX_TPL_DIR} \ - -k && \ -# Remove host-config generated for LvArray - rm lvarray* && \ -# Rename and copy spack-generated host-config to root directory - cp *.cmake /spack-generated.cmake && \ -# Remove extraneous spack files - cd ${GEOSX_TPL_DIR} && \ - rm -rf bin/ build_stage/ misc_cache/ spack/ spack_env/ .spack-db/ - -# Extract only TPL's from previous stage -FROM tpl_toolchain_intersect_geosx_toolchain AS geosx_toolchain -ARG SRC_DIR - -COPY --from=tpl_toolchain $GEOSX_TPL_DIR $GEOSX_TPL_DIR - -# Extract the generated host-config -COPY --from=tpl_toolchain /spack-generated.cmake / - -RUN yum install -y \ - openssh-client \ - ca-certificates \ - curl \ - python3 \ - texlive \ - graphviz \ - git && \ -# Regenerate symlink to openmpi include directory - ln -s /usr/include/openmpi-x86_64 /usr/lib64/openmpi/include - -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-ninja.sh - -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-sccache.sh -ENV SCCACHE=/opt/sccache/bin/sccache diff --git a/docker/tpl-rockylinux-clang-cuda-12.Dockerfile b/docker/tpl-rockylinux-clang-cuda-12.Dockerfile deleted file mode 100644 index f5dc00b4..00000000 --- a/docker/tpl-rockylinux-clang-cuda-12.Dockerfile +++ /dev/null @@ -1,132 +0,0 @@ -# NOTE: see docker/tpl-ubuntu-gcc.Dockerfile for detailed comments -ARG TMP_DIR=/tmp -ARG SRC_DIR=$TMP_DIR/thirdPartyLibs -ARG BLD_DIR=$TMP_DIR/build - -FROM nvidia/cuda:12.9.1-devel-rockylinux8 AS tpl_toolchain_intersect_geosx_toolchain -ARG SRC_DIR -ARG INSTALL_DIR -ENV GEOSX_TPL_DIR=$INSTALL_DIR - -# Installing dependencies -RUN dnf clean all && \ - dnf -y update && \ - dnf -y install dnf-plugins-core && \ - dnf config-manager --set-enabled powertools || dnf config-manager --set-enabled devel && \ - dnf -y install \ - which \ - clang-17.0.6 \ - gcc-toolset-13 \ - python3 \ - zlib-devel \ - tbb \ - blas \ - lapack \ - openmpi \ - openmpi-devel \ - python3-pip \ - unzip \ - mpfr-devel \ - bzip2 \ - gnupg \ - xz \ - python3-virtualenv - -# Install clingo for Spack -RUN python3 -m pip install --upgrade pip && \ - python3 -m pip install clingo - -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-cmake.sh - -# Installing TPL's -FROM tpl_toolchain_intersect_geosx_toolchain AS tpl_toolchain -ARG SRC_DIR -ARG BLD_DIR -ARG SPEC - -RUN dnf -y install \ - tbb-devel \ - bc \ - file \ - patch \ - ca-certificates \ - autoconf \ - automake \ - m4 \ - git - -# Create clang wrappers that always use gcc-toolset-13 for libstdc++ headers/libs. -# This is critical for CUDA builds where NVCC invokes the host compiler via -ccbin. -RUN printf '%s\n' '#!/usr/bin/env bash' \ - 'exec /usr/bin/clang --gcc-toolchain=/opt/rh/gcc-toolset-13/root/usr "$@"' \ - > /usr/local/bin/clang-gcc13 && \ - chmod +x /usr/local/bin/clang-gcc13 && \ - printf '%s\n' '#!/usr/bin/env bash' \ - 'exec /usr/bin/clang++ --gcc-toolchain=/opt/rh/gcc-toolset-13/root/usr "$@"' \ - > /usr/local/bin/clang++-gcc13 && \ - chmod +x /usr/local/bin/clang++-gcc13 && \ - /usr/local/bin/clang-gcc13 --version && \ - /usr/local/bin/clang++-gcc13 --version - -# RUN uberenv -# Have to create install directory first for uberenv -# -k flag is to ignore SSL errors -# 1. We wrap this in 'scl enable gcc-toolset-13' so the build finds GCC 13 headers. -# 2. gcc-toolchain selection is handled by the clang wrapper scripts + Spack llvm external compiler paths. -RUN --mount=src=.,dst=$SRC_DIR,readwrite cd ${SRC_DIR} && \ - mkdir -p ${GEOSX_TPL_DIR} && \ -# Create symlink to openmpi include directory - ln -s /usr/include/openmpi-x86_64 /usr/lib64/openmpi/include && \ -# Create symlinks to blas/lapack libraries - ln -s /usr/lib64/libblas.so.3 /usr/lib64/libblas.so && \ - ln -s /usr/lib64/liblapack.so.3 /usr/lib64/liblapack.so && \ - GEOSX_SPEC="${SPEC}" && \ - if [ -z "${GEOSX_SPEC}" ] || [ "${GEOSX_SPEC}" = "undefined" ]; then GEOSX_SPEC="+cuda~uncrustify~openmp~pygeosx cuda_arch=86 %clang-17 ^cuda@12.9.1+allow-unsupported-compilers ^caliper~gotcha~sampler~libunwind~libdw~papi"; fi && \ - scl enable gcc-toolset-13 ' \ - ./scripts/uberenv/uberenv.py \ - --spec "'"${GEOSX_SPEC}"'" \ - --spack-env-file=${SRC_DIR}/docker/rocky-spack.yaml \ - --project-json=.uberenv_config.json \ - --prefix ${GEOSX_TPL_DIR} \ - -k ' && \ - rm -f lvarray* && \ - cp *.cmake /spack-generated.cmake && \ -# Remove extraneous spack files - cd ${GEOSX_TPL_DIR} && \ - rm -rf bin/ build_stage/ builtin_spack_packages_repo/ misc_cache/ spack/ spack_env/ .spack-db/ - -# Extract only TPL's from the previous stage -FROM tpl_toolchain_intersect_geosx_toolchain AS geosx_toolchain -ARG SRC_DIR - -COPY --from=tpl_toolchain $GEOSX_TPL_DIR $GEOSX_TPL_DIR - -# The generated host-config may reference these wrappers as compilers, so they -# must exist in the final image (not just the build stage). -COPY --from=tpl_toolchain /usr/local/bin/clang-gcc13 /usr/local/bin/clang-gcc13 -COPY --from=tpl_toolchain /usr/local/bin/clang++-gcc13 /usr/local/bin/clang++-gcc13 - -# Extract the generated host-config -COPY --from=tpl_toolchain /spack-generated.cmake / - -# Install required packages using dnf -RUN dnf clean all && \ - rm -rf /var/cache/dnf && \ - dnf -y install \ - openssh-clients \ - ca-certificates \ - curl \ - python3 \ - texlive \ - graphviz \ - ninja-build \ - git && \ -# Regenerate symlink to openmpi include directory - ln -s /usr/include/openmpi-x86_64 /usr/lib64/openmpi/include && \ -# Regenerate symlinks to blas/lapack libraries - ln -s /usr/lib64/libblas.so.3 /usr/lib64/libblas.so && \ - ln -s /usr/lib64/liblapack.so.3 /usr/lib64/liblapack.so - -# Run the installation script -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-sccache.sh -ENV SCCACHE=/opt/sccache/bin/sccache diff --git a/docker/tpl-rockylinux-gcc-cuda-12.Dockerfile b/docker/tpl-rockylinux-gcc-cuda-12.Dockerfile deleted file mode 100644 index ac0ede2c..00000000 --- a/docker/tpl-rockylinux-gcc-cuda-12.Dockerfile +++ /dev/null @@ -1,118 +0,0 @@ -ARG TMP_DIR=/tmp -ARG SRC_DIR=$TMP_DIR/thirdPartyLibs -ARG BLD_DIR=$TMP_DIR/build - -FROM nvidia/cuda:12.9.1-devel-rockylinux8 AS tpl_toolchain_intersect_geosx_toolchain -ARG SRC_DIR - -ARG INSTALL_DIR -ENV GEOSX_TPL_DIR=$INSTALL_DIR - -# Installing dependencies -RUN dnf clean all && \ - dnf -y update && \ - dnf -y install \ - which \ - gcc-toolset-13 \ - python3 \ - zlib-devel \ - tbb \ - blas \ - lapack \ - openmpi \ - openmpi-devel \ - # Additional spack dependencies - python3-pip \ - unzip \ - mpfr-devel \ - bzip2 \ - gnupg \ - xz \ - python3-virtualenv - -# Install clingo for Spack -RUN python3 -m pip install --upgrade pip && \ - python3 -m pip install clingo - -# Custom install script for CMake or other tools -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-cmake.sh - -# Installing TPL's -FROM tpl_toolchain_intersect_geosx_toolchain AS tpl_toolchain -ARG SRC_DIR -ARG BLD_DIR -ARG SPEC - -# Install additional required packages -RUN dnf clean all && \ - dnf -y update && \ - dnf -y install \ - tbb-devel \ - bc \ - file \ - patch \ - ca-certificates \ - autoconf \ - automake \ - m4 \ - git - -# Run uberenv -# Have to create install directory first for uberenv -# -k flag is to ignore SSL errors -RUN --mount=src=.,dst=$SRC_DIR,readwrite cd ${SRC_DIR} && \ - mkdir -p ${GEOSX_TPL_DIR} && \ -# Create symlink to openmpi include directory - ln -s /usr/include/openmpi-x86_64 /usr/lib64/openmpi/include && \ -# Create symlinks to blas/lapack libraries - ln -s /usr/lib64/libblas.so.3 /usr/lib64/libblas.so && \ - ln -s /usr/lib64/liblapack.so.3 /usr/lib64/liblapack.so && \ - GEOSX_SPEC="${SPEC}" && \ - if [ -z "${GEOSX_SPEC}" ] || [ "${GEOSX_SPEC}" = "undefined" ]; then GEOSX_SPEC="+cuda~uncrustify~openmp~pygeosx lai=hypre cuda_arch=86 %gcc-13 ^cuda@12.9.1+allow-unsupported-compilers ^caliper~gotcha~sampler~libunwind~libdw~papi"; fi && \ - ./scripts/uberenv/uberenv.py \ - --spec "${GEOSX_SPEC}" \ - --spack-env-file=${SRC_DIR}/docker/rocky-spack.yaml \ - --project-json=.uberenv_config.json \ - --prefix ${GEOSX_TPL_DIR} \ - -k && \ -# Remove host-config generated for LvArray - rm lvarray* && \ -# Rename and copy spack-generated host-config to root directory - cp *.cmake /spack-generated.cmake && \ -# Remove extraneous spack files - cd ${GEOSX_TPL_DIR} && \ - rm -rf bin/ build_stage/ builtin_spack_packages_repo/ misc_cache/ spack/ spack_env/ .spack-db/ - -# Extract only TPL's from the previous stage -FROM tpl_toolchain_intersect_geosx_toolchain AS geosx_toolchain -ARG SRC_DIR - -COPY --from=tpl_toolchain $GEOSX_TPL_DIR $GEOSX_TPL_DIR - -# Extract the generated host-config -COPY --from=tpl_toolchain /spack-generated.cmake / - -# Final installation of packages and tools -RUN dnf clean all && \ - rm -rf /var/cache/dnf && \ - dnf -y install dnf-plugins-core && \ - dnf config-manager --set-enabled devel && \ - dnf -y update && \ - dnf -y install \ - openssh-clients \ - ca-certificates \ - curl \ - python3 \ - texlive \ - graphviz \ - ninja-build \ - git && \ -# Regenerate symlink to openmpi include directory - ln -s /usr/include/openmpi-x86_64 /usr/lib64/openmpi/include && \ -# Regenerate symlinks to blas/lapack libraries - ln -s /usr/lib64/libblas.so.3 /usr/lib64/libblas.so && \ - ln -s /usr/lib64/liblapack.so.3 /usr/lib64/liblapack.so - -# Install sccache -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-sccache.sh -ENV SCCACHE=/opt/sccache/bin/sccache diff --git a/docker/tpl-rockylinux.Dockerfile b/docker/tpl-rockylinux.Dockerfile new file mode 100644 index 00000000..27cbe2bb --- /dev/null +++ b/docker/tpl-rockylinux.Dockerfile @@ -0,0 +1,184 @@ +# TPL build Dockerfile for Rocky Linux-based images. +# +# This Dockerfile expects DOCKER_BASE_IMAGE to point at one of the +# geosx/rockylinux:* images produced by +# https://github.com/GEOS-DEV/docker_base_images. Those images already provide: +# * the toolchain (gcc-toolset-N or clang) under /opt/compiler/bin/, with +# CC/CXX/FC set +# * cmake (under /usr/local) +# * the upstream NVIDIA CUDA toolkit when DOCKER_BASE_IMAGE is a CUDA variant +# +# Temporary local variables dedicated to the TPL build +ARG TMP_DIR=/tmp +ARG SRC_DIR=$TMP_DIR/thirdPartyLibs +ARG BLD_DIR=$TMP_DIR/build + +ARG DOCKER_BASE_IMAGE=rockylinux:8 +FROM ${DOCKER_BASE_IMAGE} AS tpl_toolchain_intersect_geosx_toolchain +ARG SRC_DIR + +ARG INSTALL_DIR +ENV GEOSX_TPL_DIR=$INSTALL_DIR + +# Packages needed both for the TPL build and for the downstream GEOS build. +# Some Rocky 8 vs 9 differences are handled by the base image already +# (curl vs curl-minimal, etc.); here we only add things the base images +# don't preinstall. +RUN dnf clean all && \ + dnf -y install dnf-plugins-core || true && \ + (dnf config-manager --set-enabled powertools 2>/dev/null || \ + dnf config-manager --set-enabled crb 2>/dev/null || \ + dnf config-manager --set-enabled devel 2>/dev/null || true) && \ + dnf -y install \ + which \ + zlib-devel \ + tbb \ + openmpi \ + openmpi-devel \ + python3-pip \ + unzip \ + mpfr-devel \ + bzip2 \ + gnupg2 \ + perl \ + xz && \ + (dnf -y install python3-virtualenv || \ + /usr/bin/python3 -m pip install --no-cache-dir virtualenv) && \ + dnf clean all && rm -rf /var/cache/dnf /var/lib/dnf + +# Install clingo for Spack +RUN (/usr/bin/python3 -m pip --version >/dev/null 2>&1 || \ + /usr/bin/python3 -m ensurepip --upgrade || \ + (dnf -y install python3.12-pip || dnf -y install python3-pip)) && \ + /usr/bin/python3 -m pip install --upgrade pip && \ + /usr/bin/python3 -m pip install clingo + +# Make `mpicc`/`mpicxx` resolve without a `module load` step. +ENV PATH="/usr/lib64/openmpi/bin:${PATH}" \ + MPICC=/usr/lib64/openmpi/bin/mpicc \ + MPICXX=/usr/lib64/openmpi/bin/mpicxx \ + MPIEXEC=/usr/lib64/openmpi/bin/mpirun +ENV OMPI_CC=${CC} \ + OMPI_CXX=${CXX} + +# Some downstream builds expect /usr/lib64/openmpi/include to point at the +# headers; on Rocky those live under /usr/include/openmpi-x86_64. +RUN if [ -d /usr/include/openmpi-x86_64 ] && [ ! -e /usr/lib64/openmpi/include ]; then \ + mkdir -p /usr/lib64/openmpi && \ + ln -s /usr/include/openmpi-x86_64 /usr/lib64/openmpi/include ; \ + fi && \ + if [ -e /usr/lib64/libblas.so.3 ] && [ ! -e /usr/lib64/libblas.so ]; then ln -s /usr/lib64/libblas.so.3 /usr/lib64/libblas.so ; fi && \ + if [ -e /usr/lib64/liblapack.so.3 ] && [ ! -e /usr/lib64/liblapack.so ]; then ln -s /usr/lib64/liblapack.so.3 /usr/lib64/liblapack.so ; fi + +# Rocky OpenMPI defaults wrappers to gcc/g++. For clang-based base images we +# retarget the wrappers to clang/clang++ so mpi wrapper compilers are aligned +# with the image toolchain contract. +RUN if echo "${CC:-}" | grep -q "clang"; then \ + for f in /usr/share/openmpi/mpicc-wrapper-data.txt /usr/share/openmpi/mpicc.openmpi-wrapper-data.txt; do \ + if [ -f "${f}" ]; then sed -i "s|^compiler=.*$|compiler=${CC}|" "${f}" ; fi ; \ + done && \ + for f in /usr/share/openmpi/mpic++-wrapper-data.txt /usr/share/openmpi/mpic++.openmpi-wrapper-data.txt /usr/share/openmpi/mpicxx-wrapper-data.txt /usr/share/openmpi/mpicxx.openmpi-wrapper-data.txt /usr/share/openmpi/mpiCC-wrapper-data.txt /usr/share/openmpi/mpiCC.openmpi-wrapper-data.txt; do \ + if [ -f "${f}" ]; then sed -i "s|^compiler=.*$|compiler=${CXX}|" "${f}" ; fi ; \ + done && \ + mpicc --showme:command && \ + mpic++ --showme:command ; \ + fi + +# ----- TPL build stage ----- +FROM tpl_toolchain_intersect_geosx_toolchain AS tpl_toolchain +ARG SRC_DIR +ARG BLD_DIR +ARG SPEC + +RUN dnf -y install \ + tbb-devel \ + bc \ + file \ + patch \ + ca-certificates \ + autoconf \ + automake \ + make \ + m4 \ + git && \ + dnf clean all && rm -rf /var/cache/dnf /var/lib/dnf + +# Run uberenv. The SPEC is supplied by the matrix because the spack toolchain +# tag depends on the compiler+version baked into the base image. +# +# The matrix selects exactly one compiler toolchain through SPEC. Validate the +# expected external compiler paths before Spack starts so missing compilers fail +# directly instead of being discovered or built by Spack. +RUN --mount=src=.,dst=$SRC_DIR,readwrite cd ${SRC_DIR} && \ + mkdir -p ${GEOSX_TPL_DIR} && \ + GEOSX_SPEC="${SPEC}" && \ + if [ -z "${GEOSX_SPEC}" ] || [ "${GEOSX_SPEC}" = "undefined" ]; then \ + echo "ERROR: SPEC build-arg must be supplied" >&2 ; \ + exit 1 ; \ + fi && \ + GEOSX_SPACK_ENV_FILE=${SRC_DIR}/docker/rocky-spack.yaml && \ + require_exe() { for exe in "$@"; do if [ ! -x "${exe}" ]; then echo "ERROR: required compiler path is missing or not executable: ${exe}" >&2 ; exit 1 ; fi ; done ; } && \ + ROCKY_GCC_TOOLSET="" && \ + case "${GEOSX_SPEC}" in \ + *"%gcc-12"*) \ + ROCKY_GCC_TOOLSET=12 ; \ + require_exe /opt/rh/gcc-toolset-12/root/usr/bin/gcc /opt/rh/gcc-toolset-12/root/usr/bin/g++ /opt/rh/gcc-toolset-12/root/usr/bin/gfortran ;; \ + *"%gcc-13"*) \ + ROCKY_GCC_TOOLSET=13 ; \ + require_exe /opt/rh/gcc-toolset-13/root/usr/bin/gcc /opt/rh/gcc-toolset-13/root/usr/bin/g++ /opt/rh/gcc-toolset-13/root/usr/bin/gfortran ;; \ + *"%gcc-15"*) \ + ROCKY_GCC_TOOLSET=15 ; \ + require_exe /opt/rh/gcc-toolset-15/root/usr/bin/gcc /opt/rh/gcc-toolset-15/root/usr/bin/g++ /opt/rh/gcc-toolset-15/root/usr/bin/gfortran ;; \ + *"%clang-17"*) \ + require_exe /usr/local/bin/clang-gcc13 /usr/local/bin/clang++-gcc13 /opt/rh/gcc-toolset-13/root/usr/bin/gcc /opt/rh/gcc-toolset-13/root/usr/bin/g++ /opt/rh/gcc-toolset-13/root/usr/bin/gfortran ;; \ + *"%clang-19"*) \ + require_exe /usr/bin/clang /usr/bin/clang++ /opt/rh/gcc-toolset-14/root/usr/bin/gcc /opt/rh/gcc-toolset-14/root/usr/bin/g++ /opt/rh/gcc-toolset-14/root/usr/bin/gfortran ;; \ + *"%clang-22"*) \ + require_exe /usr/bin/clang /usr/bin/clang++ /opt/rh/gcc-toolset-15/root/usr/bin/gcc /opt/rh/gcc-toolset-15/root/usr/bin/g++ /opt/rh/gcc-toolset-15/root/usr/bin/gfortran ;; \ + *) \ + echo "ERROR: unsupported Rocky compiler selector in SPEC: ${GEOSX_SPEC}" >&2 ; \ + exit 1 ;; \ + esac && \ + if [ -n "${ROCKY_GCC_TOOLSET}" ]; then \ + scl enable "gcc-toolset-${ROCKY_GCC_TOOLSET}" " \ + ./scripts/uberenv/uberenv.py \ + --spec '${GEOSX_SPEC}' \ + --spack-env-file=${GEOSX_SPACK_ENV_FILE} \ + --project-json=${SRC_DIR}/.uberenv_config.json \ + --prefix ${GEOSX_TPL_DIR} \ + -k " ; \ + else \ + ./scripts/uberenv/uberenv.py \ + --spec "${GEOSX_SPEC}" \ + --spack-env-file=${GEOSX_SPACK_ENV_FILE} \ + --project-json=${SRC_DIR}/.uberenv_config.json \ + --prefix ${GEOSX_TPL_DIR} \ + -k ; \ + fi && \ + rm -f lvarray* && \ + cp *.cmake /spack-generated.cmake && \ + cd ${GEOSX_TPL_DIR} && \ + rm -rf bin/ build_stage/ builtin_spack_packages_repo/ misc_cache/ spack/ spack_env/ .spack-db/ + +# ----- Final GEOS-build image ----- +FROM tpl_toolchain_intersect_geosx_toolchain AS geosx_toolchain +ARG SRC_DIR +COPY --from=tpl_toolchain $GEOSX_TPL_DIR $GEOSX_TPL_DIR +COPY --from=tpl_toolchain /spack-generated.cmake / + +RUN dnf -y install \ + openssh-clients \ + ca-certificates \ + graphviz \ + ninja-build && \ + dnf clean all && rm -rf /var/cache/dnf /var/lib/dnf && \ + if [ -d /usr/include/openmpi-x86_64 ] && [ ! -e /usr/lib64/openmpi/include ]; then \ + mkdir -p /usr/lib64/openmpi && \ + ln -s /usr/include/openmpi-x86_64 /usr/lib64/openmpi/include ; \ + fi && \ + if [ -e /usr/lib64/libblas.so.3 ] && [ ! -e /usr/lib64/libblas.so ]; then ln -s /usr/lib64/libblas.so.3 /usr/lib64/libblas.so ; fi && \ + if [ -e /usr/lib64/liblapack.so.3 ] && [ ! -e /usr/lib64/liblapack.so ]; then ln -s /usr/lib64/liblapack.so.3 /usr/lib64/liblapack.so ; fi + +# Install sccache to speed up downstream GEOS builds +RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-sccache.sh +ENV SCCACHE=/opt/sccache/bin/sccache diff --git a/docker/tpl-ubuntu-clang-cuda.Dockerfile b/docker/tpl-ubuntu-clang-cuda.Dockerfile deleted file mode 100644 index 4a915408..00000000 --- a/docker/tpl-ubuntu-clang-cuda.Dockerfile +++ /dev/null @@ -1,101 +0,0 @@ -# NOTE: see docker/tpl-ubuntu-gcc.Dockerfile for detailed comments -ARG TMP_DIR=/tmp -ARG SRC_DIR=$TMP_DIR/thirdPartyLibs -ARG BLD_DIR=$TMP_DIR/build - -FROM nvidia/cuda:11.8.0-devel-ubuntu20.04 AS tpl_toolchain_intersect_geosx_toolchain -ARG SRC_DIR - -ARG INSTALL_DIR -ENV GEOSX_TPL_DIR=$INSTALL_DIR - -# Installing dependencies -RUN ln -fs /usr/share/zoneinfo/America/Los_Angeles /etc/localtime && \ - rm /etc/apt/sources.list.d/*.list && \ - apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - ca-certificates \ - curl \ - gfortran \ - libtbb2 \ - libblas-dev \ - liblapack-dev \ - zlib1g-dev \ - openmpi-bin \ - libopenmpi-dev \ - python3 \ - python3-dev \ - clang \ -# Additional spack dependencies - python3-pip \ - pkg-config \ - xz-utils \ - unzip \ - libmpfr-dev \ - lbzip2 \ - bzip2 \ - gnupg \ - virtualenv - -# Install clingo for Spack -RUN python3 -m pip install --upgrade pip && \ - python3 -m pip install clingo - -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-cmake.sh - -# Installing TPL's -FROM tpl_toolchain_intersect_geosx_toolchain AS tpl_toolchain -ARG SRC_DIR -ARG BLD_DIR -ARG SPEC - -RUN apt-get install -y --no-install-recommends \ - libtbb-dev \ - bc \ - file \ - patch \ - ca-certificates \ - git - -# Run uberenv -# Have to create install directory first for uberenv -# -k flag is to ignore SSL errors -RUN --mount=src=.,dst=$SRC_DIR,readwrite cd ${SRC_DIR} && \ - mkdir -p ${GEOSX_TPL_DIR} && \ - GEOSX_SPEC="${SPEC}" && \ - if [ -z "${GEOSX_SPEC}" ] || [ "${GEOSX_SPEC}" = "undefined" ]; then GEOSX_SPEC="+cuda~uncrustify~openmp~pygeosx cuda_arch=86 %clang-10 ^cuda@11.8.0+allow-unsupported-compilers ^caliper~gotcha~sampler~libunwind~libdw~papi"; fi && \ - ./scripts/uberenv/uberenv.py \ - --spec "${GEOSX_SPEC}" \ - --spack-env-file=${SRC_DIR}/docker/ubuntu20-clang-cuda-spack.yaml \ - --project-json=.uberenv_config.json \ - --prefix ${GEOSX_TPL_DIR} \ - -k && \ -# Remove host-config generated for LvArray - rm lvarray* && \ -# Rename and copy spack-generated host-config to root directory - cp *.cmake /spack-generated.cmake && \ -# Remove extraneous spack files - cd ${GEOSX_TPL_DIR} && \ - rm -rf bin/ build_stage/ builtin_spack_packages_repo/ misc_cache/ spack/ spack_env/ .spack-db/ - - -# Extract only TPL's from previous stage -FROM tpl_toolchain_intersect_geosx_toolchain AS geosx_toolchain -ARG SRC_DIR - -COPY --from=tpl_toolchain $GEOSX_TPL_DIR $GEOSX_TPL_DIR - -# Extract the generated host-config -COPY --from=tpl_toolchain /spack-generated.cmake / - -RUN apt-get install -y --no-install-recommends \ - openssh-client \ - ca-certificates \ - curl \ - python3 \ - texlive \ - graphviz \ - ninja-build - -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-sccache.sh -ENV SCCACHE=/opt/sccache/bin/sccache diff --git a/docker/tpl-ubuntu-clang.Dockerfile b/docker/tpl-ubuntu-clang.Dockerfile deleted file mode 100644 index fe75f418..00000000 --- a/docker/tpl-ubuntu-clang.Dockerfile +++ /dev/null @@ -1,140 +0,0 @@ -# NOTE: see docker/tpl-ubuntu-gcc.Dockerfile for detailed comments -ARG TMP_DIR=/tmp -ARG SRC_DIR=$TMP_DIR/thirdPartyLibs -ARG BLD_DIR=$TMP_DIR/build - -ARG DOCKER_ROOT_IMAGE - -FROM $DOCKER_ROOT_IMAGE AS tpl_toolchain_intersect_geosx_toolchain -ARG SRC_DIR - -ARG INSTALL_DIR -ENV GEOSX_TPL_DIR=$INSTALL_DIR - -ARG CLANG_MAJOR_VERSION - -RUN apt-get update - -# Installing dependencies -RUN DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ - apt-get install -y --no-install-recommends \ - clang-$CLANG_MAJOR_VERSION \ - libomp-$CLANG_MAJOR_VERSION-dev \ - ca-certificates \ - curl \ - libtbb2 \ - libblas-dev \ - liblapack-dev \ - zlib1g-dev \ - openmpi-bin \ - libopenmpi-dev \ - python3 \ - python3-dev \ - python3-pip \ - python3-sphinx \ - doxygen \ - pkg-config \ - xz-utils \ - unzip \ - libmpfr-dev \ - lbzip2 \ - bzip2 \ - gnupg \ - virtualenv - -# Install clingo for Spack -RUN python3 -m pip install --upgrade pip && \ - python3 -m pip install clingo - -# Install CMake -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-cmake.sh - -# Installing TPLs -FROM tpl_toolchain_intersect_geosx_toolchain AS tpl_toolchain -ARG SRC_DIR -ARG BLD_DIR -ARG SPEC - -ARG GCC_MAJOR_VERSION - -RUN apt-get install -y --no-install-recommends \ - gfortran-$GCC_MAJOR_VERSION \ - g++-$GCC_MAJOR_VERSION \ - libtbb-dev \ - make \ - bc \ - file \ -# GEOS patches some tpl. Remove when it's not the case anymore. - patch \ -# `ca-certificates` needed by `git` to download spack repo. - ca-certificates \ - git - -# Add MPI environment path info -ENV CC=/usr/bin/gcc-$GCC_MAJOR_VERSION \ - CXX=/usr/bin/g++-$GCC_MAJOR_VERSION \ - MPICC=/usr/bin/mpicc \ - MPICXX=/usr/bin/mpicxx \ - MPIEXEC=/usr/bin/mpirun -# The multi-line definition of arguments does not seem happy -# when a variable uses the value of another variable previously defined on the same line. -ENV OMPI_CC=$CC \ - OMPI_CXX=$CXX - -# Run uberenv -# Have to create install directory first for uberenv -# -k flag is to ignore SSL errors -RUN --mount=src=.,dst=$SRC_DIR,readwrite cd ${SRC_DIR} && \ - mkdir -p ${GEOSX_TPL_DIR} && \ -# Create symlinks to g++ libraries - ln -s /usr/bin/g++-${GCC_MAJOR_VERSION} /usr/bin/g++ && \ - GEOSX_SPEC="${SPEC}" && \ - if [ -z "${GEOSX_SPEC}" ] || [ "${GEOSX_SPEC}" = "undefined" ]; then GEOSX_SPEC="~shared~openmp+docs %clang-${CLANG_MAJOR_VERSION} ^caliper~gotcha~sampler~libunwind~libdw~papi"; fi && \ - ./scripts/uberenv/uberenv.py \ - --spec "${GEOSX_SPEC}" \ - --spack-env-file=${SRC_DIR}/docker/spack.yaml \ - --project-json=.uberenv_config.json \ - --prefix ${GEOSX_TPL_DIR} \ - -k && \ -# Remove host-config generated for LvArray - rm lvarray* && \ -# Rename and copy spack-generated host-config to root directory - cp *.cmake /spack-generated.cmake && \ -# Remove extraneous spack files - cd ${GEOSX_TPL_DIR} && \ - rm -rf bin/ build_stage/ builtin_spack_packages_repo/ misc_cache/ spack/ spack_env/ .spack-db/ - -# Extract only TPLs from previous stage -FROM tpl_toolchain_intersect_geosx_toolchain AS geosx_toolchain -ARG SRC_DIR - -COPY --from=tpl_toolchain $GEOSX_TPL_DIR $GEOSX_TPL_DIR - -# Extract the generated host-config -COPY --from=tpl_toolchain /spack-generated.cmake / - -RUN DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ - apt-get install -y --no-install-recommends \ - openssh-client \ - ca-certificates \ - curl \ - python3 \ - texlive \ - texlive-latex-extra \ - graphviz \ - libxml2-utils \ - git \ - ghostscript \ - ninja-build \ -## Necessary dependencies for pygeosx unit tests - python3-dev \ - python3-sphinx \ - python3-mpi4py \ - python3-scipy \ - python3-virtualenv \ - python3-matplotlib \ - python3-venv \ - python3-pytest - -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-sccache.sh -ENV SCCACHE=/opt/sccache/bin/sccache diff --git a/docker/tpl-ubuntu-gcc.Dockerfile b/docker/tpl-ubuntu-gcc.Dockerfile deleted file mode 100644 index 70341fb5..00000000 --- a/docker/tpl-ubuntu-gcc.Dockerfile +++ /dev/null @@ -1,158 +0,0 @@ -# Temporary local variables dedicated to the TPL build -ARG TMP_DIR=/tmp -ARG SRC_DIR=$TMP_DIR/thirdPartyLibs -ARG BLD_DIR=$TMP_DIR/build - -# Defining the building toolchain that are common to both GEOSX and its TPLs. -# The docker base image could be any version of ubuntu/debian (as long as package names are unchanged). -ARG DOCKER_ROOT_IMAGE - - -FROM $DOCKER_ROOT_IMAGE AS tpl_toolchain_intersect_geosx_toolchain -ARG SRC_DIR - -# All the environment variables defined in this Dockerfile -# (GEOSX_TPL_DIR but also compiler information like CC, CXX...) -# are part of the image contract (otherwise ARG is used). -# GEOSX use them so consider modifying their names with care. -# -# The installation directory is provided as a docker build argument. -# We forward it using an environment variable. -ARG INSTALL_DIR -ENV GEOSX_TPL_DIR=$INSTALL_DIR - -# The same distribution and Dockerfile can be used for the 8, 9 and 10 version of the GNU compilers. -# The GCC_MAJOR_VERSION argument is here to parametrise (--build-arg) the build from the `docker build` command line. -# Note that docker seems to forget about the ARGs after each FROM statement. -# This is why we repeat it below. -ARG GCC_MAJOR_VERSION - -# Do not apt-get upgrade (ask the maintainer if you really think something should be upgraded) -RUN apt-get update - -# tzdata blocks the installation by interactively asking for the time zone. -# DEBIAN_FRONTEND and TZ variables fix this. -RUN DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ - apt-get install -y --no-install-recommends \ -# gfortran 8, 9 and 10 depend on libgfortran5. - gcc-$GCC_MAJOR_VERSION \ - g++-$GCC_MAJOR_VERSION \ - gfortran-$GCC_MAJOR_VERSION \ - libgfortran5 \ -# Several scientific (or close) libraries. -# Note the difference between runtime and development packages. - ca-certificates \ - curl \ - libtbb2 \ - libblas-dev \ - liblapack-dev \ - zlib1g-dev \ - openmpi-bin \ - libopenmpi-dev \ -# Some of the TPL's make "extensive" use of python in their build. -# And we want to test GEOSX's python configuration script. -# Unfortunately argparse (standard library's package used by GEOSX) -# is not in the python-minimal package so we install the whole std lib. - python3 \ - python3-pip \ - python3-sphinx \ - python3-dev \ - doxygen \ - pkg-config \ - xz-utils \ - unzip \ - libmpfr-dev \ - lbzip2 \ - bzip2 \ - gnupg \ - virtualenv - -# Install clingo for Spack -RUN python3 -m pip install --upgrade pip && \ - python3 -m pip install clingo - -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-cmake.sh - -ENV CC=/usr/bin/gcc-$GCC_MAJOR_VERSION \ - CXX=/usr/bin/g++-$GCC_MAJOR_VERSION \ - MPICC=/usr/bin/mpicc \ - MPICXX=/usr/bin/mpicxx \ - MPIEXEC=/usr/bin/mpirun -# The multi-line definition of arguments does not seem happy -# when a variable uses the value of another variable previously defined on the same line. -ENV OMPI_CC=$CC \ - OMPI_CXX=$CXX - -# This stage is dedicated to TPLs uniquely. -# A multi-stage build patern will allow to extract what we need for the GEOSX build. -FROM tpl_toolchain_intersect_geosx_toolchain AS tpl_toolchain -ARG SRC_DIR -ARG BLD_DIR -ARG SPEC - -# This is the version from the `docker build` command line. -# It is repeated because docker forgets about the ARGs after FROM statements. -ARG GCC_MAJOR_VERSION - -RUN apt-get install -y --no-install-recommends \ - libtbb-dev \ - make \ - bc \ - file \ -# GEOS patches some tpl. Remove when it's not the case anymore. - patch \ -# `ca-certificates` needed by `git` to download spack repo. - ca-certificates \ - git - - -# Run uberenv -# Have to create install directory first for uberenv -# -k flag is to ignore SSL errors -RUN --mount=src=.,dst=$SRC_DIR,readwrite cd ${SRC_DIR} && \ - mkdir -p ${GEOSX_TPL_DIR} && \ - GEOSX_SPEC="${SPEC}" && \ - if [ -z "${GEOSX_SPEC}" ] || [ "${GEOSX_SPEC}" = "undefined" ]; then GEOSX_SPEC="~pygeosx +docs %gcc-${GCC_MAJOR_VERSION}"; fi && \ - ./scripts/uberenv/uberenv.py \ - --spec "${GEOSX_SPEC}" \ - --spack-env-file=${SRC_DIR}/docker/spack.yaml \ - --project-json=${SRC_DIR}/.uberenv_config.json \ - --prefix ${GEOSX_TPL_DIR} \ - -k && \ -# Remove host-config generated for LvArray - rm lvarray* && \ -# Rename and copy spack-generated host-config to root directory - cp *.cmake /spack-generated.cmake && \ -# Remove extraneous spack files - cd ${GEOSX_TPL_DIR} && \ - rm -rf bin/ build_stage/ builtin_spack_packages_repo/ misc_cache/ spack/ spack_env/ .spack-db/ - -# Last step is setting everything for a complete slave that will build GEOSX. -FROM tpl_toolchain_intersect_geosx_toolchain AS geosx_toolchain -ARG SRC_DIR - -# I extract the deployed TPLs from the TPL building stqge. -COPY --from=tpl_toolchain $GEOSX_TPL_DIR $GEOSX_TPL_DIR - -# Extract the generated host-config -COPY --from=tpl_toolchain /spack-generated.cmake / - -# Any tool specific to building GEOSX shall be installed in this stage. -RUN DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ - apt-get install -y --no-install-recommends \ - openssh-client \ -# `ca-certificates` is needed by `sccache` to download the cached compilations. - ca-certificates \ - curl \ - python3 \ - texlive \ - texlive-latex-extra \ - graphviz \ - libxml2-utils \ - git \ - ghostscript \ - ninja-build - -# Install `sccache` binaries to speed up the build of `geos` -RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-sccache.sh -ENV SCCACHE=/opt/sccache/bin/sccache diff --git a/docker/tpl-ubuntu.Dockerfile b/docker/tpl-ubuntu.Dockerfile new file mode 100644 index 00000000..367092a3 --- /dev/null +++ b/docker/tpl-ubuntu.Dockerfile @@ -0,0 +1,169 @@ +# TPL build Dockerfile for Ubuntu-based images. +# +# This Dockerfile expects DOCKER_BASE_IMAGE to point at one of the geosx/ubuntu:* +# images produced by https://github.com/GEOS-DEV/docker_base_images. Those images +# already provide: +# * the toolchain (gcc or clang) under /opt/compiler/bin/, with CC/CXX/FC set +# * cmake (under /usr/local) +# * the upstream NVIDIA CUDA toolkit when DOCKER_BASE_IMAGE is a CUDA variant +# +# This file is intentionally agnostic of compiler vendor and CUDA-or-not: those +# choices are baked into DOCKER_BASE_IMAGE. The matrix in +# .github/workflows/docker_build_tpls.yml selects the right base image for each +# build. + +# Temporary local variables dedicated to the TPL build +ARG TMP_DIR=/tmp +ARG SRC_DIR=$TMP_DIR/thirdPartyLibs +ARG BLD_DIR=$TMP_DIR/build + +ARG DOCKER_BASE_IMAGE=ubuntu:24.04 +FROM ${DOCKER_BASE_IMAGE} AS tpl_toolchain_intersect_geosx_toolchain +ARG SRC_DIR +ARG CLANG_VERSION + +# Install directory provided as a docker build argument; forwarded via ENV +# (GEOSX_TPL_DIR is part of the image contract consumed by GEOS). +ARG INSTALL_DIR +ENV GEOSX_TPL_DIR=$INSTALL_DIR + +# Packages needed both for the TPL build and for the downstream GEOS build. +# We avoid reinstalling anything already present in the base image (compiler, +# cmake, doxygen, blas/lapack-dev when included by base PACKAGES, etc.). +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ + apt-get install -y --no-install-recommends \ + ca-certificates \ + libtbb12 \ + libgfortran5 \ + zlib1g-dev \ + openmpi-bin \ + libopenmpi-dev \ + python3-pip \ + python3-sphinx \ + python3-dev \ + python3-venv \ + python3-virtualenv \ + pkg-config \ + xz-utils \ + unzip \ + libmpfr-dev \ + lbzip2 \ + bzip2 \ + gnupg && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +# Install clingo for Spack. Do not upgrade Ubuntu's Debian-managed pip in +# place; Ubuntu 24.04's pip package cannot be uninstalled by pip. +RUN python3 -m pip install --break-system-packages clingo + +# MPI environment. CC/CXX/FC come from the base image. +ENV MPICC=/usr/bin/mpicc \ + MPICXX=/usr/bin/mpicxx \ + MPIEXEC=/usr/bin/mpirun +ENV OMPI_CC=${CC} \ + OMPI_CXX=${CXX} + +# For clang-based base images: +# 1) install a matching OpenMP runtime (libomp) +# 2) retarget OpenMPI wrappers to clang/clang++ +RUN if echo "${CC}" | grep -q "clang"; then \ + CLANG_MAJOR="${CLANG_VERSION:-}" ; \ + if [ -z "${CLANG_MAJOR}" ]; then \ + CLANG_MAJOR="$(echo "${CC:-}" | sed -nE 's|.*clang-([0-9]+).*|\\1|p')" ; \ + fi ; \ + if [ -z "${CLANG_MAJOR}" ] && command -v clang >/dev/null 2>&1; then \ + CLANG_MAJOR="$(clang --version | sed -nE '1s/.*version ([0-9]+).*/\\1/p')" ; \ + fi ; \ + apt-get update ; \ + if [ -n "${CLANG_MAJOR}" ]; then \ + DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ + apt-get install -y --no-install-recommends "libomp-${CLANG_MAJOR}-dev" || \ + (apt-get update && DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ + apt-get install -y --no-install-recommends libomp-dev) ; \ + else \ + DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ + apt-get install -y --no-install-recommends libomp-dev ; \ + fi ; \ + apt-get clean && rm -rf /var/lib/apt/lists/* && \ + for f in /usr/share/openmpi/mpicc-wrapper-data.txt /usr/share/openmpi/mpicc.openmpi-wrapper-data.txt; do \ + if [ -f "${f}" ]; then sed -i "s|^compiler=.*$|compiler=${CC}|" "${f}" ; fi ; \ + done && \ + for f in /usr/share/openmpi/mpic++-wrapper-data.txt /usr/share/openmpi/mpic++.openmpi-wrapper-data.txt /usr/share/openmpi/mpicxx-wrapper-data.txt /usr/share/openmpi/mpicxx.openmpi-wrapper-data.txt /usr/share/openmpi/mpiCC-wrapper-data.txt /usr/share/openmpi/mpiCC.openmpi-wrapper-data.txt; do \ + if [ -f "${f}" ]; then sed -i "s|^compiler=.*$|compiler=${CXX}|" "${f}" ; fi ; \ + done && \ + mpicc --showme:command && \ + mpic++ --showme:command ; \ + fi + +# ----- TPL build stage ----- +FROM tpl_toolchain_intersect_geosx_toolchain AS tpl_toolchain +ARG SRC_DIR +ARG BLD_DIR +ARG SPEC + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ + apt-get install -y --no-install-recommends \ + libtbb-dev \ + bc \ + file \ + patch \ + git \ + autoconf \ + automake \ + libtool \ + libtool-bin \ + m4 && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +# Run uberenv. The SPEC is supplied by the matrix because the spack toolchain +# tag depends on the compiler+version baked into the base image. +RUN --mount=src=.,dst=$SRC_DIR,readwrite cd ${SRC_DIR} && \ + mkdir -p ${GEOSX_TPL_DIR} && \ + GEOSX_SPEC="${SPEC}" && \ + if [ -z "${GEOSX_SPEC}" ] || [ "${GEOSX_SPEC}" = "undefined" ]; then \ + echo "ERROR: SPEC build-arg must be supplied" >&2 ; \ + exit 1 ; \ + fi && \ + GEOSX_SPACK_ENV_FILE=${SRC_DIR}/docker/ubuntu-spack.yaml && \ + if echo "${CC:-}" | grep -q "clang"; then \ + GEOSX_SPACK_ENV_FILE=/tmp/geosx-spack.yaml && \ + cp ${SRC_DIR}/docker/ubuntu-spack.yaml ${GEOSX_SPACK_ENV_FILE} && \ + sed -i -E "s/gcc@([0-9]+) languages:='c,c\\+\\+,fortran'/gcc@\\1 languages:='fortran'/g" ${GEOSX_SPACK_ENV_FILE} && \ + sed -i -E '/c: \/usr\/bin\/gcc-[0-9]+/d; /cxx: \/usr\/bin\/g\+\+-[0-9]+/d' ${GEOSX_SPACK_ENV_FILE} ; \ + fi && \ + ./scripts/uberenv/uberenv.py \ + --spec "${GEOSX_SPEC}" \ + --spack-env-file=${GEOSX_SPACK_ENV_FILE} \ + --project-json=${SRC_DIR}/.uberenv_config.json \ + --prefix ${GEOSX_TPL_DIR} \ + -k && \ + rm -f lvarray* && \ + cp *.cmake /spack-generated.cmake && \ + cd ${GEOSX_TPL_DIR} && \ + rm -rf bin/ build_stage/ builtin_spack_packages_repo/ misc_cache/ spack/ spack_env/ .spack-db/ + +# ----- Final GEOS-build image ----- +FROM tpl_toolchain_intersect_geosx_toolchain AS geosx_toolchain +ARG SRC_DIR +COPY --from=tpl_toolchain $GEOSX_TPL_DIR $GEOSX_TPL_DIR +COPY --from=tpl_toolchain /spack-generated.cmake / + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive TZ=America/Los_Angeles \ + apt-get install -y --no-install-recommends \ + openssh-client \ + git \ + graphviz \ + libxml2-utils \ + ninja-build \ + python3-mpi4py \ + python3-scipy \ + python3-matplotlib \ + python3-pytest && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +# Install sccache to speed up downstream GEOS builds +RUN --mount=src=.,dst=$SRC_DIR $SRC_DIR/docker/install-sccache.sh +ENV SCCACHE=/opt/sccache/bin/sccache diff --git a/docker/ubuntu-spack.yaml b/docker/ubuntu-spack.yaml new file mode 100644 index 00000000..e27514bf --- /dev/null +++ b/docker/ubuntu-spack.yaml @@ -0,0 +1,307 @@ +spack: + config: + install_tree: + root: $spack/.. + projections: + all: '{compiler.name}-{compiler.version}/{name}-{version}-{hash}' + misc_cache: $spack/../misc_cache + test_stage: $spack/../test_stage + build_stage:: + - $spack/../build_stage + + # Regular TPLs do not need views + view: false + + # Include shared variants and versions + include: + - ../defaults.yaml + - ../versions.yaml + + # --------------------------------------------------------------------------- + # Toolchains + # + # Aligned with the geosx/ubuntu:* base images produced by + # https://github.com/GEOS-DEV/docker_base_images. Each base image installs + # exactly one of these toolchains; the matrix in + # .github/workflows/docker_build_tpls.yml picks the matching '%' + # selector via the SPEC build-arg. + # --------------------------------------------------------------------------- + toolchains: + clang-19: + - spec: '%[virtuals=c]llvm@19+clang~flang~lld~lldb' + when: '%c' + - spec: '%[virtuals=cxx]llvm@19+clang~flang~lld~lldb' + when: '%cxx' + - spec: '%[virtuals=fortran]gcc@13' + when: '%fortran' + - spec: '%openmpi' + when: '%mpi' + clang-20: + - spec: cxxflags='-pthread' + - spec: cflags='-pthread' + - spec: '%[virtuals=c]llvm@20+clang~flang~lld~lldb' + when: '%c' + - spec: '%[virtuals=cxx]llvm@20+clang~flang~lld~lldb' + when: '%cxx' + - spec: '%[virtuals=fortran]gcc@13' + when: '%fortran' + - spec: '%openmpi' + when: '%mpi' + clang-22: + - spec: cxxflags='-pthread' + - spec: cflags='-pthread' + - spec: '%[virtuals=c]llvm@22+clang~flang~lld~lldb' + when: '%c' + - spec: '%[virtuals=cxx]llvm@22+clang~flang~lld~lldb' + when: '%cxx' + - spec: '%[virtuals=fortran]gcc@13' + when: '%fortran' + - spec: '%openmpi' + when: '%mpi' + gcc-12: + - spec: cxxflags='-pthread' + - spec: cflags='-pthread' + - spec: '%c=gcc@12' + when: '%c' + - spec: '%cxx=gcc@12' + when: '%cxx' + - spec: '%fortran=gcc@12' + when: '%fortran' + - spec: '%openmpi %gcc@12' + when: '%mpi' + gcc-13: + - spec: cxxflags='-pthread' + - spec: cflags='-pthread' + - spec: '%c=gcc@13' + when: '%c' + - spec: '%cxx=gcc@13' + when: '%cxx' + - spec: '%fortran=gcc@13' + when: '%fortran' + - spec: '%openmpi %gcc@13' + when: '%mpi' + gcc-14: + - spec: cxxflags='-pthread' + - spec: cflags='-pthread' + - spec: '%c=gcc@14' + when: '%c' + - spec: '%cxx=gcc@14' + when: '%cxx' + - spec: '%fortran=gcc@14' + when: '%fortran' + - spec: '%openmpi %gcc@14' + when: '%mpi' + gcc-15: + - spec: cxxflags='-pthread' + - spec: cflags='-pthread' + - spec: '%c=gcc@15' + when: '%c' + - spec: '%cxx=gcc@15' + when: '%cxx' + - spec: '%fortran=gcc@15' + when: '%fortran' + - spec: '%openmpi %gcc@15' + when: '%mpi' + + packages: + all: + target: [x86_64] + + mpi: + require: + - openmpi@4.1.6 + + zlib-api: + require: + - zlib + + blas: + require: + - "netlib-lapack" + lapack: + require: + - "netlib-lapack" + + # ---- Compilers (point at the actual binaries in /usr/bin) ---- + llvm: + buildable: false + externals: + - spec: llvm@19+clang~flang~lld~lldb + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/clang-19 + cxx: /usr/bin/clang++-19 + - spec: llvm@20+clang~flang~lld~lldb + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/clang-20 + cxx: /usr/bin/clang++-20 + - spec: llvm@22+clang~flang~lld~lldb + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/clang-22 + cxx: /usr/bin/clang++-22 + + gcc: + buildable: false + externals: + - spec: gcc@12 languages:='c,c++,fortran' + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/gcc-12 + cxx: /usr/bin/g++-12 + fortran: /usr/bin/gfortran-12 + - spec: gcc@13 languages:='c,c++,fortran' + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/gcc-13 + cxx: /usr/bin/g++-13 + fortran: /usr/bin/gfortran-13 + - spec: gcc@14 languages:='c,c++,fortran' + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/gcc-14 + cxx: /usr/bin/g++-14 + fortran: /usr/bin/gfortran-14 + - spec: gcc@15 languages:='c,c++,fortran' + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/gcc-15 + cxx: /usr/bin/g++-15 + fortran: /usr/bin/gfortran-15 + + + doxygen: + buildable: false + externals: + - spec: doxygen@1.8.20 + prefix: /usr + + autoconf: + version: [2.71] + buildable: false + externals: + - spec: autoconf@2.71 + prefix: /usr + automake: + version: [1.16.5] + buildable: false + externals: + - spec: automake@1.16.5 + prefix: /usr + libtool: + version: [2.4.7] + buildable: false + externals: + - spec: libtool@2.4.7 + prefix: /usr + cmake: + version: [3.31.9] + buildable: false + externals: + - spec: cmake@3.31.9 + prefix: /usr/local + cuda: + buildable: False + externals: + - spec: cuda@12.9.1 +allow-unsupported-compilers + prefix: /usr/local/cuda + - spec: cuda@13.2.1 +allow-unsupported-compilers + prefix: /usr/local/cuda + + findutils: + version: [4.9.0] + buildable: false + externals: + - spec: findutils@4.9.0 + prefix: /usr + m4: + buildable: false + externals: + - spec: m4@1.4.18 + prefix: /usr + mpfr: + buildable: false + externals: + - spec: mpfr@4.2.0 + prefix: /usr + netlib-lapack: + buildable: false + externals: + - spec: netlib-lapack@3.12.0 + prefix: /usr + netlib-blas: + buildable: false + externals: + - spec: netlib-blas@3.12.0 + prefix: /usr + # ---- OpenMPI: same prefix for every compiler binding (Ubuntu 24.04 ships one) + openmpi: + buildable: false + externals: + - spec: openmpi@4.1.6 %clang@19 + prefix: /usr + - spec: openmpi@4.1.6 %clang@20 + prefix: /usr + - spec: openmpi@4.1.6 %clang@22 + prefix: /usr + - spec: openmpi@4.1.6 %gcc@12 + prefix: /usr + - spec: openmpi@4.1.6 %gcc@13 + prefix: /usr + - spec: openmpi@4.1.6 %gcc@14 + prefix: /usr + - spec: openmpi@4.1.6 %gcc@15 + prefix: /usr + perl: + buildable: false + externals: + - spec: perl@5.38.2 + prefix: /usr + pkg-config: + buildable: false + externals: + - spec: pkg-config@0.29.2 + prefix: /usr + py-sphinx: + buildable: false + externals: + - spec: py-sphinx@7.4.7 + prefix: /usr + python: + buildable: false + externals: + - spec: python@3.12 + prefix: /usr + readline: + buildable: false + externals: + - spec: readline@8.2 + prefix: /usr + tar: + buildable: false + externals: + - spec: tar@1.35 + prefix: /usr + unzip: + buildable: false + externals: + - spec: unzip@6.0 + prefix: /usr + xz: + buildable: false + externals: + - spec: xz@5.6.1 + prefix: /usr + zlib: + buildable: false + externals: + - spec: zlib@1.3 + prefix: /usr diff --git a/docker/ubuntu20-clang-cuda-spack.yaml b/docker/ubuntu20-clang-cuda-spack.yaml deleted file mode 100644 index 29d26056..00000000 --- a/docker/ubuntu20-clang-cuda-spack.yaml +++ /dev/null @@ -1,167 +0,0 @@ -spack: - config: - install_tree: - root: $spack/.. - projections: - all: '{compiler.name}-{compiler.version}/{name}-{version}-{hash}' - misc_cache: $spack/../misc_cache - test_stage: $spack/../test_stage - build_stage:: - - $spack/../build_stage - - # Regular TPLs do not need views - view: false - - # Include shared variants and versions - include: - - ../defaults.yaml - - ../versions.yaml - - # Ubuntu 20.04 clang/cuda image only has clang/llvm 10.x available. - # Keep this environment llvm10-only to avoid concretizer conflicts with llvm@15. - toolchains: - clang-10: - - spec: '%[virtuals=c]llvm@10.0.0+clang~flang~lld~lldb openmp=project' - when: '%c' - - spec: '%[virtuals=cxx]llvm@10.0.0+clang~flang~lld~lldb openmp=project' - when: '%cxx' - - spec: '%[virtuals=fortran]gcc@9.4.0' - when: '%fortran' - - spec: '%openmpi' - when: '%mpi' - - packages: - all: - target: [x86_64] - - mpi: - require: - - openmpi@4.0.3 - - zlib-api: - require: - - zlib - - blas: - require: - - "netlib-lapack" - lapack: - require: - - "netlib-lapack" - - llvm: - externals: - - spec: llvm@10.0.0+clang~flang~lld~lldb openmp=project - prefix: /usr - extra_attributes: - compilers: - c: /usr/bin/clang - cxx: /usr/bin/clang++ - - gcc: - externals: - - spec: gcc@9.4.0 languages:='c,c++,fortran' - prefix: /usr - extra_attributes: - compilers: - c: /usr/bin/gcc-9 - cxx: /usr/bin/g++-9 - fortran: /usr/bin/gfortran-9 - - # CUDA comes from the nvidia base image. - cuda: - buildable: False - externals: - - spec: cuda@11.8.0 +allow-unsupported-compilers - prefix: /usr/local/cuda - - # Use system OpenMPI, but tag it as llvm@10-built so it can satisfy %clang-10 DAGs. - # (Spack does not validate the actual compiler used for system packages.) - openmpi: - buildable: false - externals: - - spec: openmpi@4.0.3 %llvm@10.0.0 - prefix: /usr - - autoconf: - version: [2.71] - buildable: false - externals: - - spec: autoconf@2.71 - prefix: /usr - automake: - version: [1.16.5] - buildable: false - externals: - - spec: automake@1.16.5 - prefix: /usr - cmake: - version: [3.28.3] - buildable: false - externals: - - spec: cmake@3.28.3 - prefix: /usr/local - - findutils: - version: [4.7.0] - buildable: false - externals: - - spec: findutils@4.7.0 - prefix: /usr - m4: - buildable: false - externals: - - spec: m4@1.4.18 - prefix: /usr - mpfr: - buildable: false - externals: - - spec: mpfr@6.0.2 - prefix: /usr - - netlib-lapack: - buildable: false - externals: - - spec: netlib-lapack@3.10.0 - prefix: /usr - netlib-blas: - buildable: false - externals: - - spec: netlib-blas@3.10.0 - prefix: /usr - - perl: - buildable: false - externals: - - spec: perl@5.30.0 - prefix: /usr - pkg-config: - buildable: false - externals: - - spec: pkg-config@0.29.1 - prefix: /usr - python: - buildable: false - externals: - - spec: python@3.8.10 - prefix: /usr - tar: - buildable: false - externals: - - spec: tar@1.30 - prefix: /usr - unzip: - buildable: false - externals: - - spec: unzip@6.0 - prefix: /usr - xz: - buildable: false - externals: - - spec: xz@5.2.4 - prefix: /usr - zlib: - buildable: false - externals: - - spec: zlib@1.2.11 - prefix: /usr diff --git a/scripts/docker-build.sh b/scripts/docker-build.sh index 8cd56c39..9ab6a3f5 100644 --- a/scripts/docker-build.sh +++ b/scripts/docker-build.sh @@ -8,27 +8,40 @@ echo .git > .dockerignore git submodule update --init scripts/uberenv -# This script will build an image from TPL_DOCKERFILE -# with (optional) DOCKER_COMPILER_BUILD_ARG build arguments. -# This image will be tagged with the DOCKER_REPOSITORY:DOCKER_TAG tag +# This script will build an image from TPL_DOCKERFILE. +# The new TPL Dockerfiles (docker/tpl-ubuntu.Dockerfile, +# docker/tpl-rockylinux.Dockerfile) layer on top of one of the geosx/: +# base images produced by https://github.com/GEOS-DEV/docker_base_images. The +# matrix in .github/workflows/docker_build_tpls.yml selects which base image +# (DOCKER_BASE_IMAGE) and which spack toolchain (SPEC) to use. +# +# This image will be tagged with the DOCKER_REPOSITORY:DOCKER_TAG tag. # A specific host-config file can be defined through variable HOST_CONFIG. -# For the case of Total cluster only, DOCKER_ROOT_IMAGE is used to define docker base image. -# Where the TPL are installed in the docker can be specified by parameter INSTALL_DIR. -# These variables shall be defined by the "yaml derived classes" in a stage prior to `script` stage. +# Where the TPL are installed in the docker can be specified by parameter +# INSTALL_DIR. echo "Docker tag is ${DOCKER_REPOSITORY}:${DOCKER_TAG}" INSTALL_DIR=${INSTALL_DIR_ROOT}/GEOS_TPL-${DOCKER_TAG}-${COMMIT:0:7} echo "Installation directory is ${INSTALL_DIR}" +echo "Docker base image is ${DOCKER_BASE_IMAGE}" -docker build --progress=plain ${DOCKER_COMPILER_BUILD_ARG} \ ---build-arg HOST_CONFIG=${HOST_CONFIG} \ ---build-arg DOCKER_ROOT_IMAGE=${DOCKER_ROOT_IMAGE} \ ---build-arg INSTALL_DIR=${INSTALL_DIR} \ ---build-arg SPEC="${SPEC}" \ ---tag ${DOCKER_REPOSITORY}:${DOCKER_TAG} \ ---file ${TPL_DOCKERFILE} \ ---label "org.opencontainers.image.created=$(date --rfc-3339=seconds)" \ ---label "org.opencontainers.image.source=https://github.com/GEOS-DEV/thirdPartyLibs" \ ---label "org.opencontainers.image.revision=${COMMIT}" \ ---label "org.opencontainers.image.title=Building environment for GEOS" \ -. +# Optional build-args are only forwarded when set, so the Dockerfiles can rely +# on `[ -z "${ARG}" ]` checks. +EXTRA_BUILD_ARGS=() +if [ -n "${GCC_VERSION}" ]; then EXTRA_BUILD_ARGS+=(--build-arg "GCC_VERSION=${GCC_VERSION}"); fi +if [ -n "${CLANG_VERSION}" ]; then EXTRA_BUILD_ARGS+=(--build-arg "CLANG_VERSION=${CLANG_VERSION}"); fi + +docker build --progress=plain \ + --build-arg HOST_CONFIG=${HOST_CONFIG} \ + --build-arg DOCKER_BASE_IMAGE=${DOCKER_BASE_IMAGE} \ + --build-arg INSTALL_DIR=${INSTALL_DIR} \ + --build-arg SPEC="${SPEC}" \ + "${EXTRA_BUILD_ARGS[@]}" \ + --tag ${DOCKER_REPOSITORY}:${DOCKER_TAG} \ + --file ${TPL_DOCKERFILE} \ + --label "org.opencontainers.image.created=$(date --rfc-3339=seconds)" \ + --label "org.opencontainers.image.source=https://github.com/GEOS-DEV/thirdPartyLibs" \ + --label "org.opencontainers.image.revision=${COMMIT}" \ + --label "org.opencontainers.image.base.name=${DOCKER_BASE_IMAGE}" \ + --label "org.opencontainers.image.title=Building environment for GEOS" \ + . diff --git a/scripts/setupLC-TPL-uberenv.bash b/scripts/setupLC-TPL-uberenv.bash index 454a10cc..e5f064ae 100755 --- a/scripts/setupLC-TPL-uberenv.bash +++ b/scripts/setupLC-TPL-uberenv.bash @@ -91,22 +91,25 @@ function launch_jobs() { ALLOC_CMD="srun -N 1 --exclusive -t 60 -A vortex" "${UBERENV_HELPER}" "$INSTALL_DIR" dane gcc-12 "+docs %%gcc-12 ${COMMON}" "${ALLOC_CMD}" "$@" & "${UBERENV_HELPER}" "$INSTALL_DIR" dane gcc-13 "+docs %%gcc-13 ${COMMON}" "${ALLOC_CMD}" "$@" & + "${UBERENV_HELPER}" "$INSTALL_DIR" dane gcc-13-hypredrive "+docs +hypredrive %%gcc-13 ${COMMON}" "${ALLOC_CMD}" "$@" & "${UBERENV_HELPER}" "$INSTALL_DIR" dane llvm-14 "+docs %%clang-14 ${COMMON}" "${ALLOC_CMD}" "$@" & "${UBERENV_HELPER}" "$INSTALL_DIR" dane llvm-19 "+docs %%clang-19 ${COMMON}" "${ALLOC_CMD}" "$@" & ;; matrix) ALLOC_CMD="srun -N 1 --exclusive -t 60 -A vortex" - "${UBERENV_HELPER}" "$INSTALL_DIR" matrix gcc-12-cuda-12.6 "+cuda~uncrustify cuda_arch=90 %%gcc-12 ^cuda@12.6.0+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & - "${UBERENV_HELPER}" "$INSTALL_DIR" matrix gcc-13-cuda-12.9 "+cuda~uncrustify cuda_arch=90 %%gcc-13 ^cuda@12.9.1+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & - "${UBERENV_HELPER}" "$INSTALL_DIR" matrix llvm-14-cuda-12.6 "+cuda~uncrustify cuda_arch=90 %%clang-14 ^cuda@12.6.0+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & - "${UBERENV_HELPER}" "$INSTALL_DIR" matrix llvm-19-cuda-12.9 "+cuda~uncrustify cuda_arch=90 %%clang-19 ^cuda@12.9.1+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & + "${UBERENV_HELPER}" "$INSTALL_DIR" matrix gcc-12-cuda-12.6 "+cuda ~uncrustify cuda_arch=90 %%gcc-12 ^cuda@12.6.0+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & + "${UBERENV_HELPER}" "$INSTALL_DIR" matrix gcc-13-cuda-12.9 "+cuda ~uncrustify cuda_arch=90 %%gcc-13 ^cuda@12.9.1+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & + "${UBERENV_HELPER}" "$INSTALL_DIR" matrix gcc-13-cuda-12.9-hypredrive "+cuda ~uncrustify +hypredrive cuda_arch=90 %%gcc-13 ^cuda@12.9.1+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & + "${UBERENV_HELPER}" "$INSTALL_DIR" matrix llvm-14-cuda-12.6 "+cuda ~uncrustify cuda_arch=90 %%clang-14 ^cuda@12.6.0+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & + "${UBERENV_HELPER}" "$INSTALL_DIR" matrix llvm-19-cuda-12.9 "+cuda ~uncrustify cuda_arch=90 %%clang-19 ^cuda@12.9.1+allow-unsupported-compilers ${COMMON}" "${ALLOC_CMD}" "$@" & ;; tuo|tuolumne) ALLOC_CMD="salloc -N 1 --exclusive -p pdebug -t 60 -A vortex" "${UBERENV_HELPER}" "$INSTALL_DIR" tuolumne cce-20-rocm-6.4.3 "+rocm~pygeosx~trilinos~petsc~docs amdgpu_target=gfx942 %%cce-20 ${COMMON}" "${ALLOC_CMD}" "$@" & "${UBERENV_HELPER}" "$INSTALL_DIR" tuolumne llvm-amdgpu-6.4.3 "+rocm~pygeosx~trilinos~petsc~docs amdgpu_target=gfx942 %%llvm-amdgpu_6_4_3 ${COMMON}" "${ALLOC_CMD}" "$@" & + "${UBERENV_HELPER}" "$INSTALL_DIR" tuolumne llvm-amdgpu-6.4.3-hypredrive "+rocm~pygeosx~trilinos~petsc~docs+hypredrive amdgpu_target=gfx942 %%llvm-amdgpu_6_4_3 ${COMMON}" "${ALLOC_CMD}" "$@" & ;; *) diff --git a/scripts/spack_packages/packages/geosx/package.py b/scripts/spack_packages/packages/geosx/package.py index 538b6087..c7997d6a 100644 --- a/scripts/spack_packages/packages/geosx/package.py +++ b/scripts/spack_packages/packages/geosx/package.py @@ -464,9 +464,12 @@ def geos_hostconfig(self, spec, prefix, py_site_pkgs_dir=None): cmake_cuda_flags += ' -Xcompiler ' + compilerArg if not spec.satisfies('cuda_arch=none'): - cuda_arch = spec.variants['cuda_arch'].value - cmake_cuda_flags += ' -arch sm_{0}'.format(cuda_arch[0]) - cfg.write(cmake_cache_string('CMAKE_CUDA_ARCHITECTURES', cuda_arch[0])) + cuda_arches = [str(arch) for arch in spec.variants['cuda_arch'].value] + for cuda_arch in cuda_arches: + cmake_cuda_flags += ( + ' -gencode arch=compute_{0},code=sm_{0}'.format(cuda_arch) + ) + cfg.write(cmake_cache_string('CMAKE_CUDA_ARCHITECTURES', ';'.join(cuda_arches))) cfg.write(cmake_cache_string('CMAKE_CUDA_FLAGS', cmake_cuda_flags)) @@ -795,9 +798,12 @@ def lvarray_hostconfig(self, spec, prefix, py_site_pkgs_dir=None): cmake_cuda_flags += ' -Xcompiler ' + compilerArg if not spec.satisfies('cuda_arch=none'): - cuda_arch = spec.variants['cuda_arch'].value - cmake_cuda_flags += ' -arch sm_{0}'.format(cuda_arch[0]) - cfg.write(cmake_cache_string('CMAKE_CUDA_ARCHITECTURES', cuda_arch[0])) + cuda_arches = [str(arch) for arch in spec.variants['cuda_arch'].value] + for cuda_arch in cuda_arches: + cmake_cuda_flags += ( + ' -gencode arch=compute_{0},code=sm_{0}'.format(cuda_arch) + ) + cfg.write(cmake_cache_string('CMAKE_CUDA_ARCHITECTURES', ';'.join(cuda_arches))) cfg.write(cmake_cache_string('CMAKE_CUDA_FLAGS', cmake_cuda_flags))