diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..f644e0e --- /dev/null +++ b/.editorconfig @@ -0,0 +1,15 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 4 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[{*.json,*.yaml,*.yml}] +indent_size = 2 + +[{README.rst,docs/*.rst}] +indent_size = unset diff --git a/.github/workflows/_prep-release.yaml b/.github/workflows/_prep-release.yaml new file mode 100644 index 0000000..4248bb8 --- /dev/null +++ b/.github/workflows/_prep-release.yaml @@ -0,0 +1,31 @@ +name: "✨ Prep release" + +on: + workflow_dispatch: + inputs: + version: + description: "The new version" + type: "string" + required: true + +jobs: + prep-release: + name: "Prep release v${{ inputs.version }}" + + permissions: + contents: "write" + pull-requests: "write" + + strategy: + matrix: + include: + - tox-label-create-changes: "prep-release" + branch-name: "release/$VERSION" + commit-title: "Update project metadata" + pr-base: "releases" + pr-title: "Release v$VERSION" + + uses: "./.github/workflows/create-pr.yaml" + with: + config: "${{ toJSON(matrix) }}" + version: "${{ inputs.version }}" diff --git a/.github/workflows/_test.yaml b/.github/workflows/_test.yaml new file mode 100644 index 0000000..bf8fb3c --- /dev/null +++ b/.github/workflows/_test.yaml @@ -0,0 +1,40 @@ +name: "🧪 Test" + +on: + pull_request: + types: + # These are the default on:pull_request types. + - "opened" + - "reopened" + - "synchronize" + # Release automation opens PRs as drafts without triggering CI; + # clicking "Ready for review" in the UI will trigger test runs. + - "ready_for_review" + push: + branches: + - "main" + - "releases" + +permissions: + contents: "read" + +jobs: + test: + name: "${{ matrix.name }}" + strategy: + matrix: + include: + - name: "Linux" + runner: "ubuntu-24.04" + cpythons: + - "3.13" + cache-key-hash-files: + - "requirements/*/*.txt" + cache-paths: + - ".mypy_cache/" + tox-skip-environments: + - "coverage-html" + + uses: "./.github/workflows/tox.yaml" + with: + config: "${{ toJSON(matrix) }}" diff --git a/.github/workflows/build-python-package.jinja.yaml b/.github/workflows/build-python-package.jinja.yaml new file mode 100644 index 0000000..bd95793 --- /dev/null +++ b/.github/workflows/build-python-package.jinja.yaml @@ -0,0 +1,99 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +on: + workflow_call: + outputs: + artifact-id: + description: "The artifact ID that can be subsequently downloaded" + value: "${{ jobs.build.outputs.artifact-id }}" + packages-path: + description: "The path to the Python packages" + value: "${{ jobs.build.outputs.packages-path }}" + +env: + PYTHON_VERSION: "[[ PYTHON_VERSION ]]" + UV_VERSION: "[[ UV_VERSION ]]" + ARTIFACT_NAME: "build-python-package-${{ github.run_id }}" + PACKAGES_PATH: "./dist" + BUILD_REQUIREMENTS: | + [[ include_requirements("build") | indent(4) ]] + +jobs: + #[#- + # Halt execution if an attempt is made to run the template directly. + # This block is enclosed in a Jinja comment and will not be rendered. + halt: + name: "Halt" + runs-on: "ubuntu-slim" + steps: + - name: "Halt" + run: | + echo "::error::⚠️ Do not run the workflow template directly." + exit 1 + #]# + build: + #[#- + # The `needs` key is in a Jinja comment and will not be rendered. + needs: ["halt"] + #]# + name: "Build" + runs-on: "ubuntu-24.04" + permissions: + contents: "read" + outputs: + artifact-id: "${{ steps.upload-packages.outputs.artifact-id }}" + packages-path: "${{ steps.packages-path.outputs.packages-path }}" + steps: + - name: "Setup Python" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Install uv" + uses: "astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867" # v7.1.6 + with: + version: "${{ env.UV_VERSION }}" + enable-cache: "false" + ignore-empty-workdir: "true" + + - name: "Checkout the repository" + uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 + with: + ref: "${{ github.sha }}" + persist-credentials: "false" + + - name: "Build the package" + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${BUILD_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + --module build --installer=uv --outdir="${PACKAGES_PATH}" + + - name: "Show checksums" + run: | + sha256sum "${PACKAGES_PATH}"/* + + - name: "Upload the built packages" + id: "upload-packages" + uses: "actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f" # v7.0.0 + with: + name: "${{ env.ARTIFACT_NAME }}" + path: "${{ env.PACKAGES_PATH }}" + if-no-files-found: "error" + retention-days: "1" + overwrite: "false" + + - name: "Create packages-path output" + id: "packages-path" + run: | + echo "packages-path=${PACKAGES_PATH}" >> "${GITHUB_OUTPUT}" diff --git a/.github/workflows/build-python-package.yaml b/.github/workflows/build-python-package.yaml new file mode 100644 index 0000000..3ccb553 --- /dev/null +++ b/.github/workflows/build-python-package.yaml @@ -0,0 +1,88 @@ +# DO NOT EDIT THIS FILE! EDIT 'build-python-package.jinja.yaml'. + +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +on: + workflow_call: + outputs: + artifact-id: + description: "The artifact ID that can be subsequently downloaded" + value: "${{ jobs.build.outputs.artifact-id }}" + packages-path: + description: "The path to the Python packages" + value: "${{ jobs.build.outputs.packages-path }}" + +env: + PYTHON_VERSION: "3.13" + UV_VERSION: "0.11.2" + ARTIFACT_NAME: "build-python-package-${{ github.run_id }}" + PACKAGES_PATH: "./dist" + BUILD_REQUIREMENTS: | + build==1.4.2 ; python_version == "3.13" + colorama==0.4.6 ; python_version == "3.13" and os_name == "nt" + packaging==26.0 ; python_version == "3.13" + pyproject-hooks==1.2.0 ; python_version == "3.13" + +jobs: + build: + name: "Build" + runs-on: "ubuntu-24.04" + permissions: + contents: "read" + outputs: + artifact-id: "${{ steps.upload-packages.outputs.artifact-id }}" + packages-path: "${{ steps.packages-path.outputs.packages-path }}" + steps: + - name: "Setup Python" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Install uv" + uses: "astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867" # v7.1.6 + with: + version: "${{ env.UV_VERSION }}" + enable-cache: "false" + ignore-empty-workdir: "true" + + - name: "Checkout the repository" + uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 + with: + ref: "${{ github.sha }}" + persist-credentials: "false" + + - name: "Build the package" + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${BUILD_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + --module build --installer=uv --outdir="${PACKAGES_PATH}" + + - name: "Show checksums" + run: | + sha256sum "${PACKAGES_PATH}"/* + + - name: "Upload the built packages" + id: "upload-packages" + uses: "actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f" # v7.0.0 + with: + name: "${{ env.ARTIFACT_NAME }}" + path: "${{ env.PACKAGES_PATH }}" + if-no-files-found: "error" + retention-days: "1" + overwrite: "false" + + - name: "Create packages-path output" + id: "packages-path" + run: | + echo "packages-path=${PACKAGES_PATH}" >> "${GITHUB_OUTPUT}" diff --git a/.github/workflows/create-pr.jinja.yaml b/.github/workflows/create-pr.jinja.yaml new file mode 100644 index 0000000..8904360 --- /dev/null +++ b/.github/workflows/create-pr.jinja.yaml @@ -0,0 +1,288 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +on: + workflow_call: + inputs: + config: + type: "string" + required: true + description: | + The configuration object. + version: + type: "string" + required: false + description: | + The version to use when creating the release. + + This must be a valid version specifier + (see PEP 440 -- https://peps.python.org/pep-0440/), + but the workflow itself doesn't currently validate the version. + + The version can be referenced in several configuration variables + by using the name `$VERSION`: + + * `branch-name` (example: `release/$VERSION`) + * `commit-title` (example: `Update metadata for v$VERSION`) + * `pr-title` (example: `Release v$VERSION`) + + The version will also be available as an environment variable when tox is run: + + ``` + poetry version "$VERSION" + ``` + + Remember to set the tox `pass_env` configuration value to pass `VERSION`. + + +env: + PYTHON_VERSION: "[[ PYTHON_VERSION ]]" + UV_VERSION: "[[ UV_VERSION ]]" + PANDOC_VERSION: "3.8.3" + CHECK_JSONSCHEMA_REQUIREMENTS: | + [[ include_requirements("check_jsonschema") | indent(4) ]] + TOX_REQUIREMENTS: | + [[ include_requirements("tox") | indent(4) ]] + + # These values are used when a config value is not specified. + default-branch-from: "main" + default-branch-name: "updates" + default-commit-title: "Updates" + default-pr-base: "main" + default-pr-title: "Updates" + default-pr-body-input-format: "rst" + +jobs: + #[#- + # Halt execution if an attempt is made to run the template directly. + # This job is enclosed in a Jinja comment and will not be rendered. + halt: + name: "Halt" + runs-on: "ubuntu-slim" + steps: + - name: "Halt" + run: | + echo "::error::⚠️ Do not run the workflow template directly." + exit 1 + #]# + create-pr: + #[#- + # The `needs` key is in a Jinja comment and will not be rendered. + needs: ["halt"] + #]# + name: "create-pr" + runs-on: "ubuntu-24.04" + permissions: + contents: "write" + pull-requests: "write" + steps: + - name: "Export config" + id: "config-exporter" + shell: "bash" + # Loading the input from an environment variable avoids injection attacks. + env: + inputs_config: "${{ inputs.config }}" + run: | + echo "$inputs_config" > ".create-pr-config.raw.json" + + - name: "Setup Python for base requirements" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Install uv" + uses: "astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867" # v7.1.6 + with: + version: "${{ env.UV_VERSION }}" + enable-cache: "false" + ignore-empty-workdir: "true" + + # If a previous workflow run successfully validated an identical config object, + # a cache hit is sufficient to demonstrate that no further validation is required. + - name: "Check if raw config is already validated" + id: "lookup-config-cache" + uses: "actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306" # v5.0.3 + with: + lookup-only: true + path: ".create-pr-config.raw.json" + key: "create-pr-config-${{ hashFiles('.create-pr-config.raw.json') }}" + + - name: "Write config schema" + if: "${{ steps.lookup-config-cache.outputs.cache-hit == false }}" + shell: "bash" + env: + CONFIG_SCHEMA: | + [[ include_file("config-schema.json") | indent(12) ]] + run: | + echo "${CONFIG_SCHEMA}" > "${RUNNER_TEMP}/config-schema.json" + + - name: "Validate the raw config against the schema" + if: "${{ steps.lookup-config-cache.outputs.cache-hit == false }}" + shell: "bash" + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${CHECK_JSONSCHEMA_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + check-jsonschema --schemafile "${RUNNER_TEMP}/config-schema.json" ".create-pr-config.raw.json" + + - name: "Create a 'config-is-validated' cache key" + if: "${{ steps.lookup-config-cache.outputs.cache-hit == false }}" + uses: "actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306" # v5.0.3 + with: + path: ".create-pr-config.raw.json" + key: "${{ steps.lookup-config-cache.outputs.cache-primary-key }}" + + - name: "Install pandoc" + env: + PANDOC_DEB_URL: "https://github.com/jgm/pandoc/releases/download/${{ env.PANDOC_VERSION }}/pandoc-${{ env.PANDOC_VERSION }}-1-amd64.deb" + run: | + wget -nv -O pandoc.deb "${PANDOC_DEB_URL}" + sudo dpkg -i ./pandoc.deb + rm ./pandoc.deb + + - name: "Checkout the repository" + uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 + with: + persist-credentials: "false" + ref: "${{ fromJSON(inputs.config).branch-from || env.default-branch-from }}" + fetch-depth: 0 + + - name: "Create a virtual environment" + shell: "bash" + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${TOX_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv venv --no-project --no-managed-python .venv + echo "*" > ".venv/.gitignore" + uv pip install --no-managed-python --directory=.venv --requirements="${REQUIREMENTS_PATH}" + + - name: "Setup the requested Python version" + if: "${{ fromJSON(inputs.config).python-version != '' && fromJSON(inputs.config).python-version != env.PYTHON_VERSION }}" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + with: + python-version: "${{ fromJSON(inputs.config).python-version }}" + allow-prereleases: true + + - name: "Generate changes" + env: + PR_BODY: "${{ fromJSON(inputs.config).pr-body }}" + PR_BODY_OUTPUT_PATH: "${{ runner.temp }}/pr-body-fragment.txt" + VERSION: "${{ inputs.version }}" + TOX_LABEL: "${{ fromJSON(inputs.config).tox-label-create-changes }}" + run: | + .venv/bin/tox run --colored yes -m "${TOX_LABEL}" + + - name: "Setup Python for commit generation" + if: "${{ fromJSON(inputs.config).python-version != '' && fromJSON(inputs.config).python-version != env.PYTHON_VERSION }}" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + with: + python-version: "${{ env.PYTHON_VERSION }}" + allow-prereleases: true + + - name: "Generate commit request body" + shell: "python" + env: + VERSION: "${{ inputs.version }}" + BRANCH_NAME: "${{ fromJSON(inputs.config).branch-name || env.default-branch-name }}" + COMMIT_TITLE: "${{ fromJSON(inputs.config).commit-title || env.default-commit-title }}" + OUTPUT_FILE: "${{ runner.temp }}/graphql-input.json" + run: | + [[ include_file("create_commit_request_body.py") | indent(10) ]] + + - name: "Compute versioned variables" + shell: "python" + env: + VERSION: "${{ inputs.version }}" + DEFAULT_BRANCH_NAME: "${{ env.default-branch-name }}" + BRANCH_NAME: "${{ fromJSON(inputs.config).branch-name }}" + DEFAULT_PR_TITLE: "${{ env.default-pr-title }}" + PR_TITLE: "${{ fromJSON(inputs.config).pr-title }}" + # Creates new environment variables: + # + # * COMPUTED_BRANCH_NAME + # * COMPUTED_PR_TITLE + # + run: | + [[ include_file("compute_versioned_variables.py") | indent(10) ]] + + - name: "Push a new branch" + env: + GH_TOKEN: "${{ github.token }}" + COMPUTED_BRANCH_NAME: "${{ env.COMPUTED_BRANCH_NAME }}" + run: | + # `gh api` is required because `git` has no permissions. + + # Create the branch on the server. + gh api \ + --method POST \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2026-03-10" \ + "/repos/${GITHUB_REPOSITORY}/git/refs" \ + --field "ref=refs/heads/${COMPUTED_BRANCH_NAME}" \ + --field "sha=${GITHUB_SHA}" + + # Push a new commit to the branch. + gh api graphql --input "${RUNNER_TEMP}/graphql-input.json" + + - name: "Generate the PR body" + env: + PR_BODY: "${{ fromJSON(inputs.config).pr-body }}" + PR_BODY_PATH: "${{ runner.temp }}/pr-body-fragment.txt" + PR_BODY_INPUT_FORMAT: "${{ fromJSON(inputs.config).pr-body-input-format || env.default-pr-body-input-format }}" + VERSION: "${{ inputs.version }}" + run: | + # If a static PR body was provided, use it. + # This unconditionally overwrites any PR body + # that might have been created when tox ran above. + if [ ! -z "${PR_BODY}" ]; then + echo "${PR_BODY}" > "${PR_BODY_PATH}" + fi + + # If a PR body file was not yet generated, create a blank one. + if [ ! -f "${PR_BODY_PATH}" ]; then + touch "${PR_BODY_PATH}" + fi + + # Convert the content to GitHub-formatted Markdown. + pandoc \ + --from "${PR_BODY_INPUT_FORMAT}" \ + --to gfm \ + --wrap preserve \ + --shift-heading-level-by 1 \ + "${PR_BODY_PATH}" \ + --output "${RUNNER_TEMP}/pr-body-fragment.gfm" + + # If a version was provided, augment the PR body with the version as its header. + if [ ! -z "${VERSION}" ]; then + echo "# ${VERSION}" > "${RUNNER_TEMP}/pr-body.gfm" + fi + + cat "${RUNNER_TEMP}/pr-body-fragment.gfm" >> "${RUNNER_TEMP}/pr-body.gfm" + + - name: "Create a PR" + env: + GH_TOKEN: "${{ github.token }}" + COMPUTED_BRANCH_NAME: "${{ env.COMPUTED_BRANCH_NAME }}" + COMPUTED_PR_TITLE: "${{ env.COMPUTED_PR_TITLE }}" + PR_BASE: "${{ fromJSON(inputs.config).pr-base || env.default-pr-base }}" + run: | + gh pr create \ + --draft \ + --head "${COMPUTED_BRANCH_NAME}" \ + --base "${PR_BASE}" \ + --title "${COMPUTED_PR_TITLE}" \ + --body-file "${RUNNER_TEMP}/pr-body.gfm" diff --git a/.github/workflows/create-pr.yaml b/.github/workflows/create-pr.yaml new file mode 100644 index 0000000..73ac048 --- /dev/null +++ b/.github/workflows/create-pr.yaml @@ -0,0 +1,539 @@ +# DO NOT EDIT THIS FILE! EDIT 'create-pr.jinja.yaml'. + +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +on: + workflow_call: + inputs: + config: + type: "string" + required: true + description: | + The configuration object. + version: + type: "string" + required: false + description: | + The version to use when creating the release. + + This must be a valid version specifier + (see PEP 440 -- https://peps.python.org/pep-0440/), + but the workflow itself doesn't currently validate the version. + + The version can be referenced in several configuration variables + by using the name `$VERSION`: + + * `branch-name` (example: `release/$VERSION`) + * `commit-title` (example: `Update metadata for v$VERSION`) + * `pr-title` (example: `Release v$VERSION`) + + The version will also be available as an environment variable when tox is run: + + ``` + poetry version "$VERSION" + ``` + + Remember to set the tox `pass_env` configuration value to pass `VERSION`. + + +env: + PYTHON_VERSION: "3.13" + UV_VERSION: "0.11.2" + PANDOC_VERSION: "3.8.3" + CHECK_JSONSCHEMA_REQUIREMENTS: | + attrs==26.1.0 ; python_version == "3.13" + certifi==2026.2.25 ; python_version == "3.13" + charset-normalizer==3.4.6 ; python_version == "3.13" + check-jsonschema==0.37.1 ; python_version == "3.13" + click==8.3.1 ; python_version == "3.13" + colorama==0.4.6 ; python_version == "3.13" and platform_system == "Windows" + idna==3.11 ; python_version == "3.13" + jsonschema-specifications==2025.9.1 ; python_version == "3.13" + jsonschema==4.26.0 ; python_version == "3.13" + referencing==0.37.0 ; python_version == "3.13" + regress==2025.10.1 ; python_version == "3.13" + requests==2.33.0 ; python_version == "3.13" + rpds-py==0.30.0 ; python_version == "3.13" + ruamel-yaml==0.19.1 ; python_version == "3.13" + urllib3==2.6.3 ; python_version == "3.13" + TOX_REQUIREMENTS: | + cachetools==7.0.5 ; python_version == "3.13" + colorama==0.4.6 ; python_version == "3.13" + distlib==0.4.0 ; python_version == "3.13" + filelock==3.25.2 ; python_version == "3.13" + packaging==26.0 ; python_version == "3.13" + platformdirs==4.9.4 ; python_version == "3.13" + pluggy==1.6.0 ; python_version == "3.13" + pyproject-api==1.10.0 ; python_version == "3.13" + python-discovery==1.2.1 ; python_version == "3.13" + tomli-w==1.2.0 ; python_version == "3.13" + tox-gh==1.7.1 ; python_version == "3.13" + tox-uv-bare==1.33.4 ; python_version == "3.13" + tox-uv==1.33.4 ; python_version == "3.13" + tox==4.51.0 ; python_version == "3.13" + uv==0.11.2 ; python_version == "3.13" + virtualenv==21.2.0 ; python_version == "3.13" + + # These values are used when a config value is not specified. + default-branch-from: "main" + default-branch-name: "updates" + default-commit-title: "Updates" + default-pr-base: "main" + default-pr-title: "Updates" + default-pr-body-input-format: "rst" + +jobs: + create-pr: + name: "create-pr" + runs-on: "ubuntu-24.04" + permissions: + contents: "write" + pull-requests: "write" + steps: + - name: "Export config" + id: "config-exporter" + shell: "bash" + # Loading the input from an environment variable avoids injection attacks. + env: + inputs_config: "${{ inputs.config }}" + run: | + echo "$inputs_config" > ".create-pr-config.raw.json" + + - name: "Setup Python for base requirements" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Install uv" + uses: "astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867" # v7.1.6 + with: + version: "${{ env.UV_VERSION }}" + enable-cache: "false" + ignore-empty-workdir: "true" + + # If a previous workflow run successfully validated an identical config object, + # a cache hit is sufficient to demonstrate that no further validation is required. + - name: "Check if raw config is already validated" + id: "lookup-config-cache" + uses: "actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306" # v5.0.3 + with: + lookup-only: true + path: ".create-pr-config.raw.json" + key: "create-pr-config-${{ hashFiles('.create-pr-config.raw.json') }}" + + - name: "Write config schema" + if: "${{ steps.lookup-config-cache.outputs.cache-hit == false }}" + shell: "bash" + env: + CONFIG_SCHEMA: | + { + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "required": [ + "tox-label-create-changes" + ], + "properties": { + "python-version": { + "description": "The Python version to use when running tox.", + "type": "string", + "default": "3.13" + }, + "tox-label-create-changes": { + "description": "The tox label (passed using the `-m` argument) to run before creating the commit. Two environment variables will be set: `VERSION` and `PR_BODY_OUTPUT_PATH`. After running tox, all file changes shown in `git status` will be included in the commit.", + "type": "string", + "minLength": 1, + "examples": [ + "update", + "prep-release" + ] + }, + "checkout-branch": { + "description": "The name of the branch to initially checkout.", + "type": "string", + "minLength": 1, + "default": "main" + }, + "branch-name": { + "description": "The name of the branch to create. If a `version` input is passed to the workflow, it can be referenced as `$VERSION`.", + "type": "string", + "minLength": 1, + "default": "updates", + "examples": [ + "release/$VERSION" + ] + }, + "commit-title": { + "description": "The one-line commit message to use.", + "type": "string", + "minLength": 1, + "default": "Updates" + }, + "pr-base": { + "description": "The name of the branch to merge to in the PR. This appears as the 'base' in the GitHub UI.", + "type": "string", + "minLength": 1, + "default": "main" + }, + "pr-title": { + "description": "The title of the PR. If a `version` input is passed to the workflow, it can be referenced as `$VERSION`.", + "type": "string", + "minLength": 1, + "default": "Updates", + "examples": [ + "Release v$VERSION" + ] + }, + "pr-body": { + "description": "The body of the PR. If a `version` input is passed to the workflow, it can be referenced as `$VERSION`.", + "type": "string", + "minLength": 1, + "default": "", + "examples": [ + "Scheduled updates." + ] + }, + "pr-body-input-format": { + "description": "The format of the PR body fragment generated by the tox label that prepares releases. The value must exactly match the `--from` argument values that pandoc supports, but currently only 'gfm' and 'rst' are allowed.", + "type": "string", + "enum": [ + "gfm", + "rst" + ], + "default": "rst" + } + } + } + run: | + echo "${CONFIG_SCHEMA}" > "${RUNNER_TEMP}/config-schema.json" + + - name: "Validate the raw config against the schema" + if: "${{ steps.lookup-config-cache.outputs.cache-hit == false }}" + shell: "bash" + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${CHECK_JSONSCHEMA_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + check-jsonschema --schemafile "${RUNNER_TEMP}/config-schema.json" ".create-pr-config.raw.json" + + - name: "Create a 'config-is-validated' cache key" + if: "${{ steps.lookup-config-cache.outputs.cache-hit == false }}" + uses: "actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306" # v5.0.3 + with: + path: ".create-pr-config.raw.json" + key: "${{ steps.lookup-config-cache.outputs.cache-primary-key }}" + + - name: "Install pandoc" + env: + PANDOC_DEB_URL: "https://github.com/jgm/pandoc/releases/download/${{ env.PANDOC_VERSION }}/pandoc-${{ env.PANDOC_VERSION }}-1-amd64.deb" + run: | + wget -nv -O pandoc.deb "${PANDOC_DEB_URL}" + sudo dpkg -i ./pandoc.deb + rm ./pandoc.deb + + - name: "Checkout the repository" + uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 + with: + persist-credentials: "false" + ref: "${{ fromJSON(inputs.config).branch-from || env.default-branch-from }}" + fetch-depth: 0 + + - name: "Create a virtual environment" + shell: "bash" + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${TOX_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv venv --no-project --no-managed-python .venv + echo "*" > ".venv/.gitignore" + uv pip install --no-managed-python --directory=.venv --requirements="${REQUIREMENTS_PATH}" + + - name: "Setup the requested Python version" + if: "${{ fromJSON(inputs.config).python-version != '' && fromJSON(inputs.config).python-version != env.PYTHON_VERSION }}" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + with: + python-version: "${{ fromJSON(inputs.config).python-version }}" + allow-prereleases: true + + - name: "Generate changes" + env: + PR_BODY: "${{ fromJSON(inputs.config).pr-body }}" + PR_BODY_OUTPUT_PATH: "${{ runner.temp }}/pr-body-fragment.txt" + VERSION: "${{ inputs.version }}" + TOX_LABEL: "${{ fromJSON(inputs.config).tox-label-create-changes }}" + run: | + .venv/bin/tox run --colored yes -m "${TOX_LABEL}" + + - name: "Setup Python for commit generation" + if: "${{ fromJSON(inputs.config).python-version != '' && fromJSON(inputs.config).python-version != env.PYTHON_VERSION }}" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + with: + python-version: "${{ env.PYTHON_VERSION }}" + allow-prereleases: true + + - name: "Generate commit request body" + shell: "python" + env: + VERSION: "${{ inputs.version }}" + BRANCH_NAME: "${{ fromJSON(inputs.config).branch-name || env.default-branch-name }}" + COMMIT_TITLE: "${{ fromJSON(inputs.config).commit-title || env.default-commit-title }}" + OUTPUT_FILE: "${{ runner.temp }}/graphql-input.json" + run: | + # This file is a part of the Globus GitHub Workflows project. + # https://github.com/globus/workflows + # Copyright 2021-2026 Globus + # Copyright 2024-2026 Kurt McKee + # SPDX-License-Identifier: MIT + + import base64 + import json + import os + import pathlib + import re + import subprocess + import sys + import typing + + RC_SUCCESS = 0 + RC_FAILURE = 1 + + mandatory_environment_variables = { + "BRANCH_NAME", + "COMMIT_TITLE", + "GITHUB_REPOSITORY", + "GITHUB_SHA", + "OUTPUT_FILE", + } + + + def main() -> int: + # Ensure mandatory environment variables are present. + if missing_keys := (mandatory_environment_variables - os.environ.keys()): + for missing_key in missing_keys: + print(f"`{missing_key}` is a mandatory environment variable.") + return RC_FAILURE + + # Calculate file changes (and exit if there are none). + file_changes = calculate_file_changes() + if not file_changes: + print("No file changes detected.") + return RC_FAILURE + + request_body = generate_request_body(file_changes) + + output_file = os.environ["OUTPUT_FILE"] + if output_file == "-": + print(json.dumps(request_body, indent=2)) + else: + with open(output_file, "w") as file: + file.write(json.dumps(request_body)) + + return RC_SUCCESS + + + def generate_request_body(file_changes: dict[str, typing.Any]) -> dict[str, typing.Any]: + query = """ + mutation ($input:CreateCommitOnBranchInput!) { + createCommitOnBranch(input: $input) { + commit { oid } + } + } + """ + + return { + "query": " ".join(query.split()), + "variables": { + "input": { + "branch": { + "branchName": inject_version(os.environ["BRANCH_NAME"]), + "repositoryNameWithOwner": os.environ["GITHUB_REPOSITORY"], + }, + "expectedHeadOid": os.environ["GITHUB_SHA"], + "fileChanges": file_changes, + "message": { + "headline": inject_version(os.environ["COMMIT_TITLE"]), + }, + }, + }, + } + + + def inject_version(text: str) -> str: + version = os.getenv("VERSION") or "VERSION_NOT_FOUND" + return re.sub(r"\$version", version, text, flags=re.I) + + + def calculate_file_changes() -> dict[str, list[dict[str, str]]]: + cmd = "git status --no-renames --porcelain" + + additions: list[dict[str, str]] = [] + deletions: list[dict[str, str]] = [] + + for line in subprocess.check_output(cmd.split()).decode().splitlines(): + path = pathlib.Path(line[3:]) + + target = deletions + info = {"path": path.as_posix()} + if path.is_file(): + target = additions + info["contents"] = base64.b64encode(path.read_bytes()).decode() + target.append(info) + + file_changes = {} + if additions: + file_changes["additions"] = additions + if deletions: + file_changes["deletions"] = deletions + return file_changes + + + if __name__ == "__main__": + sys.exit(main()) + + - name: "Compute versioned variables" + shell: "python" + env: + VERSION: "${{ inputs.version }}" + DEFAULT_BRANCH_NAME: "${{ env.default-branch-name }}" + BRANCH_NAME: "${{ fromJSON(inputs.config).branch-name }}" + DEFAULT_PR_TITLE: "${{ env.default-pr-title }}" + PR_TITLE: "${{ fromJSON(inputs.config).pr-title }}" + # Creates new environment variables: + # + # * COMPUTED_BRANCH_NAME + # * COMPUTED_PR_TITLE + # + run: | + # This file is a part of the Globus GitHub Workflows project. + # https://github.com/globus/workflows + # Copyright 2021-2026 Globus + # Copyright 2024-2026 Kurt McKee + # SPDX-License-Identifier: MIT + + import os + import sys + + RC_SUCCESS = 0 + RC_FAILURE = 1 + + mandatory_environment_variables = { + "BRANCH_NAME", + "DEFAULT_BRANCH_NAME", + "GITHUB_ENV", + "VERSION", + } + + + def main() -> int: + # Ensure mandatory environment variables are present. + if missing_keys := (mandatory_environment_variables - os.environ.keys()): + for missing_key in missing_keys: + print(f"`{missing_key}` is a mandatory environment variable.") + return RC_FAILURE + + # Branch name + branch_name = os.environ["BRANCH_NAME"] + if branch_name: + version = os.environ["VERSION"] + computed_branch_name = branch_name.replace("$VERSION", version) + else: + computed_branch_name = os.environ["DEFAULT_BRANCH_NAME"] + with open(os.environ["GITHUB_ENV"], "a") as file: + file.write(f"COMPUTED_BRANCH_NAME={computed_branch_name}\n") + + # PR title + pr_title = os.environ["PR_TITLE"] + if pr_title: + version = os.environ["VERSION"] + computed_pr_title = pr_title.replace("$VERSION", version) + else: + computed_pr_title = os.environ["DEFAULT_PR_TITLE"] + with open(os.environ["GITHUB_ENV"], "a") as file: + file.write(f"COMPUTED_PR_TITLE={computed_pr_title}\n") + + return RC_SUCCESS + + + if __name__ == "__main__": + sys.exit(main()) + + - name: "Push a new branch" + env: + GH_TOKEN: "${{ github.token }}" + COMPUTED_BRANCH_NAME: "${{ env.COMPUTED_BRANCH_NAME }}" + run: | + # `gh api` is required because `git` has no permissions. + + # Create the branch on the server. + gh api \ + --method POST \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2026-03-10" \ + "/repos/${GITHUB_REPOSITORY}/git/refs" \ + --field "ref=refs/heads/${COMPUTED_BRANCH_NAME}" \ + --field "sha=${GITHUB_SHA}" + + # Push a new commit to the branch. + gh api graphql --input "${RUNNER_TEMP}/graphql-input.json" + + - name: "Generate the PR body" + env: + PR_BODY: "${{ fromJSON(inputs.config).pr-body }}" + PR_BODY_PATH: "${{ runner.temp }}/pr-body-fragment.txt" + PR_BODY_INPUT_FORMAT: "${{ fromJSON(inputs.config).pr-body-input-format || env.default-pr-body-input-format }}" + VERSION: "${{ inputs.version }}" + run: | + # If a static PR body was provided, use it. + # This unconditionally overwrites any PR body + # that might have been created when tox ran above. + if [ ! -z "${PR_BODY}" ]; then + echo "${PR_BODY}" > "${PR_BODY_PATH}" + fi + + # If a PR body file was not yet generated, create a blank one. + if [ ! -f "${PR_BODY_PATH}" ]; then + touch "${PR_BODY_PATH}" + fi + + # Convert the content to GitHub-formatted Markdown. + pandoc \ + --from "${PR_BODY_INPUT_FORMAT}" \ + --to gfm \ + --wrap preserve \ + --shift-heading-level-by 1 \ + "${PR_BODY_PATH}" \ + --output "${RUNNER_TEMP}/pr-body-fragment.gfm" + + # If a version was provided, augment the PR body with the version as its header. + if [ ! -z "${VERSION}" ]; then + echo "# ${VERSION}" > "${RUNNER_TEMP}/pr-body.gfm" + fi + + cat "${RUNNER_TEMP}/pr-body-fragment.gfm" >> "${RUNNER_TEMP}/pr-body.gfm" + + - name: "Create a PR" + env: + GH_TOKEN: "${{ github.token }}" + COMPUTED_BRANCH_NAME: "${{ env.COMPUTED_BRANCH_NAME }}" + COMPUTED_PR_TITLE: "${{ env.COMPUTED_PR_TITLE }}" + PR_BASE: "${{ fromJSON(inputs.config).pr-base || env.default-pr-base }}" + run: | + gh pr create \ + --draft \ + --head "${COMPUTED_BRANCH_NAME}" \ + --base "${PR_BASE}" \ + --title "${COMPUTED_PR_TITLE}" \ + --body-file "${RUNNER_TEMP}/pr-body.gfm" diff --git a/.github/workflows/create-tag-and-release.jinja.yaml b/.github/workflows/create-tag-and-release.jinja.yaml new file mode 100644 index 0000000..ca50280 --- /dev/null +++ b/.github/workflows/create-tag-and-release.jinja.yaml @@ -0,0 +1,199 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +on: + workflow_call: + outputs: + project-version: + description: "The detected project version" + value: "${{ jobs.tag.outputs.project-version }}" + tag-name: + description: "The name of the created tag" + value: "${{ jobs.tag.outputs.tag-name }}" + +env: + PYTHON_VERSION: "[[ PYTHON_VERSION ]]" + UV_VERSION: "[[ UV_VERSION ]]" + PANDOC_VERSION: "3.8.3" + SCRIV_REQUIREMENTS: | + [[ include_requirements("scriv") | indent(4) ]] + +jobs: + #[#- + # Halt execution if an attempt is made to run the template directly. + # This block is enclosed in a Jinja comment and will not be rendered. + halt: + name: "Halt" + runs-on: "ubuntu-slim" + steps: + - name: "Halt" + run: | + echo "::error::⚠️ Do not run the workflow template directly." + exit 1 + #]# + tag: + #[#- + # The `needs` key is in a Jinja comment and will not be rendered. + needs: ["halt"] + #]# + name: "Tag" + runs-on: "ubuntu-24.04" + permissions: + contents: "write" + outputs: + project-version: "${{ steps.get-tag-name.outputs.project-version }}" + tag-name: "${{ steps.get-tag-name.outputs.tag-name }}" + steps: + - name: "Checkout the repository" + uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 + with: + ref: "${{ github.sha }}" + fetch-depth: "0" + fetch-tags: "true" + persist-credentials: "false" + + - name: "Setup Python" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Install uv" + uses: "astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867" # v7.1.6 + with: + version: "${{ env.UV_VERSION }}" + enable-cache: "false" + ignore-empty-workdir: "true" + + - name: "Get tag name" + id: "get-tag-name" + # Creates new environment variables: + # + # * TAG_NAME (version with a "v" prefix) + # + # Creates outputs: + # + # * project-version (exact string from `pyproject.toml`) + # * tag-name (project-version with a "v" prefix) + # + shell: "python" + run: | + [[ include_file("get_tag_name.py") | indent(10) ]] + + - name: "Verify version has no git tag conflicts" + id: "verify-repo-state" + # Creates outputs: + # + # * tag-exists + # + env: + GH_TOKEN: ${{ github.token }} + shell: "python" + run: | + [[ include_file("validate_repo_state.py") | indent(10) ]] + + - name: "Get GitHub Actions bot information" + if: "steps.verify-repo-state.outputs.tag-exists == 'false'" + env: + GH_TOKEN: ${{ github.token }} + # Creates new environment variables: + # + # * BOT_NAME + # * BOT_EMAIL + # + run: | + # Get the Github Actions bot's email address in this environment. + # The email address on github.com is well-known, + # but the bot ID may differ on GHES instances. + BOT_NAME='github-actions[bot]' + + BOT_ID="$(gh api "/users/${BOT_NAME}" | jq --raw-output .id)" + BOT_EMAIL="${BOT_ID}+${BOT_NAME}@users.noreply.github.com" + + echo "BOT_NAME=${BOT_NAME}" >> "${GITHUB_ENV}" + echo "BOT_EMAIL=${BOT_EMAIL}" >> "${GITHUB_ENV}" + + - name: "Install pandoc" + if: "steps.verify-repo-state.outputs.tag-exists == 'false'" + env: + PANDOC_DEB_URL: "https://github.com/jgm/pandoc/releases/download/${{ env.PANDOC_VERSION }}/pandoc-${{ env.PANDOC_VERSION }}-1-amd64.deb" + run: | + wget -nv -O pandoc.deb "${PANDOC_DEB_URL}" + sudo dpkg -i ./pandoc.deb + rm ./pandoc.deb + + - name: "Generate the annotated git tag content" + if: "steps.verify-repo-state.outputs.tag-exists == 'false'" + # Creates new environment variables: + # + # * TAG_BODY_PATH + # + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${SCRIV_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + scriv print --version "${TAG_NAME}" >> "${RUNNER_TEMP}/changelog-fragment.rst" + + export TAG_BODY_PATH="${RUNNER_TEMP}/tag-body.gfm" + echo "TAG_BODY_PATH=${TAG_BODY_PATH}" >> "${GITHUB_ENV}" + + pandoc \ + --from=rst \ + --to=gfm \ + --shift-heading-level-by=1 \ + --wrap=preserve \ + --output="${TAG_BODY_PATH}" \ + "${RUNNER_TEMP}/changelog-fragment.rst" + + - name: "Create a git tag" + if: "steps.verify-repo-state.outputs.tag-exists == 'false'" + env: + GH_TOKEN: "${{ github.token }}" + run: | + TAG_BODY="$(cat "${TAG_BODY_PATH}")" + export TAG_BODY + + TAG_OBJECT_SHA="$(gh api \ + --method POST \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "/repos/${GITHUB_REPOSITORY}/git/tags" \ + -f "type=commit" \ + -f "tag=${TAG_NAME}" \ + -f "message=${TAG_BODY}" \ + -f "object=${GITHUB_SHA}" \ + -f "tagger[name]=${BOT_NAME}" \ + -f "tagger[email]=${BOT_EMAIL}" \ + | jq -r '.sha' \ + )" + export TAG_OBJECT_BODY + + # Use `gh api` because `git` isn't configured with permissions. + gh api \ + --method POST \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "/repos/${GITHUB_REPOSITORY}/git/refs" \ + -f "ref=refs/tags/${TAG_NAME}" \ + -f "sha=${TAG_OBJECT_SHA}" + + git fetch --tags + + - name: "Create a GitHub release" + env: + GH_TOKEN: "${{ github.token }}" + run: | + if gh release view "${TAG_NAME}" 1>/dev/null 2>/dev/null; then + echo "Release ${TAG_NAME} exists." + else + gh release create "${TAG_NAME}" \ + --notes-from-tag \ + --target "${GITHUB_SHA}" \ + --title "${TAG_NAME}" + fi diff --git a/.github/workflows/create-tag-and-release.yaml b/.github/workflows/create-tag-and-release.yaml new file mode 100644 index 0000000..ca242a9 --- /dev/null +++ b/.github/workflows/create-tag-and-release.yaml @@ -0,0 +1,322 @@ +# DO NOT EDIT THIS FILE! EDIT 'create-tag-and-release.jinja.yaml'. + +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +on: + workflow_call: + outputs: + project-version: + description: "The detected project version" + value: "${{ jobs.tag.outputs.project-version }}" + tag-name: + description: "The name of the created tag" + value: "${{ jobs.tag.outputs.tag-name }}" + +env: + PYTHON_VERSION: "3.13" + UV_VERSION: "0.11.2" + PANDOC_VERSION: "3.8.3" + SCRIV_REQUIREMENTS: | + attrs==26.1.0 ; python_version == "3.13" + certifi==2026.2.25 ; python_version == "3.13" + charset-normalizer==3.4.6 ; python_version == "3.13" + click-log==0.4.0 ; python_version == "3.13" + click==8.3.1 ; python_version == "3.13" + colorama==0.4.6 ; python_version == "3.13" and platform_system == "Windows" + idna==3.11 ; python_version == "3.13" + jinja2==3.1.6 ; python_version == "3.13" + markdown-it-py==4.0.0 ; python_version == "3.13" + markupsafe==3.0.3 ; python_version == "3.13" + mdurl==0.1.2 ; python_version == "3.13" + requests==2.33.0 ; python_version == "3.13" + scriv==1.8.0 ; python_version == "3.13" + urllib3==2.6.3 ; python_version == "3.13" + +jobs: + tag: + name: "Tag" + runs-on: "ubuntu-24.04" + permissions: + contents: "write" + outputs: + project-version: "${{ steps.get-tag-name.outputs.project-version }}" + tag-name: "${{ steps.get-tag-name.outputs.tag-name }}" + steps: + - name: "Checkout the repository" + uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 + with: + ref: "${{ github.sha }}" + fetch-depth: "0" + fetch-tags: "true" + persist-credentials: "false" + + - name: "Setup Python" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Install uv" + uses: "astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867" # v7.1.6 + with: + version: "${{ env.UV_VERSION }}" + enable-cache: "false" + ignore-empty-workdir: "true" + + - name: "Get tag name" + id: "get-tag-name" + # Creates new environment variables: + # + # * TAG_NAME (version with a "v" prefix) + # + # Creates outputs: + # + # * project-version (exact string from `pyproject.toml`) + # * tag-name (project-version with a "v" prefix) + # + shell: "python" + run: | + # This file is a part of the Globus GitHub Workflows project. + # https://github.com/globus/workflows + # Copyright 2021-2026 Globus + # Copyright 2024-2026 Kurt McKee + # SPDX-License-Identifier: MIT + + import os + import pathlib + import tomllib + + + def main() -> None: + toml = tomllib.loads(pathlib.Path("pyproject.toml").read_text()) + version = toml["project"]["version"] + with open(os.environ["GITHUB_ENV"], "a") as file: + file.write(f"TAG_NAME=v{version}\n") + with open(os.environ["GITHUB_OUTPUT"], "a") as file: + file.write(f"project-version={version}\n") + file.write(f"tag-name=v{version}\n") + + + if __name__ == "__main__": + main() + + - name: "Verify version has no git tag conflicts" + id: "verify-repo-state" + # Creates outputs: + # + # * tag-exists + # + env: + GH_TOKEN: ${{ github.token }} + shell: "python" + run: | + # This file is a part of the Globus GitHub Workflows project. + # https://github.com/globus/workflows + # Copyright 2021-2026 Globus + # Copyright 2024-2026 Kurt McKee + # SPDX-License-Identifier: MIT + + import os + import subprocess + import sys + import typing as t + + + def main() -> None: + tag_name = os.environ["TAG_NAME"] + head_sha = _get_head_sha() + existing_tag_sha = _get_existing_tag_sha(tag_name) + if existing_tag_sha is None: + tag_exists = False + elif existing_tag_sha == head_sha: + tag_exists = True + else: + msg = ( + f"The {tag_name} tag commit SHA ({existing_tag_sha})" + f" doesn't match HEAD ({head_sha})." + ) + exit_with_error(msg) + + with open(os.environ["GITHUB_OUTPUT"], "a") as file: + file.write(f"tag-exists={str(tag_exists).lower()}\n") + + + def _get_head_sha() -> str: + """Get the SHA of HEAD.""" + + _, stdout, _ = _run_command("git", "rev-parse", "HEAD") + return stdout.strip() + + + def _get_existing_tag_sha(tag_name: str) -> str | None: + """Validate the project version and git repo state are compatible. + + "Compatibility" is defined as one of: + + * The project version has no corresponding git tag ref. + * A git tag ref exists for the project version, + and its commit SHA matches the SHA currently checked out in HEAD. + """ + + # Check if a tag exists. + cmd = ("git", "rev-list", "-n", "1", f"tags/{tag_name}", "--") + rc, stdout, stderr = _run_command(*cmd) + if rc == 128 and "bad revision" in stderr: + # The tag doesn't exist locally. This is the expected case. + return None + + # The output must be a commit SHA. + tag_commit_sha = stdout.strip() + try: + int(tag_commit_sha, base=16) + except ValueError: + msg = "Something unexpected happened." + exit_with_error(msg, rc, stdout, stderr) + + # A git tag already exists. + return tag_commit_sha + + + def _run_command(*args: str, timeout: int = 10) -> tuple[int, str, str]: + """Run a command.""" + + process = subprocess.Popen( + args=args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="utf-8", + ) + try: + stdout, stderr = process.communicate(timeout=timeout) + except subprocess.TimeoutExpired: + process.kill() + stdout, stderr = process.communicate() + + return process.returncode, stdout, stderr + + + def exit_with_error( + msg: str, + rc: int | None = None, + stdout: str | None = None, + stderr: str | None = None, + ) -> t.NoReturn: + print(f"::error::{msg}", file=sys.stderr) + if rc is not None: + print(f"Return code:\n{rc}", file=sys.stderr) + if stdout is not None: + print(f"STDOUT:\n{stdout}", file=sys.stderr) + if stderr is not None: + print(f"STDERR:\n{stderr}", file=sys.stderr) + raise SystemExit(1) + + + if __name__ == "__main__": + main() + + - name: "Get GitHub Actions bot information" + if: "steps.verify-repo-state.outputs.tag-exists == 'false'" + env: + GH_TOKEN: ${{ github.token }} + # Creates new environment variables: + # + # * BOT_NAME + # * BOT_EMAIL + # + run: | + # Get the Github Actions bot's email address in this environment. + # The email address on github.com is well-known, + # but the bot ID may differ on GHES instances. + BOT_NAME='github-actions[bot]' + + BOT_ID="$(gh api "/users/${BOT_NAME}" | jq --raw-output .id)" + BOT_EMAIL="${BOT_ID}+${BOT_NAME}@users.noreply.github.com" + + echo "BOT_NAME=${BOT_NAME}" >> "${GITHUB_ENV}" + echo "BOT_EMAIL=${BOT_EMAIL}" >> "${GITHUB_ENV}" + + - name: "Install pandoc" + if: "steps.verify-repo-state.outputs.tag-exists == 'false'" + env: + PANDOC_DEB_URL: "https://github.com/jgm/pandoc/releases/download/${{ env.PANDOC_VERSION }}/pandoc-${{ env.PANDOC_VERSION }}-1-amd64.deb" + run: | + wget -nv -O pandoc.deb "${PANDOC_DEB_URL}" + sudo dpkg -i ./pandoc.deb + rm ./pandoc.deb + + - name: "Generate the annotated git tag content" + if: "steps.verify-repo-state.outputs.tag-exists == 'false'" + # Creates new environment variables: + # + # * TAG_BODY_PATH + # + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${SCRIV_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + scriv print --version "${TAG_NAME}" >> "${RUNNER_TEMP}/changelog-fragment.rst" + + export TAG_BODY_PATH="${RUNNER_TEMP}/tag-body.gfm" + echo "TAG_BODY_PATH=${TAG_BODY_PATH}" >> "${GITHUB_ENV}" + + pandoc \ + --from=rst \ + --to=gfm \ + --shift-heading-level-by=1 \ + --wrap=preserve \ + --output="${TAG_BODY_PATH}" \ + "${RUNNER_TEMP}/changelog-fragment.rst" + + - name: "Create a git tag" + if: "steps.verify-repo-state.outputs.tag-exists == 'false'" + env: + GH_TOKEN: "${{ github.token }}" + run: | + TAG_BODY="$(cat "${TAG_BODY_PATH}")" + export TAG_BODY + + TAG_OBJECT_SHA="$(gh api \ + --method POST \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "/repos/${GITHUB_REPOSITORY}/git/tags" \ + -f "type=commit" \ + -f "tag=${TAG_NAME}" \ + -f "message=${TAG_BODY}" \ + -f "object=${GITHUB_SHA}" \ + -f "tagger[name]=${BOT_NAME}" \ + -f "tagger[email]=${BOT_EMAIL}" \ + | jq -r '.sha' \ + )" + export TAG_OBJECT_BODY + + # Use `gh api` because `git` isn't configured with permissions. + gh api \ + --method POST \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "/repos/${GITHUB_REPOSITORY}/git/refs" \ + -f "ref=refs/tags/${TAG_NAME}" \ + -f "sha=${TAG_OBJECT_SHA}" + + git fetch --tags + + - name: "Create a GitHub release" + env: + GH_TOKEN: "${{ github.token }}" + run: | + if gh release view "${TAG_NAME}" 1>/dev/null 2>/dev/null; then + echo "Release ${TAG_NAME} exists." + else + gh release create "${TAG_NAME}" \ + --notes-from-tag \ + --target "${GITHUB_SHA}" \ + --title "${TAG_NAME}" + fi diff --git a/.github/workflows/pr_has_changelog.yaml b/.github/workflows/pr_has_changelog.yaml index 5335363..26b7d71 100644 --- a/.github/workflows/pr_has_changelog.yaml +++ b/.github/workflows/pr_has_changelog.yaml @@ -1,3 +1,8 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# SPDX-License-Identifier: MIT + name: pr_has_changelog on: workflow_call: @@ -44,6 +49,7 @@ jobs: - uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 with: ref: ${{ github.event.pull_request.head.sha }} + persist-credentials: "false" fetch-depth: 0 # if the skip label is set, the job should succeed but this step should be skipped @@ -55,8 +61,11 @@ jobs: ) # TODO: this will currently accept edits to a README.md in the changelog.d/ dir # we should ideally exclude README.* or even a configurable pattern + env: + BASE_BRANCH: "${{ inputs.base-branch }}" + CHANGELOG_TYPE: "${{ inputs.changelog-type }}" run: | - news_files="$(git diff --name-only "$(git merge-base origin/${{ inputs.base-branch }} "$GITHUB_SHA")" "$GITHUB_SHA" -- changelog.d/*.${{ inputs.changelog-type }})" + news_files="$(git diff --name-only "$(git merge-base "origin/${BASE_BRANCH}" "$GITHUB_SHA")" "$GITHUB_SHA" -- changelog.d/*."${CHANGELOG_TYPE}")" if [ -n "$news_files" ]; then echo "Saw new files. changelog.d:" echo "$news_files" diff --git a/.github/workflows/tox.jinja.yaml b/.github/workflows/tox.jinja.yaml new file mode 100644 index 0000000..3145bf7 --- /dev/null +++ b/.github/workflows/tox.jinja.yaml @@ -0,0 +1,267 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +on: + workflow_call: + inputs: + config: + description: | + The configuration object. + required: true + type: "string" + +env: + # These variables lock application versions for reproducibility. + PYTHON_VERSION: "[[ PYTHON_VERSION ]]" + UV_VERSION: "[[ UV_VERSION ]]" + CHECK_JSONSCHEMA_REQUIREMENTS: | + [[ include_requirements("check_jsonschema") | indent(4) ]] + TOX_REQUIREMENTS: | + [[ include_requirements("tox") | indent(4) ]] + +jobs: + #[#- + # Halt execution if an attempt is made to run the template directly. + # This block is enclosed in a Jinja comment and will not be rendered. + halt: + name: "Halt" + runs-on: "ubuntu-slim" + steps: + - name: "Halt" + run: | + echo "::error::⚠️ Do not run the workflow template directly." + exit 1 + #]# + tox: + #[#- + # The `needs` key is in a Jinja comment and will not be rendered. + needs: ["halt"] + #]# + name: "tox" + runs-on: "${{ fromJSON(inputs.config).runner }}" + timeout-minutes: ${{ fromJSON(inputs.config).timeout-minutes || 15 }} + steps: + - name: "Export config" + id: "config-exporter" + shell: "bash" + # Loading the input from an environment variable avoids injection attacks. + env: + inputs_config: "${{ inputs.config }}" + run: | + echo "$inputs_config" > ".tox-config.raw.json" + + - name: "Setup Python for tox config validation/transformation" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Install uv" + uses: "astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867" # v7.1.6 + with: + version: "${{ env.UV_VERSION }}" + enable-cache: "false" + ignore-empty-workdir: "true" + + # If a previous workflow run successfully validated an identical config object, + # a cache hit is sufficient to demonstrate that no further validation is required. + - name: "Check if raw tox config is already validated" + id: "lookup-config-cache" + uses: "actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306" # v5.0.3 + with: + lookup-only: true + path: ".tox-config.raw.json" + key: "config-${{ hashFiles('.tox-config.raw.json') }}" + + - name: "Write tox config schema" + if: "steps.lookup-config-cache.outputs.cache-hit == false" + shell: "bash" + env: + CONFIG_SCHEMA: | + [[ include_file("config-schema.json") | indent(12) ]] + run: | + echo "${CONFIG_SCHEMA}" > "${RUNNER_TEMP}/tox-schema.json" + + - name: "Validate the raw tox config against the schema" + if: "steps.lookup-config-cache.outputs.cache-hit == false" + shell: "bash" + # Environment variables: + # + # * REQUIREMENTS_PATH + # + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${CHECK_JSONSCHEMA_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + echo "REQUIREMENTS_PATH=${REQUIREMENTS_PATH}" >> "$GITHUB_ENV" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + check-jsonschema --schemafile "${RUNNER_TEMP}/tox-schema.json" --regex-variant python ".tox-config.raw.json" + + - name: "Create a 'config-is-validated' cache key" + if: "steps.lookup-config-cache.outputs.cache-hit == false" + uses: "actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306" # v5.0.3 + with: + path: ".tox-config.raw.json" + key: "${{ steps.lookup-config-cache.outputs.cache-primary-key }}" + + - name: "Transform tox config" + id: "config-transformer" + shell: "python" + # Environment variables: + # + # * TOX_CONFIG + # + run: | + [[ include_file("config_transformer.py") | indent(10) ]] + + - name: "Checkout the repository" + uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 + with: + persist-credentials: "false" + + - name: "Save the tox config to a file for cache-busting" + shell: "bash" + run: | + echo "${TOX_CONFIG}" > .tox-config.json + + - name: "Calculate additional checksums" + if: "fromJSON(env.TOX_CONFIG).cache-key-hash-files" + shell: "bash" + env: + FILE_PATTERNS: "${{ join(fromJSON(env.TOX_CONFIG).cache-key-hash-files, ' ') }}" + run: | + # shellcheck disable=SC2086 + for pattern in $FILE_PATTERNS; do + if ! ${{ runner.os == 'macOS' && 'shasum -a 1' || 'sha1sum' }} $pattern >> '.hash-files.sha'; then + echo "The cache-key-hash-files pattern '$pattern' matched nothing" + exit 1 + fi + done + cat .hash-files.sha + + - name: "Setup Pythons (required)" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + with: + python-version: "${{ fromJSON(env.TOX_CONFIG).python-versions-required }}" + allow-prereleases: true + + - name: "Detect Pythons" + uses: "kurtmckee/detect-pythons@4a7b361b5ee27eb35c8b5026ac757d02751d6688" # v1.1.1 + + - name: "Restore cache" + id: "restore-cache" + uses: "actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306" # v5.0.3 + with: + path: | + .tox/ + .venv/ + ${{ fromJSON(env.TOX_CONFIG).cache-paths && join(fromJSON(env.TOX_CONFIG).cache-paths, '\n') }} + key: "${{ + format( + '{0}-os={1}-hash={2}', + fromJSON(env.TOX_CONFIG).cache-key-prefix || 'tox', + fromJSON(env.TOX_CONFIG).runner, + hashFiles( + '.python-identifiers', + '.tox-config.json', + 'tox.ini', + env.REQUIREMENTS_PATH, + fromJSON(env.TOX_CONFIG).cache-key-hash-files && '.hash-files.sha' || '' + ) + ) + }}" + + - name: "Identify .venv path" + shell: "bash" + run: | + echo 'VENV_PATH=.venv/${{ runner.os == 'Windows' && 'Scripts' || 'bin' }}' >> "$GITHUB_ENV" + + - name: "Create a virtual environment (Windows)" + if: "steps.restore-cache.outputs.cache-hit == false && runner.os == 'Windows'" + shell: "pwsh" + run: | + $REQUIREMENTS_PATH=New-TemporaryFile + Out-File -InputObject $env:TOX_REQUIREMENTS -FilePath $REQUIREMENTS_PATH + + uv venv --no-project --no-managed-python .venv + Out-File -InputObject "*" -FilePath .venv/.gitignore + uv pip install --no-managed-python --directory=.venv --requirements=$REQUIREMENTS_PATH --link-mode=copy + + - name: "Create a virtual environment (non-Windows)" + if: "steps.restore-cache.outputs.cache-hit == false && runner.os != 'Windows'" + shell: "bash" + run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${TOX_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv venv --no-project --no-managed-python .venv + echo "*" > ".venv/.gitignore" + uv pip install --no-managed-python --directory=.venv --requirements="${REQUIREMENTS_PATH}" + + - name: "Setup Pythons (requested)" + if: "fromJSON(env.TOX_CONFIG).python-versions-required != fromJSON(env.TOX_CONFIG).python-versions-requested" + uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 + env: + PIP_DISABLE_PIP_VERSION_CHECK: "1" + with: + python-version: "${{ fromJSON(env.TOX_CONFIG).python-versions-requested }}" + allow-prereleases: true + + - name: "Create the tox environments (Linux/macOS)" + if: "runner.os != 'Windows'" + shell: "bash" + env: + TOX_SKIP_ENV: "${{ fromJSON(env.TOX_CONFIG).tox-skip-environments-regex }}" + TOX_ENVIRONMENTS: ${{ fromJSON(env.TOX_CONFIG).tox-environments && format('-e "{0}"', join(fromJSON(env.TOX_CONFIG).tox-environments, ',')) }} + run: | + if [ "${TOX_ENVIRONMENT}" ]; then + "${VENV_PATH}/tox" run --colored=yes --notest "${TOX_ENVIRONMENTS}" + else + "${VENV_PATH}/tox" run --colored=yes --notest + fi + + - name: "Run tox (Linux/macOS)" + if: "runner.os != 'Windows'" + shell: "bash" + env: + TOX_SKIP_ENV: "${{ fromJSON(env.TOX_CONFIG).tox-skip-environments-regex }}" + TOX_ENVIRONMENTS: ${{ fromJSON(env.TOX_CONFIG).tox-environments && format('-e "{0}"', join(fromJSON(env.TOX_CONFIG).tox-environments, ',')) }} + run: | + if [ "${TOX_ENVIRONMENT}" ]; then + "${VENV_PATH}/tox" run --colored=yes --no-provision --skip-pkg-install "${TOX_ENVIRONMENTS}" + else + "${VENV_PATH}/tox" run --colored=yes --no-provision --skip-pkg-install + fi + + - name: "Create the tox environments (Windows)" + if: "runner.os == 'Windows'" + shell: "pwsh" + env: + TOX_SKIP_ENV: "${{ fromJSON(env.TOX_CONFIG).tox-skip-environments-regex }}" + TOX_ENVIRONMENTS: ${{ fromJSON(env.TOX_CONFIG).tox-environments && format('-e {0}', join(fromJSON(env.TOX_CONFIG).tox-environments, ',')) }} + run: | + if ($Env:TOX_ENVIRONMENTS) { + & "$Env:VENV_PATH/tox" run --colored=yes --notest "$Env:TOX_ENVIRONMENTS" + } else { + & "$Env:VENV_PATH/tox" run --colored=yes --notest + } + + - name: "Run tox (Windows)" + if: "runner.os == 'Windows'" + shell: "pwsh" + env: + TOX_SKIP_ENV: "${{ fromJSON(env.TOX_CONFIG).tox-skip-environments-regex }}" + TOX_ENVIRONMENTS: ${{ fromJSON(env.TOX_CONFIG).tox-environments && format('-e {0}', join(fromJSON(env.TOX_CONFIG).tox-environments, ',')) }} + run: | + if ($Env:TOX_ENVIRONMENTS) { + & "$Env:VENV_PATH/tox" run --colored=yes --no-provision --skip-pkg-install "$Env:TOX_ENVIRONMENTS" + } else { + & "$Env:VENV_PATH/tox" run --colored=yes --no-provision --skip-pkg-install + } diff --git a/.github/workflows/tox.yaml b/.github/workflows/tox.yaml index 7f1ea01..974d36d 100644 --- a/.github/workflows/tox.yaml +++ b/.github/workflows/tox.yaml @@ -1,6 +1,9 @@ -# This file is a part of Kurt McKee's GitHub Workflows project. -# https://github.com/kurtmckee/github-workflows -# Copyright 2024-2025 Kurt McKee +# DO NOT EDIT THIS FILE! EDIT 'tox.jinja.yaml'. + +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee # SPDX-License-Identifier: MIT on: @@ -14,14 +17,47 @@ on: env: # These variables lock application versions for reproducibility. - CHECK_JSONSCHEMA_VERSION: "0.35.0" PYTHON_VERSION: "3.13" - UV_VERSION: "0.9.11" + UV_VERSION: "0.11.2" + CHECK_JSONSCHEMA_REQUIREMENTS: | + attrs==26.1.0 ; python_version == "3.13" + certifi==2026.2.25 ; python_version == "3.13" + charset-normalizer==3.4.6 ; python_version == "3.13" + check-jsonschema==0.37.1 ; python_version == "3.13" + click==8.3.1 ; python_version == "3.13" + colorama==0.4.6 ; python_version == "3.13" and platform_system == "Windows" + idna==3.11 ; python_version == "3.13" + jsonschema-specifications==2025.9.1 ; python_version == "3.13" + jsonschema==4.26.0 ; python_version == "3.13" + referencing==0.37.0 ; python_version == "3.13" + regress==2025.10.1 ; python_version == "3.13" + requests==2.33.0 ; python_version == "3.13" + rpds-py==0.30.0 ; python_version == "3.13" + ruamel-yaml==0.19.1 ; python_version == "3.13" + urllib3==2.6.3 ; python_version == "3.13" + TOX_REQUIREMENTS: | + cachetools==7.0.5 ; python_version == "3.13" + colorama==0.4.6 ; python_version == "3.13" + distlib==0.4.0 ; python_version == "3.13" + filelock==3.25.2 ; python_version == "3.13" + packaging==26.0 ; python_version == "3.13" + platformdirs==4.9.4 ; python_version == "3.13" + pluggy==1.6.0 ; python_version == "3.13" + pyproject-api==1.10.0 ; python_version == "3.13" + python-discovery==1.2.1 ; python_version == "3.13" + tomli-w==1.2.0 ; python_version == "3.13" + tox-gh==1.7.1 ; python_version == "3.13" + tox-uv-bare==1.33.4 ; python_version == "3.13" + tox-uv==1.33.4 ; python_version == "3.13" + tox==4.51.0 ; python_version == "3.13" + uv==0.11.2 ; python_version == "3.13" + virtualenv==21.2.0 ; python_version == "3.13" jobs: tox: name: "tox" runs-on: "${{ fromJSON(inputs.config).runner }}" + timeout-minutes: ${{ fromJSON(inputs.config).timeout-minutes || 15 }} steps: - name: "Export config" id: "config-exporter" @@ -38,10 +74,11 @@ jobs: python-version: "${{ env.PYTHON_VERSION }}" - name: "Install uv" - uses: "astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41" # v7.2.1 + uses: "astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867" # v7.1.6 with: version: "${{ env.UV_VERSION }}" - enable-cache: false + enable-cache: "false" + ignore-empty-workdir: "true" # If a previous workflow run successfully validated an identical config object, # a cache hit is sufficient to demonstrate that no further validation is required. @@ -57,11 +94,9 @@ jobs: if: "steps.lookup-config-cache.outputs.cache-hit == false" shell: "bash" env: - tox_schema: | - # START: tox-schema.json + CONFIG_SCHEMA: | { "$schema": "https://json-schema.org/draft-07/schema", - "description": "This file is a part of Kurt McKee's GitHub Workflows project.\nhttps://github.com/kurtmckee/github-workflows\nCopyright 2024-2025 Kurt McKee .\nSPDX-License-Identifier: MIT", "type": "object", "required": [ "runner" @@ -71,6 +106,10 @@ jobs: "type": "string", "minLength": 1 }, + "timeout-minutes": { + "type": "integer", + "minimum": 1 + }, "tox-environments": { "description": "A list of tox environments to run.", "type": "array", @@ -218,20 +257,26 @@ jobs: } ] } - # END: tox-schema.json run: | - # Due to the architecture of the source code synchronization code, - # the START and END lines in the JSON schema above must be removed. - echo "${tox_schema}" | grep -ve '^#' > "${RUNNER_TEMP}/tox-schema.json" + echo "${CONFIG_SCHEMA}" > "${RUNNER_TEMP}/tox-schema.json" - name: "Validate the raw tox config against the schema" if: "steps.lookup-config-cache.outputs.cache-hit == false" shell: "bash" + # Environment variables: + # + # * REQUIREMENTS_PATH + # run: | - uv tool run --from="check-jsonschema==${CHECK_JSONSCHEMA_VERSION}" check-jsonschema \ - --schemafile "${RUNNER_TEMP}/tox-schema.json" \ - --regex-variant python \ - ".tox-config.raw.json" + REQUIREMENTS_PATH="$(mktemp)" + echo "${CHECK_JSONSCHEMA_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + echo "REQUIREMENTS_PATH=${REQUIREMENTS_PATH}" >> "$GITHUB_ENV" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + check-jsonschema --schemafile "${RUNNER_TEMP}/tox-schema.json" --regex-variant python ".tox-config.raw.json" - name: "Create a 'config-is-validated' cache key" if: "steps.lookup-config-cache.outputs.cache-hit == false" @@ -243,8 +288,17 @@ jobs: - name: "Transform tox config" id: "config-transformer" shell: "python" + # Environment variables: + # + # * TOX_CONFIG + # run: | - # START: tox_config_transformer.py + # This file is a part of the Globus GitHub Workflows project. + # https://github.com/globus/workflows + # Copyright 2021-2026 Globus + # Copyright 2024-2026 Kurt McKee + # SPDX-License-Identifier: MIT + import json import os import pathlib @@ -312,28 +366,27 @@ jobs: # Write output = json.dumps(config, sort_keys=True, separators=(",", ":")) with open(os.environ["GITHUB_ENV"], "a") as file: - file.write(f"tox-config={output}") + file.write(f"TOX_CONFIG={output}") if __name__ == "__main__": main() - # END: tox_config_transformer.py - name: "Checkout the repository" uses: "actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd" # v6.0.2 + with: + persist-credentials: "false" - name: "Save the tox config to a file for cache-busting" shell: "bash" run: | - cat << EOF > .tox-config.json - ${{ env.tox-config }} - EOF + echo "${TOX_CONFIG}" > .tox-config.json - name: "Calculate additional checksums" - if: "fromJSON(env.tox-config).cache-key-hash-files" + if: "fromJSON(env.TOX_CONFIG).cache-key-hash-files" shell: "bash" env: - FILE_PATTERNS: "${{ join(fromJSON(env.tox-config).cache-key-hash-files, ' ') }}" + FILE_PATTERNS: "${{ join(fromJSON(env.TOX_CONFIG).cache-key-hash-files, ' ') }}" run: | # shellcheck disable=SC2086 for pattern in $FILE_PATTERNS; do @@ -349,7 +402,7 @@ jobs: env: PIP_DISABLE_PIP_VERSION_CHECK: "1" with: - python-version: "${{ fromJSON(env.tox-config).python-versions-required }}" + python-version: "${{ fromJSON(env.TOX_CONFIG).python-versions-required }}" allow-prereleases: true - name: "Detect Pythons" @@ -362,49 +415,106 @@ jobs: path: | .tox/ .venv/ - ${{ fromJSON(env.tox-config).cache-paths && join(fromJSON(env.tox-config).cache-paths, '\n') }} + ${{ fromJSON(env.TOX_CONFIG).cache-paths && join(fromJSON(env.TOX_CONFIG).cache-paths, '\n') }} key: "${{ format( '{0}-os={1}-hash={2}', - fromJSON(env.tox-config).cache-key-prefix || 'tox', - fromJSON(env.tox-config).runner, + fromJSON(env.TOX_CONFIG).cache-key-prefix || 'tox', + fromJSON(env.TOX_CONFIG).runner, hashFiles( '.python-identifiers', '.tox-config.json', 'tox.ini', - fromJSON(env.tox-config).cache-key-hash-files && '.hash-files.sha' || '' + env.REQUIREMENTS_PATH, + fromJSON(env.TOX_CONFIG).cache-key-hash-files && '.hash-files.sha' || '' ) ) - }}" + }}" - name: "Identify .venv path" shell: "bash" run: | - echo 'venv-path=.venv/${{ runner.os == 'Windows' && 'Scripts' || 'bin' }}' >> "$GITHUB_ENV" + echo 'VENV_PATH=.venv/${{ runner.os == 'Windows' && 'Scripts' || 'bin' }}' >> "$GITHUB_ENV" - - name: "Create a virtual environment" - if: "steps.restore-cache.outputs.cache-hit == false" + - name: "Create a virtual environment (Windows)" + if: "steps.restore-cache.outputs.cache-hit == false && runner.os == 'Windows'" + shell: "pwsh" + run: | + $REQUIREMENTS_PATH=New-TemporaryFile + Out-File -InputObject $env:TOX_REQUIREMENTS -FilePath $REQUIREMENTS_PATH + + uv venv --no-project --no-managed-python .venv + Out-File -InputObject "*" -FilePath .venv/.gitignore + uv pip install --no-managed-python --directory=.venv --requirements=$REQUIREMENTS_PATH --link-mode=copy + + - name: "Create a virtual environment (non-Windows)" + if: "steps.restore-cache.outputs.cache-hit == false && runner.os != 'Windows'" + shell: "bash" run: | - uv venv .venv - uv pip install --directory .venv tox tox-gh tox-uv + REQUIREMENTS_PATH="$(mktemp)" + echo "${TOX_REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv venv --no-project --no-managed-python .venv + echo "*" > ".venv/.gitignore" + uv pip install --no-managed-python --directory=.venv --requirements="${REQUIREMENTS_PATH}" - name: "Setup Pythons (requested)" - if: "fromJSON(env.tox-config).python-versions-required != fromJSON(env.tox-config).python-versions-requested" + if: "fromJSON(env.TOX_CONFIG).python-versions-required != fromJSON(env.TOX_CONFIG).python-versions-requested" uses: "actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405" # v6.2.0 env: PIP_DISABLE_PIP_VERSION_CHECK: "1" with: - python-version: "${{ fromJSON(env.tox-config).python-versions-requested }}" + python-version: "${{ fromJSON(env.TOX_CONFIG).python-versions-requested }}" allow-prereleases: true - - name: "Create the tox environments" + - name: "Create the tox environments (Linux/macOS)" + if: "runner.os != 'Windows'" + shell: "bash" env: - TOX_SKIP_ENV: "${{ fromJSON(env.tox-config).tox-skip-environments-regex }}" + TOX_SKIP_ENV: "${{ fromJSON(env.TOX_CONFIG).tox-skip-environments-regex }}" + TOX_ENVIRONMENTS: ${{ fromJSON(env.TOX_CONFIG).tox-environments && format('-e "{0}"', join(fromJSON(env.TOX_CONFIG).tox-environments, ',')) }} run: | - ${{ env.venv-path }}/tox run --colored=yes --notest ${{ fromJSON(env.tox-config).tox-environments && format('-e "{0}"', join(fromJSON(env.tox-config).tox-environments, ',')) }} - - - name: "Run the test suite" + if [ "${TOX_ENVIRONMENT}" ]; then + "${VENV_PATH}/tox" run --colored=yes --notest "${TOX_ENVIRONMENTS}" + else + "${VENV_PATH}/tox" run --colored=yes --notest + fi + + - name: "Run tox (Linux/macOS)" + if: "runner.os != 'Windows'" + shell: "bash" + env: + TOX_SKIP_ENV: "${{ fromJSON(env.TOX_CONFIG).tox-skip-environments-regex }}" + TOX_ENVIRONMENTS: ${{ fromJSON(env.TOX_CONFIG).tox-environments && format('-e "{0}"', join(fromJSON(env.TOX_CONFIG).tox-environments, ',')) }} + run: | + if [ "${TOX_ENVIRONMENT}" ]; then + "${VENV_PATH}/tox" run --colored=yes --no-provision --skip-pkg-install "${TOX_ENVIRONMENTS}" + else + "${VENV_PATH}/tox" run --colored=yes --no-provision --skip-pkg-install + fi + + - name: "Create the tox environments (Windows)" + if: "runner.os == 'Windows'" + shell: "pwsh" + env: + TOX_SKIP_ENV: "${{ fromJSON(env.TOX_CONFIG).tox-skip-environments-regex }}" + TOX_ENVIRONMENTS: ${{ fromJSON(env.TOX_CONFIG).tox-environments && format('-e {0}', join(fromJSON(env.TOX_CONFIG).tox-environments, ',')) }} + run: | + if ($Env:TOX_ENVIRONMENTS) { + & "$Env:VENV_PATH/tox" run --colored=yes --notest "$Env:TOX_ENVIRONMENTS" + } else { + & "$Env:VENV_PATH/tox" run --colored=yes --notest + } + + - name: "Run tox (Windows)" + if: "runner.os == 'Windows'" + shell: "pwsh" env: - TOX_SKIP_ENV: "${{ fromJSON(env.tox-config).tox-skip-environments-regex }}" + TOX_SKIP_ENV: "${{ fromJSON(env.TOX_CONFIG).tox-skip-environments-regex }}" + TOX_ENVIRONMENTS: ${{ fromJSON(env.TOX_CONFIG).tox-environments && format('-e {0}', join(fromJSON(env.TOX_CONFIG).tox-environments, ',')) }} run: | - ${{ env.venv-path }}/tox run --colored=yes --no-provision --skip-pkg-install ${{ fromJSON(env.tox-config).tox-environments && format('-e "{0}"', join(fromJSON(env.tox-config).tox-environments, ',')) }} + if ($Env:TOX_ENVIRONMENTS) { + & "$Env:VENV_PATH/tox" run --colored=yes --no-provision --skip-pkg-install "$Env:TOX_ENVIRONMENTS" + } else { + & "$Env:VENV_PATH/tox" run --colored=yes --no-provision --skip-pkg-install + } diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b153bf1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +/.tox/ +__pycache__/ +*.egg-info/ +.coverage* +poetry.lock diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6843465..174eebf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,31 +2,96 @@ ci: autoupdate_schedule: "quarterly" default_language_version: - python: "python3.12" + python: "python3.13" repos: + - repo: "meta" + hooks: + - id: "check-hooks-apply" + - id: "check-useless-excludes" + - repo: "https://github.com/pre-commit/pre-commit-hooks" - rev: "v6.0.0" + rev: "3e8a8703264a2f4a69428a0aa4dcb512790b2c8c" # frozen: v6.0.0 hooks: + - id: "check-added-large-files" - id: "check-merge-conflict" + - id: "check-json" + - id: "check-yaml" + - id: "end-of-file-fixer" + - id: "mixed-line-ending" + args: + - "--fix=lf" - id: "trailing-whitespace" - - repo: "https://github.com/sirosen/check-jsonschema" - rev: "0.36.0" + - repo: "https://github.com/asottile/pyupgrade" + rev: "75992aaa40730136014f34227e0135f63fc951b4" # frozen: v3.21.2 + hooks: + - id: "pyupgrade" + name: "Enforce Python 3.13+ idioms" + args: + - "--py313-plus" + + - repo: "https://github.com/psf/black-pre-commit-mirror" + rev: "fa505ab9c3e0fedafe1709fd7ac2b5f8996c670d" # frozen: 26.3.1 + hooks: + - id: "black" + + - repo: "https://github.com/pycqa/isort" + rev: "a333737ed43df02b18e6c95477ea1b285b3de15a" # frozen: 8.0.1 + hooks: + - id: "isort" + + - repo: "https://github.com/pycqa/flake8" + rev: "c48217e1fc006c2dddd14df54e83b67da15de5cd" # frozen: 7.3.0 hooks: + - id: "flake8" + additional_dependencies: + - "flake8-toml-config==1.0.0" + - "flake8-bugbear==25.11.29" + + - repo: "https://github.com/editorconfig-checker/editorconfig-checker" + rev: "aa06cea887b26f372f9368aa53665266ec6dc364" # frozen: v3.6.1 + hooks: + - id: "editorconfig-checker" + + - repo: "https://github.com/python-jsonschema/check-jsonschema" + rev: "13614ab716a3113145f1294ed259d9fbe5678ff3" # frozen: 0.37.1 + hooks: + - id: "check-dependabot" - id: "check-github-workflows" + - id: "check-metaschema" + files: "src/.+-schema.json" + + - repo: "https://github.com/adhtruong/mirrors-typos" + rev: "cf074ce7ed10a99b0147ee84edc05a6b5732a122" # frozen: v1.44.0 + hooks: + - id: "typos" + + - repo: "local" + hooks: + - id: "render-workflow-templates" + name: "Render workflow templates" + language: "python" + entry: "python src/render-templates.py" + always_run: true + pass_filenames: false + additional_dependencies: + # The dependencies here are injected from a lock file + # when `tox run -e update-requirements` is run. + # DO NOT edit these dependencies manually. + # + # additional_dependencies source: requirements/templating/requirements.txt + - 'jinja2==3.1.6 ; python_version == "3.13"' + - 'markupsafe==3.0.3 ; python_version == "3.13"' - - repo: "https://github.com/sirosen/texthooks" - rev: "0.7.1" + - repo: "https://github.com/zizmorcore/zizmor-pre-commit" + rev: "ea2eb407b4cbce87cf0d502f36578950494f5ac9" # frozen: v1.23.1 hooks: - - id: "alphabetize-codeowners" - - id: "fix-smartquotes" - args: ["--show-changes"] - - id: "fix-ligatures" - args: ["--show-changes"] - - id: "forbid-bidi-controls" + - id: "zizmor" - repo: "https://github.com/rhysd/actionlint" - rev: "v1.7.10" + rev: "393031adb9afb225ee52ae2ccd7a5af5525e03e8" # frozen: v1.7.11 hooks: - id: "actionlint" + additional_dependencies: + - "github.com/wasilibs/go-shellcheck/cmd/shellcheck@4e7020840c303923eb1ab846fc446d77be892570" diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 583a0f7..0000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,38 +0,0 @@ -# CHANGELOG - -The changelog is continuous. All changes are made to the current version branch. - -## v1.4 - -### `tox.yaml` - -- Separate tox environment creation from execution. -- Add the tox-gh plugin to group tox environment output. -- Update check-jsonschema to v0.35.0. -- Use uv for venv creation and check-jsonschema execution. - -## v1.3 - -### `pr_has_changelog.yaml` - -- Add `skip-users`, `changelog-type`, and `base-branch` workflow call arguments. -- Update action versions. - -### `tox.yaml` - -- Add `tox-skip-environments` and `tox-skip-environments-regex` config inputs. -- Update action versions. - -## v1.2 - -- Update `actions/cache` versions to resolve test suite failures. - -## v1.1 - -### `tox.yaml` - -- Initial version. - -## v1.0 - -- Rename repository from `globus/reusable-workflows` to `globus/workflows`. diff --git a/CHANGELOG.rst b/CHANGELOG.rst new file mode 100644 index 0000000..911075f --- /dev/null +++ b/CHANGELOG.rst @@ -0,0 +1,72 @@ +.. + This file is a part of the Globus GitHub Workflows project. + https://github.com/globus/workflows + Copyright 2021-2026 Globus + SPDX-License-Identifier: MIT + + +Globus GitHub Workflows +####################### + +Unreleased changes +================== + +Unreleased changes to the code are documented in +`changelog fragments `_ +in the ``changelog.d/`` directory on GitHub. + +.. scriv-insert-here + +.. _changelog-1.4: + +1.4 — 2026-01-05 +================ + +``tox.yaml`` +------------ + +- Separate tox environment creation from execution. +- Add the tox-gh plugin to group tox environment output. +- Update check-jsonschema to v0.35.0. +- Use uv for venv creation and check-jsonschema execution. + +.. _changelog-1.3: + +1.3 — 2025-10-03 +================ + +``pr_has_changelog.yaml`` +------------------------- + +- Add ``skip-users``, ``changelog-type``, and ``base-branch`` workflow call arguments. +- Update action versions. + +``tox.yaml`` +------------ + +- Add ``tox-skip-environments`` and ``tox-skip-environments-regex`` config inputs. +- Update action versions. + +.. _changelog-1.2: + +1.2 — 2025-03-04 +================ + +- Update ``actions/cache`` versions to resolve test suite failures. + +.. _changelog-1.1: + +1.1 — 2024-11-05 +================ + +``tox.yaml`` +------------ + +- Initial version. + +.. _changelog-1.0: + +1.0 — 2021-12-10 +================ + +- Rename repository from ``globus/reusable-workflows`` to ``globus/workflows``. diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..48c2313 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,22 @@ +MIT License + +Copyright 2021-2026 Globus +Copyright 2024-2026 Kurt McKee + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md deleted file mode 100644 index f557255..0000000 --- a/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Reusable Workflows - -This repository defines reusable workflows for use within Globus. - -## Workflows - -### pr_has_changelog - -Check if a PR has changelog fragments in `changelog.d/`, identified as new -files with a desired suffix (`.md`). - -Usage example: - -```yaml -name: Validate main PR - -on: - pull_request: - branches: - - main - -jobs: - check_changelog: - uses: globus/workflows/.github/workflows/pr_has_changelog.yaml@v1 -``` diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..2995c13 --- /dev/null +++ b/README.rst @@ -0,0 +1,209 @@ +.. + This file is a part of the Globus GitHub Workflows project. + https://github.com/globus/workflows + Copyright 2021-2026 Globus + Copyright 2024-2026 Kurt McKee + SPDX-License-Identifier: MIT + + +Globus GitHub Workflows +####################### + +*Reusable workflows that reduce maintenance effort.* + +--------------------------------------------------------------------------- + +This repo centralizes Globus' public GitHub workflows. + +In many cases, workflows in other Globus repositories can be minimized +to a set of configuration values and a reference to the workflows here. + + +Table of contents +================= + +* `pr_has_changelog`_ +* `tox`_ +* `create-pr`_ +* `create-tag-and-release`_ +* `build-python-package`_ + + +pr_has_changelog +================ + +Check if a PR has changelog fragments in ``changelog.d/``, identified as new +files with a desired suffix (``.md``). + +Usage example: + +.. code-block:: yaml + + name: Validate main PR + + on: + pull_request: + branches: + - main + + jobs: + check_changelog: + uses: globus/workflows/.github/workflows/pr_has_changelog.yaml@v2 + + +tox +=== + +`Workflow documentation `__ + +The ``tox.yaml`` workflow captures hard-earned lessons for running tox in CI +to optimize test suite execution, including tools, plugins, and caching. + +.. code-block:: yaml + + name: "🧪 Test" + + on: + pull_request: + push: + branches: + - "main" + + jobs: + test: + permissions: + contents: "read" + strategy: + matrix: + runner: + - "ubuntu-latest" + - "macos-latest" + - "windows-latest" + + # The single value in this `include` section will be added to each runner. + include: + - cpythons: + - "3.10" + - "3.11" + - "3.12" + - "3.13" + - "3.14" + cpython-beta: "3.15" + + uses: "globus/workflows/.github/workflows/tox.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + + +create-pr +========= + +`Workflow documentation `__ + +The ``create-pr.yaml`` workflow cuts release PRs. +It helps kick off an automated release process. + +.. code-block:: yaml + + name: "✨ Prep release" + on: + workflow_dispatch: + inputs: + version: + description: "The version to release" + type: "string" + required: true + + jobs: + prep-release: + name: "Prep release v${{ inputs.version }}" + + permissions: + contents: "write" + pull-requests: "write" + + strategy: + matrix: + include: + - branch-name: "release/$VERSION" + commit-title: "Update project metadata for v$VERSION" + pr-title: "Release v$VERSION" + tox-label-create-changes: "prep-release" + + uses: "globus/workflows/.github/workflows/create-pr.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + version: "${{ inputs.version }}" + + +create-tag-and-release +====================== + +`Workflow documentation `__ + +The ``create-tag-and-release.yaml`` workflow creates a git tag and a GitHub release. +It adds the version's changelog fragment as the release body. + +.. code-block:: yaml + + name: "🏷️ Tag and release" + on: + push: + branches: + - "releases" + + jobs: + tag: + name: "Tag and release" + + permissions: + contents: "write" + + uses: "globus/workflows/.github/workflows/create-tag-and-release.yaml@..." + + +build-python-package +==================== + +`Workflow documentation `__ + +The ``build-python-package.yaml`` workflow builds a Python sdist and wheel, +and uploads a GitHub artifact containing these. +It helps make automated releases to PyPI trivial. + +.. code-block:: yaml + + name: "📦 Publish" + on: + push: + branches: + - "releases" + + jobs: + build: + name: "Build" + + permissions: + contents: "read" + + uses: "globus/workflows/.github/workflows/build-python-package.yaml@..." + + publish: + name: "Publish" + needs: + - "build" + runs-on: "ubuntu-24.04" + environment: "PyPI" + permissions: + id-token: "write" + steps: + - name: "Download artifact" + uses: "actions/download-artifact@..." + with: + artifact-ids: "${{ needs.build.outputs.artifact-id }}" + path: "${{ needs.build.outputs.packages-path }}" + + - name: "Publish package distributions to PyPI" + uses: "pypa/gh-action-pypi-publish@???" + with: + packages-dir: "${{ needs.build.outputs.packages-path }}" diff --git a/changelog.d/20260225_170218_kurtmckee_add_release_workflows.rst b/changelog.d/20260225_170218_kurtmckee_add_release_workflows.rst new file mode 100644 index 0000000..252cf36 --- /dev/null +++ b/changelog.d/20260225_170218_kurtmckee_add_release_workflows.rst @@ -0,0 +1,50 @@ +Breaking changes +---------------- + +* ``tox``: Add a ``timeout-minutes`` key for configuring job timeouts. + + Jobs now timeout after 15 minutes by default. + +Added +----- + +* Introduce a ``create-pr`` workflow. + + This workflow runs a tox label, commits the changes, and opens a PR. + +* Introduce a ``create-tag-and-release`` workflow. + + This workflow creates an annotated git tag and a GitHub release. + +* Introduce a ``build-python-package`` workflow. + + This workflow builds a Python package and uploads an artifact. + +Changed +------- + +* Lock almost all software dependencies. +* Update all software dependencies. +* ``tox``: Bust the cache if the workflow's locked dependencies change. + +Documentation +------------- + +* Add extensive documentation for almost all of the workflows. +* Document how to create new templated workflows. + +Development +----------- + +* Use templates to generate standalone reusable workflow files. + + Now, instead of disallowing edits to portions of the workflows, + the underlying templates are fully editable. + +* Use prek to update pre-commit hook versions. + +* Wholesale copy all of the infrastructure from ``kurtmckee/github-workflows@v2.1``. + + This ensures that this repo can evolve independently as needed. + +* Introduce zizmor and address its warnings. diff --git a/changelog.d/README.rst b/changelog.d/README.rst new file mode 100644 index 0000000..64c905b --- /dev/null +++ b/changelog.d/README.rst @@ -0,0 +1,8 @@ +``changelog.d/`` +################ + +Use scriv to generate a new changelog fragment in this directory: + +.. code-block:: + + scriv create diff --git a/docs/adding-new-workflows.rst b/docs/adding-new-workflows.rst new file mode 100644 index 0000000..e480094 --- /dev/null +++ b/docs/adding-new-workflows.rst @@ -0,0 +1,183 @@ +.. + This file is a part of the Globus GitHub Workflows project. + https://github.com/globus/workflows + Copyright 2021-2026 Globus + Copyright 2024-2026 Kurt McKee + SPDX-License-Identifier: MIT + + +Adding new workflows +#################### + +Adding a new workflow requires some specific knowledge. + + +Workflow filename +================= + +If the workflow has no need for templating, use a memorable name. + +If templating is needed, use a filename that ends with ``.jinja.yaml``. + + +Permissions +=========== + +Permissions must be explicitly set, even if only defaults are needed. + +These are the GitHub defaults: + +.. code-block:: yaml + + permissions: + contents: "read" + + +Jinja settings +============== + +The default Jinja settings conflict with GitHub's ``${{ }}`` workflow syntax. + +Therefore, these are the settings used when rendering workflow templates: + +=============================== =========================== +Setting Value +=============================== =========================== +``block_start_string`` ``[%`` +``block_end_string`` ``%]`` +``variable_start_string`` ``[[`` +``variable_end_string`` ``]]`` +``comment_start_string`` ``#[#`` +``comment_end_string`` ``#]#`` +=============================== =========================== + + +Template variables +================== + +Several template variables are available: + +* ``PYTHON_VERSION`` (read from ``pyproject.toml``) +* ``UV_VERSION`` (read from ``requirements/uv/requirements.txt``) + +These can be used to help lock dependencies and increase predictability. +For example, these can be used when using actions: + +.. code-block:: yaml + + - name: "Setup Python" + uses: "actions/setup-python@" # + with: + python-version: "[[ PYTHON_VERSION ]]" + + - name: "Install uv" + uses: "astral-sh/setup-uv@" # + with: + version: "[[ UV_VERSION ]]" + + +Template functions +================== + +Several template functions are available. + + +``include_requirements(directory: str)`` +---------------------------------------- + +The ``directory`` given must exist in this repo's ``requirements/`` directory. +The constructed path to the ``requirements.txt`` file will be: + +.. code-block:: text + + requirements/$DIRECTORY/requirements.txt + +This is useful for locking dependencies to ensure consistent runtime behavior: + +.. code-block:: yaml + + env: + REQUIREMENTS: | + [[ include_requirements("my_cool_package") | indent(4) ]] + + jobs: + example: + steps: + + # ... + + - run: | + REQUIREMENTS_PATH="$(mktemp)" + echo "${REQUIREMENTS}" > "${REQUIREMENTS_PATH}" + + uv run \ + --no-managed-python \ + --no-project \ + --with-requirements="${REQUIREMENTS_PATH}" \ + my_cool_package_cli + + +``include_file(file: str)`` +--------------------------- + +The ``file`` must exist in a subdirectory in ``src/workflow_assets/`` +that matches the normalized name of the workflow name. + +For example, if the workflow filename is ``do-something.jinja.yaml``, +then the corresponding directory in ``src/workflow_assets/`` +must be named ``do_something/``. + +This is useful for keeping a file outside of the workflow itself +so that it can be linted and checked by standard tools. + +Examples of JSON schemas and Python code exist in the ``tox.jinja.yaml``. + + +Block templates as runnable workflows +===================================== + +Templates must be stored in the ``.github/workflows/`` directory +so that Dependabot can update the action versions authoritatively in the templates. + +However, it's not desirable for the templates workflows to be runnable, +so use boilerplate like this to prevent execution of the workflows: + +.. code-block:: yaml + + jobs: + #[#- + # Halt execution if an attempt is made to run the template directly. + # This block is enclosed in a Jinja comment and will not be rendered. + halt: + name: "Halt" + runs-on: "ubuntu-slim" + steps: + - name: "Halt" + run: | + echo "::error::⚠️ Do not run the workflow template directly." + exit 1 + #]# + +Then, add the boilerplate just after the key of the real job, +which will force the real job *in the template* to depend on the ``halt`` job: + +.. code-block:: yaml + + real_job: + #[#- + # The `needs` key is in a Jinja comment and will not be rendered. + needs: ["halt"] + #]# + +This ensures that the template workflow cannot be used by a calling workflow. + + +Disable template workflows +========================== + +After the new workflow merges, disable the workflow in this repository. +This helps keeps focus on runnable actions in the Actions sidebar. + +Go to the Actions tab in the repo, click on the workflow, +and then in the ``...`` menu on the right of the page, +select "Disable workflow". diff --git a/docs/build-python-package.rst b/docs/build-python-package.rst new file mode 100644 index 0000000..5d9eb13 --- /dev/null +++ b/docs/build-python-package.rst @@ -0,0 +1,106 @@ +.. + This file is a part of the Globus GitHub Workflows project. + https://github.com/globus/workflows + Copyright 2021-2026 Globus + Copyright 2024-2026 Kurt McKee + SPDX-License-Identifier: MIT + + +``build-python-package.yaml`` +############################# + +This reusable workflow builds a Python package and uploads an artifact. + +The repository is checked out at the git commit SHA that triggered the run. + +It currently takes no inputs. + + +Table of contents +================= + +* `Requirements`_ +* `Permissions`_ +* `Inputs`_ +* `Outputs`_ +* `Workflow example`_ + + +Requirements +============ + +* The project must be buildable solely using the Python `build`_ module. + + No additional dependencies are pre-installed for building. + + +Permissions +=========== + +The workflow requires the GitHub token to have read permissions for ``contents``. + +This is the default, but it is recommended that permissions be explicitly set. + +.. code-block:: yaml + + permissions: + contents: "read" + + +Inputs +====== + +None. + + +Outputs +======= + +* ``artifact-id`` + + The ID of the artifact that was uploaded. + + This can be downloaded by a publishing workflow. + +* ``packages-path`` + + The directory that the packages were built in. + + +Workflow example +================ + +.. code-block:: yaml + + name: "Build and publish" + on: + push: + branches: + - "releases" + + jobs: + build: + name: "Build" + + permissions: + contents: "read" + + uses: "globus/workflows/.github/workflows/build-python-package.yaml@..." + + publish: + name: "Publish" + needs: + - "build" + + steps: + - name: "Download artifact" + uses: "actions/download-artifact@..." + with: + artifact-ids: "${{ needs.build.outputs.artifact-id }}" + path: "${{ needs.build.outputs.packages-path }}" + + +.. Links +.. ----- +.. +.. _build: https://pypi.org/project/build/ diff --git a/docs/create-pr.rst b/docs/create-pr.rst new file mode 100644 index 0000000..a09222f --- /dev/null +++ b/docs/create-pr.rst @@ -0,0 +1,311 @@ +.. + This file is a part of the Globus GitHub Workflows project. + https://github.com/globus/workflows + Copyright 2021-2026 Globus + Copyright 2024-2026 Kurt McKee + SPDX-License-Identifier: MIT + + +``create-pr.yaml`` +################## + +This reusable workflow runs a defined tox label to create changes to files, +then commits all of the changes and creates a draft PR. + +The reusable workflow takes two inputs: + +* ``config``, which must be a serialized JSON object with keys described below. +* ``version``, which is an optional string that can be referenced by ``config`` values + and by tox environments. + + +Table of contents +================= + +* `Requirements`_ +* `Permissions`_ +* `Inputs`_ + + * `version`_ + * `config`_ + +* `Required config keys`_ +* `Optional config keys`_ +* `Outputs`_ +* `Workflow examples`_ + + +Requirements +============ + +By default, GitHub disallows workflows from opening PRs. +A checkbox in the repository settings must be ticked: + +.. code-block:: text + + Settings > Actions > General > Allow GitHub Actions to create and approve pull requests + + +Permissions +=========== + +The workflow requires the GitHub token to have two write permissions: + +* ``contents`` +* ``pull-requests`` + +These must be set on the calling workflow: + +.. code-block:: yaml + + permissions: + contents: "write" + pull-requests: "write" + + +Inputs +====== + +.. _config: + +``config`` +---------- + +The workflow requires a JSON-serialized input named ``"config"``. + +The best way to accomplish this is by using a matrix configuration, +and using the ``toJSON()`` function to serialize it as a workflow input: + +.. code-block:: yaml + + strategy: + matrix: + include: + - tox-label-create-changes: "update" + + # ... + + uses: "globus/workflows/.github/workflows/create-pr.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + + +For more information about the supported ``config`` object keys, see: + +* `Required config keys`_ +* `Optional config keys`_ + + +.. _version: + +``version`` +----------- + +A ``version`` input may be passed to the workflow, separate from the ``config`` input. +It can then be referenced in several places, including these config keys: + +* ``branch-name`` (example: ``releases/$VERSION``) +* ``commit-title`` (example: ``Update metadata for v$VERSION``) +* ``pr-title`` (example: ``Release v$VERSION``) + +It will also be available as an environment variable named ``VERSION`` when tox is run. +Tox must be configured to pass ``VERSION`` into the test environment: + +.. code-block:: ini + + [testenv:prep-release] + pass_env = + VERSION + deps = + poetry + commands = + poetry version {env:VERSION} + + +Required config keys +==================== + +* ``tox-label-create-changes``: + The tox label to run to generate changes that will be committed. Must be a string. + + Example: + + .. code-block:: yaml + + tox-label-create-changes: "prep-release" + + When tox is run, two environment variables will be available: + + * ``PR_BODY_OUTPUT_PATH``, which a tox environment can write a PR body to + * ``VERSION``, which will contain the optional ``version`` input value + + These should be passed to the tox environments using the tox ``pass_env`` config. + + Example: + + .. code-block:: ini + + [testenv:prep-release] + description = Make the changes needed to create a new release PR + skip_install = true + deps = + poetry + scriv + pass_env = + PR_BODY_OUTPUT_PATH + VERSION + commands = + # Fail if $VERSION is not set. + python -Ec 'import os; assert (v := os.getenv("VERSION")) is not None, v' + poetry version "{env:VERSION}" + scriv collect + scriv print --version "{env:VERSION}" --output "{env:PR_BODY_OUTPUT_PATH:{env:VERSION}.rst}" + + +Optional config keys +==================== + + +* ``python-version``: + The CPython interpreter version to install. Must be a string. + + .. code-block:: yaml + + python-version: "3.13" + +* ``commit-title``: + The first line of the commit message to use. Must be a string. + + This supports a ``$VERSION`` substitution. + + Examples: + + .. code-block:: yaml + + commit-title: "Update tool versions" + + .. code-block:: yaml + + commit-title: "Update project metadata for v$VERSION" + +* ``pr-base``: + The name of the branch that the PR will be configured to merge to. + Must be a string. + + The default is ``main``. + + Example: + + .. code-block:: yaml + + pr-base: "releases" + +* ``pr-title``: + The title of the PR to open. Must be a string. + + This supports a ``$VERSION`` substitution. + + Examples: + + .. code-block:: yaml + + pr-title: "Update pre-commit hooks and additional dependencies" + + .. code-block:: yaml + + pr-title: "Release v$VERSION" + +* ``pr-body``: + The body of the PR to open. Must be a string. + + This supports a ``$VERSION`` substitution. + + Example: + + .. code-block:: yaml + + pr-body: "Exactly what it says on the tin." + +* ``pr-body-input-format``: + The format of the PR body. Must be a string. + + Currently only ``gfm`` and ``rst`` are allowed values. + The default is ``rst``. + + +Outputs +======= + +None. + + +Workflow examples +================= + + +Trivial example +--------------- + +.. code-block:: yaml + + name: "Updates" + on: + workflow_dispatch: + + jobs: + updates: + name: "${{ 'Updates' || matrix.ignore }}" + + permissions: + contents: "write" + pull-requests: "write" + + strategy: + matrix: + include: + - tox-label-create-changes: "update" + + uses: "globus/workflows/.github/workflows/create-pr.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + + +Note that referencing ``matrix`` in the calling workflow name -- which is a no-op here -- +tricks GitHub and prevents it from injecting matrix values into the name of each run. +Without this trick, the workflow run would have the generated name "Updates (update)". + + +Prepare a new release +--------------------- + +.. code-block:: yaml + + name: "Prep release" + on: + workflow_dispatch: + inputs: + version: + description: "The version to release" + type: "string" + required: true + + jobs: + prep-release: + name: "Prep release v${{ inputs.version }}" + + permissions: + contents: "write" + pull-requests: "write" + + strategy: + matrix: + include: + - branch-name: "release/$VERSION" + commit-title: "Update project metadata for v$VERSION" + pr-title: "Release v$VERSION" + tox-label-create-changes: "prep-release" + + uses: "globus/workflows/.github/workflows/create-pr.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + version: "${{ inputs.version }}" diff --git a/docs/create-tag-and-release.rst b/docs/create-tag-and-release.rst new file mode 100644 index 0000000..5e69be1 --- /dev/null +++ b/docs/create-tag-and-release.rst @@ -0,0 +1,92 @@ +.. + This file is a part of the Globus GitHub Workflows project. + https://github.com/globus/workflows + Copyright 2021-2026 Globus + Copyright 2024-2026 Kurt McKee + SPDX-License-Identifier: MIT + + +``create-tag-and-release.yaml`` +############################### + +This reusable workflow creates an annotated git tag and a GitHub release. + +The git tag will contain the current version's CHANGELOG fragment +in GitHub-formatted Markdown. + +It currently takes no inputs. + + +Table of contents +================= + +* `Requirements`_ +* `Permissions`_ +* `Inputs`_ +* `Outputs`_ +* `Workflow example`_ + + +Requirements +============ + +* The project must have ``project.version`` set in ``pyproject.toml``. + The version cannot be a "dynamic" value. +* The project must use scriv to manage its CHANGELOG. +* The CHANGELOG must be in Restructured Text format. + + +Permissions +=========== + +The workflow requires the GitHub token to have write permissions for ``contents``. + +These must be set on the calling workflow: + +.. code-block:: yaml + + permissions: + contents: "write" + + +Inputs +====== + +None. + + +Outputs +======= + +* ``project-version`` + + The version of the project extracted from ``project.version`` in ``pyproject.toml``. + For example, ``1.2.3``. + +* ``tag-name`` + + The name of the git tag that was created. + + This is always the project version string prepended with the letter ``v``. + For example, ``v1.2.3``. + + +Workflow example +================ + +.. code-block:: yaml + + name: "Tag and release" + on: + push: + branches: + - "releases" + + jobs: + tag: + name: "Tag and release" + + permissions: + contents: "write" + + uses: "globus/workflows/.github/workflows/create-tag-and-release.yaml@..." diff --git a/docs/tox.rst b/docs/tox.rst new file mode 100644 index 0000000..91a8224 --- /dev/null +++ b/docs/tox.rst @@ -0,0 +1,647 @@ +.. + This file is a part of the Globus GitHub Workflows project. + https://github.com/globus/workflows + Copyright 2021-2026 Globus + Copyright 2024-2026 Kurt McKee + SPDX-License-Identifier: MIT + + +``tox.yaml`` +############ + +This reusable workflow puts a lot of engineering effort into this one task: +running tox. + +It captures years of experience for speedy test suite execution in a CI environment, +and is configured via a single workflow input named ``config``, +which must be a JSON object serialized via GitHub's ``toJSON()`` workflow function. + + +Table of contents +================= + +* `Requirements`_ +* `Permissions`_ +* `Inputs`_ + + * `config`_ + +* `Config keys`_ + + * `Runners`_ + * `Python interpreters`_ + * `Tox environments`_ + * `Caching`_ + +* `Outputs`_ +* `Passing the config to the workflow`_ +* `Workflow examples`_ +* `Controlling the job name`_ + + +Requirements +============ + +Tox must be runnable without additional software dependencies. + + +Permissions +=========== + +The workflow requires the GitHub token to have read permissions for ``contents``. + +This is the default, but it is recommended that permissions be explicitly set. + +.. code-block:: yaml + + permissions: + contents: "read" + + +Inputs +====== + +.. _config: + +``config`` +---------- + +The workflow requires a JSON-serialized input named ``"config"``. + +The best way to accomplish this is by using a matrix configuration, +and using the ``toJSON()`` function to serialize it as a workflow input: + +.. code-block:: yaml + + strategy: + matrix: + include: + - tox-label-create-changes: "update" + + # ... + + uses: "globus/workflows/.github/workflows/tox.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + + +For more information about the supported ``config`` object keys, +see the next sections. + + +Config keys +=========== + + +Runners +------- + +* ``runner``: + The runner to use. + + .. code-block:: yaml + + runner: "ubuntu-latest" + +* ``timeout-minutes``: + The maximum amount of time that the job is allowed to run. + + The default value is 15 minutes. + + .. code-block:: yaml + + timeout-minutes: 15 + + +Python interpreters +------------------- + +* ``cpythons``: + An array of CPython interpreter versions to install. Items must be strings. + + .. code-block:: yaml + + cpythons: + - "3.12" + - "3.13" + +* ``cpython-beta``: + A CPython interpreter beta to install. Must be a string. + + Tox will *never* be installed using a beta CPython interpreter. + The workflow will install a non-beta CPython interpreter if necessary + to avoid installing and executing tox on a beta CPython interpreter, + so it may be necessary to specify which tox environments to run + if the goal is to exclusively run the test suite with the beta interpreter. + + Example: + + .. code-block:: yaml + + cpython-beta: "3.99" + +* ``pypys``: + An array of PyPy interpreter versions to install. Items must be strings. + + Tox will *never* be installed using a PyPy interpreter. + The workflow will install a CPython interpreter if necessary + to avoid installing and executing tox on a PyPy interpreter, + so it may be necessary to specify which tox environments to run + if the goal is to exclusively run the test suite with the PyPy interpreters. + + Example: + + .. code-block:: yaml + + pypys: + - "3.10" + - "3.11" + + +Tox environments +---------------- + +* ``tox-environments``: + An array of tox environments to run. Items must be strings. + + If provided, only the given environment names will be run. + + Mutually-exclusive with: + + * ``tox-environments-from-pythons`` + * ``tox-factors`` + * ``tox-pre-environments`` + * ``tox-post-environments`` + * ``tox-skip-environments`` + * ``tox-skip-environments-regex`` + + Example: + + .. code-block:: yaml + + tox-environments: + - "docs" + - "mypy" + + Resulting tox command: + + .. code-block:: + + tox run -e "docs,mypy" + ^^^^ ^^^^ + +* ``tox-environments-from-pythons``: + A boolean flag that controls whether the configured Python interpreters + will be converted to a list of specific tox environments to execute. + + If configured, the only allowed value is ``true``. + + Mutually-exclusive with ``tox-environments``. + + Example: + + .. code-block:: yaml + + cpythons: + - "3.12" + - "3.13" + cpython-beta: "3.14" + pypys: + - "3.11" + tox-environments-from-pythons: true + + Resulting tox command: + + .. code-block:: + + tox run -e "py3.12,py3.13,py3.14,pypy3.11" + ^^^^^^ ^^^^^^ ^^^^^^ ^^^^^^^^ + +* ``tox-factors``: + An array of factors to add to the ends of generated tox environment names. + + Configuring this key automatically enables ``tox-environments-from-pythons``. + + Mutually-exclusive with ``tox-environments``. + + Example: + + .. code-block:: yaml + + cpythons: + - "3.12" + - "3.13" + tox-factors: + - "ci" + + Resulting tox command: + + .. code-block:: + + tox run -e "py3.12-ci,py3.13-ci" + ^^^ ^^^ + +* ``tox-pre-environments``: + An array of tox environments to run + before a generated list of all configured Python interpreters as tox environments. + + Configuring this key automatically enables ``tox-environments-from-pythons``. + + Mutually-exclusive with ``tox-environments``. + + Example: + + .. code-block:: yaml + + cpythons: + - "3.13" + pypys: + - "3.11" + tox-pre-environments: + - "flake8" + + Resulting tox command: + + .. code-block:: + + tox run -e "flake8,py3.13,pypy3.11" + ^^^^^^ + +* ``tox-post-environments``: + An array of tox environments to run + after a generated list of all configured Python interpreters as tox environments. + + Configuring this key automatically enables ``tox-environments-from-pythons``. + + Mutually-exclusive with ``tox-environments``. + + Example: + + .. code-block:: yaml + + cpythons: + - "3.12" + pypys: + - "3.11" + tox-post-environments: + - "coverage" + + Resulting tox command: + + .. code-block:: + + tox run -e "py3.12,pypy3.11,coverage" + ^^^^^^^^ + +* ``tox-skip-environments``: + An array of tox environment names to skip. + + The names will be sorted, escaped, and combined into a regular expression. + Current tox behavior is to *match* -- not *search* -- names against the pattern, + so if this option is used, the names must exactly match tox environment names. + + For true regular expression matching, see ``tox-skip-environments-regex`` below. + + Mutually-exclusive with ``tox-environments``. + + Example: + + .. code-block:: yaml + + cpythons: + - "3.13" + tox-skip-environments: + - "coverage-html" + - "docs" + + Resulting tox command: + + .. code-block:: + + export TOX_SKIP_ENV='coverage-html|docs' + ^^^^^^^^^^^^^ ^^^^ + tox + +* ``tox-skip-environments-regex``: + A regular expression of tox environment names to skip. + + If used with ``tox-skip-environments``, the patterns will be combined. + + Mutually-exclusive with ``tox-environments``. + + Example: + + .. code-block:: yaml + + cpythons: + - "3.13" + tox-skip-environments: + - "coverage-html" + - "docs" + tox-skip-environments-regex: "mypy-.*" + + Resulting tox command: + + .. code-block:: + + export TOX_SKIP_ENV='coverage-html|docs|mypy-.*' + ^^^^^^^^^^^^^ ^^^^ ^^^^^^^ + tox + + +Caching +------- + +* ``cache-paths``: + An array of additional paths to cache. + + By default, a virtual environment is created in ``.venv/`` with tox installed, + and tox virtual environments are created when tox runs in ``.tox/``. + These two directories are always cached and can be augmented by ``cache-paths``. + + Example: + + .. code-block:: yaml + + cache-paths: + - ".mypy_cache/" + + Resulting ``actions/cache`` configuration: + + .. code-block:: yaml + + uses: "actions/cache@???" + with: + path: | + .tox/ + .venv/ + .mypy_cache/ + +* ``cache-key-prefix``: + The string prefix to use with the cache. Defaults to ``"tox"``. + + Example: + + .. code-block:: yaml + + cache-key-prefix: "docs" + + Resulting ``actions/cache`` configuration: + + .. code-block:: yaml + + uses: "actions/cache@???" + with: + key: "docs-..." + +* ``cache-key-hash-files``: + An array of paths (or glob patterns) to hash and include in the cache key + for cache-busting. + + Note that the existence of the path or glob patterns is validated; + if paths do not exist, or the glob patterns match nothing, the workflow will fail. + + Example: + + .. code-block:: yaml + + cache-key-hash-files: + - "pyproject.toml" + - "requirements/*/*.txt" + + A file named ``.hash-files.sha`` will be generated containing SHA-1 checksums. + The resulting ``actions/cache`` configuration will be: + + .. code-block:: yaml + + uses: "actions/cache@???" + with: + key: "...${{ hashFiles('.python-identifiers', '.workflow-config.json', 'tox.ini', '.hash-files.sha') }}" + + +Outputs +======= + +None. + + +Passing the config to the workflow +================================== + +The workflow requires a JSON-serialized input named ``"config"``. + +The easiest way to accomplish this is by using a matrix configuration, +and using the ``toJSON()`` function to serialize it as a workflow input: + +.. code-block:: yaml + + strategy: + matrix: + runner: + - "ubuntu-latest" + cpythons: + - ["3.13"] + + uses: "globus/workflows/.github/workflows/tox.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + +There is one ``runner`` value (the string ``"ubuntu-latest"``) +and one ``cpythons`` value (the list ``["3.12"]``), +so this matrix will result in only one JSON config: + +.. code-block:: json + + { + "runner": "ubuntu-latest", + "cpythons": ["3.13"] + } + + +Workflow examples +================= + +Test all Python versions on each operating system +------------------------------------------------- + +.. code-block:: yaml + + jobs: + test: + permissions: + contents: "read" + strategy: + matrix: + runner: + - "ubuntu-latest" + - "macos-latest" + - "windows-latest" + + # The single value in this `include` section will be added to each runner. + include: + - cpythons: + - "3.10" + - "3.11" + - "3.12" + - "3.13" + cpython-beta: "3.14" + pypys: + - "3.10" + - "3.11" + + uses: "globus/workflows/.github/workflows/tox.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + +There are three ``runner`` values in the matrix +and the single ``include`` object does not have a ``runner`` value, +so this results in three JSON configurations, one for each given ``runner``. +An example of the ``"ubuntu-latest"`` runner's JSON config is shown below: + +.. code-block:: json + + { + "runner": "ubuntu-latest", + "cpythons": ["3.10", "3.11", "3.12", "3.13"], + "cpython-beta": "3.14", + "pypys": ["3.10", "3.11"] + } + + +Run individual configurations +----------------------------- + +.. code-block:: yaml + + jobs: + test: + permissions: + contents: "read" + strategy: + matrix: + include: + # Test all Python versions on Ubuntu. + - runner: "ubuntu-latest" + cpythons: + - "3.10" + - "3.11" + - "3.12" + - "3.13" + + # Test only the highest and lowest Pythons on Windows. + - runner: "windows-latest" + cpythons: + - "3.10" + - "3.13" + + uses: "globus/workflows/.github/workflows/tox.yaml@???" + with: + config: "${{ toJSON(matrix) }}" + + +Controlling the job name +======================== + +When using a ``matrix``, GitHub automatically appends matrix values +to the job name to help differentiate the matrix configuration from each other. + +Consider a matrix like the following: + +.. code-block:: yaml + + name: "🧪 Test" + jobs: + test: + name: "Linux" + strategy: + matrix: + include: + - runner: "ubuntu-latest" + cpythons: ["3.13"] + + +GitHub will combine the name of the workflow (``"🧪 Test"``), +the name of the job (``"Linux"``), and the name of the tox workflow. +However, it will also append matrix values to the job name in parentheses, +resulting in this check name: + +.. code-block:: + + 🧪 Test / Linux (ubuntu-latest, 3.13) / tox + + +As the number of matrix values grow, so too will the length of the job name. + +This behavior can be suppressed by referencing a ``matrix`` value in the job name. + +#. The name can be hard-coded in the job name, + and a bogus matrix value can be referenced. + + .. code-block:: yaml + + jobs: + test: + name: "${{ 'Linux' || matrix.bogus }}" + strategy: + matrix: + include: + - name: "Linux" + runner: "ubuntu-latest" + cpythons: ["3.13"] + + This results in the following check name: + + .. code-block:: + + 🧪 Test / Linux / tox + + +#. The name can be hard-coded into the matrix and referenced. + + .. code-block:: yaml + + jobs: + test: + name: "${{ matrix.name }}" + strategy: + matrix: + include: + - name: "Linux" + runner: "ubuntu-latest" + cpythons: ["3.13"] + + This results in the following check name: + + .. code-block:: + + 🧪 Test / Linux / tox + +#. For a more complicated workflow, + the name can be calculated based on matrix values. + + .. code-block:: yaml + + jobs: + test: + name: + "${{ + (startswith(matrix.runner, 'ubuntu') && 'Linux') + || (startswith(matrix.runner, 'macos') && 'macOS') + || (startswith(matrix.runner, 'windows') && 'Windows') + }}" + strategy: + matrix: + runner: + - "ubuntu-latest" + - "macos-latest" + - "windows-latest" + include: + - cpythons: ["3.13"] + + This results in the following check names: + + .. code-block:: + + 🧪 Test / Linux / tox + 🧪 Test / macOS / tox + 🧪 Test / Windows / tox diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..c1e221f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,59 @@ +[project] +name = "github-workflows" +version = "1.4" +classifiers = ["Private :: Do not Upload"] +requires-python = ">=3.13,<3.14" + + +# coverage +# -------- + +[tool.coverage.report] +exclude_also = [ + 'if __name__ == "__main__":', +] + + +# flake8 +# ------ + +[tool.flake8] +max-line-length = 80 +extend-select = ["B950"] +extend-ignore = ["E203", "E501", "E701"] + + +# mypy +# ---- + +[tool.mypy] +files = "src/" +strict = true +sqlite_cache = true + + +# pytest +# ------ + +[tool.pytest.ini_options] +addopts = "--color=yes" +filterwarnings = [ + "error", +] + + +# scriv +# ----- + +[tool.scriv] +version = "literal: pyproject.toml: project.version" +categories = [ + "Breaking changes", + "Python support", + "Added", + "Removed", + "Changed", + "Fixed", + "Documentation", + "Development", +] diff --git a/requirements/README.rst b/requirements/README.rst new file mode 100644 index 0000000..694075b --- /dev/null +++ b/requirements/README.rst @@ -0,0 +1,57 @@ +.. + This file is a part of the Globus GitHub Workflows project. + https://github.com/globus/workflows + Copyright 2021-2026 Globus + Copyright 2024-2026 Kurt McKee + SPDX-License-Identifier: MIT + + +``requirements/`` +################# + +This directory contains the files that manage dependencies for the project. + +At the time of writing, Poetry supports discrete dependency groups +but always resolves dependencies coherently across all groups. +However, in some cases, dependencies do not need to be coherently resolved; +for example, mypy's dependencies do not need to be resolved +together with Sphinx's dependencies. + +Each subdirectory in this directory contains a ``pyproject.toml`` file +with purpose-specific dependencies listed. + + +How it's used +============= + +Tox is configured to use the exported ``requirements.txt`` files as needed. + + +How it's updated +================ + +A tox label, ``update``, ensures that dependencies can be easily updated, +and that ``requirements.txt`` files are consistently re-exported. + +This can be invoked by running: + +.. code-block:: + + tox run -e update-requirements + + +How to add dependencies +======================= + +New dependencies can be added to a given subdirectory's ``pyproject.toml`` +by either manually modifying the file, or by running a command like: + +.. code-block:: + + poetry add --lock --directory "requirements/$DIR" $DEPENDENCY_NAME + +Either way, the dependencies must be re-exported: + +.. code-block:: + + tox run -m update diff --git a/requirements/build/pyproject.toml b/requirements/build/pyproject.toml new file mode 100644 index 0000000..0888715 --- /dev/null +++ b/requirements/build/pyproject.toml @@ -0,0 +1,9 @@ +[tool.poetry] +package-mode = false + +[project] +name = "dependencies" +requires-python = ">=3.13,<3.14" +dependencies = [ + "build", +] diff --git a/requirements/build/requirements.txt b/requirements/build/requirements.txt new file mode 100644 index 0000000..8fc0341 --- /dev/null +++ b/requirements/build/requirements.txt @@ -0,0 +1,4 @@ +build==1.4.2 ; python_version == "3.13" +colorama==0.4.6 ; python_version == "3.13" and os_name == "nt" +packaging==26.0 ; python_version == "3.13" +pyproject-hooks==1.2.0 ; python_version == "3.13" diff --git a/requirements/check_jsonschema/pyproject.toml b/requirements/check_jsonschema/pyproject.toml new file mode 100644 index 0000000..d53bc0c --- /dev/null +++ b/requirements/check_jsonschema/pyproject.toml @@ -0,0 +1,9 @@ +[tool.poetry] +package-mode = false + +[project] +name = "dependencies" +requires-python = ">=3.13,<3.14" +dependencies = [ + "check-jsonschema", +] diff --git a/requirements/check_jsonschema/requirements.txt b/requirements/check_jsonschema/requirements.txt new file mode 100644 index 0000000..fbbcf49 --- /dev/null +++ b/requirements/check_jsonschema/requirements.txt @@ -0,0 +1,15 @@ +attrs==26.1.0 ; python_version == "3.13" +certifi==2026.2.25 ; python_version == "3.13" +charset-normalizer==3.4.6 ; python_version == "3.13" +check-jsonschema==0.37.1 ; python_version == "3.13" +click==8.3.1 ; python_version == "3.13" +colorama==0.4.6 ; python_version == "3.13" and platform_system == "Windows" +idna==3.11 ; python_version == "3.13" +jsonschema-specifications==2025.9.1 ; python_version == "3.13" +jsonschema==4.26.0 ; python_version == "3.13" +referencing==0.37.0 ; python_version == "3.13" +regress==2025.10.1 ; python_version == "3.13" +requests==2.33.0 ; python_version == "3.13" +rpds-py==0.30.0 ; python_version == "3.13" +ruamel-yaml==0.19.1 ; python_version == "3.13" +urllib3==2.6.3 ; python_version == "3.13" diff --git a/requirements/mypy/pyproject.toml b/requirements/mypy/pyproject.toml new file mode 100644 index 0000000..0e46fe5 --- /dev/null +++ b/requirements/mypy/pyproject.toml @@ -0,0 +1,10 @@ +[tool.poetry] +package-mode = false + +[project] +name = "dependencies" +requires-python = ">=3.13,<3.14" +dependencies = [ + "jinja2", + "mypy", +] diff --git a/requirements/mypy/requirements.txt b/requirements/mypy/requirements.txt new file mode 100644 index 0000000..8d4a0d2 --- /dev/null +++ b/requirements/mypy/requirements.txt @@ -0,0 +1,7 @@ +jinja2==3.1.6 ; python_version == "3.13" +librt==0.8.1 ; python_version == "3.13" and platform_python_implementation != "PyPy" +markupsafe==3.0.3 ; python_version == "3.13" +mypy-extensions==1.1.0 ; python_version == "3.13" +mypy==1.19.1 ; python_version == "3.13" +pathspec==1.0.4 ; python_version == "3.13" +typing-extensions==4.15.0 ; python_version == "3.13" diff --git a/requirements/scriv/pyproject.toml b/requirements/scriv/pyproject.toml new file mode 100644 index 0000000..e672d5a --- /dev/null +++ b/requirements/scriv/pyproject.toml @@ -0,0 +1,9 @@ +[tool.poetry] +package-mode = false + +[project] +name = "dependencies" +requires-python = ">=3.13,<3.14" +dependencies = [ + "scriv", +] diff --git a/requirements/scriv/requirements.txt b/requirements/scriv/requirements.txt new file mode 100644 index 0000000..b5d1286 --- /dev/null +++ b/requirements/scriv/requirements.txt @@ -0,0 +1,14 @@ +attrs==26.1.0 ; python_version == "3.13" +certifi==2026.2.25 ; python_version == "3.13" +charset-normalizer==3.4.6 ; python_version == "3.13" +click-log==0.4.0 ; python_version == "3.13" +click==8.3.1 ; python_version == "3.13" +colorama==0.4.6 ; python_version == "3.13" and platform_system == "Windows" +idna==3.11 ; python_version == "3.13" +jinja2==3.1.6 ; python_version == "3.13" +markdown-it-py==4.0.0 ; python_version == "3.13" +markupsafe==3.0.3 ; python_version == "3.13" +mdurl==0.1.2 ; python_version == "3.13" +requests==2.33.0 ; python_version == "3.13" +scriv==1.8.0 ; python_version == "3.13" +urllib3==2.6.3 ; python_version == "3.13" diff --git a/requirements/templating/pyproject.toml b/requirements/templating/pyproject.toml new file mode 100644 index 0000000..1817ad8 --- /dev/null +++ b/requirements/templating/pyproject.toml @@ -0,0 +1,9 @@ +[tool.poetry] +package-mode = false + +[project] +name = "dependencies" +requires-python = ">=3.13,<3.14" +dependencies = [ + "jinja2", +] diff --git a/requirements/templating/requirements.txt b/requirements/templating/requirements.txt new file mode 100644 index 0000000..003afe1 --- /dev/null +++ b/requirements/templating/requirements.txt @@ -0,0 +1,2 @@ +jinja2==3.1.6 ; python_version == "3.13" +markupsafe==3.0.3 ; python_version == "3.13" diff --git a/requirements/test/pyproject.toml b/requirements/test/pyproject.toml new file mode 100644 index 0000000..2eb52bc --- /dev/null +++ b/requirements/test/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +package-mode = false + +[project] +name = "dependencies" +requires-python = ">=3.13,<3.14" +dependencies = [ + "coverage[toml]", + "jsonschema", + "pyfakefs", + "pytest", + "pytest-randomly", +] + +[dependency-groups] +coverage = [ + "coverage[toml]", +] diff --git a/requirements/test/requirements-coverage.txt b/requirements/test/requirements-coverage.txt new file mode 100644 index 0000000..b70f3ea --- /dev/null +++ b/requirements/test/requirements-coverage.txt @@ -0,0 +1 @@ +coverage==7.13.5 ; python_version == "3.13" diff --git a/requirements/test/requirements.txt b/requirements/test/requirements.txt new file mode 100644 index 0000000..1a52854 --- /dev/null +++ b/requirements/test/requirements.txt @@ -0,0 +1,14 @@ +attrs==26.1.0 ; python_version == "3.13" +colorama==0.4.6 ; python_version == "3.13" and sys_platform == "win32" +coverage==7.13.5 ; python_version == "3.13" +iniconfig==2.3.0 ; python_version == "3.13" +jsonschema-specifications==2025.9.1 ; python_version == "3.13" +jsonschema==4.26.0 ; python_version == "3.13" +packaging==26.0 ; python_version == "3.13" +pluggy==1.6.0 ; python_version == "3.13" +pyfakefs==6.1.6 ; python_version == "3.13" +pygments==2.19.2 ; python_version == "3.13" +pytest-randomly==4.0.1 ; python_version == "3.13" +pytest==9.0.2 ; python_version == "3.13" +referencing==0.37.0 ; python_version == "3.13" +rpds-py==0.30.0 ; python_version == "3.13" diff --git a/requirements/tox/pyproject.toml b/requirements/tox/pyproject.toml new file mode 100644 index 0000000..d138f6c --- /dev/null +++ b/requirements/tox/pyproject.toml @@ -0,0 +1,11 @@ +[tool.poetry] +package-mode = false + +[project] +name = "dependencies" +requires-python = ">=3.13,<3.14" +dependencies = [ + "tox", + "tox-gh", + "tox-uv", +] diff --git a/requirements/tox/requirements.txt b/requirements/tox/requirements.txt new file mode 100644 index 0000000..3ce2d9b --- /dev/null +++ b/requirements/tox/requirements.txt @@ -0,0 +1,16 @@ +cachetools==7.0.5 ; python_version == "3.13" +colorama==0.4.6 ; python_version == "3.13" +distlib==0.4.0 ; python_version == "3.13" +filelock==3.25.2 ; python_version == "3.13" +packaging==26.0 ; python_version == "3.13" +platformdirs==4.9.4 ; python_version == "3.13" +pluggy==1.6.0 ; python_version == "3.13" +pyproject-api==1.10.0 ; python_version == "3.13" +python-discovery==1.2.1 ; python_version == "3.13" +tomli-w==1.2.0 ; python_version == "3.13" +tox-gh==1.7.1 ; python_version == "3.13" +tox-uv-bare==1.33.4 ; python_version == "3.13" +tox-uv==1.33.4 ; python_version == "3.13" +tox==4.51.0 ; python_version == "3.13" +uv==0.11.2 ; python_version == "3.13" +virtualenv==21.2.0 ; python_version == "3.13" diff --git a/requirements/uv/pyproject.toml b/requirements/uv/pyproject.toml new file mode 100644 index 0000000..1288d00 --- /dev/null +++ b/requirements/uv/pyproject.toml @@ -0,0 +1,9 @@ +[tool.poetry] +package-mode = false + +[project] +name = "dependencies" +requires-python = ">=3.13,<3.14" +dependencies = [ + "uv", +] diff --git a/requirements/uv/requirements.txt b/requirements/uv/requirements.txt new file mode 100644 index 0000000..7e17156 --- /dev/null +++ b/requirements/uv/requirements.txt @@ -0,0 +1 @@ +uv==0.11.2 ; python_version == "3.13" diff --git a/src/render-templates.py b/src/render-templates.py new file mode 100644 index 0000000..54e4a26 --- /dev/null +++ b/src/render-templates.py @@ -0,0 +1,93 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import pathlib +import sys +import tomllib +import typing + +import jinja2 + +ROOT = pathlib.Path(__file__).parent.parent +WORKFLOWS = ROOT / ".github/workflows/" + + +def main() -> int: + files_changed = False + + env = jinja2.Environment( + loader=jinja2.FileSystemLoader(WORKFLOWS), + keep_trailing_newline=True, + # The default Jinja variable syntax, "{{ ... }}", + # conflicts with the GitHub workflow syntax "${{ ... }}". + # To avoid conflict, braces in Jinja syntax are converted to braces. + block_start_string="[%", + block_end_string="%]", + variable_start_string="[[", + variable_end_string="]]", + comment_start_string="#[#", + comment_end_string="#]#", + ) + for template_path in WORKFLOWS.glob("*.jinja.yaml"): + target = template_path.name[: -len(".jinja.yaml")] + include_file = create_file_includer(target) + template = env.get_template(template_path.name) + new_content = f"# DO NOT EDIT THIS FILE! EDIT '{template_path.name}'.\n\n" + new_content += template.render( + PYTHON_VERSION=get_python_version(), + UV_VERSION=get_uv_version(), + include_requirements=include_requirements, + include_file=include_file, + ) + + path = WORKFLOWS / f"{target}.yaml" + content = path.read_text() + if new_content != content: + path.write_text(new_content) + files_changed = True + + return int(files_changed) + + +def create_file_includer(name: str) -> typing.Callable[[str], str]: + name = name.replace("-", "_") + path = ROOT / f"src/workflow_assets/{name}" + + def include_file(file: str) -> str: + return (path / file).read_text().rstrip() + + return include_file + + +def include_requirements(name: str) -> str: + name = name.replace("-", "_") + path = ROOT / f"requirements/{name}/requirements.txt" + if not path.is_file(): + raise SystemExit(f"{path} does not exist") + return path.read_text().rstrip() + + +def get_python_version() -> str: + config = tomllib.loads((ROOT / "pyproject.toml").read_text()) + specifier = str(config["project"]["requires-python"]) + version = next( + piece.strip("<>=") + for piece in specifier.partition(",") + if piece.startswith(">=") + ) + return version.strip() + + +def get_uv_version() -> str: + text = (ROOT / "requirements/uv/requirements.txt").read_text() + line = next(line_ for line_ in text.splitlines() if line_.startswith("uv")) + specifier, _, _ = line.partition(";") + _, _, version = specifier.partition("==") + return version.strip() + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/src/update-pre-commit-additional-dependencies.py b/src/update-pre-commit-additional-dependencies.py new file mode 100644 index 0000000..eb2643d --- /dev/null +++ b/src/update-pre-commit-additional-dependencies.py @@ -0,0 +1,59 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import functools +import pathlib + +ROOT = pathlib.Path(__file__).parent.parent +PRE_COMMIT_CONFIG = ROOT / ".pre-commit-config.yaml" + + +def main() -> None: + """Rewrite "additional_dependencies" in the pre-commit config.""" + + config = PRE_COMMIT_CONFIG.read_text() + + lines = [] + iterable = iter(config.splitlines()) + found = False + for line in iterable: + lines.append(line) + if not line.lstrip().startswith("# additional_dependencies source:"): + continue + + found = True + target_requirements_file = line.partition(":")[2].strip() + indent = len(line) - len(line.lstrip()) + + # Consume all list lines under the comment. + # The last line that isn't a list line (if any) is kept for later use. + try: + while (next_line := next(iterable)).startswith(f"{' ' * indent}-"): + pass + except StopIteration: + next_line = None + + for requirement in get_contents(target_requirements_file).splitlines(): + lines.append(f"{' ' * indent}- '{requirement}'") + + if next_line is not None: + lines.append(next_line) + + if not found: + raise OSError("No 'additional_dependencies source' comment found") + + new_config = "\n".join(lines) + "\n" + if new_config != config: + PRE_COMMIT_CONFIG.write_text(new_config) + + +@functools.cache +def get_contents(path: str) -> str: + return (ROOT / path).read_text() + + +if __name__ == "__main__": + main() diff --git a/src/workflow_assets/create_pr/compute_versioned_variables.py b/src/workflow_assets/create_pr/compute_versioned_variables.py new file mode 100644 index 0000000..2c78d7b --- /dev/null +++ b/src/workflow_assets/create_pr/compute_versioned_variables.py @@ -0,0 +1,52 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import os +import sys + +RC_SUCCESS = 0 +RC_FAILURE = 1 + +mandatory_environment_variables = { + "BRANCH_NAME", + "DEFAULT_BRANCH_NAME", + "GITHUB_ENV", + "VERSION", +} + + +def main() -> int: + # Ensure mandatory environment variables are present. + if missing_keys := (mandatory_environment_variables - os.environ.keys()): + for missing_key in missing_keys: + print(f"`{missing_key}` is a mandatory environment variable.") + return RC_FAILURE + + # Branch name + branch_name = os.environ["BRANCH_NAME"] + if branch_name: + version = os.environ["VERSION"] + computed_branch_name = branch_name.replace("$VERSION", version) + else: + computed_branch_name = os.environ["DEFAULT_BRANCH_NAME"] + with open(os.environ["GITHUB_ENV"], "a") as file: + file.write(f"COMPUTED_BRANCH_NAME={computed_branch_name}\n") + + # PR title + pr_title = os.environ["PR_TITLE"] + if pr_title: + version = os.environ["VERSION"] + computed_pr_title = pr_title.replace("$VERSION", version) + else: + computed_pr_title = os.environ["DEFAULT_PR_TITLE"] + with open(os.environ["GITHUB_ENV"], "a") as file: + file.write(f"COMPUTED_PR_TITLE={computed_pr_title}\n") + + return RC_SUCCESS + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/src/workflow_assets/create_pr/config-schema.json b/src/workflow_assets/create_pr/config-schema.json new file mode 100644 index 0000000..a2b5d72 --- /dev/null +++ b/src/workflow_assets/create_pr/config-schema.json @@ -0,0 +1,77 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "required": [ + "tox-label-create-changes" + ], + "properties": { + "python-version": { + "description": "The Python version to use when running tox.", + "type": "string", + "default": "3.13" + }, + "tox-label-create-changes": { + "description": "The tox label (passed using the `-m` argument) to run before creating the commit. Two environment variables will be set: `VERSION` and `PR_BODY_OUTPUT_PATH`. After running tox, all file changes shown in `git status` will be included in the commit.", + "type": "string", + "minLength": 1, + "examples": [ + "update", + "prep-release" + ] + }, + "checkout-branch": { + "description": "The name of the branch to initially checkout.", + "type": "string", + "minLength": 1, + "default": "main" + }, + "branch-name": { + "description": "The name of the branch to create. If a `version` input is passed to the workflow, it can be referenced as `$VERSION`.", + "type": "string", + "minLength": 1, + "default": "updates", + "examples": [ + "release/$VERSION" + ] + }, + "commit-title": { + "description": "The one-line commit message to use.", + "type": "string", + "minLength": 1, + "default": "Updates" + }, + "pr-base": { + "description": "The name of the branch to merge to in the PR. This appears as the 'base' in the GitHub UI.", + "type": "string", + "minLength": 1, + "default": "main" + }, + "pr-title": { + "description": "The title of the PR. If a `version` input is passed to the workflow, it can be referenced as `$VERSION`.", + "type": "string", + "minLength": 1, + "default": "Updates", + "examples": [ + "Release v$VERSION" + ] + }, + "pr-body": { + "description": "The body of the PR. If a `version` input is passed to the workflow, it can be referenced as `$VERSION`.", + "type": "string", + "minLength": 1, + "default": "", + "examples": [ + "Scheduled updates." + ] + }, + "pr-body-input-format": { + "description": "The format of the PR body fragment generated by the tox label that prepares releases. The value must exactly match the `--from` argument values that pandoc supports, but currently only 'gfm' and 'rst' are allowed.", + "type": "string", + "enum": [ + "gfm", + "rst" + ], + "default": "rst" + } + } +} diff --git a/src/workflow_assets/create_pr/create_commit_request_body.py b/src/workflow_assets/create_pr/create_commit_request_body.py new file mode 100644 index 0000000..f4e3db3 --- /dev/null +++ b/src/workflow_assets/create_pr/create_commit_request_body.py @@ -0,0 +1,110 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import base64 +import json +import os +import pathlib +import re +import subprocess +import sys +import typing + +RC_SUCCESS = 0 +RC_FAILURE = 1 + +mandatory_environment_variables = { + "BRANCH_NAME", + "COMMIT_TITLE", + "GITHUB_REPOSITORY", + "GITHUB_SHA", + "OUTPUT_FILE", +} + + +def main() -> int: + # Ensure mandatory environment variables are present. + if missing_keys := (mandatory_environment_variables - os.environ.keys()): + for missing_key in missing_keys: + print(f"`{missing_key}` is a mandatory environment variable.") + return RC_FAILURE + + # Calculate file changes (and exit if there are none). + file_changes = calculate_file_changes() + if not file_changes: + print("No file changes detected.") + return RC_FAILURE + + request_body = generate_request_body(file_changes) + + output_file = os.environ["OUTPUT_FILE"] + if output_file == "-": + print(json.dumps(request_body, indent=2)) + else: + with open(output_file, "w") as file: + file.write(json.dumps(request_body)) + + return RC_SUCCESS + + +def generate_request_body(file_changes: dict[str, typing.Any]) -> dict[str, typing.Any]: + query = """ + mutation ($input:CreateCommitOnBranchInput!) { + createCommitOnBranch(input: $input) { + commit { oid } + } + } + """ + + return { + "query": " ".join(query.split()), + "variables": { + "input": { + "branch": { + "branchName": inject_version(os.environ["BRANCH_NAME"]), + "repositoryNameWithOwner": os.environ["GITHUB_REPOSITORY"], + }, + "expectedHeadOid": os.environ["GITHUB_SHA"], + "fileChanges": file_changes, + "message": { + "headline": inject_version(os.environ["COMMIT_TITLE"]), + }, + }, + }, + } + + +def inject_version(text: str) -> str: + version = os.getenv("VERSION") or "VERSION_NOT_FOUND" + return re.sub(r"\$version", version, text, flags=re.I) + + +def calculate_file_changes() -> dict[str, list[dict[str, str]]]: + cmd = "git status --no-renames --porcelain" + + additions: list[dict[str, str]] = [] + deletions: list[dict[str, str]] = [] + + for line in subprocess.check_output(cmd.split()).decode().splitlines(): + path = pathlib.Path(line[3:]) + + target = deletions + info = {"path": path.as_posix()} + if path.is_file(): + target = additions + info["contents"] = base64.b64encode(path.read_bytes()).decode() + target.append(info) + + file_changes = {} + if additions: + file_changes["additions"] = additions + if deletions: + file_changes["deletions"] = deletions + return file_changes + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/src/workflow_assets/create_tag_and_release/get_tag_name.py b/src/workflow_assets/create_tag_and_release/get_tag_name.py new file mode 100644 index 0000000..4389919 --- /dev/null +++ b/src/workflow_assets/create_tag_and_release/get_tag_name.py @@ -0,0 +1,23 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import os +import pathlib +import tomllib + + +def main() -> None: + toml = tomllib.loads(pathlib.Path("pyproject.toml").read_text()) + version = toml["project"]["version"] + with open(os.environ["GITHUB_ENV"], "a") as file: + file.write(f"TAG_NAME=v{version}\n") + with open(os.environ["GITHUB_OUTPUT"], "a") as file: + file.write(f"project-version={version}\n") + file.write(f"tag-name=v{version}\n") + + +if __name__ == "__main__": + main() diff --git a/src/workflow_assets/create_tag_and_release/validate_repo_state.py b/src/workflow_assets/create_tag_and_release/validate_repo_state.py new file mode 100644 index 0000000..b104d25 --- /dev/null +++ b/src/workflow_assets/create_tag_and_release/validate_repo_state.py @@ -0,0 +1,103 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import os +import subprocess +import sys +import typing as t + + +def main() -> None: + tag_name = os.environ["TAG_NAME"] + head_sha = _get_head_sha() + existing_tag_sha = _get_existing_tag_sha(tag_name) + if existing_tag_sha is None: + tag_exists = False + elif existing_tag_sha == head_sha: + tag_exists = True + else: + msg = ( + f"The {tag_name} tag commit SHA ({existing_tag_sha})" + f" doesn't match HEAD ({head_sha})." + ) + exit_with_error(msg) + + with open(os.environ["GITHUB_OUTPUT"], "a") as file: + file.write(f"tag-exists={str(tag_exists).lower()}\n") + + +def _get_head_sha() -> str: + """Get the SHA of HEAD.""" + + _, stdout, _ = _run_command("git", "rev-parse", "HEAD") + return stdout.strip() + + +def _get_existing_tag_sha(tag_name: str) -> str | None: + """Validate the project version and git repo state are compatible. + + "Compatibility" is defined as one of: + + * The project version has no corresponding git tag ref. + * A git tag ref exists for the project version, + and its commit SHA matches the SHA currently checked out in HEAD. + """ + + # Check if a tag exists. + cmd = ("git", "rev-list", "-n", "1", f"tags/{tag_name}", "--") + rc, stdout, stderr = _run_command(*cmd) + if rc == 128 and "bad revision" in stderr: + # The tag doesn't exist locally. This is the expected case. + return None + + # The output must be a commit SHA. + tag_commit_sha = stdout.strip() + try: + int(tag_commit_sha, base=16) + except ValueError: + msg = "Something unexpected happened." + exit_with_error(msg, rc, stdout, stderr) + + # A git tag already exists. + return tag_commit_sha + + +def _run_command(*args: str, timeout: int = 10) -> tuple[int, str, str]: + """Run a command.""" + + process = subprocess.Popen( + args=args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="utf-8", + ) + try: + stdout, stderr = process.communicate(timeout=timeout) + except subprocess.TimeoutExpired: + process.kill() + stdout, stderr = process.communicate() + + return process.returncode, stdout, stderr + + +def exit_with_error( + msg: str, + rc: int | None = None, + stdout: str | None = None, + stderr: str | None = None, +) -> t.NoReturn: + print(f"::error::{msg}", file=sys.stderr) + if rc is not None: + print(f"Return code:\n{rc}", file=sys.stderr) + if stdout is not None: + print(f"STDOUT:\n{stdout}", file=sys.stderr) + if stderr is not None: + print(f"STDERR:\n{stderr}", file=sys.stderr) + raise SystemExit(1) + + +if __name__ == "__main__": + main() diff --git a/src/workflow_assets/tox/config-schema.json b/src/workflow_assets/tox/config-schema.json new file mode 100644 index 0000000..8489cac --- /dev/null +++ b/src/workflow_assets/tox/config-schema.json @@ -0,0 +1,162 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "required": [ + "runner" + ], + "properties": { + "runner": { + "type": "string", + "minLength": 1 + }, + "timeout-minutes": { + "type": "integer", + "minimum": 1 + }, + "tox-environments": { + "description": "A list of tox environments to run.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + }, + "tox-skip-environments-regex": { + "description": "A regular expression matching tox environments to skip.", + "type": "string", + "format": "regex", + "minLength": 1 + }, + "tox-skip-environments": { + "description": "A list of tox environments to skip.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + }, + "tox-environments-from-pythons": { + "description": "Generate a list of tox environments from the list of all configured Python interpreters.", + "type": "boolean", + "enum": [true] + }, + "tox-factors": { + "description": "A list of factors to append to the generated names of tox environments.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + }, + "tox-pre-environments": { + "description": "A list of tox environments to run before all installed Python interpreter versions.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + }, + "tox-post-environments": { + "description": "A list of tox environments to run after all installed Python interpreter versions.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + }, + "cpythons": { + "description": "A list of CPython interpreter versions. The *last version* listed will be the default Python interpreter when 'python' is invoked, and will be the version used when installing and executing tox.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + }, + "cpython-beta": { + "description": "A CPython version to install as a beta. A beta CPython interpreter will never be the default Python interpreter.", + "type": "string", + "minLength": 3 + }, + "pypys": { + "description": "A list of PyPy interpreter versions. PyPy interpreters will never be the default Python interpreter.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + }, + "cache-key-prefix": { + "description": "A prefix to use with the cached environment key.", + "type": "string", + "minLength": 1, + "default": "tox" + }, + "cache-key-hash-files": { + "description": "An additional path pattern that will be added to the list of paths to include when hashing files for cache-busting.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + }, + "cache-paths": { + "description": "Additional paths to cache. Any paths specified here will be added to the default list: '.venv/' and '.tox/'.", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "minLength": 1 + } + } + }, + "allOf": [ + { + "description": "At least one Python interpreter must be specified.", + "anyOf": [ + {"required": ["cpythons"]}, + {"required": ["cpython-beta"]}, + {"required": ["pypys"]} + ] + }, + { + "description": "If tox-environments is specified, many other keys must not be specified.", + "if": {"required": ["tox-environments"]}, + "then": { + "allOf": [ + { + "description": "tox-environments is mutually exclusive with tox-environments-from-pythons.", + "not": {"required": ["tox-environments-from-pythons"]} + }, + { + "description": "tox-environments is mutually exclusive with tox-factors.", + "not": {"required": ["tox-factors"]} + }, + { + "description": "tox-environments is mutually exclusive with tox-pre-environments.", + "not": {"required": ["tox-pre-environments"]} + }, + { + "description": "tox-environments is mutually exclusive with tox-post-environments.", + "not": {"required": ["tox-post-environments"]} + }, + { + "description": "tox-environments is mutually exclusive with tox-skip-environments.", + "not": {"required": ["tox-skip-environments"]} + }, + { + "description": "tox-environments is mutually exclusive with tox-skip-environments-regex.", + "not": {"required": ["tox-skip-environments-regex"]} + } + ] + } + } + ] +} diff --git a/src/workflow_assets/tox/config_transformer.py b/src/workflow_assets/tox/config_transformer.py new file mode 100644 index 0000000..c22809b --- /dev/null +++ b/src/workflow_assets/tox/config_transformer.py @@ -0,0 +1,78 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import json +import os +import pathlib +import re +import typing + + +def transform_config(config: dict[str, typing.Any]) -> None: + # Transform the tox environments for convenience. + # pre- and post-environments will be assembled into "tox-environments", + # together with a full list of CPython and PyPy interpreter versions. + # Since these keys are mutually-exclusive with "tox-environments", + # no config data are lost in this transformation. + tox_factors = config.pop("tox-factors", []) + factors = f"-{'-'.join(tox_factors)}" if tox_factors else "" + cpythons = config.get("cpythons", []) + cpython_beta = config.get("cpython-beta") + pypys = config.get("pypys", []) + + if ( + factors + or config.pop("tox-environments-from-pythons", False) + or {"tox-pre-environments", "tox-post-environments"} & config.keys() + ): + environments = config.pop("tox-pre-environments", []) + environments.extend(f"py{version}{factors}" for version in cpythons) + if cpython_beta is not None: + environments.append(f"py{cpython_beta}{factors}") + environments.extend(f"pypy{version}{factors}" for version in pypys) + environments.extend(config.pop("tox-post-environments", [])) + config["tox-environments"] = environments + + python_versions_requested = [f"pypy{version}" for version in pypys] + if cpython_beta is not None: + python_versions_requested.append(cpython_beta) + python_versions_requested.extend(cpythons) + + # Because tox only offers "best effort" PyPy support, + # and because tox may not support CPython alphas or betas, + # a stable CPython version must be included during initial Python setup. + python_versions_required = python_versions_requested.copy() + if not cpythons: + python_versions_required.append("3.13") + config["python-versions-requested"] = "\n".join(python_versions_requested) + config["python-versions-required"] = "\n".join(python_versions_required) + + # Prepare the environments to skip. + skip_patterns: list[str] = [] + for environment in config.pop("tox-skip-environments", []): + skip_patterns.append(re.escape(environment)) + skip_patterns.sort() + if pattern := config.pop("tox-skip-environments-regex", ""): + skip_patterns.append(pattern) + config["tox-skip-environments-regex"] = "|".join(skip_patterns) + + +def main() -> None: + # Load + raw_config_path = pathlib.Path(".tox-config.raw.json") + config = json.loads(raw_config_path.read_text()) + + # Transform in-place + transform_config(config) + + # Write + output = json.dumps(config, sort_keys=True, separators=(",", ":")) + with open(os.environ["GITHUB_ENV"], "a") as file: + file.write(f"TOX_CONFIG={output}") + + +if __name__ == "__main__": + main() diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..8e69a39 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,12 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import pathlib +import sys + +# Add the `src/` directory to the Python path. +src_path = pathlib.Path(__file__).parent.parent / "src" +sys.path.append(str(src_path)) diff --git a/tests/create_pr/test_create_commit_request_body.py b/tests/create_pr/test_create_commit_request_body.py new file mode 100644 index 0000000..9a7c543 --- /dev/null +++ b/tests/create_pr/test_create_commit_request_body.py @@ -0,0 +1,50 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import base64 +import textwrap +import typing +import unittest.mock + +import pytest + +from workflow_assets.create_pr import create_commit_request_body + + +@pytest.fixture(autouse=True, scope="session") +def git_status() -> typing.Iterator[None]: + stdout = textwrap.dedent("""\ + ?? new_file + D subdir/deleted_file + M modified_file + """).strip() + with unittest.mock.patch("subprocess.check_output", lambda _: stdout.encode()): + yield + + +def test_generate_file_changes(fs): + fs.create_file("new_file", contents="?" * 1000) + fs.create_file("modified_file", contents=b"\xfe\xef\x00") + file_changes = create_commit_request_body.calculate_file_changes() + + # Ensure that file paths are POSIX-normalized. + assert file_changes["deletions"] == [{"path": "subdir/deleted_file"}] + + # new_file is long; it must be base64-encoded with no newlines. + assert file_changes["additions"][0]["path"] == "new_file" + assert "?" not in file_changes["additions"][0]["contents"] + assert "\n" not in file_changes["additions"][0]["contents"] + + # modified_file is binary; ensure it decodes as expected. + assert file_changes["additions"][1]["path"] == "modified_file" + assert base64.b64decode(file_changes["additions"][1]["contents"]) == b"\xfe\xef\x00" + + +def test_version_injection(monkeypatch): + monkeypatch.setenv("VERSION", "1.2.3") + assert create_commit_request_body.inject_version("v") == "v" + assert create_commit_request_body.inject_version("v$VERSION") == "v1.2.3" + assert create_commit_request_body.inject_version("v$version") == "v1.2.3" diff --git a/tests/create_tag_and_release/test_get_tag_name.py b/tests/create_tag_and_release/test_get_tag_name.py new file mode 100644 index 0000000..5a75bd1 --- /dev/null +++ b/tests/create_tag_and_release/test_get_tag_name.py @@ -0,0 +1,32 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import pathlib +import textwrap + +from workflow_assets.create_tag_and_release import get_tag_name + + +def test_get_tag_name(fs, monkeypatch): + version = "1.2.3" + fs.create_file( + "pyproject.toml", + contents=textwrap.dedent(f""" + [project] + version = "{version}" + """), + ) + monkeypatch.setenv("GITHUB_ENV", "github-env.txt") + monkeypatch.setenv("GITHUB_OUTPUT", "github-output.txt") + + get_tag_name.main() + + envvars = pathlib.Path("github-env.txt").read_text().strip().splitlines() + assert f"TAG_NAME=v{version}" in envvars # with 'v' prefix + + outputs = pathlib.Path("github-output.txt").read_text().strip().splitlines() + assert f"project-version={version}" in outputs # without 'v' prefix + assert f"tag-name=v{version}" in outputs # with 'v' prefix diff --git a/tests/create_tag_and_release/test_validate_repo_state.py b/tests/create_tag_and_release/test_validate_repo_state.py new file mode 100644 index 0000000..7bcff5e --- /dev/null +++ b/tests/create_tag_and_release/test_validate_repo_state.py @@ -0,0 +1,153 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import pathlib +import typing as t + +import pytest + +from workflow_assets.create_tag_and_release import validate_repo_state + + +def create_runner_mock( + config: dict[str, tuple[int, str, str]], +) -> t.Callable[[str, ...], tuple[int, str, str]]: + """Mock the CLI runner. + + The config keys are arguments found in the CLI runner arguments. + For example, the config key "rev-parse" will match the CLI invocation + ["git", "rev-parse", ...]. + + The config values are tuples representing the exit code, STDOUT, and STDERR. + """ + + def runner_mock(*args: str) -> tuple[int, str, str]: + for key, value in config.items(): + if key in args: + return value + + raise KeyError(f"No args matched the runner mock config: {args}") + + return runner_mock + + +def test_validate_repo_state_common_case(fs, monkeypatch): + """If no tag exists, `tag-exists` must be `false`.""" + + runner_mock = create_runner_mock( + { + "rev-parse": (0, "0000000000000000000000000000000000000000", ""), + "rev-list": ( + 128, + "", + "fatal: bad revision 'tags/v1.2.3'", + ), + } + ) + monkeypatch.setenv("TAG_NAME", "v1.2.3") + monkeypatch.setenv("GITHUB_OUTPUT", "outputs.txt") + monkeypatch.setattr(validate_repo_state, "_run_command", runner_mock) + + validate_repo_state.main() + + assert "tag-exists=false" in pathlib.Path("outputs.txt").read_text().splitlines() + + +def test_validate_repo_state_tag_exists(fs, monkeypatch): + """If a tag exists and matches HEAD, `tag-exists` must be `true`.""" + + runner_mock = create_runner_mock( + { + "rev-parse": (0, "0" * 40, ""), + "rev-list": (0, "0" * 40, ""), + } + ) + monkeypatch.setenv("TAG_NAME", "v1.2.3") + monkeypatch.setenv("GITHUB_OUTPUT", "outputs.txt") + monkeypatch.setattr(validate_repo_state, "_run_command", runner_mock) + + validate_repo_state.main() + + assert "tag-exists=true" in pathlib.Path("outputs.txt").read_text().splitlines() + + +def test_validate_repo_state_tag_does_not_match_head(fs, monkeypatch, capsys): + """If a tag exists and does not match HEAD, an error must be raised.""" + + runner_mock = create_runner_mock( + { + "rev-parse": (0, "0" * 40, ""), + "rev-list": (0, "1" * 40, ""), + } + ) + monkeypatch.setenv("TAG_NAME", "v1.2.3") + monkeypatch.setenv("GITHUB_OUTPUT", "outputs.txt") + monkeypatch.setattr(validate_repo_state, "_run_command", runner_mock) + + with pytest.raises(SystemExit): + validate_repo_state.main() + + assert not pathlib.Path("outputs.txt").is_file() + stdout, stderr = capsys.readouterr() + assert not stdout + msg = ( + f"::error::The v1.2.3 tag commit SHA ({'1' * 40})" + f" doesn't match HEAD ({'0' * 40})." + ) + assert msg in stderr + + +def test_unexpected_git_rev_list_behavior(fs, monkeypatch, capsys): + """Verify the unexpected git rev-list behavior. + + If `git` doesn't return exit code 128, but STDOUT doesn't contain a SHA, + then something very wrong has happened. + For example, the current directory might not be a git repository. + """ + + fatal = "fatal: not a git repository (or any of the parent directories): .git" + runner_mock = create_runner_mock( + { + "rev-parse": (128, "", fatal), + "rev-list": (128, "", fatal), + } + ) + monkeypatch.setenv("TAG_NAME", "v1.2.3") + monkeypatch.setenv("GITHUB_OUTPUT", "outputs.txt") + monkeypatch.setattr(validate_repo_state, "_run_command", runner_mock) + + with pytest.raises(SystemExit): + validate_repo_state.main() + + assert not pathlib.Path("outputs.txt").is_file() + + stdout, stderr = capsys.readouterr() + assert not stdout + assert "::error::Something unexpected happened.\n" in stderr + assert "Return code:\n128\n" in stderr + assert "STDOUT:\n\n" in stderr + assert f"STDERR:\n{fatal}" in stderr + + +def test_run_command_success(): + cmd = ("python", "-V") + rc, stdout, stderr = validate_repo_state._run_command(*cmd) + + assert rc == 0 + assert "Python" in stdout + assert stderr == "" + + +def test_run_command_timeout(): + cmd = ("python", "-c", "import time; time.sleep(1)") + rc, stdout, stderr = validate_repo_state._run_command( + *cmd, + timeout=0, + ) + + assert rc != 0 + assert stdout == "" + assert stderr == "" diff --git a/tests/tox/test_config_schema.py b/tests/tox/test_config_schema.py new file mode 100644 index 0000000..e64a99f --- /dev/null +++ b/tests/tox/test_config_schema.py @@ -0,0 +1,170 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import contextlib +import itertools +import json +import pathlib +import re + +import jsonschema +import pytest + + +@pytest.fixture(scope="session") +def tox_schema(): + root = pathlib.Path(__file__).parent.parent.parent + schema_path = root / "src/workflow_assets/tox/config-schema.json" + tox_schema = json.loads(schema_path.read_text()) + yield jsonschema.Draft7Validator( + schema=tox_schema, + format_checker=jsonschema.FormatChecker(), + ) + + +def test_require_a_python_interpreter(tox_schema): + msg = "At least one Python interpreter must be specified." + with pytest.raises(jsonschema.ValidationError, match=msg): + tox_schema.validate({"runner": "ubuntu-latest"}) + + +@pytest.mark.parametrize("pop_key_1", ("cpythons", "cpython-beta", "pypys", None)) +@pytest.mark.parametrize("pop_key_2", ("cpythons", "cpython-beta", "pypys", None)) +def test_allow_python_interpreter_combinations(tox_schema, pop_key_1, pop_key_2): + """Verify any combination of Python interpreter keys is valid. + + This tests combinations of between 1 and 3 total keys. + """ + + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.12"], + "cpython-beta": "3.13", + "pypys": ["3.10"], + } + if pop_key_1 in config: + config.pop(pop_key_1) + if pop_key_2 in config: + config.pop(pop_key_2) + tox_schema.validate(config) + + +def test_tox_environments_not_required(tox_schema): + tox_schema.validate({"runner": "ubuntu-latest", "cpythons": ["3.12"]}) + + +def test_tox_environments(tox_schema): + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.12"], + "tox-environments": ["py3.12"], + } + tox_schema.validate(config) + + +@pytest.mark.parametrize( + "pop_key", ("tox-pre-environments", "tox-post-environments", None) +) +def test_allow_tox_pre_post_environments(tox_schema, pop_key): + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.12"], + "tox-pre-environments": ["pre"], + "tox-post-environments": ["post"], + } + if pop_key in config: + config.pop(pop_key) + tox_schema.validate(config) + + +mutex_keys = ( + "tox-environments-from-pythons", + "tox-factors", + "tox-pre-environments", + "tox-post-environments", + "tox-skip-environments", + "tox-skip-environments-regex", +) +all_mutex_combinations = itertools.chain( + *[itertools.combinations(mutex_keys, r=r) for r in range(len(mutex_keys))] +) + + +@pytest.mark.parametrize("pop_keys", all_mutex_combinations) +def test_tox_environments_mutex(tox_schema, pop_keys): + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.12"], + "tox-environments": ["in"], + "tox-environments-from-pythons": True, + "tox-factors": ["factor"], + "tox-pre-environments": ["pre"], + "tox-post-environments": ["post"], + "tox-skip-environments": ["skip-literal"], + "tox-skip-environments-regex": "skip-pattern", + } + for pop_key in pop_keys: + config.pop(pop_key) + msg = "tox-environments is mutually exclusive" + with pytest.raises(jsonschema.ValidationError, match=msg): + tox_schema.validate(config) + + +def test_tox_environments_from_pythons_false(tox_schema): + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.12"], + "tox-environments-from-pythons": False, + } + msg = re.escape("False is not one of [True]") + with pytest.raises(jsonschema.ValidationError, match=msg): + tox_schema.validate(config) + + +def test_full_config(tox_schema): + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.12"], + "cpython-beta": "3.13", + "pypys": ["3.10"], + "tox-environments-from-pythons": True, + "tox-factors": ["ci"], + "tox-pre-environments": ["spin-up"], + "tox-post-environments": ["spin-down"], + "tox-skip-environments": ["skip-literal"], + "tox-skip-environments-regex": "skip-pattern", + "cache-key-prefix": "lint", + "cache-key-hash-files": ["mypy.ini", "requirements/*/requirements.txt"], + "cache-key-paths": [".mypy_cache"], + } + tox_schema.validate(config) + + +@pytest.mark.parametrize( + "pattern, context", + ( + ("valid", contextlib.nullcontext()), + ("([{", pytest.raises(jsonschema.ValidationError, match="not a 'regex'")), + ), +) +def test_tox_skip_environments_regex(tox_schema, pattern, context): + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.13"], + "tox-skip-environments-regex": pattern, + } + with context: + tox_schema.validate(config) + + +def test_timeout_minutes_less_than_1(tox_schema): + config = { + "runner": "ubuntu-latest", + "timeout-minutes": 0, + "cpythons": ["3.13"], + } + with pytest.raises(jsonschema.ValidationError, match="less than the minimum of 1"): + tox_schema.validate(config) diff --git a/tests/tox/test_config_transformer.py b/tests/tox/test_config_transformer.py new file mode 100644 index 0000000..08e21b8 --- /dev/null +++ b/tests/tox/test_config_transformer.py @@ -0,0 +1,175 @@ +# This file is a part of the Globus GitHub Workflows project. +# https://github.com/globus/workflows +# Copyright 2021-2026 Globus +# Copyright 2024-2026 Kurt McKee +# SPDX-License-Identifier: MIT + +import re + +import pytest + +import workflow_assets.tox.config_transformer + + +def test_tox_pre_post_environments(): + """Verify tox pre- and post- environment keys are transformed.""" + + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.12"], + "cpython-beta": "3.13", + "pypys": ["3.10"], + "tox-pre-environments": ["spin-up"], + "tox-post-environments": ["spin-down"], + "cache-key-prefix": "lint", + "cache-key-hash-files": ["mypy.ini", "requirements/*/requirements.txt"], + "cache-key-paths": [".mypy_cache"], + } + + workflow_assets.tox.config_transformer.transform_config(config) + assert "tox-environments-from-pythons" not in config + assert "tox-factors" not in config + assert "tox-pre-environments" not in config + assert "tox-post-environments" not in config + assert config["tox-environments"] == [ + "spin-up", + "py3.12", + "py3.13", + "pypy3.10", + "spin-down", + ] + + +def test_tox_environments(): + """Verify explicit tox environments are not transformed.""" + + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.12"], + "cpython-beta": "3.13", + "pypys": ["3.10"], + "tox-environments": ["a", "c", "b"], + } + + workflow_assets.tox.config_transformer.transform_config(config) + assert "tox-environments-from-pythons" not in config + assert "tox-factors" not in config + assert "tox-pre-environments" not in config + assert "tox-post-environments" not in config + assert config["tox-environments"] == [ + "a", + "c", + "b", + ] + + +def test_tox_pythons_as_environments(): + """Verify Pythons are used to generate a list of tox environments.""" + + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.13"], + "cpython-beta": "3.14", + "pypys": ["3.10"], + "tox-environments-from-pythons": True, + } + + workflow_assets.tox.config_transformer.transform_config(config) + assert "tox-environments-from-pythons" not in config + assert "tox-factors" not in config + assert "tox-pre-environments" not in config + assert "tox-post-environments" not in config + assert config["tox-environments"] == [ + "py3.13", + "py3.14", + "pypy3.10", + ] + + +def test_tox_factors(): + """Verify factors are only appended to generated tox environment names.""" + + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.13"], + "cpython-beta": "3.14", + "pypys": ["3.10"], + "tox-factors": ["a", "b"], + "tox-pre-environments": ["pre"], + "tox-post-environments": ["post"], + } + + workflow_assets.tox.config_transformer.transform_config(config) + assert "tox-environments-from-pythons" not in config + assert "tox-factors" not in config + assert "tox-pre-environments" not in config + assert "tox-post-environments" not in config + assert config["tox-environments"] == [ + "pre", + "py3.13-a-b", + "py3.14-a-b", + "pypy3.10-a-b", + "post", + ] + + +@pytest.mark.parametrize( + "key, value, expected", + ( + ("cpython-beta", "3.14", "3.14"), + ("pypys", ["3.10"], "pypy3.10"), + ), +) +def test_tox_stable_cpython_injection(key, value, expected): + """Verify that a stable CPython version is injected.""" + + config = { + "runner": "ubuntu-latest", + key: value, + } + + workflow_assets.tox.config_transformer.transform_config(config) + assert config["python-versions-requested"] == expected + assert config["python-versions-required"] == expected + "\n3.13" + + +def test_tox_stable_cpython_injection_unnecessary(): + """Verify that no stable CPython is injected when stable CPythons are available.""" + + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.13"], + } + + workflow_assets.tox.config_transformer.transform_config(config) + assert config["python-versions-requested"] == "3.13" + assert config["python-versions-required"] == "3.13" + + +@pytest.mark.parametrize( + "strings, pattern, expected", + ( + (["x.y.z", "abc"], None, r"abc|x\.y\.z"), + (None, "mypy-.*", "mypy-.*"), + (["x.y.z", "abc"], "mypy-.*", r"abc|x\.y\.z|mypy-.*"), + ), +) +def test_tox_skip_environments(strings, pattern, expected): + """Verify that skipped environments are sorted, escaped, and combined correctly. + + Note that it is expected that the explicit regex pattern will always be at the end; + for visibility it is not sorted in with the list of literal environments. + """ + + config = { + "runner": "ubuntu-latest", + "cpythons": ["3.13"], + } + if strings is not None: + config["tox-skip-environments"] = strings + if pattern is not None: + config["tox-skip-environments-regex"] = pattern + + workflow_assets.tox.config_transformer.transform_config(config) + assert config["tox-skip-environments-regex"] == expected + assert re.compile(config["tox-skip-environments-regex"]) diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..48c5481 --- /dev/null +++ b/tox.ini @@ -0,0 +1,128 @@ +[tox] +min_version = 4.3.5 +envlist = + coverage-erase + test + coverage-report + coverage-html + mypy +labels = + update=update-{pre-commit, requirements} + prep-release=prep-release + +[testenv:base] +description = Base config +base_python = py3.13 +skip_install = true + +[testenv:test] +description = Test the project code +base = base +depends = + coverage-erase +deps = + -r requirements/test/requirements.txt +commands = + coverage run -m pytest + +[testenv:coverage-base] +description = Base config for coverage-* environments +base = base +deps = + -r requirements/test/requirements-coverage.txt + +[testenv:coverage-erase] +description = Erase coverage data +base = coverage-base +commands = + coverage erase + +[testenv:coverage-report] +description = Print a coverage report +base = coverage-base +depends = + test +commands = + coverage report + +[testenv:coverage-html] +description = Generate an HTML coverage report +base = coverage-base +depends = + coverage-report +commands = + coverage html --fail-under=0 + +[testenv:mypy] +description = Test type annotations +base = base +deps = + -r requirements/mypy/requirements.txt +commands = + mypy {posargs} + +[testenv:update_base] +description = Base config for update-* environments +base = base +recreate = true +skip_install = true + +[testenv:update-pre-commit] +description = Update pre-commit hooks +base = update_base +deps = + prek + upadup +commands = + prek autoupdate --freeze + upadup + +[testenv:update-requirements] +description = Update requirements files +base = update_base +deps = + poetry + poetry-plugin-export + -r requirements/templating/requirements.txt +commands = + poetry update --directory="requirements/mypy" --lock + poetry export --directory="requirements/mypy" --output="requirements.txt" --without-hashes + poetry update --directory="requirements/test" --lock + poetry export --directory="requirements/test" --output="requirements.txt" --without-hashes + poetry export --directory="requirements/test" --output="requirements-coverage.txt" --without-hashes --only="coverage" + poetry update --directory="requirements/templating" --lock + poetry export --directory="requirements/templating" --output="requirements.txt" --without-hashes + + # Lock requirements files used in workflows. + poetry update --directory="requirements/build" --lock + poetry export --directory="requirements/build" --output="requirements.txt" --without-hashes + poetry update --directory="requirements/check_jsonschema" --lock + poetry export --directory="requirements/check_jsonschema" --output="requirements.txt" --without-hashes + poetry update --directory="requirements/scriv" --lock + poetry export --directory="requirements/scriv" --output="requirements.txt" --without-hashes + poetry update --directory="requirements/tox" --lock + poetry export --directory="requirements/tox" --output="requirements.txt" --without-hashes + poetry update --directory="requirements/uv" --lock + poetry export --directory="requirements/uv" --output="requirements.txt" --without-hashes + + # Update the pre-commit render-* hooks' "additional_dependencies" fields. + python src/update-pre-commit-additional-dependencies.py + + # Re-render the workflow templates, and ignore the return code. + - python src/render-templates.py + +[testenv:prep-release] +description = Make the changes needed to create a new release PR +skip_install = true +deps = + poetry + scriv +pass_env = + VERSION + PR_BODY_OUTPUT_PATH +commands = + # Fail if $VERSION is not set. + python -Ec 'import os; assert (v := os.getenv("VERSION")) is not None, v' + poetry version "{env:VERSION}" + scriv collect + scriv print --version "{env:VERSION}" --output "{env:PR_BODY_OUTPUT_PATH:{env:VERSION}.rst}"