From e253642735e412ba5f2b158b6d2ddd0e793c1994 Mon Sep 17 00:00:00 2001 From: Leandro Lucarella Date: Mon, 31 Mar 2025 14:57:17 +0200 Subject: [PATCH 1/5] Update files to repo-config v0.12 Updates all the files using the migration script from repo-config v0.12. Signed-off-by: Leandro Lucarella --- docs/_scripts/macros.py | 78 ----------------------------------------- mkdocs.yml | 5 ++- pyproject.toml | 1 + 3 files changed, 3 insertions(+), 81 deletions(-) delete mode 100644 docs/_scripts/macros.py diff --git a/docs/_scripts/macros.py b/docs/_scripts/macros.py deleted file mode 100644 index 474c610..0000000 --- a/docs/_scripts/macros.py +++ /dev/null @@ -1,78 +0,0 @@ -# License: MIT -# Copyright © 2024 Frequenz Energy-as-a-Service GmbH - -"""This module defines macros for use in Markdown files.""" - -from typing import Any - -import markdown as md -from markdown.extensions import toc -from mkdocs_macros import plugin as macros - -_CODE_ANNOTATION_MARKER: str = ( - r'' - r'' - r'' - r"" - r"" -) - - -def _slugify(text: str) -> str: - """Slugify a text. - - Args: - text: The text to slugify. - - Returns: - The slugified text. - """ - return toc.slugify_unicode(text, "-") - - -def _hook_macros_plugin(env: macros.MacrosPlugin) -> None: - """Integrate the `mkdocs-macros` plugin into `mkdocstrings`. - - This is a temporary workaround to make `mkdocs-macros` work with - `mkdocstrings` until a proper `mkdocs-macros` *pluglet* is available. See - https://github.com/mkdocstrings/mkdocstrings/issues/615 for details. - - Args: - env: The environment to hook the plugin into. - """ - # get mkdocstrings' Python handler - python_handler = env.conf["plugins"]["mkdocstrings"].get_handler("python") - - # get the `update_env` method of the Python handler - update_env = python_handler.update_env - - # override the `update_env` method of the Python handler - def patched_update_env(markdown: md.Markdown, config: dict[str, Any]) -> None: - update_env(markdown, config) - - # get the `convert_markdown` filter of the env - convert_markdown = python_handler.env.filters["convert_markdown"] - - # build a chimera made of macros+mkdocstrings - def render_convert(markdown: str, *args: Any, **kwargs: Any) -> Any: - return convert_markdown(env.render(markdown), *args, **kwargs) - - # patch the filter - python_handler.env.filters["convert_markdown"] = render_convert - - # patch the method - python_handler.update_env = patched_update_env - - -def define_env(env: macros.MacrosPlugin) -> None: - """Define the hook to create macro functions for use in Markdown. - - Args: - env: The environment to define the macro functions in. - """ - # A variable to easily show an example code annotation from mkdocs-material. - # https://squidfunk.github.io/mkdocs-material/reference/code-blocks/#adding-annotations - env.variables["code_annotation_marker"] = _CODE_ANNOTATION_MARKER - - # This hook needs to be done at the end of the `define_env` function. - _hook_macros_plugin(env) diff --git a/mkdocs.yml b/mkdocs.yml index 716684f..584bd04 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -121,10 +121,9 @@ plugins: - https://grpc.github.io/grpc/python/objects.inv - https://typing-extensions.readthedocs.io/en/stable/objects.inv # Note this plugin must be loaded after mkdocstrings to be able to use macros - # inside docstrings. See the comment in `docs/_scripts/macros.py` for more - # details + # inside docstrings. - macros: - module_name: docs/_scripts/macros + modules: ["frequenz.repo.config.mkdocs.mkdocstrings_macros"] on_undefined: strict on_error_fail: true - search diff --git a/pyproject.toml b/pyproject.toml index 68a330c..2cfe403 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -164,6 +164,7 @@ disable = [ ] [tool.pytest.ini_options] +addopts = "-W=all -Werror -Wdefault::DeprecationWarning -Wdefault::PendingDeprecationWarning -vv" testpaths = ["tests", "src"] asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" From c5c1893404f24567b40c37f68be83a57cb2cea75 Mon Sep 17 00:00:00 2001 From: Leandro Lucarella Date: Mon, 31 Mar 2025 14:58:36 +0200 Subject: [PATCH 2/5] Fix warning about TestEnv being interpreted as a test The `TestEnv` class actually holds just stuff that it is reused in different test, but `pytest` integret classes starting with `Test` as test suites. We just rename to `_TestEnv` to avoid the confusion (and warnings). Signed-off-by: Leandro Lucarella --- tests/test_frequenz_dispatch.py | 28 ++++++++++++++-------------- tests/test_mananging_actor.py | 16 ++++++++-------- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/tests/test_frequenz_dispatch.py b/tests/test_frequenz_dispatch.py index 8a0368d..9c5bed7 100644 --- a/tests/test_frequenz_dispatch.py +++ b/tests/test_frequenz_dispatch.py @@ -55,7 +55,7 @@ def _now() -> datetime: @dataclass(frozen=True) -class TestEnv: +class _TestEnv: """Test environment for the service.""" service: DispatchScheduler @@ -71,7 +71,7 @@ class TestEnv: @fixture -async def test_env() -> AsyncIterator[TestEnv]: +async def test_env() -> AsyncIterator[_TestEnv]: """Return an actor test environment.""" microgrid_id = randint(1, 100) client = FakeClient() @@ -83,7 +83,7 @@ async def test_env() -> AsyncIterator[TestEnv]: service.start() try: - yield TestEnv( + yield _TestEnv( service=service, lifecycle_events=service.new_lifecycle_events_receiver("TEST_TYPE"), running_state_change=await service.new_running_state_event_receiver( @@ -103,7 +103,7 @@ def generator() -> DispatchGenerator: async def test_new_dispatch_created( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, ) -> None: """Test that a new dispatch is created.""" @@ -131,7 +131,7 @@ def update_dispatch(sample: BaseDispatch, dispatch: BaseDispatch) -> BaseDispatc async def _test_new_dispatch_created( - test_env: TestEnv, + test_env: _TestEnv, sample: BaseDispatch, ) -> Dispatch: """Test that a new dispatch is created. @@ -159,7 +159,7 @@ async def _test_new_dispatch_created( async def test_existing_dispatch_updated( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: @@ -197,7 +197,7 @@ async def test_existing_dispatch_updated( async def test_existing_dispatch_deleted( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: @@ -220,7 +220,7 @@ async def test_existing_dispatch_deleted( async def test_dispatch_inf_duration_deleted( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: @@ -255,7 +255,7 @@ async def test_dispatch_inf_duration_deleted( async def test_dispatch_inf_duration_updated_stopped_started( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: @@ -304,7 +304,7 @@ async def test_dispatch_inf_duration_updated_stopped_started( async def test_dispatch_inf_duration_updated_to_finite_and_stops( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: @@ -347,7 +347,7 @@ async def test_dispatch_inf_duration_updated_to_finite_and_stops( async def test_dispatch_schedule( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: @@ -385,7 +385,7 @@ async def test_dispatch_schedule( async def test_dispatch_inf_duration_updated_to_finite_and_continues( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: @@ -432,7 +432,7 @@ async def test_dispatch_inf_duration_updated_to_finite_and_continues( async def test_dispatch_new_but_finished( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: @@ -490,7 +490,7 @@ async def test_dispatch_new_but_finished( async def test_notification_on_actor_start( - test_env: TestEnv, + test_env: _TestEnv, generator: DispatchGenerator, fake_time: time_machine.Coordinates, ) -> None: diff --git a/tests/test_mananging_actor.py b/tests/test_mananging_actor.py index c59d12b..4ac2c3d 100644 --- a/tests/test_mananging_actor.py +++ b/tests/test_mananging_actor.py @@ -93,7 +93,7 @@ async def create_fail( @dataclass -class TestEnv: +class _TestEnv: """Test environment.""" actors_service: ActorDispatcher @@ -109,7 +109,7 @@ def actor(self, identity: int) -> MockActor: @fixture -async def test_env() -> AsyncIterator[TestEnv]: +async def test_env() -> AsyncIterator[_TestEnv]: """Create a test environment.""" channel = Broadcast[Dispatch](name="dispatch ready test channel") @@ -122,7 +122,7 @@ async def test_env() -> AsyncIterator[TestEnv]: actors_service.start() await asyncio.sleep(1) - yield TestEnv( + yield _TestEnv( actors_service=actors_service, running_status_sender=channel.new_sender(), ) @@ -131,7 +131,7 @@ async def test_env() -> AsyncIterator[TestEnv]: async def test_simple_start_stop( - test_env: TestEnv, + test_env: _TestEnv, fake_time: time_machine.Coordinates, ) -> None: """Test behavior when receiving start/stop messages.""" @@ -178,7 +178,7 @@ async def test_simple_start_stop( async def test_start_failed( - test_env: TestEnv, fake_time: time_machine.Coordinates + test_env: _TestEnv, fake_time: time_machine.Coordinates ) -> None: """Test auto-retry after 60 seconds.""" # pylint: disable=protected-access @@ -218,7 +218,7 @@ async def test_start_failed( assert test_env.actor(1).is_running is True -def test_heapq_dispatch_compare(test_env: TestEnv) -> None: +def test_heapq_dispatch_compare(test_env: _TestEnv) -> None: """Test that the heapq compare function works.""" dispatch1 = test_env.generator.generate_dispatch() dispatch2 = test_env.generator.generate_dispatch() @@ -241,7 +241,7 @@ def test_heapq_dispatch_compare(test_env: TestEnv) -> None: ) -def test_heapq_dispatch_start_stop_compare(test_env: TestEnv) -> None: +def test_heapq_dispatch_start_stop_compare(test_env: _TestEnv) -> None: """Test that the heapq compare function works.""" dispatch1 = test_env.generator.generate_dispatch() dispatch2 = test_env.generator.generate_dispatch() @@ -267,7 +267,7 @@ def test_heapq_dispatch_start_stop_compare(test_env: TestEnv) -> None: assert scheduled_events[1].dispatch_id == dispatch2.id -async def test_dry_run(test_env: TestEnv, fake_time: time_machine.Coordinates) -> None: +async def test_dry_run(test_env: _TestEnv, fake_time: time_machine.Coordinates) -> None: """Test the dry run mode.""" dispatch = test_env.generator.generate_dispatch() dispatch = replace( From 6cb03f9050c002b08111c5d36f503141929ee6d7 Mon Sep 17 00:00:00 2001 From: Leandro Lucarella Date: Mon, 31 Mar 2025 15:04:50 +0200 Subject: [PATCH 3/5] Upgrade to repo-config 0.13.1 We update files using the migration script and we also exclude `frequenz-client-dispatch` from minor updates in dependabot as it is still at v0.x.x. Signed-off-by: Leandro Lucarella --- .github/dependabot.yml | 45 +++-- .github/workflows/ci-pr.yaml | 55 +++++++ .github/workflows/ci.yaml | 309 +++++++++++------------------------ pyproject.toml | 14 +- 4 files changed, 192 insertions(+), 231 deletions(-) create mode 100644 .github/workflows/ci-pr.yaml diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 72e4cca..8381a00 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -13,23 +13,44 @@ updates: versioning-strategy: auto # Allow up to 10 open pull requests for updates to dependency versions open-pull-requests-limit: 10 - # We group production and development ("optional" in the context of - # pyproject.toml) dependency updates when they are patch and minor updates, - # so we end up with less PRs being generated. - # Major updates are still managed, but they'll create one PR per - # dependency, as major updates are expected to be breaking, it is better to - # manage them individually. + # We group patch updates as they should always work. + # We also group minor updates, as it works too for most libraries, + # typically except libraries that don't have a stable release yet (v0.x.x + # branch), so we make some exceptions for them. + # Major updates and dependencies excluded by the above groups are still + # managed, but they'll create one PR per dependency, as breakage is + # expected, so it might need manual intervention. + # Finally, we group some dependencies that are related to each other, and + # usually need to be updated together. groups: - required: - dependency-type: "production" + patch: update-types: - - "minor" - "patch" - optional: - dependency-type: "development" + exclude-patterns: + # pydoclint has shipped breaking changes in patch updates often + - "pydoclint" + minor: update-types: - "minor" - - "patch" + exclude-patterns: + - "async-solipsism" + - "frequenz-client-dispatch" + - "frequenz-repo-config*" + - "markdown-callouts" + - "mkdocs-gen-files" + - "mkdocs-literate-nav" + - "mkdocstrings*" + - "pydoclint" + - "pytest-asyncio" + # We group repo-config updates as it uses optional dependencies that are + # considered different dependencies otherwise, and will create one PR for + # each if we don't group them. + repo-config: + patterns: + - "frequenz-repo-config*" + mkdocstrings: + patterns: + - "mkdocstrings*" - package-ecosystem: "github-actions" directory: "/" diff --git a/.github/workflows/ci-pr.yaml b/.github/workflows/ci-pr.yaml new file mode 100644 index 0000000..2c74ec0 --- /dev/null +++ b/.github/workflows/ci-pr.yaml @@ -0,0 +1,55 @@ +name: Test PR + +on: + pull_request: + +env: + # Please make sure this version is included in the `matrix`, as the + # `matrix` section can't use `env`, so it must be entered manually + DEFAULT_PYTHON_VERSION: '3.11' + # It would be nice to be able to also define a DEFAULT_UBUNTU_VERSION + # but sadly `env` can't be used either in `runs-on`. + +jobs: + nox: + name: Test with nox + runs-on: ubuntu-24.04 + + steps: + - name: Run nox + uses: frequenz-floss/gh-action-nox@v1.0.0 + with: + python-version: "3.11" + nox-session: ci_checks_max + + test-docs: + name: Test documentation website generation + runs-on: ubuntu-24.04 + steps: + - name: Setup Git + uses: frequenz-floss/gh-action-setup-git@v1.0.0 + + - name: Fetch sources + uses: actions/checkout@v4 + with: + submodules: true + + - name: Setup Python + uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0 + with: + python-version: ${{ env.DEFAULT_PYTHON_VERSION }} + dependencies: .[dev-mkdocs] + + - name: Generate the documentation + env: + MIKE_VERSION: gh-${{ github.job }} + run: | + mike deploy $MIKE_VERSION + mike set-default $MIKE_VERSION + + - name: Upload site + uses: actions/upload-artifact@v4 + with: + name: docs-site + path: site/ + if-no-files-found: error diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b3874fa..55dddda 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,7 +2,6 @@ name: CI on: merge_group: - pull_request: push: # We need to explicitly include tags because otherwise when adding # `branches-ignore` it will only trigger on branches. @@ -29,56 +28,27 @@ jobs: strategy: fail-fast: false matrix: + arch: + - amd64 + - arm os: - - ubuntu-20.04 + - ubuntu-24.04 python: - "3.11" + - "3.12" nox-session: # To speed things up a bit we use the special ci_checks_max session # that uses the same venv to run multiple linting sessions - "ci_checks_max" - "pytest_min" - runs-on: ${{ matrix.os }} + runs-on: ${{ matrix.os }}${{ matrix.arch != 'amd64' && format('-{0}', matrix.arch) || '' }} steps: - - name: Print environment (debug) - run: env - - - name: Fetch sources - uses: actions/checkout@v4 - with: - submodules: true - - - name: Set up Python - uses: actions/setup-python@v5 + - name: Run nox + uses: frequenz-floss/gh-action-nox@v1.0.0 with: python-version: ${{ matrix.python }} - cache: 'pip' - - - name: Install required Python packages - run: | - python -m pip install --upgrade pip - python -m pip install -e .[dev-noxfile] - pip freeze - - - name: Create nox venv - env: - NOX_SESSION: ${{ matrix.nox-session }} - run: nox --install-only -e "$NOX_SESSION" - - - name: Print pip freeze for nox venv (debug) - env: - NOX_SESSION: ${{ matrix.nox-session }} - run: | - . ".nox/$NOX_SESSION/bin/activate" - pip freeze - deactivate - - - name: Run nox - env: - NOX_SESSION: ${{ matrix.nox-session }} - run: nox -R -e "$NOX_SESSION" - timeout-minutes: 2 + nox-session: ${{ matrix.nox-session }} # This job runs if all the `nox` matrix jobs ran and succeeded. # It is only used to have a single job that we can require in branch @@ -90,151 +60,34 @@ jobs: needs: ["nox"] # We skip this job only if nox was also skipped if: always() && needs.nox.result != 'skipped' - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 env: DEPS_RESULT: ${{ needs.nox.result }} steps: - name: Check matrix job result run: test "$DEPS_RESULT" = "success" - nox-cross-arch: - name: Cross-arch tests with nox - if: github.event_name != 'pull_request' - strategy: - fail-fast: false - # Before adding new items to this matrix, make sure that a dockerfile - # exists for the combination of items in the matrix. - # Refer to .github/containers/nox-cross-arch/README.md to learn how to - # add and name new dockerfiles. - matrix: - arch: - - arm64 - os: - - ubuntu-20.04 - python: - - "3.11" - nox-session: - - "pytest_min" - - "pytest_max" - runs-on: ${{ matrix.os }} - - steps: - - name: Fetch sources - uses: actions/checkout@v4 - with: - submodules: true - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - with: - platforms: linux/${{ matrix.arch }} - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - # This is a workaround to prevent the cache from growing indefinitely. - # https://docs.docker.com/build/ci/github-actions/cache/#local-cache - # https://github.com/docker/build-push-action/issues/252 - # https://github.com/moby/buildkit/issues/1896 - - name: Cache container layers - uses: actions/cache@v4 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-nox-${{ matrix.arch }}-${{ matrix.os }}-${{ matrix.python }} - - - name: Build image - uses: docker/build-push-action@v6 - with: - context: .github/containers/nox-cross-arch - file: .github/containers/nox-cross-arch/${{ matrix.arch }}-${{ matrix.os }}-python-${{ matrix.python }}.Dockerfile - platforms: linux/${{ matrix.arch }} - tags: localhost/nox-cross-arch:latest - push: false - load: true - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max - - # Refer to the workaround mentioned above - - name: Move cache - run: | - rm -rf /tmp/.buildx-cache - mv /tmp/.buildx-cache-new /tmp/.buildx-cache - - # Cache pip downloads - - name: Cache pip downloads - uses: actions/cache@v4 - with: - path: /tmp/pip-cache - key: nox-${{ matrix.nox-session }}-${{ matrix.arch }}-${{ matrix.os }}-${{ matrix.python }}-${{ hashFiles('pyproject.toml') }} - - # This ensures that the docker container has access to the pip cache. - # Changing the user in the docker-run step causes it to fail due to - # incorrect permissions. Setting the ownership of the pip cache to root - # before running is a workaround to this issue. - - name: Set pip cache owners to root for docker - run: if [[ -e /tmp/pip-cache ]]; then sudo chown -R root:root /tmp/pip-cache; fi - - - name: Run nox - run: | - docker run \ - --rm \ - -v $(pwd):/${{ github.workspace }} \ - -v /tmp/pip-cache:/root/.cache/pip \ - -w ${{ github.workspace }} \ - --net=host \ - --platform linux/${{ matrix.arch }} \ - localhost/nox-cross-arch:latest \ - bash -c "pip install -e .[dev-noxfile]; nox --install-only -e ${{ matrix.nox-session }}; pip freeze; nox -R -e ${{ matrix.nox-session }}" - timeout-minutes: 8 - - # This ensures that the runner has access to the pip cache. - - name: Reset pip cache ownership - if: always() - run: sudo chown -R $USER:$USER /tmp/pip-cache - - # This job runs if all the `nox-cross-arch` matrix jobs ran and succeeded. - # As the `nox-all` job, its main purpose is to provide a single point of - # reference in branch protection rules, similar to how `nox-all` operates. - # However, there's a crucial difference: the `nox-cross-arch` job is omitted - # in PRs. Without the `nox-cross-arch-all` job, the inner matrix wouldn't be - # expanded in such scenarios. This would lead to the CI indefinitely waiting - # for these jobs to complete due to the branch protection rules, essentially - # causing it to hang. This behavior is tied to a recognized GitHub matrices - # issue when certain jobs are skipped. For a deeper understanding, refer to: - # https://github.com/orgs/community/discussions/9141 - nox-cross-arch-all: - # The job name should match the name of the `nox-cross-arch` job. - name: Cross-arch tests with nox - needs: ["nox-cross-arch"] - # We skip this job only if nox-cross-arch was also skipped - if: always() && needs.nox-cross-arch.result != 'skipped' - runs-on: ubuntu-20.04 - env: - DEPS_RESULT: ${{ needs.nox-cross-arch.result }} - steps: - - name: Check matrix job result - run: test "$DEPS_RESULT" = "success" - build: name: Build distribution packages - runs-on: ubuntu-20.04 + # Since this is a pure Python package, we only need to build it once. If it + # had any architecture specific code, we would need to build it for each + # architecture. + runs-on: ubuntu-24.04 + steps: + - name: Setup Git + uses: frequenz-floss/gh-action-setup-git@v1.0.0 + - name: Fetch sources uses: actions/checkout@v4 with: submodules: true - - name: Set up Python - uses: actions/setup-python@v5 + - name: Setup Python + uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0 with: python-version: ${{ env.DEFAULT_PYTHON_VERSION }} - cache: 'pip' - - - name: Install required Python packages - run: | - python -m pip install -U pip - python -m pip install -U build - pip freeze + dependencies: build - name: Build the source and binary distribution run: python -m build @@ -247,54 +100,92 @@ jobs: if-no-files-found: error test-installation: - name: Test package installation in different architectures + name: Test package installation needs: ["build"] - runs-on: ubuntu-20.04 + strategy: + fail-fast: false + matrix: + arch: + - amd64 + - arm + os: + - ubuntu-24.04 + python: + - "3.11" + - "3.12" + runs-on: ${{ matrix.os }}${{ matrix.arch != 'amd64' && format('-{0}', matrix.arch) || '' }} + steps: - - name: Fetch sources - uses: actions/checkout@v4 + - name: Setup Git + uses: frequenz-floss/gh-action-setup-git@v1.0.0 + + - name: Print environment (debug) + run: env + - name: Download package uses: actions/download-artifact@v4 with: name: dist-packages path: dist - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up docker-buildx - uses: docker/setup-buildx-action@v3 - - name: Test Installation - uses: docker/build-push-action@v6 + + # This is necessary for the `pip` caching in the setup-python action to work + - name: Fetch the pyproject.toml file for this action hash + env: + GH_TOKEN: ${{ github.token }} + REPO: ${{ github.repository }} + REF: ${{ github.sha }} + run: | + set -ux + gh api \ + -X GET \ + -H "Accept: application/vnd.github.raw" \ + "/repos/$REPO/contents/pyproject.toml?ref=$REF" \ + > pyproject.toml + + - name: Setup Python + uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0 with: - context: . - file: .github/containers/test-installation/Dockerfile - platforms: linux/amd64,linux/arm64 - tags: localhost/test-installation - push: false + python-version: ${{ matrix.python }} + dependencies: dist/*.whl + + - name: Print installed packages (debug) + run: python -m pip freeze + + # This job runs if all the `test-installation` matrix jobs ran and succeeded. + # It is only used to have a single job that we can require in branch + # protection rules, so we don't have to update the protection rules each time + # we add or remove a job from the matrix. + test-installation-all: + # The job name should match the name of the `test-installation` job. + name: Test package installation + needs: ["test-installation"] + # We skip this job only if test-installation was also skipped + if: always() && needs.test-installation.result != 'skipped' + runs-on: ubuntu-24.04 + env: + DEPS_RESULT: ${{ needs.test-installation.result }} + steps: + - name: Check matrix job result + run: test "$DEPS_RESULT" = "success" test-docs: name: Test documentation website generation if: github.event_name != 'push' - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: + - name: Setup Git + uses: frequenz-floss/gh-action-setup-git@v1.0.0 + - name: Fetch sources uses: actions/checkout@v4 with: submodules: true - - name: Setup Git user and e-mail - uses: frequenz-floss/setup-git-user@v2 - - - name: Set up Python - uses: actions/setup-python@v5 + - name: Setup Python + uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0 with: python-version: ${{ env.DEFAULT_PYTHON_VERSION }} - cache: 'pip' - - - name: Install build dependencies - run: | - python -m pip install -U pip - python -m pip install .[dev-mkdocs] - pip freeze + dependencies: .[dev-mkdocs] - name: Generate the documentation env: @@ -312,31 +203,25 @@ jobs: publish-docs: name: Publish documentation website to GitHub pages - needs: ["nox-all", "nox-cross-arch-all", "test-installation"] + needs: ["nox-all", "test-installation-all"] if: github.event_name == 'push' - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 permissions: contents: write steps: + - name: Setup Git + uses: frequenz-floss/gh-action-setup-git@v1.0.0 + - name: Fetch sources uses: actions/checkout@v4 with: submodules: true - - name: Setup Git user and e-mail - uses: frequenz-floss/setup-git-user@v2 - - - name: Set up Python - uses: actions/setup-python@v5 + - name: Setup Python + uses: frequenz-floss/gh-action-setup-python-with-deps@v1.0.0 with: python-version: ${{ env.DEFAULT_PYTHON_VERSION }} - cache: 'pip' - - - name: Install build dependencies - run: | - python -m pip install -U pip - python -m pip install .[dev-mkdocs] - pip freeze + dependencies: .[dev-mkdocs] - name: Calculate and check version id: mike-version @@ -391,7 +276,7 @@ jobs: # discussions to create the release announcement in the discussion forums contents: write discussions: write - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - name: Download distribution files uses: actions/download-artifact@v4 @@ -433,7 +318,7 @@ jobs: publish-to-pypi: name: Publish packages to PyPI needs: ["create-github-release"] - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 permissions: # For trusted publishing. See: # https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/ diff --git a/pyproject.toml b/pyproject.toml index 2cfe403..40dac1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ requires = [ "setuptools == 75.8.0", "setuptools_scm[toml] == 8.1.0", - "frequenz-repo-config[lib] == 0.11.0", + "frequenz-repo-config[lib] == 0.13.1", ] build-backend = "setuptools.build_meta" @@ -34,7 +34,7 @@ classifiers = [ ] requires-python = ">= 3.11, < 4" dependencies = [ - "typing-extensions >= 4.11.0, < 5.0.0", + "typing-extensions >= 4.13.0, < 5.0.0", # Make sure to update the version for cross-referencing also in the # mkdocs.yml file when changing the version here (look for the config key # plugins.mkdocstrings.handlers.python.import) @@ -65,9 +65,9 @@ dev-mkdocs = [ "mkdocs-literate-nav == 0.6.1", "mkdocs-macros-plugin == 1.3.7", "mkdocs-material == 9.6.1", - "mkdocstrings[python] == 0.27.0", - "mkdocstrings-python == 1.13.0", - "frequenz-repo-config[lib] == 0.11.0", + "mkdocstrings[python] == 0.29.0", + "mkdocstrings-python == 1.16.8", + "frequenz-repo-config[lib] == 0.13.1", ] dev-mypy = [ "mypy == 1.14.1", @@ -80,7 +80,7 @@ dev-mypy = [ dev-noxfile = [ "uv == 0.5.26", "nox == 2025.2.9", - "frequenz-repo-config[lib] == 0.11.0", + "frequenz-repo-config[lib] == 0.13.1", ] dev-pylint = [ "pylint == 3.3.4", @@ -89,7 +89,7 @@ dev-pylint = [ ] dev-pytest = [ "pytest == 8.3.4", - "frequenz-repo-config[extra-lint-examples] == 0.11.0", + "frequenz-repo-config[extra-lint-examples] == 0.13.1", "pytest-mock == 3.14.0", "pytest-asyncio == 0.25.3", "async-solipsism == 0.7", From 6d82d88e262131282028bdedfc632507800c1337 Mon Sep 17 00:00:00 2001 From: Leandro Lucarella Date: Mon, 31 Mar 2025 15:10:52 +0200 Subject: [PATCH 4/5] Update release notes Signed-off-by: Leandro Lucarella --- RELEASE_NOTES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index d4546f4..3c29f0f 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -6,7 +6,7 @@ ## Upgrading - +- Some minimal dependencies were updated, you might need to adjust your project's dependencies too. ## New Features From 0aabe656fac842fa3ee56e90ac054bc9ab8e1ac5 Mon Sep 17 00:00:00 2001 From: Leandro Lucarella Date: Mon, 31 Mar 2025 15:21:20 +0200 Subject: [PATCH 5/5] Disable treating warnings as errors in pytest Ideally we should treat warnings as errors, but we have too many warnings coming for external dependencies that are hard to address here. Signed-off-by: Leandro Lucarella --- pyproject.toml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 40dac1e..12a173c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -164,7 +164,11 @@ disable = [ ] [tool.pytest.ini_options] -addopts = "-W=all -Werror -Wdefault::DeprecationWarning -Wdefault::PendingDeprecationWarning -vv" +# Ideally we should treat warnings as errors, but we have too many warnings +# coming for external dependencies that are hard to address here, once this is +# fixed, we can add the following options: +# -Werror -Wdefault::DeprecationWarning -Wdefault::PendingDeprecationWarning +addopts = "-W=all -vv" testpaths = ["tests", "src"] asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function"