From 3f0a96392354dea6f8839cf4d68c49ac9e00a24c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 28 Oct 2025 16:54:45 -0500 Subject: [PATCH 01/93] Increment version to 3.13.3.dev0 Signed-off-by: J. Nick Koston --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 396d5024dd6..7200d24dd1c 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.13.2" +__version__ = "3.13.3.dev0" from typing import TYPE_CHECKING, Tuple From 881530d44f4939985258aa5541dc088c6ecbce6f Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 28 Oct 2025 23:41:40 +0100 Subject: [PATCH 02/93] [PR #11643 backport][3.14] Move dependency metadata from `setup.cfg` to `pyproject.toml` (#11736) This is a backport of PR https://github.com/aio-libs/aiohttp/pull/11643 as merged into master (https://github.com/aio-libs/aiohttp/commit/e1aec0ac94277a8b67092293aeac3c19e17fdd86). Modified the backport to include `attrs` again as present on the 3.14 branch. --- .github/workflows/ci-cd.yml | 2 +- CHANGES/11643.packaging.rst | 2 ++ Makefile | 2 +- pyproject.toml | 20 ++++++++++++++++++-- requirements/runtime-deps.in | 6 +++--- requirements/sync-direct-runtime-deps.py | 22 ++++++++++++++-------- setup.cfg | 18 ------------------ 7 files changed, 39 insertions(+), 33 deletions(-) create mode 100644 CHANGES/11643.packaging.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 83003b1eaa8..757cfab769f 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -37,7 +37,7 @@ jobs: submodules: true - name: >- Verify that `requirements/runtime-deps.in` - is in sync with `setup.cfg` + is in sync with `pyproject.toml` run: | set -eEuo pipefail make sync-direct-runtime-deps diff --git a/CHANGES/11643.packaging.rst b/CHANGES/11643.packaging.rst new file mode 100644 index 00000000000..8ef91a18788 --- /dev/null +++ b/CHANGES/11643.packaging.rst @@ -0,0 +1,2 @@ +Moved dependency metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` +-- by :user:`cdce8p`. diff --git a/Makefile b/Makefile index af5f7146716..29dd75cd53c 100644 --- a/Makefile +++ b/Makefile @@ -179,5 +179,5 @@ install-dev: .develop .PHONY: sync-direct-runtime-deps sync-direct-runtime-deps: - @echo Updating 'requirements/runtime-deps.in' from 'setup.cfg'... >&2 + @echo Updating 'requirements/runtime-deps.in' from 'pyproject.toml'... >&2 @python requirements/sync-direct-runtime-deps.py diff --git a/pyproject.toml b/pyproject.toml index fa6da9c82dd..7856bf4b326 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,12 +32,28 @@ classifiers = [ "Topic :: Internet :: WWW/HTTP", ] requires-python = ">= 3.10" +dependencies = [ + "aiohappyeyeballs >= 2.5.0", + "aiosignal >= 1.4.0", + "async-timeout >= 4.0, < 6.0 ; python_version < '3.11'", + "attrs >= 17.3.0", + "frozenlist >= 1.1.1", + "multidict >=4.5, < 7.0", + "propcache >= 0.2.0", + "yarl >= 1.17.0, < 2.0", +] dynamic = [ - "dependencies", - "optional-dependencies", "version", ] +[project.optional-dependencies] +speedups = [ + "aiodns >= 3.3.0", + "Brotli; platform_python_implementation == 'CPython'", + "brotlicffi; platform_python_implementation != 'CPython'", + "backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14'", +] + [[project.maintainers]] name = "aiohttp team" email = "team@aiohttp.org" diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index ad8f28e750d..95db17e158d 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,11 +1,11 @@ -# Extracted from `setup.cfg` via `make sync-direct-runtime-deps` +# Extracted from `pyproject.toml` via `make sync-direct-runtime-deps` aiodns >= 3.3.0 aiohappyeyeballs >= 2.5.0 aiosignal >= 1.4.0 -async-timeout >= 4.0, < 6.0 ; python_version < "3.11" +async-timeout >= 4.0, < 6.0 ; python_version < '3.11' attrs >= 17.3.0 -backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" +backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14' Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 diff --git a/requirements/sync-direct-runtime-deps.py b/requirements/sync-direct-runtime-deps.py index adc28bdd287..dbe445383c1 100755 --- a/requirements/sync-direct-runtime-deps.py +++ b/requirements/sync-direct-runtime-deps.py @@ -1,16 +1,22 @@ #!/usr/bin/env python -"""Sync direct runtime dependencies from setup.cfg to runtime-deps.in.""" +"""Sync direct runtime dependencies from pyproject.toml to runtime-deps.in.""" -from configparser import ConfigParser +import sys from pathlib import Path -cfg = ConfigParser() -cfg.read(Path("setup.cfg")) -reqs = cfg["options"]["install_requires"] + cfg.items("options.extras_require")[0][1] -reqs = sorted(reqs.split("\n"), key=str.casefold) -reqs.remove("") +if sys.version_info >= (3, 11): + import tomllib +else: + raise RuntimeError("Use Python 3.11+ to run 'make sync-direct-runtime-deps'") + +data = tomllib.loads(Path("pyproject.toml").read_text()) +reqs = ( + data["project"]["dependencies"] + + data["project"]["optional-dependencies"]["speedups"] +) +reqs = sorted(reqs, key=str.casefold) with open(Path("requirements", "runtime-deps.in"), "w") as outfile: - header = "# Extracted from `setup.cfg` via `make sync-direct-runtime-deps`\n\n" + header = "# Extracted from `pyproject.toml` via `make sync-direct-runtime-deps`\n\n" outfile.write(header) outfile.write("\n".join(reqs) + "\n") diff --git a/setup.cfg b/setup.cfg index 4e49e33f304..b1bf1464a11 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,21 +1,3 @@ -[options] -install_requires = - aiohappyeyeballs >= 2.5.0 - aiosignal >= 1.4.0 - async-timeout >= 4.0, < 6.0 ; python_version < "3.11" - attrs >= 17.3.0 - frozenlist >= 1.1.1 - multidict >=4.5, < 7.0 - propcache >= 0.2.0 - yarl >= 1.17.0, < 2.0 - -[options.extras_require] -speedups = - aiodns >= 3.3.0 - Brotli; platform_python_implementation == 'CPython' - brotlicffi; platform_python_implementation != 'CPython' - backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" - [pep8] max-line-length=79 From 821122025a47f5895303b47e74ce0e3d5511edfa Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 28 Oct 2025 23:47:11 +0100 Subject: [PATCH 03/93] [PR #11643 backport][3.13] Move dependency metadata from `setup.cfg` to `pyproject.toml` (#11735) This is a backport of PR https://github.com/aio-libs/aiohttp/pull/11643 as merged into master (https://github.com/aio-libs/aiohttp/commit/e1aec0ac94277a8b67092293aeac3c19e17fdd86). Modified the backport to include `attrs` again as present on the 3.13 branch. --- .github/workflows/ci-cd.yml | 2 +- CHANGES/11643.packaging.rst | 2 ++ Makefile | 2 +- pyproject.toml | 20 ++++++++++++++++++-- requirements/runtime-deps.in | 6 +++--- requirements/sync-direct-runtime-deps.py | 22 ++++++++++++++-------- setup.cfg | 18 ------------------ 7 files changed, 39 insertions(+), 33 deletions(-) create mode 100644 CHANGES/11643.packaging.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e8f51219d03..104a80f26f1 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -37,7 +37,7 @@ jobs: submodules: true - name: >- Verify that `requirements/runtime-deps.in` - is in sync with `setup.cfg` + is in sync with `pyproject.toml` run: | set -eEuo pipefail make sync-direct-runtime-deps diff --git a/CHANGES/11643.packaging.rst b/CHANGES/11643.packaging.rst new file mode 100644 index 00000000000..8ef91a18788 --- /dev/null +++ b/CHANGES/11643.packaging.rst @@ -0,0 +1,2 @@ +Moved dependency metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` +-- by :user:`cdce8p`. diff --git a/Makefile b/Makefile index cf621705e2e..893565cc4e0 100644 --- a/Makefile +++ b/Makefile @@ -189,5 +189,5 @@ install-dev: .develop .PHONY: sync-direct-runtime-deps sync-direct-runtime-deps: - @echo Updating 'requirements/runtime-deps.in' from 'setup.cfg'... >&2 + @echo Updating 'requirements/runtime-deps.in' from 'pyproject.toml'... >&2 @python requirements/sync-direct-runtime-deps.py diff --git a/pyproject.toml b/pyproject.toml index 1cbfe81138e..8d5719fc871 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,12 +33,28 @@ classifiers = [ "Topic :: Internet :: WWW/HTTP", ] requires-python = ">= 3.9" +dependencies = [ + "aiohappyeyeballs >= 2.5.0", + "aiosignal >= 1.4.0", + "async-timeout >= 4.0, < 6.0 ; python_version < '3.11'", + "attrs >= 17.3.0", + "frozenlist >= 1.1.1", + "multidict >=4.5, < 7.0", + "propcache >= 0.2.0", + "yarl >= 1.17.0, < 2.0", +] dynamic = [ - "dependencies", - "optional-dependencies", "version", ] +[project.optional-dependencies] +speedups = [ + "aiodns >= 3.3.0", + "Brotli; platform_python_implementation == 'CPython'", + "brotlicffi; platform_python_implementation != 'CPython'", + "backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14'", +] + [[project.maintainers]] name = "aiohttp team" email = "team@aiohttp.org" diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index ad8f28e750d..95db17e158d 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,11 +1,11 @@ -# Extracted from `setup.cfg` via `make sync-direct-runtime-deps` +# Extracted from `pyproject.toml` via `make sync-direct-runtime-deps` aiodns >= 3.3.0 aiohappyeyeballs >= 2.5.0 aiosignal >= 1.4.0 -async-timeout >= 4.0, < 6.0 ; python_version < "3.11" +async-timeout >= 4.0, < 6.0 ; python_version < '3.11' attrs >= 17.3.0 -backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" +backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14' Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 diff --git a/requirements/sync-direct-runtime-deps.py b/requirements/sync-direct-runtime-deps.py index adc28bdd287..dbe445383c1 100755 --- a/requirements/sync-direct-runtime-deps.py +++ b/requirements/sync-direct-runtime-deps.py @@ -1,16 +1,22 @@ #!/usr/bin/env python -"""Sync direct runtime dependencies from setup.cfg to runtime-deps.in.""" +"""Sync direct runtime dependencies from pyproject.toml to runtime-deps.in.""" -from configparser import ConfigParser +import sys from pathlib import Path -cfg = ConfigParser() -cfg.read(Path("setup.cfg")) -reqs = cfg["options"]["install_requires"] + cfg.items("options.extras_require")[0][1] -reqs = sorted(reqs.split("\n"), key=str.casefold) -reqs.remove("") +if sys.version_info >= (3, 11): + import tomllib +else: + raise RuntimeError("Use Python 3.11+ to run 'make sync-direct-runtime-deps'") + +data = tomllib.loads(Path("pyproject.toml").read_text()) +reqs = ( + data["project"]["dependencies"] + + data["project"]["optional-dependencies"]["speedups"] +) +reqs = sorted(reqs, key=str.casefold) with open(Path("requirements", "runtime-deps.in"), "w") as outfile: - header = "# Extracted from `setup.cfg` via `make sync-direct-runtime-deps`\n\n" + header = "# Extracted from `pyproject.toml` via `make sync-direct-runtime-deps`\n\n" outfile.write(header) outfile.write("\n".join(reqs) + "\n") diff --git a/setup.cfg b/setup.cfg index a78ae609f1b..eb3a36b9d23 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,21 +1,3 @@ -[options] -install_requires = - aiohappyeyeballs >= 2.5.0 - aiosignal >= 1.4.0 - async-timeout >= 4.0, < 6.0 ; python_version < "3.11" - attrs >= 17.3.0 - frozenlist >= 1.1.1 - multidict >=4.5, < 7.0 - propcache >= 0.2.0 - yarl >= 1.17.0, < 2.0 - -[options.extras_require] -speedups = - aiodns >= 3.3.0 - Brotli; platform_python_implementation == 'CPython' - brotlicffi; platform_python_implementation != 'CPython' - backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" - [pep8] max-line-length=79 From 88a57aee99dcac38700d3d17eeadf333508116b4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Oct 2025 10:49:37 +0000 Subject: [PATCH 04/93] Bump virtualenv from 20.35.3 to 20.35.4 (#11739) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.35.3 to 20.35.4.
Release notes

Sourced from virtualenv's releases.

20.35.4

What's Changed

New Contributors

Full Changelog: https://github.com/pypa/virtualenv/compare/20.35.3...20.35.4

Changelog

Sourced from virtualenv's changelog.

v20.35.4 (2025-10-28)

Bugfixes - 20.35.4

- Fix race condition in ``_virtualenv.py`` when file is
overwritten during import, preventing ``NameError`` when
``_DISTUTILS_PATCH`` is accessed - by :user:`gracetyy`. (:issue:`2969`)
- Upgrade embedded wheels:
  • pip to 25.3 from 25.2 (:issue:2989)
Commits
  • 0381534 release 20.35.4
  • 25207be Merge pull request #2989 from gaborbernat/bump-pip
  • d593841 Merge pull request #2984 from pypa/pre-commit-ci-update-config
  • f742f8d [pre-commit.ci] pre-commit autoupdate
  • 0a644b0 Merge pull request #2982 from gracetyy/fix/distutils_patch
  • 2b125eb refactor: fix last commit according to review feedback
  • 71d5e09 [pre-commit.ci] auto fixes from pre-commit.com hooks
  • fd0a834 refactor: fix last commit according to review feedback
  • c5e51d4 refactor: use pytest tmp_path fixture instead of tempfile.TemporaryDirectory
  • 629f7b8 Merge branch 'main' into fix/distutils_patch
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.35.3&new-version=20.35.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 73197640bfe..0b0147326b5 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -275,7 +275,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.35.3 +virtualenv==20.35.4 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 89cca196547..a9aa6edae19 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -265,7 +265,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.35.3 +virtualenv==20.35.4 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 222601fadb4..e748420422b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -119,7 +119,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.35.3 +virtualenv==20.35.4 # via pre-commit zlib-ng==1.0.0 # via -r requirements/lint.in From fdd8a61cacf5871231505e4a8d0865789198bcfc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 21 Nov 2025 17:17:56 +0000 Subject: [PATCH 05/93] [PR #11771/72fadb8f backport][3.14] Bump pypy to supported version (#11772) **This is a backport of PR #11771 as merged into master (72fadb8f1a56e8bff0fef23ccf02e2067cd87e41).** None Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 757cfab769f..0e3c9c81c0f 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -132,7 +132,7 @@ jobs: - os: windows no-extensions: 'Y' include: - - pyver: pypy-3.10 + - pyver: pypy-3.11 no-extensions: 'Y' os: ubuntu experimental: false From 1a6abb4760debab6b12caa94294dab0690bcd73e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 21 Nov 2025 18:00:53 +0000 Subject: [PATCH 06/93] [PR #11689/a091c738 backport][3.14] Remove unused update-pre-commit ci workflow (#11770) **This is a backport of PR #11689 as merged into master (a091c738132733766ef9d268fe2dd124c46c1975).** ## What do these changes do? update-pre-commit is broken after #5261 due to the incorrect `github.repository_owner` condition. (See ). pre-commit auto update prs are actually created by pre-commit-ci app. This workflow is not needed anyway. Co-authored-by: Rui Xi --- CHANGES/11689.contrib.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 CHANGES/11689.contrib.rst diff --git a/CHANGES/11689.contrib.rst b/CHANGES/11689.contrib.rst new file mode 100644 index 00000000000..1f6404aa4bd --- /dev/null +++ b/CHANGES/11689.contrib.rst @@ -0,0 +1 @@ +Removed unused ``update-pre-commit`` github action workflow -- by :user:`Cycloctane`. From 1e65581f0b66d190d218b05fc71d5e7d39924bd1 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 21 Nov 2025 18:01:12 +0000 Subject: [PATCH 07/93] [PR #11689/a091c738 backport][3.13] Remove unused update-pre-commit ci workflow (#11769) **This is a backport of PR #11689 as merged into master (a091c738132733766ef9d268fe2dd124c46c1975).** ## What do these changes do? update-pre-commit is broken after #5261 due to the incorrect `github.repository_owner` condition. (See ). pre-commit auto update prs are actually created by pre-commit-ci app. This workflow is not needed anyway. Co-authored-by: Rui Xi --- CHANGES/11689.contrib.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 CHANGES/11689.contrib.rst diff --git a/CHANGES/11689.contrib.rst b/CHANGES/11689.contrib.rst new file mode 100644 index 00000000000..1f6404aa4bd --- /dev/null +++ b/CHANGES/11689.contrib.rst @@ -0,0 +1 @@ +Removed unused ``update-pre-commit`` github action workflow -- by :user:`Cycloctane`. From 3dd962c56bc5e4fcd5f3236cb33ae1d58bbf1c47 Mon Sep 17 00:00:00 2001 From: gsoldatov <37042288+gsoldatov@users.noreply.github.com> Date: Fri, 28 Nov 2025 20:00:56 +0300 Subject: [PATCH 08/93] Request/Response storage typing (backport to v3.14) (#11775) (cherry picked from commit 30055104aeda525303e1eb93d3fa9e3eee0f6dfc) --- CHANGES/11766.feature.rst | 4 ++ CONTRIBUTORS.txt | 1 + aiohttp/client.py | 4 +- aiohttp/helpers.py | 27 +++++++--- aiohttp/web.py | 4 +- aiohttp/web_app.py | 2 +- aiohttp/web_protocol.py | 2 +- aiohttp/web_request.py | 43 ++++++++++++---- aiohttp/web_response.py | 46 ++++++++++++++--- aiohttp/web_runner.py | 2 +- docs/faq.rst | 11 +++- docs/spelling_wordlist.txt | 6 +++ docs/web_advanced.rst | 17 +++++-- docs/web_reference.rst | 17 +++++++ tests/test_web_exceptions.py | 1 + tests/test_web_request.py | 91 +++++++++++++++++++++++++++++++-- tests/test_web_response.py | 98 ++++++++++++++++++++++++++++++++++-- 17 files changed, 334 insertions(+), 42 deletions(-) create mode 100644 CHANGES/11766.feature.rst diff --git a/CHANGES/11766.feature.rst b/CHANGES/11766.feature.rst new file mode 100644 index 00000000000..de57ca44543 --- /dev/null +++ b/CHANGES/11766.feature.rst @@ -0,0 +1,4 @@ +Added ``RequestKey`` and ``ResponseKey`` classes, +which enable static type checking for request & response +context storages in the same way that ``AppKey`` does for ``Application`` +-- by :user:`gsoldatov`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 290be0205f1..e54cd6e8cc8 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -143,6 +143,7 @@ Gennady Andreyev Georges Dubus Greg Holt Gregory Haynes +Grigoriy Soldatov Gus Goulart Gustavo Carneiro Günther Jena diff --git a/aiohttp/client.py b/aiohttp/client.py index 8d2c3d67921..7b19ee154f6 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -417,7 +417,7 @@ def __init__( def __init_subclass__(cls: type["ClientSession"]) -> None: warnings.warn( - f"Inheritance class {cls.__name__} from ClientSession " "is discouraged", + f"Inheritance class {cls.__name__} from ClientSession is discouraged", DeprecationWarning, stacklevel=2, ) @@ -427,7 +427,7 @@ def __init_subclass__(cls: type["ClientSession"]) -> None: def __setattr__(self, name: str, val: Any) -> None: if name not in self.ATTRS: warnings.warn( - f"Setting custom ClientSession.{name} attribute " "is discouraged", + f"Setting custom ClientSession.{name} attribute is discouraged", DeprecationWarning, stacklevel=2, ) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 25d639028b0..a399c1c20ef 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -812,8 +812,11 @@ def set_exception( @functools.total_ordering -class AppKey(Generic[_T]): - """Keys for static typing support in Application.""" +class BaseKey(Generic[_T]): + """Base for concrete context storage key classes. + + Each storage is provided with its own sub-class for the sake of some additional type safety. + """ __slots__ = ("_name", "_t", "__orig_class__") @@ -835,9 +838,9 @@ def __init__(self, name: str, t: type[_T] | None = None): self._t = t def __lt__(self, other: object) -> bool: - if isinstance(other, AppKey): + if isinstance(other, BaseKey): return self._name < other._name - return True # Order AppKey above other types. + return True # Order BaseKey above other types. def __repr__(self) -> str: t = self._t @@ -855,7 +858,19 @@ def __repr__(self) -> str: t_repr = f"{t.__module__}.{t.__qualname__}" else: t_repr = repr(t) - return f"" + return f"<{self.__class__.__name__}({self._name}, type={t_repr})>" + + +class AppKey(BaseKey[_T]): + """Keys for static typing support in Application.""" + + +class RequestKey(BaseKey[_T]): + """Keys for static typing support in Request.""" + + +class ResponseKey(BaseKey[_T]): + """Keys for static typing support in Response.""" class ChainMapProxy(Mapping[str | AppKey[Any], Any]): @@ -866,7 +881,7 @@ def __init__(self, maps: Iterable[Mapping[str | AppKey[Any], Any]]) -> None: def __init_subclass__(cls) -> None: raise TypeError( - f"Inheritance class {cls.__name__} from ChainMapProxy " "is forbidden" + f"Inheritance class {cls.__name__} from ChainMapProxy is forbidden" ) @overload # type: ignore[override] diff --git a/aiohttp/web.py b/aiohttp/web.py index 241207ed709..c1ab12e84ed 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -11,7 +11,7 @@ from typing import TYPE_CHECKING, Any, cast from .abc import AbstractAccessLogger -from .helpers import AppKey as AppKey +from .helpers import AppKey, RequestKey, ResponseKey from .log import access_logger from .typedefs import PathLike from .web_app import Application as Application, CleanupError as CleanupError @@ -223,11 +223,13 @@ "BaseRequest", "FileField", "Request", + "RequestKey", # web_response "ContentCoding", "Response", "StreamResponse", "json_response", + "ResponseKey", # web_routedef "AbstractRouteDef", "RouteDef", diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 432ee100804..1a9489ad500 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -166,7 +166,7 @@ def __init__( def __init_subclass__(cls: type["Application"]) -> None: warnings.warn( - f"Inheritance class {cls.__name__} from web.Application " "is discouraged", + f"Inheritance class {cls.__name__} from web.Application is discouraged", DeprecationWarning, stacklevel=3, ) diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 44f5055507b..c4c61ccc82d 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -690,7 +690,7 @@ async def finish_response( self.log_exception("Missing return statement on request handler") else: self.log_exception( - "Web-handler should return a response instance, " f"got {resp!r}" + f"Web-handler should return a response instance, got {resp!r}" ) exc = HTTPInternalServerError() resp = Response( diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 644d55631fe..dea5eb34e55 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -10,7 +10,7 @@ from collections.abc import Iterator, Mapping, MutableMapping from re import Pattern from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Final, Optional, cast +from typing import TYPE_CHECKING, Any, Final, Optional, TypeVar, cast, overload from urllib.parse import parse_qsl import attr @@ -34,6 +34,7 @@ ChainMapProxy, ETag, HeadersMixin, + RequestKey, parse_http_date, reify, sentinel, @@ -50,7 +51,7 @@ RawHeaders, StrOrURL, ) -from .web_exceptions import HTTPRequestEntityTooLarge +from .web_exceptions import HTTPRequestEntityTooLarge, NotAppKeyWarning from .web_response import StreamResponse __all__ = ("BaseRequest", "FileField", "Request") @@ -62,6 +63,9 @@ from .web_urldispatcher import UrlMappingMatchInfo +_T = TypeVar("_T") + + @attr.s(auto_attribs=True, frozen=True, slots=True) class FileField: name: str @@ -98,8 +102,7 @@ class FileField: ############################################################ -class BaseRequest(MutableMapping[str, Any], HeadersMixin): - +class BaseRequest(MutableMapping[str | RequestKey[Any], Any], HeadersMixin): POST_METHODS = { hdrs.METH_PATCH, hdrs.METH_POST, @@ -131,6 +134,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin): ) _post: MultiDictProxy[str | bytes | FileField] | None = None _read_bytes: bytes | None = None + _seen_str_keys: set[str] = set() def __init__( self, @@ -142,7 +146,7 @@ def __init__( loop: asyncio.AbstractEventLoop, *, client_max_size: int = 1024**2, - state: dict[str, Any] | None = None, + state: dict[RequestKey[Any] | str, Any] | None = None, scheme: str | None = None, host: str | None = None, remote: str | None = None, @@ -286,19 +290,40 @@ def loop(self) -> asyncio.AbstractEventLoop: # MutableMapping API - def __getitem__(self, key: str) -> Any: + @overload # type: ignore[override] + def __getitem__(self, key: RequestKey[_T]) -> _T: ... + + @overload + def __getitem__(self, key: str) -> Any: ... + + def __getitem__(self, key: str | RequestKey[_T]) -> Any: return self._state[key] - def __setitem__(self, key: str, value: Any) -> None: + @overload # type: ignore[override] + def __setitem__(self, key: RequestKey[_T], value: _T) -> None: ... + + @overload + def __setitem__(self, key: str, value: Any) -> None: ... + + def __setitem__(self, key: str | RequestKey[_T], value: Any) -> None: + if not isinstance(key, RequestKey) and key not in BaseRequest._seen_str_keys: + BaseRequest._seen_str_keys.add(key) + warnings.warn( + "It is recommended to use web.RequestKey instances for keys.\n" + + "https://docs.aiohttp.org/en/stable/web_advanced.html" + + "#request-s-storage", + category=NotAppKeyWarning, + stacklevel=2, + ) self._state[key] = value - def __delitem__(self, key: str) -> None: + def __delitem__(self, key: str | RequestKey[_T]) -> None: del self._state[key] def __len__(self) -> int: return len(self._state) - def __iter__(self) -> Iterator[str]: + def __iter__(self) -> Iterator[str | RequestKey[Any]]: return iter(self._state) ######## diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 03404029cf3..4bf6afc32a3 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -10,7 +10,7 @@ from concurrent.futures import Executor from http import HTTPStatus from http.cookies import SimpleCookie -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import TYPE_CHECKING, Any, Optional, TypeVar, cast, overload from multidict import CIMultiDict, istr @@ -22,6 +22,7 @@ QUOTED_ETAG_RE, ETag, HeadersMixin, + ResponseKey, must_be_empty_body, parse_http_date, rfc822_formatted_time, @@ -47,6 +48,9 @@ BaseClass = collections.abc.MutableMapping +_T = TypeVar("_T") + + # TODO(py311): Convert to StrEnum for wider use class ContentCoding(enum.Enum): # The content codings that we have support for. @@ -65,7 +69,7 @@ class ContentCoding(enum.Enum): ############################################################ -class StreamResponse(BaseClass, HeadersMixin): +class StreamResponse(MutableMapping[str | ResponseKey[Any], Any], HeadersMixin): _body: None | bytes | bytearray | Payload _length_check = True @@ -82,6 +86,7 @@ class StreamResponse(BaseClass, HeadersMixin): _body_length = 0 _cookies: SimpleCookie | None = None _send_headers_immediately = True + _seen_str_keys: set[str] = set() def __init__( self, @@ -98,7 +103,7 @@ def __init__( the headers when creating a new response object. It is not intended to be used by external code. """ - self._state: dict[str, Any] = {} + self._state: dict[str | ResponseKey[Any], Any] = {} if _real_headers is not None: self._headers = _real_headers @@ -587,19 +592,46 @@ def __repr__(self) -> str: info = "not prepared" return f"<{self.__class__.__name__} {self.reason} {info}>" - def __getitem__(self, key: str) -> Any: + @overload # type: ignore[override] + def __getitem__(self, key: ResponseKey[_T]) -> _T: ... + + @overload + def __getitem__(self, key: str) -> Any: ... + + def __getitem__(self, key: str | ResponseKey[_T]) -> Any: return self._state[key] - def __setitem__(self, key: str, value: Any) -> None: + @overload # type: ignore[override] + def __setitem__(self, key: ResponseKey[_T], value: _T) -> None: ... + + @overload + def __setitem__(self, key: str, value: Any) -> None: ... + + def __setitem__(self, key: str | ResponseKey[_T], value: Any) -> None: + if ( + not isinstance(key, ResponseKey) + and key not in StreamResponse._seen_str_keys + ): + # Import here to break circular dependency + from .web_exceptions import NotAppKeyWarning + + StreamResponse._seen_str_keys.add(key) + warnings.warn( + "It is recommended to use web.ResponseKey instances for keys.\n" + + "https://docs.aiohttp.org/en/stable/web_advanced.html" + + "#response-s-storage", + category=NotAppKeyWarning, + stacklevel=2, + ) self._state[key] = value - def __delitem__(self, key: str) -> None: + def __delitem__(self, key: str | ResponseKey[_T]) -> None: del self._state[key] def __len__(self) -> int: return len(self._state) - def __iter__(self) -> Iterator[str]: + def __iter__(self) -> Iterator[str | ResponseKey[Any]]: return iter(self._state) def __hash__(self) -> int: diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index c66c9359e27..6ea43c6237c 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -374,7 +374,7 @@ def __init__( super().__init__(handle_signals=handle_signals, **kwargs) if not isinstance(app, Application): raise TypeError( - "The first argument should be web.Application " f"instance, got {app!r}" + f"The first argument should be web.Application instance, got {app!r}" ) self._app = app diff --git a/docs/faq.rst b/docs/faq.rst index 30803da3576..b3d844c3636 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -87,8 +87,15 @@ support the :class:`dict` interface. Therefore, data may be stored inside a request object. :: - async def handler(request): - request['unique_key'] = data + request_id_key = web.RequestKey("request_id_key", str) + + @web.middleware + async def request_id_middleware(request, handler): + request[request_id_key] = "some_request_id" + return await handler(request) + + async def handler(request): + request_id = request[request_id_key] See https://github.com/aio-libs/aiohttp_session code for an example. The ``aiohttp_session.get_session(request)`` method uses ``SESSION_KEY`` diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 0912c312f6d..63626cb35e2 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -6,6 +6,7 @@ aiohttp aiohttpdemo aiohttp’s aiopg +al alives api api’s @@ -119,6 +120,7 @@ env environ eof epoll +et etag ETag expirations @@ -166,6 +168,7 @@ iterable iterables javascript Jinja +jitter json keepalive keepalived @@ -293,6 +296,7 @@ runtime runtimes sa Satisfiable +scalability schemas sendfile serializable @@ -305,6 +309,7 @@ ssl SSLContext startup stateful +storages subapplication subclassed subclasses @@ -350,6 +355,7 @@ unicode unittest Unittest unix +unobvious unsets unstripped untyped diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 0912327df6e..35286f8da6e 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -447,10 +447,13 @@ Request's storage ^^^^^^^^^^^^^^^^^ Variables that are only needed for the lifetime of a :class:`Request`, can be -stored in a :class:`Request`:: +stored in a :class:`Request`. Similarly to :class:`Application`, :class:`RequestKey` +instances or strings can be used as keys:: + + my_private_key = web.RequestKey("my_private_key", str) async def handler(request): - request['my_private_key'] = "data" + request[my_private_key] = "data" ... This is mostly useful for :ref:`aiohttp-web-middlewares` and @@ -465,9 +468,11 @@ also support :class:`collections.abc.MutableMapping` interface. This is useful when you want to share data with signals and middlewares once all the work in the handler is done:: + my_metric_key = web.ResponseKey("my_metric_key", int) + async def handler(request): [ do all the work ] - response['my_metric'] = 123 + response[my_metric_key] = 123 return response @@ -719,18 +724,20 @@ In contrast, when accessing the stream directly (not recommended in middleware): When working with raw stream data that needs to be shared between middleware and handlers:: + raw_body_key = web.RequestKey("raw_body_key", bytes) + async def stream_parsing_middleware( request: web.Request, handler: Callable[[web.Request], Awaitable[web.StreamResponse]] ) -> web.StreamResponse: # Read stream once and store the data raw_data = await request.content.read() - request['raw_body'] = raw_data + request[raw_body_key] = raw_data return await handler(request) async def handler(request: web.Request) -> web.Response: # Access the stored data instead of reading the stream again - raw_data = request.get('raw_body', b'') + raw_data = request.get(raw_body_key, b'') return web.Response(body=raw_data) Example diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 5ae15478b4f..6e466308533 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -554,6 +554,13 @@ and :ref:`aiohttp-web-signals` handlers. request copy with changed *path*, *method* etc. +.. class:: RequestKey(name, t) + + Keys for use in :class:`Request`. + + See :class:`AppKey` for more details. + + .. _aiohttp-web-response: @@ -1422,6 +1429,15 @@ the handler. :class:`Response` initializer. +.. class:: ResponseKey(name, t) + + Keys for use in :class:`Response`. + + See :class:`AppKey` for more details. + + + + .. _aiohttp-web-app-and-router: Application and Router @@ -1770,6 +1786,7 @@ Application and Router :param t: The type that should be used for the value in the dict (e.g. `str`, `Iterator[int]` etc.) + .. class:: Server A protocol factory compatible with diff --git a/tests/test_web_exceptions.py b/tests/test_web_exceptions.py index de294d188ff..40c6bd4e791 100644 --- a/tests/test_web_exceptions.py +++ b/tests/test_web_exceptions.py @@ -238,6 +238,7 @@ def test_HTTPException_retains_cause() -> None: assert "direct cause" in tb +@pytest.mark.filterwarnings(r"ignore:.*web\.RequestKey:UserWarning") async def test_HTTPException_retains_cookie(aiohttp_client: AiohttpClient) -> None: @web.middleware async def middleware(request, handler): diff --git a/tests/test_web_request.py b/tests/test_web_request.py index e706e18dee5..d98fb58170f 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -1,7 +1,8 @@ import asyncio import datetime import socket -from collections.abc import MutableMapping +import sys +from collections.abc import Iterator, MutableMapping from typing import Any from unittest import mock @@ -13,7 +14,7 @@ from aiohttp.http_parser import RawRequestMessage from aiohttp.streams import StreamReader from aiohttp.test_utils import make_mocked_request -from aiohttp.web import BaseRequest, HTTPRequestEntityTooLarge +from aiohttp.web import BaseRequest, HTTPRequestEntityTooLarge, Request, RequestKey from aiohttp.web_request import ETag @@ -467,6 +468,7 @@ def test_match_info() -> None: assert req._match_info is req.match_info +@pytest.mark.filterwarnings(r"ignore:.*web\.RequestKey:UserWarning") def test_request_is_mutable_mapping() -> None: req = make_mocked_request("GET", "/") assert isinstance(req, MutableMapping) @@ -475,6 +477,7 @@ def test_request_is_mutable_mapping() -> None: assert "value" == req["key"] +@pytest.mark.filterwarnings(r"ignore:.*web\.RequestKey:UserWarning") def test_request_delitem() -> None: req = make_mocked_request("GET", "/") req["key"] = "value" @@ -483,6 +486,7 @@ def test_request_delitem() -> None: assert "key" not in req +@pytest.mark.filterwarnings(r"ignore:.*web\.RequestKey:UserWarning") def test_request_len() -> None: req = make_mocked_request("GET", "/") assert len(req) == 0 @@ -490,11 +494,91 @@ def test_request_len() -> None: assert len(req) == 1 +@pytest.mark.filterwarnings(r"ignore:.*web\.RequestKey:UserWarning") def test_request_iter() -> None: req = make_mocked_request("GET", "/") req["key"] = "value" req["key2"] = "value2" - assert set(req) == {"key", "key2"} + key3 = RequestKey("key3", str) + req[key3] = "value3" + assert set(req) == {"key", "key2", key3} + + +def test_requestkey() -> None: + req = make_mocked_request("GET", "/") + key = RequestKey("key", str) + req[key] = "value" + assert req[key] == "value" + assert len(req) == 1 + del req[key] + assert len(req) == 0 + + +def test_request_get_requestkey() -> None: + req = make_mocked_request("GET", "/") + key = RequestKey("key", int) + assert req.get(key, "foo") == "foo" + req[key] = 5 + assert req.get(key, "foo") == 5 + + +def test_requestkey_repr_concrete() -> None: + key = RequestKey("key", int) + assert repr(key) in ( + "", # pytest-xdist + "", + ) + key2 = RequestKey("key", Request) + assert repr(key2) in ( + # pytest-xdist: + "", + "", + ) + + +def test_requestkey_repr_nonconcrete() -> None: + key = RequestKey("key", Iterator[int]) + if sys.version_info < (3, 11): + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + else: + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + + +def test_requestkey_repr_annotated() -> None: + key = RequestKey[Iterator[int]]("key") + if sys.version_info < (3, 11): + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + else: + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + + +def test_str_key_warnings() -> None: + # Check if warnings are raised once per str key + req = make_mocked_request("GET", "/") + + with pytest.warns(UserWarning): + req["test_str_key_warnings_key_1"] = "value" + + with pytest.warns(UserWarning): + req["test_str_key_warnings_key_2"] = "value 2" + + req["test_str_key_warnings_key_1"] = "value" def test___repr__() -> None: @@ -859,6 +943,7 @@ def test_remote_peername_unix() -> None: assert req.remote == "/path/to/sock" +@pytest.mark.filterwarnings(r"ignore:.*web\.RequestKey:UserWarning") def test_save_state_on_clone() -> None: req = make_mocked_request("GET", "/") req["key"] = "val" diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 70a83143641..011e3ef1a0c 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -4,7 +4,7 @@ import io import json import sys -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Iterator from concurrent.futures import ThreadPoolExecutor from unittest import mock @@ -19,7 +19,14 @@ from aiohttp.multipart import BodyPartReader, MultipartWriter from aiohttp.payload import BytesPayload, StringPayload from aiohttp.test_utils import make_mocked_request -from aiohttp.web import ContentCoding, Response, StreamResponse, json_response +from aiohttp.web import ( + ContentCoding, + Request, + Response, + ResponseKey, + StreamResponse, + json_response, +) def make_request( @@ -103,6 +110,7 @@ def test_stream_response_eq() -> None: assert not resp1 == resp2 +@pytest.mark.filterwarnings(r"ignore:.*web\.ResponseKey:UserWarning") def test_stream_response_is_mutable_mapping() -> None: resp = StreamResponse() assert isinstance(resp, collections.abc.MutableMapping) @@ -111,6 +119,7 @@ def test_stream_response_is_mutable_mapping() -> None: assert "value" == resp["key"] +@pytest.mark.filterwarnings(r"ignore:.*web\.ResponseKey:UserWarning") def test_stream_response_delitem() -> None: resp = StreamResponse() resp["key"] = "value" @@ -118,6 +127,7 @@ def test_stream_response_delitem() -> None: assert "key" not in resp +@pytest.mark.filterwarnings(r"ignore:.*web\.ResponseKey:UserWarning") def test_stream_response_len() -> None: resp = StreamResponse() assert len(resp) == 0 @@ -125,11 +135,91 @@ def test_stream_response_len() -> None: assert len(resp) == 1 -def test_request_iter() -> None: +@pytest.mark.filterwarnings(r"ignore:.*web\.ResponseKey:UserWarning") +def test_response_iter() -> None: resp = StreamResponse() resp["key"] = "value" resp["key2"] = "value2" - assert set(resp) == {"key", "key2"} + key3 = ResponseKey("key3", str) + resp[key3] = "value3" + assert set(resp) == {"key", "key2", key3} + + +def test_responsekey() -> None: + resp = StreamResponse() + key = ResponseKey("key", str) + resp[key] = "value" + assert resp[key] == "value" + assert len(resp) == 1 + del resp[key] + assert len(resp) == 0 + + +def test_response_get_responsekey() -> None: + resp = StreamResponse() + key = ResponseKey("key", int) + assert resp.get(key, "foo") == "foo" + resp[key] = 5 + assert resp.get(key, "foo") == 5 + + +def test_responsekey_repr_concrete() -> None: + key = ResponseKey("key", int) + assert repr(key) in ( + "", # pytest-xdist + "", + ) + key2 = ResponseKey("key", Request) + assert repr(key2) in ( + # pytest-xdist: + "", + "", + ) + + +def test_responsekey_repr_nonconcrete() -> None: + key = ResponseKey("key", Iterator[int]) + if sys.version_info < (3, 11): + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + else: + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + + +def test_responsekey_repr_annotated() -> None: + key = ResponseKey[Iterator[int]]("key") + if sys.version_info < (3, 11): + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + else: + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + + +def test_str_key_warnings() -> None: + # Check if warnings are raised once per str key + resp = StreamResponse() + + with pytest.warns(UserWarning): + resp["test_str_key_warnings_key_1"] = "value" + + with pytest.warns(UserWarning): + resp["test_str_key_warnings_key_2"] = "value 2" + + resp["test_str_key_warnings_key_1"] = "value" def test_content_length() -> None: From de9d4906b8dda51fcb8496541c4b9d1dc32cfa5f Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 28 Nov 2025 17:32:27 +0000 Subject: [PATCH 09/93] Bump pypy to supported version (#11773) --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 104a80f26f1..51f34c8b908 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -132,7 +132,7 @@ jobs: - os: windows no-extensions: 'Y' include: - - pyver: pypy-3.9 + - pyver: pypy-3.11 no-extensions: 'Y' os: ubuntu experimental: false From 54647544a2d0a48ac1661c8f7cba1a7b2cf23169 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Dec 2025 16:31:37 +0000 Subject: [PATCH 10/93] Bump regex from 2025.10.23 to 2025.11.3 (#11742) Bumps [regex](https://github.com/mrabarnett/mrab-regex) from 2025.10.23 to 2025.11.3.
Changelog

Sourced from regex's changelog.

Version: 2025.11.3

Git issue 594: Support relative PARNO in recursive
subpatterns

Version: 2025.10.23

'setup.py' was missing from the source distribution.

Version: 2025.10.22

Fixed test in main.yml.

Version: 2025.10.21

Moved tests into subfolder.

Version: 2025.10.20

Re-organised files.

Updated to Unicode 17.0.0.

Version: 2025.9.20

Enable free-threading support in cibuildwheel in another
place.

Version: 2025.9.19

Enable free-threading support in cibuildwheel.

Version: 2025.9.18

Git issue 565: Support the free-threaded build of CPython
3.13

Version: 2025.9.1

Git PR 585: Fix AttributeError: 'AnyAll' object has no
attribute '_key'

Version: 2025.8.29

Git issue 584: AttributeError: 'AnyAll' object has no
attribute 'positive'

Version: 2025.7.34

Git issue 575: Issues with ASCII/Unicode modifiers

Version: 2025.7.33

Updated main.yml and pyproject.toml.

... (truncated)

Commits
  • b01d6e7 Git issue 594: Support relative PARNO in recursive subpatterns
  • dd256cd Merge pull request #591 from foosel/ci-tests
  • 1b2ca8c ci: run tests against sdist
  • 609733a ci: only release if the tests are green
  • fa08ff1 ci: make sure to always run tests against full matrix
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=regex&package-manager=pip&previous-version=2025.10.23&new-version=2025.11.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0b0147326b5..48b002db721 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -198,7 +198,7 @@ pyyaml==6.0.3 # via pre-commit re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.10.23 +regex==2025.11.3 # via re-assert requests==2.32.5 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index a9aa6edae19..3ea5a519ae9 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -193,7 +193,7 @@ pyyaml==6.0.3 # via pre-commit re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.10.23 +regex==2025.11.3 # via re-assert requests==2.32.5 # via sphinx diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 7dbad4a84aa..f5898e7f951 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -85,7 +85,7 @@ python-on-whales==0.79.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.10.23 +regex==2025.11.3 # via re-assert rich==14.2.0 # via pytest-codspeed diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index b85c0acc12c..495fa71e570 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -120,7 +120,7 @@ python-on-whales==0.79.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.10.23 +regex==2025.11.3 # via re-assert rich==14.2.0 # via pytest-codspeed diff --git a/requirements/test.txt b/requirements/test.txt index 02fe687bca1..4bd208d8139 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -120,7 +120,7 @@ python-on-whales==0.79.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.10.23 +regex==2025.11.3 # via re-assert rich==14.2.0 # via pytest-codspeed From 092e29d23e9733c31ef27007d9819ee25878e409 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Dec 2025 17:44:03 +0000 Subject: [PATCH 11/93] Bump pre-commit from 4.3.0 to 4.4.0 (#11755) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [//]: # (dependabot-start) ⚠️ **Dependabot is rebasing this PR** ⚠️ Rebasing might not happen immediately, so don't worry if this takes some time. Note: if you make any changes to this PR yourself, they will take precedence over the rebase. --- [//]: # (dependabot-end) Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 4.3.0 to 4.4.0.
Release notes

Sourced from pre-commit's releases.

pre-commit v4.4.0

Features

Fixes

Changelog

Sourced from pre-commit's changelog.

4.4.0 - 2025-11-08

Features

Fixes

Commits
  • 17cf886 v4.4.0
  • cb63a5c Merge pull request #3535 from br-rhrbacek/fix-cgroups
  • f80801d Fix docker-in-docker detection for cgroups v2
  • 9143fc3 Merge pull request #3577 from pre-commit/language-unsupported
  • 725acc9 rename system and script languages to unsupported / unsupported_script
  • 3815e2e Merge pull request #3576 from pre-commit/fix-stages-config-error
  • aa2961c fix missing context in error for stages
  • 46297f7 Merge pull request #3575 from pre-commit/rm-python3-hooks-repo
  • 95eec75 rm python3_hooks_repo
  • 5e4b354 Merge pull request #3574 from pre-commit/rm-hook-with-spaces-test
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pre-commit&package-manager=pip&previous-version=4.3.0&new-version=4.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 48b002db721..a384e46533b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -141,7 +141,7 @@ pluggy==1.6.0 # via # pytest # pytest-cov -pre-commit==4.3.0 +pre-commit==4.5.0 # via -r requirements/lint.in propcache==0.4.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 3ea5a519ae9..0cace107547 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -138,7 +138,7 @@ pluggy==1.6.0 # via # pytest # pytest-cov -pre-commit==4.3.0 +pre-commit==4.5.0 # via -r requirements/lint.in propcache==0.4.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index e748420422b..04165205f76 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -61,7 +61,7 @@ platformdirs==4.5.0 # via virtualenv pluggy==1.6.0 # via pytest -pre-commit==4.3.0 +pre-commit==4.5.0 # via -r requirements/lint.in pycares==4.11.0 # via aiodns From d4e14f552f04727ff9a6f2d13844c89d1f8e3428 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Dec 2025 18:10:34 +0000 Subject: [PATCH 12/93] Bump pytest from 8.4.2 to 9.0.0 (#11754) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [//]: # (dependabot-start) ⚠️ **Dependabot is rebasing this PR** ⚠️ Rebasing might not happen immediately, so don't worry if this takes some time. Note: if you make any changes to this PR yourself, they will take precedence over the rebase. --- [//]: # (dependabot-end) Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.4.2 to 9.0.0.
Release notes

Sourced from pytest's releases.

9.0.0

pytest 9.0.0 (2025-11-05)

New features

  • #1367: Support for subtests has been added.

    subtests <subtests> are an alternative to parametrization, useful in situations where the parametrization values are not all known at collection time.

    Example:

    def contains_docstring(p: Path) -> bool:
    """Return True if the given Python file contains a
    top-level docstring."""
        ...
    

    def test_py_files_contain_docstring(subtests: pytest.Subtests) -> None: for path in Path.cwd().glob("*.py"): with subtests.test(path=str(path)): assert contains_docstring(path)

    Each assert failure or error is caught by the context manager and reported individually, giving a clear picture of all files that are missing a docstring.

    In addition, unittest.TestCase.subTest is now also supported.

    This feature was originally implemented as a separate plugin in pytest-subtests, but since then has been merged into the core.

    [!NOTE] This feature is experimental and will likely evolve in future releases. By that we mean that we might change how subtests are reported on failure, but the functionality and how to use it are stable.

  • #13743: Added support for native TOML configuration files.

    While pytest, since version 6, supports configuration in pyproject.toml files under [tool.pytest.ini_options], it does so in an "INI compatibility mode", where all configuration values are treated as strings or list of strings. Now, pytest supports the native TOML data model.

    In pyproject.toml, the native TOML configuration is under the [tool.pytest] table.

    # pyproject.toml
    [tool.pytest]
    minversion = "9.0"
    addopts = ["-ra", "-q"]
    testpaths = [
        "tests",
        "integration",
    ]
    

... (truncated)

Commits
  • f4b0fd2 Prepare release version 9.0.0
  • 52d8e68 Merge pull request #13889 from bluetech/regendoc-restore
  • d6d3e4a doc: fixes for regendoc
  • 7cb3974 doc: restore missing "# content of pytest.toml" regendoc commands
  • 5ae9e47 build(deps): Bump django in /testing/plugins_integration (#13881)
  • adb3658 Merge pull request #13864 from bluetech/config-cleanups-2
  • a28c08e Merge pull request #13875 from bluetech/ci-tweaks
  • a250954 ci: split publish-to-pypi and push-tag jobs
  • ebc152f ci: update setup python's from 3.11 or 3.* to 3.13
  • dfd796f ci: move running update-plugin-list script to tox
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=8.4.2&new-version=9.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a384e46533b..770eed21f1b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -168,7 +168,7 @@ pyproject-hooks==1.2.0 # via # build # pip-tools -pytest==8.4.2 +pytest==9.0.1 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 0cace107547..6f273ab8807 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -163,7 +163,7 @@ pyproject-hooks==1.2.0 # via # build # pip-tools -pytest==8.4.2 +pytest==9.0.1 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 04165205f76..f2d4e420a8e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -75,7 +75,7 @@ pygments==2.19.2 # via # pytest # rich -pytest==8.4.2 +pytest==9.0.1 # via # -r requirements/lint.in # pytest-codspeed diff --git a/requirements/test-common.txt b/requirements/test-common.txt index f5898e7f951..3a9830108f0 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -64,7 +64,7 @@ pygments==2.19.2 # via # pytest # rich -pytest==8.4.2 +pytest==9.0.1 # via # -r requirements/test-common.in # pytest-codspeed diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 495fa71e570..5f0935a8067 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -99,7 +99,7 @@ pygments==2.19.2 # via # pytest # rich -pytest==8.4.2 +pytest==9.0.1 # via # -r requirements/test-common.in # pytest-codspeed diff --git a/requirements/test.txt b/requirements/test.txt index 4bd208d8139..081a060c2f0 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -99,7 +99,7 @@ pygments==2.19.2 # via # pytest # rich -pytest==8.4.2 +pytest==9.0.1 # via # -r requirements/test-common.in # pytest-codspeed From 3c15317c5087b10206f0899625ef0b8053834ce9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Dec 2025 21:25:44 +0000 Subject: [PATCH 13/93] Bump actions/upload-artifact from 4 to 5 (#11723) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4 to 5.
Release notes

Sourced from actions/upload-artifact's releases.

v5.0.0

What's Changed

BREAKING CHANGE: this update supports Node v24.x. This is not a breaking change per-se but we're treating it as such.

New Contributors

Full Changelog: https://github.com/actions/upload-artifact/compare/v4...v5.0.0

v4.6.2

What's Changed

New Contributors

Full Changelog: https://github.com/actions/upload-artifact/compare/v4...v4.6.2

v4.6.1

What's Changed

Full Changelog: https://github.com/actions/upload-artifact/compare/v4...v4.6.1

v4.6.0

What's Changed

Full Changelog: https://github.com/actions/upload-artifact/compare/v4...v4.6.0

v4.5.0

What's Changed

New Contributors

... (truncated)

Commits
  • 330a01c Merge pull request #734 from actions/danwkennedy/prepare-5.0.0
  • 03f2824 Update github.dep.yml
  • 905a1ec Prepare v5.0.0
  • 2d9f9cd Merge pull request #725 from patrikpolyak/patch-1
  • 9687587 Merge branch 'main' into patch-1
  • 2848b2c Merge pull request #727 from danwkennedy/patch-1
  • 9b51177 Spell out the first use of GHES
  • cd231ca Update GHES guidance to include reference to Node 20 version
  • de65e23 Merge pull request #712 from actions/nebuk89-patch-1
  • 8747d8c Update README.md
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/upload-artifact&package-manager=github_actions&previous-version=4&new-version=5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 0e3c9c81c0f..9b54cdd0b1a 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -111,7 +111,7 @@ jobs: run: | make generate-llhttp - name: Upload llhttp generated files - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: llhttp path: vendor/llhttp/build @@ -339,7 +339,7 @@ jobs: run: | python -m build --sdist - name: Upload artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: dist-sdist path: dist @@ -431,7 +431,7 @@ jobs: CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - name: Upload wheels - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: >- dist-${{ matrix.os }}-${{ matrix.musl }}-${{ From b7244d76e7cffc9ece30485b16048985654be0c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Dec 2025 22:09:05 +0000 Subject: [PATCH 14/93] Bump pydantic from 2.12.3 to 2.12.4 (#11747) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.12.3 to 2.12.4.
Release notes

Sourced from pydantic's releases.

v2.12.4 2025-11-05

v2.12.4 (2025-11-05)

This is the fourth 2.12 patch release, fixing more regressions, and reverting a change in the build() method of the AnyUrl and Dsn types.

This patch release also fixes an issue with the serialization of IP address types, when serialize_as_any is used. The next patch release will try to address the remaining issues with serialize as any behavior by introducing a new polymorphic serialization feature, that should be used in most cases in place of serialize as any.

Full Changelog: https://github.com/pydantic/pydantic/compare/v2.12.3...v2.12.4

Changelog

Sourced from pydantic's changelog.

v2.12.4 (2025-11-05)

GitHub release

This is the fourth 2.12 patch release, fixing more regressions, and reverting a change in the build() method of the AnyUrl and Dsn types.

This patch release also fixes an issue with the serialization of IP address types, when serialize_as_any is used. The next patch release will try to address the remaining issues with serialize as any behavior by introducing a new polymorphic serialization feature, that should be used in most cases in place of serialize as any.

Commits
  • 5c842df Prepare release v2.12.4
  • c678a71 Bump pydantic-core to v2.41.5
  • a7cd292 Bump cloudpickle to v3.1.2
  • 21f6278 Bump actions/setup-node from 5 to 6
  • 8d6be8f Bump astral-sh/setup-uv from 6 to 7
  • 17865ea Bump actions/upload-artifact from 4 to 5
  • 90ad0af Bump actions/download-artifact from 5 to 6
  • 18e6672 Drop testing under PyPy 3.9
  • 650215b Document workaround for MongoDsn default port
  • e326790 Fix example of for bytes_invalid_encoding validation error
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.12.3&new-version=2.12.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test-common.txt | 4 ++-- requirements/test-ft.txt | 4 ++-- requirements/test.txt | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 770eed21f1b..b7c406f3b04 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -153,9 +153,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.3 +pydantic==2.12.5 # via python-on-whales -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pyenchant==3.3.0 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index 6f273ab8807..c43b037ba96 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -150,9 +150,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.3 +pydantic==2.12.5 # via python-on-whales -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index f2d4e420a8e..d08c7ab3c88 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -67,9 +67,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.3 +pydantic==2.12.5 # via python-on-whales -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 3a9830108f0..426f3a78ed0 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -56,9 +56,9 @@ proxy-py==2.4.10 # via -r requirements/test-common.in pycparser==2.23 # via cffi -pydantic==2.12.3 +pydantic==2.12.5 # via python-on-whales -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 5f0935a8067..26ac06eccd6 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -91,9 +91,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.3 +pydantic==2.12.5 # via python-on-whales -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test.txt b/requirements/test.txt index 081a060c2f0..bb4d9051118 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -91,9 +91,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.3 +pydantic==2.12.5 # via python-on-whales -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via From 4f7807928d4ddd936a8ce70fc1f51974b3923899 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Dec 2025 22:33:19 +0000 Subject: [PATCH 15/93] Bump cython from 3.1.6 to 3.2.0 (#11748) Bumps [cython](https://github.com/cython/cython) from 3.1.6 to 3.2.0.
Release notes

Sourced from cython's releases.

3.2.0

No release notes provided.

3.2.0b3

No release notes provided.

3.2.0b2

No release notes provided.

3.2.0b1-3

No release notes provided.

3.2.0b1

Not released due to package metadata problems.

Changelog

Sourced from cython's changelog.

3.2.0 (2025-11-05)

(Complete changelog for the 3.2.0 release, including pre-releases.)

Features added

  • Builtin exception types are now inferred. (Github issue :issue:6908)

  • The list of known, inferred and optimised Python builtins was updated. range is now considered a type. ascii, bin, format, hex, oct were added as functions. (Github issue :issue:6931)

  • The f-string syntax was extended according to PEP-701. (Github issue :issue:5452)

  • t-strings are implemented according to PEP-750. The implementation backports the template classes but prefers existing backports if installed separately. (Github issue :issue:6811)

  • Unknown return type annotations with -> are no longer rejected but produce warnings. This allows better integration with Python type hints that are not always usable for Cython. -> None is also allowed now. Patch by jpe. (Github issue :issue:6946)

  • The runtime Python dispatch for fused functions is substantially faster. (Github issues :issue:1385, :issue:6996)

  • Freelists (via cdef class decorator and for internally used types such as async) are now also used in the Limited API and with extension type specs enabled. (Github issue :issue:7151)

  • Module imports now quickly check for an already imported module to speed up reimports. Patch by Lysandros Nikolaou. (Github issue :issue:7035)

  • Type checks on PEP-604 union types (int | None) are optimised into separate checks. (Github issue :issue:6935)

  • Assignments to the PEP-604 union type float | None allow any suitable Python number as input and convert it to a Python float automatically. (Github issue :issue:5750)

  • Item type inference was improved for looping over literals. (Github issue :issue:6912)

  • Looping over literal sequences and strings now uses efficient C array looping if possible. (Github issue :issue:6926)

... (truncated)

Commits
  • e6533f8 Prepare release of 3.2.0.
  • 3748286 Update changelog.
  • 404f226 Docs: Add a note that the Limited API is currently only supported in CPython.
  • f216cfb Fix path building in cygdb script for Windows usage (GH-7285)
  • ae78273 Add PyPy 3.11 to test matrix (#7284)
  • b3528ac Bump the github-actions group with 2 updates (#7278)
  • 1e21023 Expand isolated limited API tests to cover more versions (#7280)
  • e2ef20c Move memoryview acquisition counting out of generic atomics code (GH-7277)
  • 15f5864 Docs: Fix C++ wrapping example in user guide (GH-7195)
  • 730a05b Prepare release of 3.2.0b3.
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.1.6&new-version=3.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b7c406f3b04..3deaae4448f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -61,7 +61,7 @@ coverage==7.11.0 # pytest-cov cryptography==46.0.3 # via trustme -cython==3.1.6 +cython==3.2.2 # via -r requirements/cython.in distlib==0.4.0 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 05f39110b33..6d9424a8755 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.1.6 +cython==3.2.2 # via -r requirements/cython.in multidict==6.7.0 # via -r requirements/multidict.in From 81160396facbbcdedb84b0e9e6459225d7af755e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 00:36:44 +0000 Subject: [PATCH 16/93] Bump brotli from 1.1.0 to 1.2.0 (#11749) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [brotli](https://github.com/google/brotli) from 1.1.0 to 1.2.0.
Release notes

Sourced from brotli's releases.

v1.2.0

SECURITY

  • python: added Decompressor::can_accept_more_data method and optional output_buffer_limit argument Decompressor::process; that allows mitigation of unexpectedly large output; reported by Charles Chan (https://github.com/charleswhchan)

Added

  • decoder / encoder: added static initialization to reduce binary size
  • python: allow limiting decoder output (see SECURITY section)
  • CLI: brcat alias; allow decoding concatenated brotli streams
  • kt: pure Kotlin decoder
  • cgo: support "raw" dictionaries
  • build: Bazel modules

Removed

  • java: dropped finalize() for native entities

Fixed

  • java: in compress pass correct length to native encoder

Improved

  • build: install man pages
  • build: updated / fixed / refined Bazel buildfiles
  • encoder: faster encoding
  • cgo: link via pkg-config
  • python: modernize extension / allow multi-phase module initialization

Changed

  • decoder / encoder: static tables use "small" model (allows 2GiB+ binaries)

v1.2.0 RC2

What's Changed (compared to RC1)

v1.2.0 RC1

IMPORTANT: though this is a pre-release for v1.2.0, it is expected that some changes will be added before release; most notably concerning build files: patches applied by Alpine, Debian, Conan, VCPKG will be partially/fully integrated.

SECURITY

  • python: added Decompressor::can_accept_more_data method and optional output_buffer_limit argument Decompressor::process; that allows mitigation of unexpectedly large output; reported by Charles Chan (https://github.com/charleswhchan)

Added

  • decoder / encoder: added static initialization to reduce binary size
  • python: allow limiting decoder output (see SECURITY section)

... (truncated)

Changelog

Sourced from brotli's changelog.

[1.2.0] - 2025-10-27

SECURITY

  • python: added Decompressor::can_accept_more_data method and optional output_buffer_limit argument Decompressor::process; that allows mitigation of unexpectedly large output; reported by Charles Chan (https://github.com/charleswhchan)

Added

  • decoder / encoder: added static initialization to reduce binary size
  • python: allow limiting decoder output (see SECURITY section)
  • CLI: brcat alias; allow decoding concatenated brotli streams
  • kt: pure Kotlin decoder
  • cgo: support "raw" dictionaries
  • build: Bazel modules

Removed

  • java: dropped finalize() for native entities

Fixed

  • java: in compress pass correct length to native encoder

Improved

  • build: install man pages
  • build: updated / fixed / refined Bazel buildfiles
  • encoder: faster encoding
  • cgo: link via pkg-config
  • python: modernize extension / allow multi-phase module initialization

Changed

  • decoder / encoder: static tables use "small" model (allows 2GiB+ binaries)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=brotli&package-manager=pip&previous-version=1.1.0&new-version=1.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index ec6026187e3..56257e289eb 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -16,7 +16,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via pycares diff --git a/requirements/base.txt b/requirements/base.txt index 0aedbd80e1d..fe01fe38c67 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -16,7 +16,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via pycares diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3deaae4448f..4d48b4f4770 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -34,7 +34,7 @@ blockbuster==1.5.25 # via # -r requirements/lint.in # -r requirements/test-common.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.3.0 # via pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index c43b037ba96..96d0802902e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -34,7 +34,7 @@ blockbuster==1.5.25 # via # -r requirements/lint.in # -r requirements/test-common.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.3.0 # via pip-tools diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 4481c13811c..42ef4278320 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -16,7 +16,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via pycares diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 26ac06eccd6..817dace2e12 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -20,7 +20,7 @@ backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_v # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index bb4d9051118..0b2a1482440 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -20,7 +20,7 @@ backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_v # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via From 351989ce8fa435354450a5613806e6ee3ad33d6a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 01:10:06 +0000 Subject: [PATCH 17/93] [PR #11749/81160396 backport][3.13] Bump brotli from 1.1.0 to 1.2.0 (#11779) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #11749 as merged into 3.14 (81160396facbbcdedb84b0e9e6459225d7af755e).** Bumps [brotli](https://github.com/google/brotli) from 1.1.0 to 1.2.0.
Release notes

Sourced from brotli's releases.

v1.2.0

SECURITY

  • python: added Decompressor::can_accept_more_data method and optional output_buffer_limit argument Decompressor::process; that allows mitigation of unexpectedly large output; reported by Charles Chan (https://github.com/charleswhchan)

Added

  • decoder / encoder: added static initialization to reduce binary size
  • python: allow limiting decoder output (see SECURITY section)
  • CLI: brcat alias; allow decoding concatenated brotli streams
  • kt: pure Kotlin decoder
  • cgo: support "raw" dictionaries
  • build: Bazel modules

Removed

  • java: dropped finalize() for native entities

Fixed

  • java: in compress pass correct length to native encoder

Improved

  • build: install man pages
  • build: updated / fixed / refined Bazel buildfiles
  • encoder: faster encoding
  • cgo: link via pkg-config
  • python: modernize extension / allow multi-phase module initialization

Changed

  • decoder / encoder: static tables use "small" model (allows 2GiB+ binaries)

v1.2.0 RC2

What's Changed (compared to RC1)

v1.2.0 RC1

IMPORTANT: though this is a pre-release for v1.2.0, it is expected that some changes will be added before release; most notably concerning build files: patches applied by Alpine, Debian, Conan, VCPKG will be partially/fully integrated.

SECURITY

  • python: added Decompressor::can_accept_more_data method and optional output_buffer_limit argument Decompressor::process; that allows mitigation of unexpectedly large output; reported by Charles Chan (https://github.com/charleswhchan)

Added

  • decoder / encoder: added static initialization to reduce binary size
  • python: allow limiting decoder output (see SECURITY section)

... (truncated)

Changelog

Sourced from brotli's changelog.

[1.2.0] - 2025-10-27

SECURITY

  • python: added Decompressor::can_accept_more_data method and optional output_buffer_limit argument Decompressor::process; that allows mitigation of unexpectedly large output; reported by Charles Chan (https://github.com/charleswhchan)

Added

  • decoder / encoder: added static initialization to reduce binary size
  • python: allow limiting decoder output (see SECURITY section)
  • CLI: brcat alias; allow decoding concatenated brotli streams
  • kt: pure Kotlin decoder
  • cgo: support "raw" dictionaries
  • build: Bazel modules

Removed

  • java: dropped finalize() for native entities

Fixed

  • java: in compress pass correct length to native encoder

Improved

  • build: install man pages
  • build: updated / fixed / refined Bazel buildfiles
  • encoder: faster encoding
  • cgo: link via pkg-config
  • python: modernize extension / allow multi-phase module initialization

Changed

  • decoder / encoder: static tables use "small" model (allows 2GiB+ binaries)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=brotli&package-manager=pip&previous-version=1.1.0&new-version=1.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index 8a8d2a15499..cce3b614e26 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.3.0 # via -r requirements/runtime-deps.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via pycares diff --git a/requirements/base.txt b/requirements/base.txt index fa734658aba..f1f72f3ed05 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.3.0 # via -r requirements/runtime-deps.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via pycares diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ec3806c6977..533091d75a3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -30,7 +30,7 @@ blockbuster==1.5.25 # via # -r requirements/lint.in # -r requirements/test-common.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.3.0 # via pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 63cc8589262..22d3952fccb 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -30,7 +30,7 @@ blockbuster==1.5.25 # via # -r requirements/lint.in # -r requirements/test-common.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.3.0 # via pip-tools diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 232ae352db1..86d8f58e491 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.3.0 # via -r requirements/runtime-deps.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via pycares diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 04b1dcb86e4..d9960bcbd73 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -18,7 +18,7 @@ attrs==25.3.0 # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index b7c53b0b6d5..e946cb3e7db 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -18,7 +18,7 @@ attrs==25.3.0 # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in -brotli==1.1.0 ; platform_python_implementation == "CPython" +brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 # via From 96060aedf02228faaf0b6be56eb4bf5e6eb001d3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 02:42:38 +0000 Subject: [PATCH 18/93] Bump actions/checkout from 5 to 6 (#11768) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
Release notes

Sourced from actions/checkout's releases.

v6.0.0

What's Changed

Full Changelog: https://github.com/actions/checkout/compare/v5.0.0...v6.0.0

v6-beta

What's Changed

Updated persist-credentials to store the credentials under $RUNNER_TEMP instead of directly in the local git config.

This requires a minimum Actions Runner version of v2.329.0 to access the persisted credentials for Docker container action scenarios.

v5.0.1

What's Changed

Full Changelog: https://github.com/actions/checkout/compare/v5...v5.0.1

Changelog

Sourced from actions/checkout's changelog.

Changelog

V6.0.0

V5.0.1

V5.0.0

V4.3.1

V4.3.0

v4.2.2

v4.2.1

v4.2.0

v4.1.7

v4.1.6

v4.1.5

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=5&new-version=6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 14 +++++++------- .github/workflows/codeql.yml | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 9b54cdd0b1a..c88140a590a 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -32,7 +32,7 @@ jobs: timeout-minutes: 5 steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: submodules: true - name: >- @@ -92,7 +92,7 @@ jobs: timeout-minutes: 5 steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: submodules: true - name: Cache llhttp generated files @@ -145,7 +145,7 @@ jobs: continue-on-error: ${{ matrix.experimental }} steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: submodules: true - name: Setup Python ${{ matrix.pyver }} @@ -249,7 +249,7 @@ jobs: timeout-minutes: 12 steps: - name: Checkout project - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: submodules: true - name: Setup Python 3.13.2 @@ -315,7 +315,7 @@ jobs: needs: pre-deploy steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: submodules: true - name: Setup Python @@ -385,7 +385,7 @@ jobs: musl: musllinux steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: submodules: true - name: Set up QEMU @@ -456,7 +456,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 with: submodules: true - name: Login diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index ed447b9f29c..797b7591cd8 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -26,7 +26,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Initialize CodeQL uses: github/codeql-action/init@v4 From 1951c2319e270f7c865b1dab621f99a2e36607c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 11:28:03 +0000 Subject: [PATCH 19/93] Bump certifi from 2025.10.5 to 2025.11.12 (#11788) Bumps [certifi](https://github.com/certifi/python-certifi) from 2025.10.5 to 2025.11.12.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=certifi&package-manager=pip&previous-version=2025.10.5&new-version=2025.11.12)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4d48b4f4770..6bd4f5a2e08 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -38,7 +38,7 @@ brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.3.0 # via pip-tools -certifi==2025.10.5 +certifi==2025.11.12 # via requests cffi==2.0.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 96d0802902e..a57e61b7157 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -38,7 +38,7 @@ brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in build==1.3.0 # via pip-tools -certifi==2025.10.5 +certifi==2025.11.12 # via requests cffi==2.0.0 # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 730accfac89..564a75d3f84 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -10,7 +10,7 @@ alabaster==1.0.0 # via sphinx babel==2.17.0 # via sphinx -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index d69dd77eafc..41c5958f162 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -10,7 +10,7 @@ alabaster==1.0.0 # via sphinx babel==2.17.0 # via sphinx -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests From e7420bf01e7834eb3f17b8a985c32b5f6d94a1e8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 11:39:54 +0000 Subject: [PATCH 20/93] Bump execnet from 2.1.1 to 2.1.2 (#11789) Bumps [execnet](https://github.com/pytest-dev/execnet) from 2.1.1 to 2.1.2.
Changelog

Sourced from execnet's changelog.

2.1.2 (2025-11-11)

  • [#376](https://github.com/pytest-dev/execnet/issues/376) <https://github.com/pytest-dev/execnet/issues/376>__ fix artifact building - pin minimal version of hatch.
Commits
  • 9313ece Release 2.1.2
  • 45a4d87 Test on pypy-3.11 instead of pypy-3.8 (#375)
  • 03462f5 Update hatchling version requirement in pyproject.toml (#377)
  • f056180 [pre-commit.ci] pre-commit autoupdate (#378)
  • a9d75e7 [pre-commit.ci] pre-commit autoupdate (#373)
  • 9bfff13 [pre-commit.ci] pre-commit autoupdate (#372)
  • caf108d build(deps): bump actions/download-artifact from 5 to 6 (#371)
  • d3074cd [pre-commit.ci] pre-commit autoupdate (#370)
  • 35aa807 [pre-commit.ci] pre-commit autoupdate (#369)
  • d823f13 build(deps): bump hynek/build-and-inspect-python-package (#368)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=execnet&package-manager=pip&previous-version=2.1.1&new-version=2.1.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 6bd4f5a2e08..a1c0b6c14ad 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -69,7 +69,7 @@ docutils==0.21.2 # via sphinx exceptiongroup==1.3.0 # via pytest -execnet==2.1.1 +execnet==2.1.2 # via pytest-xdist filelock==3.20.0 # via virtualenv diff --git a/requirements/dev.txt b/requirements/dev.txt index a57e61b7157..624bda6295d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -67,7 +67,7 @@ docutils==0.21.2 # via sphinx exceptiongroup==1.3.0 # via pytest -execnet==2.1.1 +execnet==2.1.2 # via pytest-xdist filelock==3.20.0 # via virtualenv diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 426f3a78ed0..059c9d1d538 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -22,7 +22,7 @@ cryptography==46.0.3 # via trustme exceptiongroup==1.3.0 # via pytest -execnet==2.1.1 +execnet==2.1.2 # via pytest-xdist forbiddenfruit==0.1.4 # via blockbuster diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 817dace2e12..9b3c00f39da 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -37,7 +37,7 @@ cryptography==46.0.3 # via trustme exceptiongroup==1.3.0 # via pytest -execnet==2.1.1 +execnet==2.1.2 # via pytest-xdist forbiddenfruit==0.1.4 # via blockbuster diff --git a/requirements/test.txt b/requirements/test.txt index 0b2a1482440..502a7334918 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -37,7 +37,7 @@ cryptography==46.0.3 # via trustme exceptiongroup==1.3.0 # via pytest -execnet==2.1.1 +execnet==2.1.2 # via pytest-xdist forbiddenfruit==0.1.4 # via blockbuster From ee2053836219741e7bc263342a42c34176cde3da Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 12:01:23 +0000 Subject: [PATCH 21/93] Bump backports-zstd from 1.0.0 to 1.1.0 (#11790) Bumps [backports-zstd](https://github.com/rogdham/backports.zstd) from 1.0.0 to 1.1.0.
Changelog

Sourced from backports-zstd's changelog.

1.1.0 - 2025-11-23

:rocket: Added

  • Shorten import time by lazy loading the register_shutil function

:bug: Fixes

  • Fix assertion on Python 3.13 when build with DEBUG
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=backports-zstd&package-manager=pip&previous-version=1.0.0&new-version=1.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index 56257e289eb..b63dd5acce6 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/base.txt b/requirements/base.txt index fe01fe38c67..8d8a2f8570b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a1c0b6c14ad..f44c35f4a8d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -26,7 +26,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -backports-zstd==1.0.0 ; implementation_name == "cpython" +backports-zstd==1.1.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 624bda6295d..712c148b219 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,7 +26,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/lint.txt b/requirements/lint.txt index d08c7ab3c88..d06b3ec6b3d 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 # via valkey -backports-zstd==1.0.0 ; implementation_name == "cpython" +backports-zstd==1.1.0 ; implementation_name == "cpython" # via -r requirements/lint.in blockbuster==1.5.25 # via -r requirements/lint.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 42ef4278320..982687372ef 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 9b3c00f39da..0e68764176f 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index 502a7334918..d49707b5d94 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in From 1d651d306dbbaa12bd34d96bf18022fdde4a9ef3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 12:17:06 +0000 Subject: [PATCH 22/93] Bump sphinxcontrib-spelling from 8.0.1 to 8.0.2 (#11792) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [sphinxcontrib-spelling](https://github.com/sphinx-contrib/spelling) from 8.0.1 to 8.0.2.
Release notes

Sourced from sphinxcontrib-spelling's releases.

8.0.2

What's Changed

Full Changelog: https://github.com/sphinx-contrib/spelling/compare/8.0.1...8.0.2

Commits
  • c039e7a Merge pull request #240 from dhellmann/fix-issue-234-none-source
  • 92b510f fix: remove pkglint job
  • fe50721 fix: handle TypeError when source is None in Sphinx 8.2
  • 7b84d6a Merge pull request #239 from sphinx-contrib/dependabot/github_actions/actions...
  • 5eb3af9 build(deps): bump actions/checkout from 5 to 6
  • 9dde976 Merge pull request #238 from sphinx-contrib/dependabot/github_actions/actions...
  • 62a8631 build(deps): bump actions/setup-python from 5 to 6
  • f28e446 Merge pull request #237 from dhellmann/add-autodoc
  • 9159319 feat: add automatically generated documentation for key modules
  • 81ea0f0 Merge pull request #236 from dhellmann/python-3.13
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sphinxcontrib-spelling&package-manager=pip&previous-version=8.0.1&new-version=8.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/doc-spelling.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f44c35f4a8d..8276adf72c8 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -231,7 +231,7 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" +sphinxcontrib-spelling==8.0.2 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 564a75d3f84..5658563eef3 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -57,7 +57,7 @@ sphinxcontrib-qthelp==2.0.0 # via sphinx sphinxcontrib-serializinghtml==2.0.0 # via sphinx -sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" +sphinxcontrib-spelling==8.0.2 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in From 00c5965c9e6de854ad8124e23a0934e2ac72b2ef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 12:31:23 +0000 Subject: [PATCH 23/93] Bump pip-tools from 7.5.1 to 7.5.2 (#11793) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pip-tools](https://github.com/jazzband/pip-tools) from 7.5.1 to 7.5.2.
Release notes

Sourced from pip-tools's releases.

v7.5.2

2025-11-11

Bug fixes

  • Fixed pip-compile to handle relative path includes which are not subpaths of the current working directory -- by @​sirosen.

    PRs and issues: #2231, #2260

  • Using --upgrade-package and dynamically building project metadata no longer causes an AttributeError when pip encounters an error during the build -- by @​Epic_Wink and @​tusharsadhwani.

    PRs and issues: #2258

Features

Packaging updates and notes for downstreams

  • pip-tools now supports installation from git archives by providing setuptools-scm with .git_archival.txt data.

    PRs and issues: #2225

Contributor-facing changes

  • The change log entry bot has been explicitly configured to stop requiring news fragments in pull requests having the bot:chronographer:skip label set -- by @​sirosen and @​webknjaz.

    It was also set up to reference our change log authoring document from the GitHub Checks pages. And the reported check name is now set to Change log entry.

    PRs and issues: #2201

  • The CI is now set up to invoke failed tests again with maximum level of detail -- by @​webknjaz.

    The change is aimed at helping troubleshoot failures that might be difficult to reproduce locally.

    PRs and issues: #2254

  • The integration with Codecov has been updated to ensure that reports are uploaded to the service even on failures -- by @​webknjaz.

    GitHub Actions is now configured to also send an explicit notification to Codecov about the completion of previously initiated uploads.

... (truncated)

Changelog

Sourced from pip-tools's changelog.

v7.5.2

2025-11-11

Bug fixes

  • Fixed pip-compile to handle relative path includes which are not subpaths of the current working directory -- by {user}sirosen.

    PRs and issues: {issue}2231, {issue}2260

  • Using --upgrade-package and dynamically building project metadata no longer causes an {exc}AttributeError when pip encounters an error during the build -- by {user}Epic_Wink and {user}tusharsadhwani.

    PRs and issues: {issue}2258

Features

  • Test and declare Python 3.13 support -- by {user}jayaddison (for OpenCulinary).

    PRs and issues: {issue}2251

  • pip-tools is now compatible with pip 25.3 -- by {user}shifqu.

    PRs and issues: {issue}2252, {issue}2253

Packaging updates and notes for downstreams

  • pip-tools now supports installation from git archives by providing setuptools-scm with .git_archival.txt data.

    PRs and issues: {issue}2225

Contributor-facing changes

  • The change log entry bot has been explicitly configured to stop requiring news fragments in pull requests having the bot:chronographer:skip label set -- by {user}sirosen and {user}webknjaz.

    It was also set up to reference our change log authoring document from the GitHub Checks pages. And the reported check name is now set to Change log entry.

    PRs and issues: {issue}2201

  • The CI is now set up to invoke failed tests again with maximum level of detail -- by {user}webknjaz.

... (truncated)

Commits
  • eb9606f Merge pull request #2270 from sirosen/release-7.5.2
  • 2cbb933 Update changelog for version 7.5.2
  • d33539c Merge pull request #2253 from shifqu/fix/remove-opt-pep517
  • 43e1159 Apply suggestions from code review
  • b7e8f9b changelog: add news fragment for towncrier
  • 88f2761 tests: lower required coverage for py38 to 98%
  • 608c47b feat: add deprecation warnings in cli and sync
  • 43952fc tests: ensure tox pipsupported uses pip 25.3
  • bfa96b5 tests: provide minimal_wheels_path as a fixture
  • 10a4b44 fix: remove deprecated options from resolver and install_requirement
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip-tools&package-manager=pip&previous-version=7.5.1&new-version=7.5.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8276adf72c8..727e983ca09 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -131,7 +131,7 @@ packaging==25.0 # sphinx pathspec==0.12.1 # via mypy -pip-tools==7.5.1 +pip-tools==7.5.2 # via -r requirements/dev.in pkgconfig==1.5.5 # via -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 712c148b219..3e0b0697305 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -128,7 +128,7 @@ packaging==25.0 # sphinx pathspec==0.12.1 # via mypy -pip-tools==7.5.1 +pip-tools==7.5.2 # via -r requirements/dev.in pkgconfig==1.5.5 # via -r requirements/test-common.in From 7a1dafc8a7e08a8bfd1de7aff9c25feca1dab9d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 12:47:11 +0000 Subject: [PATCH 24/93] Bump exceptiongroup from 1.3.0 to 1.3.1 (#11791) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [exceptiongroup](https://github.com/agronholm/exceptiongroup) from 1.3.0 to 1.3.1.
Release notes

Sourced from exceptiongroup's releases.

1.3.1

  • Fixed AttributeError: 'TracebackException' object has no attribute 'exceptions' when formatting unpickled TBEs from another Python process which did not apply the exceptiongroup patches (#144)
Changelog

Sourced from exceptiongroup's changelog.

Version history

This library adheres to Semantic Versioning 2.0 <http://semver.org/>_.

1.3.1

  • Fixed AttributeError: 'TracebackException' object has no attribute 'exceptions' when formatting unpickled TBEs from another Python process which did not apply the exceptiongroup patches ([#144](https://github.com/agronholm/exceptiongroup/issues/144) <https://github.com/agronholm/exceptiongroup/issues/144>_)

1.3.0

  • Added **kwargs to function and method signatures as appropriate to match the signatures in the standard library
  • In line with the stdlib typings in typeshed, updated (Base)ExceptionGroup generic types to define defaults for their generic arguments (defaulting to BaseExceptionGroup[BaseException] and ExceptionGroup[Exception]) (PR by @​mikenerone)
  • Changed BaseExceptionGroup.__init__() to directly call BaseException.__init__() instead of the superclass __init__() in order to emulate the CPython behavior (broken or not) (PR by @​cfbolz)
  • Changed the exceptions attribute to always return the same tuple of exceptions, created from the original exceptions sequence passed to BaseExceptionGroup to match CPython behavior ([#143](https://github.com/agronholm/exceptiongroup/issues/143) <https://github.com/agronholm/exceptiongroup/issues/143>_)

1.2.2

  • Removed an assert in exceptiongroup._formatting that caused compatibility issues with Sentry ([#123](https://github.com/agronholm/exceptiongroup/issues/123) <https://github.com/agronholm/exceptiongroup/issues/123>_)

1.2.1

  • Updated the copying of __notes__ to match CPython behavior (PR by CF Bolz-Tereick)
  • Corrected the type annotation of the exception handler callback to accept a BaseExceptionGroup instead of BaseException
  • Fixed type errors on Python < 3.10 and the type annotation of suppress() (PR by John Litborn)

1.2.0

  • Added special monkeypatching if Apport <https://github.com/canonical/apport>_ has overridden sys.excepthook so it will format exception groups correctly (PR by John Litborn)
  • Added a backport of contextlib.suppress() from Python 3.12.1 which also handles suppressing exceptions inside exception groups
  • Fixed bare raise in a handler reraising the original naked exception rather than an exception group which is what is raised when you do a raise in an except*

... (truncated)

Commits
  • ddddb6f Added the release version
  • 49c5e60 Fixed AttributeError when formatting unpickled TBEs from an unpatched process
  • 1be517f [pre-commit.ci] pre-commit autoupdate (#152)
  • af0ea2f [pre-commit.ci] pre-commit autoupdate (#149)
  • 7c980a8 Removed pin on pyright version
  • ef85336 Fixed typing job not finding Python 3.14
  • 080b3f4 Pinned pyright version to fix typeshed related failure
  • ac66090 Added Python 3.14 to the test matrix
  • a0da94d Fixed test failures on Python 3.14
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=exceptiongroup&package-manager=pip&previous-version=1.3.0&new-version=1.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 727e983ca09..bb1905ebb51 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -67,7 +67,7 @@ distlib==0.4.0 # via virtualenv docutils==0.21.2 # via sphinx -exceptiongroup==1.3.0 +exceptiongroup==1.3.1 # via pytest execnet==2.1.2 # via pytest-xdist diff --git a/requirements/dev.txt b/requirements/dev.txt index 3e0b0697305..f239220751c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -65,7 +65,7 @@ distlib==0.4.0 # via virtualenv docutils==0.21.2 # via sphinx -exceptiongroup==1.3.0 +exceptiongroup==1.3.1 # via pytest execnet==2.1.2 # via pytest-xdist diff --git a/requirements/lint.txt b/requirements/lint.txt index d06b3ec6b3d..1b77fc30d29 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -27,7 +27,7 @@ cryptography==46.0.3 # via trustme distlib==0.4.0 # via virtualenv -exceptiongroup==1.3.0 +exceptiongroup==1.3.1 # via pytest filelock==3.20.0 # via virtualenv diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 059c9d1d538..0326bb9e902 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -20,7 +20,7 @@ coverage==7.11.0 # pytest-cov cryptography==46.0.3 # via trustme -exceptiongroup==1.3.0 +exceptiongroup==1.3.1 # via pytest execnet==2.1.2 # via pytest-xdist diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 0e68764176f..dcaf6d237a9 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -35,7 +35,7 @@ coverage==7.11.0 # pytest-cov cryptography==46.0.3 # via trustme -exceptiongroup==1.3.0 +exceptiongroup==1.3.1 # via pytest execnet==2.1.2 # via pytest-xdist diff --git a/requirements/test.txt b/requirements/test.txt index d49707b5d94..897264289bc 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -35,7 +35,7 @@ coverage==7.11.0 # pytest-cov cryptography==46.0.3 # via trustme -exceptiongroup==1.3.0 +exceptiongroup==1.3.1 # via pytest execnet==2.1.2 # via pytest-xdist From 149dfa78d8e48eac9b427f0fe3ba20b418fa645f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Dec 2025 15:33:22 +0000 Subject: [PATCH 25/93] Bump pypa/cibuildwheel from 3.2.1 to 3.3.0 (#11760) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 3.2.1 to 3.3.0.
Release notes

Sourced from pypa/cibuildwheel's releases.

v3.3.0

  • 🐛 Fix an incompatibility with Docker v29 (#2660)
  • ✨ Adds test-runtime option, to customise how tests on simulated/emulated environments are run (#2636)
  • ✨ Adds support for new manylinux_2_35 images on 32-bit ARM armv7l, offering better C++20 compatibility (#2656)
  • build[uv] is now supported on Android (#2587)
  • ✨ You can now install extras (such as uv) with a simple option on the GitHub Action (#2630)
  • {project} and {package} placeholders are now supported in repair-wheel-command (#2589)
  • 🛠 The versions set with dependency-versions no longer constrain packages specified by your build-system.requires. Previously, on platforms other than Linux, the constraints in this option would remain in the environment during the build. This has been tidied up make behaviour more consistent between platforms, and to prevent version conflicts. (#2583)
  • 🛠 Improve the handling of test-command on Android, enabling more options to be passed (#2590)
  • 📚 Docs improvements (#2618)
Changelog

Sourced from pypa/cibuildwheel's changelog.

v3.3.0

12 November 2025

  • 🐛 Fix an incompatibility with Docker v29 (#2660)
  • ✨ Adds test-runtime option, to customise how tests on simulated/emulated environments are run (#2636)
  • ✨ Adds support for new manylinux_2_35 images on 32-bit ARM armv7l, offering better C++20 compatibility (#2656)
  • build[uv] is now supported on Android (#2587)
  • ✨ You can now install extras (such as uv) with a simple option on the GitHub Action (#2630)
  • {project} and {package} placeholders are now supported in repair-wheel-command (#2589)
  • 🛠 The versions set with dependency-versions no longer constrain packages specified by your build-system.requires. Previously, on platforms other than Linux, the constraints in this option would remain in the environment during the build. This has been tidied up make behaviour more consistent between platforms, and to prevent version conflicts. (#2583)
  • 🛠 Improve the handling of test-command on Android, enabling more options to be passed (#2590)
  • 📚 Docs improvements (#2618)
Commits
  • 63fd63b Bump version: v3.3.0
  • f4fe311 fix: support Docker 29 (#2660)
  • f6c8108 feat: make the {project} placeholder available to repair-wheel-command (#...
  • ccbae30 feat: support uv with Android (#2587)
  • 1337e50 chore: pytest log_level is better than log_cli_level (#2657)
  • 720f8e2 feat: add manylinux_2_35 (#2656)
  • 4c7f369 [pre-commit.ci] pre-commit autoupdate (#2658)
  • e1baa60 chore: enable more Ruff checks (#2654)
  • 1f2f8b2 fix: don't constrain build-system.requires with our dependency-versions (...
  • 8c5b02f [pre-commit.ci] pre-commit autoupdate (#2648)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=3.2.1&new-version=3.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index c88140a590a..d294d0fcbfe 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -426,7 +426,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v3.2.1 + uses: pypa/cibuildwheel@v3.3.0 env: CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 From b796fce385917b9c51b409d98987e06893a1c0ac Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 3 Dec 2025 14:30:00 +0000 Subject: [PATCH 26/93] [PR #11795/d0970585 backport][3.14] Added regression test for cached logging status (#11803) **This is a backport of PR #11795 as merged into master (d097058504374b86c2738c2ebe6f9b445d38ad75).** --------- Co-authored-by: meehand <70619157+meehand@users.noreply.github.com> --- CHANGES/11778.misc.rst | 1 + tests/test_web_log.py | 43 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 CHANGES/11778.misc.rst diff --git a/CHANGES/11778.misc.rst b/CHANGES/11778.misc.rst new file mode 100644 index 00000000000..ad29aea920a --- /dev/null +++ b/CHANGES/11778.misc.rst @@ -0,0 +1 @@ +Added regression test for cached logging status -- by :user:`meehand`. diff --git a/tests/test_web_log.py b/tests/test_web_log.py index 16c4b976daa..1cd9f88d57a 100644 --- a/tests/test_web_log.py +++ b/tests/test_web_log.py @@ -281,3 +281,46 @@ def log( resp = await client.get("/") assert 200 == resp.status assert "This should not be logged" not in caplog.text + + +@pytest.mark.xfail(reason="#11778") +async def test_logger_does_not_log_when_enabled_post_init( + aiohttp_server: AiohttpServer, + aiohttp_client: AiohttpClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test logger does nothing when not enabled even if enabled post init.""" + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + enabled = False + + class Logger(AbstractAccessLogger): + + def log( + self, request: web.BaseRequest, response: web.StreamResponse, time: float + ) -> None: + self.logger.critical("This should not be logged") # pragma: no cover + + @property + def enabled(self) -> bool: + """Check if logger is enabled.""" + # Avoid formatting the log line if it will not be emitted. + return enabled + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app, access_log_class=Logger) + client = await aiohttp_client(server) + resp = await client.get("/") + assert 200 == resp.status + assert "This should not be logged" not in caplog.text + assert not server.handler.connections[0]._force_close + + # mock enabling logging post-init + enabled = True + resp = await client.get("/") + assert 200 == resp.status + assert "This should not be logged" not in caplog.text + assert not server.handler.connections[0]._force_close From ea6c06592ca71a8b3f545228806a1732c268b55b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 3 Dec 2025 14:30:32 +0000 Subject: [PATCH 27/93] [PR #11795/d0970585 backport][3.13] Added regression test for cached logging status (#11802) **This is a backport of PR #11795 as merged into master (d097058504374b86c2738c2ebe6f9b445d38ad75).** --------- Co-authored-by: meehand <70619157+meehand@users.noreply.github.com> --- CHANGES/11778.misc.rst | 1 + tests/test_web_log.py | 43 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 CHANGES/11778.misc.rst diff --git a/CHANGES/11778.misc.rst b/CHANGES/11778.misc.rst new file mode 100644 index 00000000000..ad29aea920a --- /dev/null +++ b/CHANGES/11778.misc.rst @@ -0,0 +1 @@ +Added regression test for cached logging status -- by :user:`meehand`. diff --git a/tests/test_web_log.py b/tests/test_web_log.py index 16c4b976daa..1cd9f88d57a 100644 --- a/tests/test_web_log.py +++ b/tests/test_web_log.py @@ -281,3 +281,46 @@ def log( resp = await client.get("/") assert 200 == resp.status assert "This should not be logged" not in caplog.text + + +@pytest.mark.xfail(reason="#11778") +async def test_logger_does_not_log_when_enabled_post_init( + aiohttp_server: AiohttpServer, + aiohttp_client: AiohttpClient, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test logger does nothing when not enabled even if enabled post init.""" + + async def handler(request: web.Request) -> web.Response: + return web.Response() + + enabled = False + + class Logger(AbstractAccessLogger): + + def log( + self, request: web.BaseRequest, response: web.StreamResponse, time: float + ) -> None: + self.logger.critical("This should not be logged") # pragma: no cover + + @property + def enabled(self) -> bool: + """Check if logger is enabled.""" + # Avoid formatting the log line if it will not be emitted. + return enabled + + app = web.Application() + app.router.add_get("/", handler) + server = await aiohttp_server(app, access_log_class=Logger) + client = await aiohttp_client(server) + resp = await client.get("/") + assert 200 == resp.status + assert "This should not be logged" not in caplog.text + assert not server.handler.connections[0]._force_close + + # mock enabling logging post-init + enabled = True + resp = await client.get("/") + assert 200 == resp.status + assert "This should not be logged" not in caplog.text + assert not server.handler.connections[0]._force_close From 77ab8f3051c2de87624d23dda2afaa3fbd72b87e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 5 Dec 2025 14:35:44 +0000 Subject: [PATCH 28/93] [PR #11804/bffff8cf backport][3.14] Bump blockbuster to 1.5.26 (#11805) **This is a backport of PR #11804 as merged into master (bffff8cfd3e9f1d078a4781912fc863c4d230a69).** Co-authored-by: Christophe Bornet --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index bb1905ebb51..94d6327553e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -30,7 +30,7 @@ backports-zstd==1.1.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -blockbuster==1.5.25 +blockbuster==1.5.26 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index f239220751c..142472aa328 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -30,7 +30,7 @@ backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_v # via # -r requirements/lint.in # -r requirements/runtime-deps.in -blockbuster==1.5.25 +blockbuster==1.5.26 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 1b77fc30d29..a590c750520 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -12,7 +12,7 @@ async-timeout==5.0.1 # via valkey backports-zstd==1.1.0 ; implementation_name == "cpython" # via -r requirements/lint.in -blockbuster==1.5.25 +blockbuster==1.5.26 # via -r requirements/lint.in cffi==2.0.0 # via diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 0326bb9e902..c3bed2b501d 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -6,7 +6,7 @@ # annotated-types==0.7.0 # via pydantic -blockbuster==1.5.25 +blockbuster==1.5.26 # via -r requirements/test-common.in cffi==2.0.0 # via diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index dcaf6d237a9..bf326af5d53 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -18,7 +18,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in -blockbuster==1.5.25 +blockbuster==1.5.26 # via -r requirements/test-common.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 897264289bc..c3a09e21bda 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -18,7 +18,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in -blockbuster==1.5.25 +blockbuster==1.5.26 # via -r requirements/test-common.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From 3e4c3befa80fc3913c80739e54f50b28c79055fd Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 5 Dec 2025 15:22:14 +0000 Subject: [PATCH 29/93] Bump blockbuster to 1.5.26 (#11804) (#11806) (cherry picked from commit bffff8cfd3e9f1d078a4781912fc863c4d230a69) Co-authored-by: Christophe Bornet --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 533091d75a3..8cc65554bbd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -26,7 +26,7 @@ attrs==25.3.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -blockbuster==1.5.25 +blockbuster==1.5.26 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 22d3952fccb..86a0843a7c5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,7 +26,7 @@ attrs==25.3.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -blockbuster==1.5.25 +blockbuster==1.5.26 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index b3b12bdd62a..a9b088d96b7 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 # via valkey -blockbuster==1.5.25 +blockbuster==1.5.26 # via -r requirements/lint.in cffi==2.0.0 # via diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 8e073988a37..1ab1c6b3550 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -6,7 +6,7 @@ # annotated-types==0.7.0 # via pydantic -blockbuster==1.5.25 +blockbuster==1.5.26 # via -r requirements/test-common.in cffi==2.0.0 # via diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index d9960bcbd73..dd791bc78e7 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.3.0 # via -r requirements/runtime-deps.in -blockbuster==1.5.25 +blockbuster==1.5.26 # via -r requirements/test-common.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index e946cb3e7db..87faf489087 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.3.0 # via -r requirements/runtime-deps.in -blockbuster==1.5.25 +blockbuster==1.5.26 # via -r requirements/test-common.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in From 14a65ba0227f09f8befaf38062eb8ed1f100109a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Dec 2025 15:39:03 +0000 Subject: [PATCH 30/93] Bump cfgv from 3.4.0 to 3.5.0 (#11801) Bumps [cfgv](https://github.com/asottile/cfgv) from 3.4.0 to 3.5.0.
Commits
  • c734212 v3.5.0
  • d64e0cc Merge pull request #158 from asottile/key-value-map
  • 641559f add KeyValueMap
  • 82e2f93 Merge pull request #157 from asottile/pre-commit-ci-update-config
  • 69cd020 [pre-commit.ci] pre-commit autoupdate
  • 5ccc4d4 Merge pull request #156 from asottile/pre-commit-ci-update-config
  • 2e7ffe5 [pre-commit.ci] pre-commit autoupdate
  • ef78499 Merge pull request #155 from asottile/all-repos_autofix_all-repos-manual
  • b34b882 py310+
  • 724a279 Merge pull request #154 from asottile/pre-commit-ci-update-config
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cfgv&package-manager=pip&previous-version=3.4.0&new-version=3.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 94d6327553e..e6978d40a0b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -45,7 +45,7 @@ cffi==2.0.0 # cryptography # pycares # pytest-codspeed -cfgv==3.4.0 +cfgv==3.5.0 # via pre-commit charset-normalizer==3.4.4 # via requests diff --git a/requirements/dev.txt b/requirements/dev.txt index 142472aa328..6e41062eaa7 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -45,7 +45,7 @@ cffi==2.0.0 # cryptography # pycares # pytest-codspeed -cfgv==3.4.0 +cfgv==3.5.0 # via pre-commit charset-normalizer==3.4.4 # via requests diff --git a/requirements/lint.txt b/requirements/lint.txt index a590c750520..6b495868cec 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -19,7 +19,7 @@ cffi==2.0.0 # cryptography # pycares # pytest-codspeed -cfgv==3.4.0 +cfgv==3.5.0 # via pre-commit click==8.3.0 # via slotscheck From 96c3b46f157dad8923149984f5b02374de6f88e5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Dec 2025 15:47:57 +0000 Subject: [PATCH 31/93] Bump sigstore/gh-action-sigstore-python from 3.1.0 to 3.2.0 (#11798) Bumps [sigstore/gh-action-sigstore-python](https://github.com/sigstore/gh-action-sigstore-python) from 3.1.0 to 3.2.0.
Release notes

Sourced from sigstore/gh-action-sigstore-python's releases.

v3.2.0

gh-action-sigstore-python action now manages the used Python version internally, improving reliability.

Changed

  • Manage Python version internally (#242, #258)
  • Dependency updates
Changelog

Sourced from sigstore/gh-action-sigstore-python's changelog.

Changelog

All notable changes to gh-action-sigstore-python will be documented in this file.

The format is based on Keep a Changelog.

All versions prior to 3.0.0 are untracked.

[Unreleased]

Commits
  • a5caf34 build(deps): bump actions/checkout in the actions group (#265)
  • 7b8cfcb build(deps): bump the actions group with 2 updates (#264)
  • 270f433 build(deps-dev): bump ruff in the python-dependencies group (#263)
  • 034c8bf build(deps): bump actions/setup-python in the actions group (#260)
  • 5483fa8 Fix .python-version lookup (#258)
  • f962baa build(deps): bump github/codeql-action in the actions group (#259)
  • 225a312 build(deps): bump astral-sh/setup-uv in the actions group (#253)
  • b7c02b3 build(deps): bump actions/checkout in the actions group (#251)
  • 52bad44 build(deps-dev): bump ruff in the python-dependencies group (#252)
  • 68eceea build(deps): bump certifi in the python-dependencies group (#250)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sigstore/gh-action-sigstore-python&package-manager=github_actions&previous-version=3.1.0&new-version=3.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index d294d0fcbfe..7176356f296 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -489,7 +489,7 @@ jobs: uses: pypa/gh-action-pypi-publish@release/v1 - name: Sign the dists with Sigstore - uses: sigstore/gh-action-sigstore-python@v3.1.0 + uses: sigstore/gh-action-sigstore-python@v3.2.0 with: inputs: >- ./dist/*.tar.gz From 8b3bba81c962ca15678a5513c883339f98ab3a24 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Dec 2025 15:50:01 +0000 Subject: [PATCH 32/93] Bump mypy from 1.18.2 to 1.19.0 (#11800) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.18.2 to 1.19.0.
Changelog

Sourced from mypy's changelog.

Mypy Release Notes

Next Release

Drop Support for Python 3.9

Mypy no longer supports running with Python 3.9, which has reached end-of-life. When running mypy with Python 3.10+, it is still possible to type check code that needs to support Python 3.9 with the --python-version 3.9 argument. Support for this will be dropped in the first half of 2026!

Contributed by Marc Mueller (PR 20156).

Mypy 1.19

We’ve just uploaded mypy 1.19.0 to the Python Package Index (PyPI). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:

python3 -m pip install -U mypy

You can read the full documentation for this release on Read the Docs.

Python 3.9 Support Ending Soon

This is the last mypy feature release that supports Python 3.9, which reached end of life in October 2025.

Performance Improvements

  • Switch to a more dynamic SCC processing logic (Ivan Levkivskyi, PR 20053)
  • Speed up type aliases (Ivan Levkivskyi, PR 19810)

Fixed‑Format Cache Improvements

Mypy uses a cache by default to speed up incremental runs by reusing partial results from earlier runs. Mypy 1.18 added a new binary fixed-format cache representation as an experimental feature. The feature is no longer experimental, and we are planning to enable it by default in a future mypy release (possibly 1.20), since it's faster and uses less space than the original, JSON-based cache format. Use --fixed-format-cache to enable the fixed-format cache.

Mypy now has an extra dependency on the librt PyPI package, as it's needed for cache serialization and deserialization.

Mypy ships with a tool to convert fixed-format cache files to the old JSON format. Example of how to use this:

$ python -m mypy.exportjson .mypy_cache/.../my_module.data.ff

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.18.2&new-version=1.19.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 +++- requirements/dev.txt | 4 +++- requirements/lint.txt | 4 +++- requirements/test-common.txt | 4 +++- requirements/test-ft.txt | 4 +++- requirements/test.txt | 4 +++- 6 files changed, 18 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e6978d40a0b..2c009cd5630 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -104,6 +104,8 @@ jinja2==3.1.6 # via # sphinx # towncrier +librt==0.6.3 + # via mypy markdown-it-py==4.0.0 # via rich markupsafe==3.0.3 @@ -115,7 +117,7 @@ multidict==6.7.0 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.18.2 ; implementation_name == "cpython" +mypy==1.19.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 6e41062eaa7..b7482a723f4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -102,6 +102,8 @@ jinja2==3.1.6 # via # sphinx # towncrier +librt==0.6.3 + # via mypy markdown-it-py==4.0.0 # via rich markupsafe==3.0.3 @@ -112,7 +114,7 @@ multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl -mypy==1.18.2 ; implementation_name == "cpython" +mypy==1.19.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 6b495868cec..00029ebe126 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -43,11 +43,13 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 # via -r requirements/lint.in +librt==0.6.3 + # via mypy markdown-it-py==4.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.18.2 ; implementation_name == "cpython" +mypy==1.19.0 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.1.0 # via mypy diff --git a/requirements/test-common.txt b/requirements/test-common.txt index c3bed2b501d..ad3b92335fe 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -34,11 +34,13 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in +librt==0.6.3 + # via mypy markdown-it-py==4.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.18.2 ; implementation_name == "cpython" +mypy==1.19.0 ; implementation_name == "cpython" # via -r requirements/test-common.in mypy-extensions==1.1.0 # via mypy diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index bf326af5d53..7e3c0b12454 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -57,6 +57,8 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in +librt==0.6.3 + # via mypy markdown-it-py==4.0.0 # via rich mdurl==0.1.2 @@ -65,7 +67,7 @@ multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl -mypy==1.18.2 ; implementation_name == "cpython" +mypy==1.19.0 ; implementation_name == "cpython" # via -r requirements/test-common.in mypy-extensions==1.1.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index c3a09e21bda..3f7ae54122e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,6 +57,8 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via -r requirements/test-common.in +librt==0.6.3 + # via mypy markdown-it-py==4.0.0 # via rich mdurl==0.1.2 @@ -65,7 +67,7 @@ multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl -mypy==1.18.2 ; implementation_name == "cpython" +mypy==1.19.0 ; implementation_name == "cpython" # via -r requirements/test-common.in mypy-extensions==1.1.0 # via mypy From a76721a3823f2faa3a6c4486c5e25f9af9c1e999 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Dec 2025 16:08:07 +0000 Subject: [PATCH 33/93] Bump click from 8.3.0 to 8.3.1 (#11799) Bumps [click](https://github.com/pallets/click) from 8.3.0 to 8.3.1.
Release notes

Sourced from click's releases.

8.3.1

This is the Click 8.3.1 fix release, which fixes bugs but does not otherwise change behavior and should not result in breaking changes compared to the latest feature release.

PyPI: https://pypi.org/project/click/8.3.1/ Changes: https://click.palletsprojects.com/page/changes/#version-8-3-1 Milestone: https://github.com/pallets/click/milestone/28

  • Don't discard pager arguments by correctly using subprocess.Popen. #3039 #3055
  • Replace Sentinel.UNSET default values by None as they're passed through the Context.invoke() method. #3066 #3065 #3068
  • Fix conversion of Sentinel.UNSET happening too early, which caused incorrect behavior for multiple parameters using the same name. #3071 #3079
  • Fix rendering when prompt and confirm parameter prompt_suffix is empty. #3019 #3021
  • When Sentinel.UNSET is found during parsing, it will skip calls to type_cast_value. #3069 #3090
  • Hide Sentinel.UNSET values as None when looking up for other parameters through the context inside parameter callbacks. #3136 #3137
Changelog

Sourced from click's changelog.

Version 8.3.1

Released 2025-11-15

  • Don't discard pager arguments by correctly using subprocess.Popen. :issue:3039 :pr:3055
  • Replace Sentinel.UNSET default values by None as they're passed through the Context.invoke() method. :issue:3066 :issue:3065 :pr:3068
  • Fix conversion of Sentinel.UNSET happening too early, which caused incorrect behavior for multiple parameters using the same name. :issue:3071 :pr:3079
  • Hide Sentinel.UNSET values as None when looking up for other parameters through the context inside parameter callbacks. :issue:3136 :pr:3137
  • Fix rendering when prompt and confirm parameter prompt_suffix is empty. :issue:3019 :pr:3021
  • When Sentinel.UNSET is found during parsing, it will skip calls to type_cast_value. :issue:3069 :pr:3090
Commits
  • 1d038f2 release version 8.3.1
  • 03f3889 Fix Ruff UP038 warning (#3141)
  • 3867781 Fix Ruff UP038 warning
  • b91bb95 Provide altered context to callbacks to hide UNSET values as None (#3137)
  • 437e1e3 Temporarily provide a fake context to the callback to hide UNSET values as ...
  • ea70da4 Don't test using a file in docs/ (#3102)
  • e27b307 Make uv run --all-extras pyright --verifytypes click pass (#3072)
  • a92c573 Fix test_edit to work with BSD sed (#3129)
  • bd131e1 Fix test_edit to work with BSD sed
  • 0b5c6b7 Add Best practices section (#3127)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=click&package-manager=pip&previous-version=8.3.0&new-version=8.3.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2c009cd5630..768636e7e41 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -49,7 +49,7 @@ cfgv==3.5.0 # via pre-commit charset-normalizer==3.4.4 # via requests -click==8.3.0 +click==8.3.1 # via # pip-tools # slotscheck diff --git a/requirements/dev.txt b/requirements/dev.txt index b7482a723f4..1c9537690fc 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -49,7 +49,7 @@ cfgv==3.5.0 # via pre-commit charset-normalizer==3.4.4 # via requests -click==8.3.0 +click==8.3.1 # via # pip-tools # slotscheck diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 5658563eef3..2f086169891 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -14,7 +14,7 @@ certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests -click==8.3.0 +click==8.3.1 # via towncrier docutils==0.21.2 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index 41c5958f162..8ada5342079 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -14,7 +14,7 @@ certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests -click==8.3.0 +click==8.3.1 # via towncrier docutils==0.21.2 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 00029ebe126..13bc3e24f48 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -21,7 +21,7 @@ cffi==2.0.0 # pytest-codspeed cfgv==3.5.0 # via pre-commit -click==8.3.0 +click==8.3.1 # via slotscheck cryptography==46.0.3 # via trustme diff --git a/requirements/test-common.txt b/requirements/test-common.txt index ad3b92335fe..1aa16173ad5 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -12,7 +12,7 @@ cffi==2.0.0 # via # cryptography # pytest-codspeed -click==8.3.0 +click==8.3.1 # via wait-for-it coverage==7.11.0 # via diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 7e3c0b12454..ced070f0e23 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -27,7 +27,7 @@ cffi==2.0.0 # cryptography # pycares # pytest-codspeed -click==8.3.0 +click==8.3.1 # via wait-for-it coverage==7.11.0 # via diff --git a/requirements/test.txt b/requirements/test.txt index 3f7ae54122e..44037b17ad9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -27,7 +27,7 @@ cffi==2.0.0 # cryptography # pycares # pytest-codspeed -click==8.3.0 +click==8.3.1 # via wait-for-it coverage==7.11.0 # via From d66a69e629f8cd768ee1840d63acbd9b141ae5ef Mon Sep 17 00:00:00 2001 From: meehand <70619157+meehand@users.noreply.github.com> Date: Sun, 7 Dec 2025 09:36:14 -0800 Subject: [PATCH 34/93] Re-backport 10713 - add regression test (#11807) --- CHANGES/10713.misc.rst | 1 + aiohttp/web_protocol.py | 2 +- tests/test_web_log.py | 1 - 3 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10713.misc.rst diff --git a/CHANGES/10713.misc.rst b/CHANGES/10713.misc.rst new file mode 100644 index 00000000000..a556d11e1e0 --- /dev/null +++ b/CHANGES/10713.misc.rst @@ -0,0 +1 @@ +Optimized web server performance when access logging is disabled by reducing time syscalls -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index c4c61ccc82d..843d5504b8e 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -457,7 +457,7 @@ def force_close(self) -> None: def log_access( self, request: BaseRequest, response: StreamResponse, time: float | None ) -> None: - if self.access_logger is not None and self.access_logger.enabled: + if self._logging_enabled and self.access_logger is not None: if TYPE_CHECKING: assert time is not None self.access_logger.log(request, response, self._loop.time() - time) diff --git a/tests/test_web_log.py b/tests/test_web_log.py index 1cd9f88d57a..82b74a6e713 100644 --- a/tests/test_web_log.py +++ b/tests/test_web_log.py @@ -283,7 +283,6 @@ def log( assert "This should not be logged" not in caplog.text -@pytest.mark.xfail(reason="#11778") async def test_logger_does_not_log_when_enabled_post_init( aiohttp_server: AiohttpServer, aiohttp_client: AiohttpClient, From 5ac4695d7353c27a0e682d2c814a387668177f2b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 7 Dec 2025 18:00:14 +0000 Subject: [PATCH 35/93] [PR #11807/d66a69e6 backport][3.13] Re-backport 10713 - add regression test (#11809) **This is a backport of PR #11807 as merged into 3.14 (d66a69e629f8cd768ee1840d63acbd9b141ae5ef).** Co-authored-by: meehand <70619157+meehand@users.noreply.github.com> --- CHANGES/10713.misc.rst | 1 + aiohttp/web_protocol.py | 2 +- tests/test_web_log.py | 1 - 3 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 CHANGES/10713.misc.rst diff --git a/CHANGES/10713.misc.rst b/CHANGES/10713.misc.rst new file mode 100644 index 00000000000..a556d11e1e0 --- /dev/null +++ b/CHANGES/10713.misc.rst @@ -0,0 +1 @@ +Optimized web server performance when access logging is disabled by reducing time syscalls -- by :user:`bdraco`. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index e1923aac24b..1bd344ae42a 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -468,7 +468,7 @@ def force_close(self) -> None: def log_access( self, request: BaseRequest, response: StreamResponse, time: Optional[float] ) -> None: - if self.access_logger is not None and self.access_logger.enabled: + if self._logging_enabled and self.access_logger is not None: if TYPE_CHECKING: assert time is not None self.access_logger.log(request, response, self._loop.time() - time) diff --git a/tests/test_web_log.py b/tests/test_web_log.py index 1cd9f88d57a..82b74a6e713 100644 --- a/tests/test_web_log.py +++ b/tests/test_web_log.py @@ -283,7 +283,6 @@ def log( assert "This should not be logged" not in caplog.text -@pytest.mark.xfail(reason="#11778") async def test_logger_does_not_log_when_enabled_post_init( aiohttp_server: AiohttpServer, aiohttp_client: AiohttpClient, From 668f7cec79f3ba0629d4c5f38947e2a4af4e600e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Dec 2025 11:02:14 +0000 Subject: [PATCH 36/93] Bump urllib3 from 2.5.0 to 2.6.0 (#11815) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.5.0 to 2.6.0.
Release notes

Sourced from urllib3's releases.

2.6.0

🚀 urllib3 is fundraising for HTTP/2 support

urllib3 is raising ~$40,000 USD to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects please consider contributing financially to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.

Thank you for your support.

Security

  • Fixed a security issue where streaming API could improperly handle highly compressed HTTP content ("decompression bombs") leading to excessive resource consumption even when a small amount of data was requested. Reading small chunks of compressed data is safer and much more efficient now. (CVE-2025-66471 reported by @​Cycloctane, 8.9 High, GHSA-2xpw-w6gg-jr37)
  • Fixed a security issue where an attacker could compose an HTTP response with virtually unlimited links in the Content-Encoding header, potentially leading to a denial of service (DoS) attack by exhausting system resources during decoding. The number of allowed chained encodings is now limited to 5. (CVE-2025-66418 reported by @​illia-v, 8.9 High, GHSA-gm62-xv2j-4w53)

[!IMPORTANT]

  • If urllib3 is not installed with the optional urllib3[brotli] extra, but your environment contains a Brotli/brotlicffi/brotlipy package anyway, make sure to upgrade it to at least Brotli 1.2.0 or brotlicffi 1.2.0.0 to benefit from the security fixes and avoid warnings. Prefer using urllib3[brotli] to install a compatible Brotli package automatically.
  • If you use custom decompressors, please make sure to update them to respect the changed API of urllib3.response.ContentDecoder.

Features

  • Enabled retrieval, deletion, and membership testing in HTTPHeaderDict using bytes keys. (#3653)
  • Added host and port information to string representations of HTTPConnection. (#3666)
  • Added support for Python 3.14 free-threading builds explicitly. (#3696)

Removals

  • Removed the HTTPResponse.getheaders() method in favor of HTTPResponse.headers. Removed the HTTPResponse.getheader(name, default) method in favor of HTTPResponse.headers.get(name, default). (#3622)

Bugfixes

  • Fixed redirect handling in urllib3.PoolManager when an integer is passed for the retries parameter. (#3649)
  • Fixed HTTPConnectionPool when used in Emscripten with no explicit port. (#3664)
  • Fixed handling of SSLKEYLOGFILE with expandable variables. (#3700)

Misc

  • Changed the zstd extra to install backports.zstd instead of zstandard on Python 3.13 and before. (#3693)
  • Improved the performance of content decoding by optimizing BytesQueueBuffer class. (#3710)
  • Allowed building the urllib3 package with newer setuptools-scm v9.x. (#3652)
  • Ensured successful urllib3 builds by setting Hatchling requirement to ≥ 1.27.0. (#3638)
Changelog

Sourced from urllib3's changelog.

2.6.0 (2025-12-05)

Security

  • Fixed a security issue where streaming API could improperly handle highly compressed HTTP content ("decompression bombs") leading to excessive resource consumption even when a small amount of data was requested. Reading small chunks of compressed data is safer and much more efficient now. (GHSA-2xpw-w6gg-jr37 <https://github.com/urllib3/urllib3/security/advisories/GHSA-2xpw-w6gg-jr37>__)
  • Fixed a security issue where an attacker could compose an HTTP response with virtually unlimited links in the Content-Encoding header, potentially leading to a denial of service (DoS) attack by exhausting system resources during decoding. The number of allowed chained encodings is now limited to 5. (GHSA-gm62-xv2j-4w53 <https://github.com/urllib3/urllib3/security/advisories/GHSA-gm62-xv2j-4w53>__)

.. caution::

  • If urllib3 is not installed with the optional urllib3[brotli] extra, but your environment contains a Brotli/brotlicffi/brotlipy package anyway, make sure to upgrade it to at least Brotli 1.2.0 or brotlicffi 1.2.0.0 to benefit from the security fixes and avoid warnings. Prefer using urllib3[brotli] to install a compatible Brotli package automatically.

  • If you use custom decompressors, please make sure to update them to respect the changed API of urllib3.response.ContentDecoder.

Features

  • Enabled retrieval, deletion, and membership testing in HTTPHeaderDict using bytes keys. ([#3653](https://github.com/urllib3/urllib3/issues/3653) <https://github.com/urllib3/urllib3/issues/3653>__)
  • Added host and port information to string representations of HTTPConnection. ([#3666](https://github.com/urllib3/urllib3/issues/3666) <https://github.com/urllib3/urllib3/issues/3666>__)
  • Added support for Python 3.14 free-threading builds explicitly. ([#3696](https://github.com/urllib3/urllib3/issues/3696) <https://github.com/urllib3/urllib3/issues/3696>__)

Removals

  • Removed the HTTPResponse.getheaders() method in favor of HTTPResponse.headers. Removed the HTTPResponse.getheader(name, default) method in favor of HTTPResponse.headers.get(name, default). ([#3622](https://github.com/urllib3/urllib3/issues/3622) <https://github.com/urllib3/urllib3/issues/3622>__)

Bugfixes

  • Fixed redirect handling in urllib3.PoolManager when an integer is passed for the retries parameter. ([#3649](https://github.com/urllib3/urllib3/issues/3649) <https://github.com/urllib3/urllib3/issues/3649>__)
  • Fixed HTTPConnectionPool when used in Emscripten with no explicit port. ([#3664](https://github.com/urllib3/urllib3/issues/3664) <https://github.com/urllib3/urllib3/issues/3664>__)
  • Fixed handling of SSLKEYLOGFILE with expandable variables. ([#3700](https://github.com/urllib3/urllib3/issues/3700) <https://github.com/urllib3/urllib3/issues/3700>__)

... (truncated)

Commits
  • 720f484 Release 2.6.0
  • 24d7b67 Merge commit from fork
  • c19571d Merge commit from fork
  • 816fcf0 Bump actions/setup-python from 6.0.0 to 6.1.0 (#3725)
  • 18af0a1 Improve speed of BytesQueueBuffer.get() by using memoryview (#3711)
  • 1f6abac Bump versions of pre-commit hooks (#3716)
  • 1c8fbf7 Bump actions/checkout from 5.0.0 to 6.0.0 (#3722)
  • 7784b9e Add Python 3.15 to CI (#3717)
  • 0241c9e Updated docs to reflect change in optional zstd dependency from zstandard t...
  • 7afcabb Expand environment variable of SSLKEYLOGFILE (#3705)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=2.5.0&new-version=2.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 768636e7e41..239db4dc40f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -269,7 +269,7 @@ typing-extensions==4.15.0 # virtualenv typing-inspection==0.4.2 # via pydantic -urllib3==2.5.0 +urllib3==2.6.0 # via requests uvloop==0.21.0 ; platform_system != "Windows" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 1c9537690fc..d46a737697c 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -259,7 +259,7 @@ typing-extensions==4.15.0 # virtualenv typing-inspection==0.4.2 # via pydantic -urllib3==2.5.0 +urllib3==2.6.0 # via requests uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 2f086169891..0f447f88d19 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -69,5 +69,5 @@ towncrier==25.8.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.5.0 +urllib3==2.6.0 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index 8ada5342079..d830f66199b 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -62,5 +62,5 @@ towncrier==25.8.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.5.0 +urllib3==2.6.0 # via requests From f7fab6d6652a47267fb11786e7c3dd27fc0b17b8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Dec 2025 11:37:58 +0000 Subject: [PATCH 37/93] Bump pytest from 9.0.1 to 9.0.2 (#11817) Bumps [pytest](https://github.com/pytest-dev/pytest) from 9.0.1 to 9.0.2.
Release notes

Sourced from pytest's releases.

9.0.2

pytest 9.0.2 (2025-12-06)

Bug fixes

  • #13896: The terminal progress feature added in pytest 9.0.0 has been disabled by default, except on Windows, due to compatibility issues with some terminal emulators.

    You may enable it again by passing -p terminalprogress. We may enable it by default again once compatibility improves in the future.

    Additionally, when the environment variable TERM is dumb, the escape codes are no longer emitted, even if the plugin is enabled.

  • #13904: Fixed the TOML type of the tmp_path_retention_count settings in the API reference from number to string.

  • #13946: The private config.inicfg attribute was changed in a breaking manner in pytest 9.0.0. Due to its usage in the ecosystem, it is now restored to working order using a compatibility shim. It will be deprecated in pytest 9.1 and removed in pytest 10.

  • #13965: Fixed quadratic-time behavior when handling unittest subtests in Python 3.10.

Improved documentation

  • #4492: The API Reference now contains cross-reference-able documentation of pytest's command-line flags <command-line-flags>.
Commits
  • 3d10b51 Prepare release version 9.0.2
  • 188750b Merge pull request #14030 from pytest-dev/patchback/backports/9.0.x/1e4b01d1f...
  • b7d7bef Merge pull request #14014 from bluetech/compat-note
  • bd08e85 Merge pull request #14013 from pytest-dev/patchback/backports/9.0.x/922b60377...
  • bc78386 Add CLI options reference documentation (#13930)
  • 5a4e398 Fix docs typo (#14005) (#14008)
  • d7ae6df Merge pull request #14006 from pytest-dev/maintenance/update-plugin-list-tmpl...
  • 556f6a2 pre-commit: fix rst-lint after new release (#13999) (#14001)
  • c60fbe6 Fix quadratic-time behavior when handling unittest subtests in Python 3.10 ...
  • 73d9b01 Merge pull request #13995 from nicoddemus/patchback/backports/9.0.x/1b5200c0f...
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest&package-manager=pip&previous-version=9.0.1&new-version=9.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 239db4dc40f..007c1a9790d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -170,7 +170,7 @@ pyproject-hooks==1.2.0 # via # build # pip-tools -pytest==9.0.1 +pytest==9.0.2 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index d46a737697c..549748cbc12 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -165,7 +165,7 @@ pyproject-hooks==1.2.0 # via # build # pip-tools -pytest==9.0.1 +pytest==9.0.2 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 13bc3e24f48..ffdb1aafa78 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -77,7 +77,7 @@ pygments==2.19.2 # via # pytest # rich -pytest==9.0.1 +pytest==9.0.2 # via # -r requirements/lint.in # pytest-codspeed diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 1aa16173ad5..183db69936e 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -66,7 +66,7 @@ pygments==2.19.2 # via # pytest # rich -pytest==9.0.1 +pytest==9.0.2 # via # -r requirements/test-common.in # pytest-codspeed diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index ced070f0e23..4c949c46a26 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -101,7 +101,7 @@ pygments==2.19.2 # via # pytest # rich -pytest==9.0.1 +pytest==9.0.2 # via # -r requirements/test-common.in # pytest-codspeed diff --git a/requirements/test.txt b/requirements/test.txt index 44037b17ad9..c6031683470 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -101,7 +101,7 @@ pygments==2.19.2 # via # pytest # rich -pytest==9.0.1 +pytest==9.0.2 # via # -r requirements/test-common.in # pytest-codspeed From 057806086e6915b8772bc74f206b964ee9bc16c7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Dec 2025 11:38:05 +0000 Subject: [PATCH 38/93] Bump platformdirs from 4.5.0 to 4.5.1 (#11820) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [platformdirs](https://github.com/tox-dev/platformdirs) from 4.5.0 to 4.5.1.
Release notes

Sourced from platformdirs's releases.

4.5.1

What's Changed

Full Changelog: https://github.com/tox-dev/platformdirs/compare/4.5.0...4.5.1

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=platformdirs&package-manager=pip&previous-version=4.5.0&new-version=4.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 007c1a9790d..192f7b74df1 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -137,7 +137,7 @@ pip-tools==7.5.2 # via -r requirements/dev.in pkgconfig==1.5.5 # via -r requirements/test-common.in -platformdirs==4.5.0 +platformdirs==4.5.1 # via virtualenv pluggy==1.6.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 549748cbc12..f1eb4e60059 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -134,7 +134,7 @@ pip-tools==7.5.2 # via -r requirements/dev.in pkgconfig==1.5.5 # via -r requirements/test-common.in -platformdirs==4.5.0 +platformdirs==4.5.1 # via virtualenv pluggy==1.6.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index ffdb1aafa78..c2f888bdd3a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -59,7 +59,7 @@ packaging==25.0 # via pytest pathspec==0.12.1 # via mypy -platformdirs==4.5.0 +platformdirs==4.5.1 # via virtualenv pluggy==1.6.0 # via pytest From d211915ed4df1a7893fada77825e1b026fd3dab6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Dec 2025 11:42:39 +0000 Subject: [PATCH 39/93] Bump aiodns from 3.5.0 to 3.6.0 (#11819) Bumps [aiodns](https://github.com/saghul/aiodns) from 3.5.0 to 3.6.0.
Changelog

Sourced from aiodns's changelog.

3.6.0

  • Fix resolver garbage collection during pending queries (#211)
    • Prevents resolver from being garbage collected while queries are in progress
  • Socket callback optimizations (#172)
    • Improved performance for socket state handling
  • Fixed RTD links (#176)
  • Added Python 3.14 to the CI (#212)
  • Updated dependencies
    • Bumped pycares from 4.9.0 to 4.11.0 (#186, #194)
    • Bumped pytest-asyncio from 1.0.0 to 1.2.0 (#181, #196)
    • Bumped pytest-cov from 6.2.1 to 7.0.0 (#193)
    • Bumped pytest from 8.4.0 to 8.4.2 (#171, #190)
    • Bumped mypy from 1.16.0 to 1.19.0 (#170, #179, #185, #195, #197, #207)
    • Bumped uvloop from 0.21.0 to 0.22.1 (#202)
    • Bumped winloop from 0.1.8 to 0.3.1 (#182, #183, #184, #187, #200, #201, #203)
    • Bumped actions/setup-python from 5 to 6 (#199)
    • Bumped actions/checkout from 4 to 6 (#188, #208)
    • Bumped actions/upload-artifact from 4 to 5 (#204)
    • Bumped actions/download-artifact from 4.3.0 to 6.0.0 (#205)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiodns&package-manager=pip&previous-version=3.5.0&new-version=3.6.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index b63dd5acce6..8c4aba70e90 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base-ft.txt --strip-extras requirements/base-ft.in # -aiodns==3.5.0 +aiodns==3.6.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/base.txt b/requirements/base.txt index 8d8a2f8570b..9c1c46507f4 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in # -aiodns==3.5.0 +aiodns==3.6.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 192f7b74df1..abd0f1a6523 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --strip-extras requirements/constraints.in # -aiodns==3.5.0 +aiodns==3.6.0 # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index f1eb4e60059..d8856b82fd5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --strip-extras requirements/dev.in # -aiodns==3.5.0 +aiodns==3.6.0 # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/lint.txt b/requirements/lint.txt index c2f888bdd3a..127203ae45e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/lint.txt --resolver=backtracking --strip-extras requirements/lint.in # -aiodns==3.5.0 +aiodns==3.6.0 # via -r requirements/lint.in annotated-types==0.7.0 # via pydantic diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 982687372ef..a560c167f25 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in # -aiodns==3.5.0 +aiodns==3.6.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 4c949c46a26..7e2eae4e4da 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test-ft.txt --strip-extras requirements/test-ft.in # -aiodns==3.5.0 +aiodns==3.6.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index c6031683470..f91919772cb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test.txt --strip-extras requirements/test.in # -aiodns==3.5.0 +aiodns==3.6.0 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in From 8a4e76d6a958600d47581d27b3c57a0b4b67612f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Dec 2025 11:42:42 +0000 Subject: [PATCH 40/93] Bump librt from 0.6.3 to 0.7.3 (#11818) Bumps [librt](https://github.com/mypyc/librt) from 0.6.3 to 0.7.3.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=librt&package-manager=pip&previous-version=0.6.3&new-version=0.7.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index abd0f1a6523..8e3a62e21ac 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -104,7 +104,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -librt==0.6.3 +librt==0.7.3 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/dev.txt b/requirements/dev.txt index d8856b82fd5..ea1a98f1dbe 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -102,7 +102,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -librt==0.6.3 +librt==0.7.3 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/lint.txt b/requirements/lint.txt index 127203ae45e..356ebc3551e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -43,7 +43,7 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 # via -r requirements/lint.in -librt==0.6.3 +librt==0.7.3 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 183db69936e..a9cd41896e8 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -34,7 +34,7 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.6.3 +librt==0.7.3 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 7e2eae4e4da..48515c0823c 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -57,7 +57,7 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.6.3 +librt==0.7.3 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index f91919772cb..d2d35d7ad43 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.6.3 +librt==0.7.3 # via mypy markdown-it-py==4.0.0 # via rich From bed4add8dab01703ddfa4f243b6f429d321f216d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=F0=9F=87=BA=F0=9F=87=A6=20Sviatoslav=20Sydorenko=20=28?= =?UTF-8?q?=D0=A1=D0=B2=D1=8F=D1=82=D0=BE=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1?= =?UTF-8?q?=D0=B8=D0=B4=D0=BE=D1=80=D0=B5=D0=BD=D0=BA=D0=BE=29?= Date: Tue, 9 Dec 2025 17:26:12 +0100 Subject: [PATCH 41/93] Bump coverage from 7.11.0 to 7.13.0 (#11826) This patch includes pinning the tracer to `ctrace` in the coveragepy config and migrating said config to TOML. (cherry picked from commit fb722b8bfe0ac8675673c9f64f8de97ed5d6c3e2) --- .coveragerc | 11 ----------- .coveragerc.toml | 22 ++++++++++++++++++++++ CHANGES/11826.contrib.rst | 7 +++++++ requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 34 insertions(+), 16 deletions(-) delete mode 100644 .coveragerc create mode 100644 .coveragerc.toml create mode 100644 CHANGES/11826.contrib.rst diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 7792266b114..00000000000 --- a/.coveragerc +++ /dev/null @@ -1,11 +0,0 @@ -[run] -branch = True -source = aiohttp, tests -omit = site-packages - -[report] -exclude_also = - if TYPE_CHECKING - assert False - : \.\.\.(\s*#.*)?$ - ^ +\.\.\.$ diff --git a/.coveragerc.toml b/.coveragerc.toml new file mode 100644 index 00000000000..4ca5d2808bd --- /dev/null +++ b/.coveragerc.toml @@ -0,0 +1,22 @@ +[run] +branch = true +# NOTE: `ctrace` tracing method is needed because the `sysmon` tracer +# NOTE: which is default on Python 3.14, causes unprecedented slow-down +# NOTE: of the test runs. +# Ref: https://github.com/coveragepy/coveragepy/issues/2099 +core = 'ctrace' +source = [ + 'aiohttp', + 'tests', +] +omit = [ + 'site-packages', +] + +[report] +exclude_also = [ + 'if TYPE_CHECKING', + 'assert False', + ': \.\.\.(\s*#.*)?$', + '^ +\.\.\.$', +] diff --git a/CHANGES/11826.contrib.rst b/CHANGES/11826.contrib.rst new file mode 100644 index 00000000000..134eda601c2 --- /dev/null +++ b/CHANGES/11826.contrib.rst @@ -0,0 +1,7 @@ +The coverage tool is now configured using the new native +auto-discovered :file:`.coveragerc.toml` file +-- by :user:`webknjaz`. + +It is also set up to use the ``ctrace`` core that works +around the performance issues in the ``sysmon`` tracer +which is default under Python 3.14. diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8cc65554bbd..35112e806ef 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.10.7 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 86a0843a7c5..935b3416a57 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.10.7 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 1ab1c6b3550..9b3e1c0c778 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -14,7 +14,7 @@ cffi==2.0.0 # pytest-codspeed click==8.2.1 # via wait-for-it -coverage==7.10.7 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index dd791bc78e7..71d928fcecd 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -27,7 +27,7 @@ cffi==2.0.0 # pytest-codspeed click==8.2.1 # via wait-for-it -coverage==7.10.7 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 87faf489087..a6377f3b337 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -27,7 +27,7 @@ cffi==2.0.0 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.10.7 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov From 624a38cfc6b45eac76bf79c4faf22569ced57a4a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 9 Dec 2025 16:44:22 +0000 Subject: [PATCH 42/93] [PR #11826/fb722b8b backport][3.14] Bump coverage from 7.11.0 to 7.13.0 (#11827) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #11826 as merged into master (fb722b8bfe0ac8675673c9f64f8de97ed5d6c3e2).** This patch includes pinning the tracer to `ctrace` in the coveragepy config and migrating said config to TOML. Co-authored-by: 🇺🇦 Sviatoslav Sydorenko (Святослав Сидоренко) --- .coveragerc | 11 ----------- .coveragerc.toml | 22 ++++++++++++++++++++++ CHANGES/11826.contrib.rst | 7 +++++++ requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 34 insertions(+), 16 deletions(-) delete mode 100644 .coveragerc create mode 100644 .coveragerc.toml create mode 100644 CHANGES/11826.contrib.rst diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 7792266b114..00000000000 --- a/.coveragerc +++ /dev/null @@ -1,11 +0,0 @@ -[run] -branch = True -source = aiohttp, tests -omit = site-packages - -[report] -exclude_also = - if TYPE_CHECKING - assert False - : \.\.\.(\s*#.*)?$ - ^ +\.\.\.$ diff --git a/.coveragerc.toml b/.coveragerc.toml new file mode 100644 index 00000000000..4ca5d2808bd --- /dev/null +++ b/.coveragerc.toml @@ -0,0 +1,22 @@ +[run] +branch = true +# NOTE: `ctrace` tracing method is needed because the `sysmon` tracer +# NOTE: which is default on Python 3.14, causes unprecedented slow-down +# NOTE: of the test runs. +# Ref: https://github.com/coveragepy/coveragepy/issues/2099 +core = 'ctrace' +source = [ + 'aiohttp', + 'tests', +] +omit = [ + 'site-packages', +] + +[report] +exclude_also = [ + 'if TYPE_CHECKING', + 'assert False', + ': \.\.\.(\s*#.*)?$', + '^ +\.\.\.$', +] diff --git a/CHANGES/11826.contrib.rst b/CHANGES/11826.contrib.rst new file mode 100644 index 00000000000..134eda601c2 --- /dev/null +++ b/CHANGES/11826.contrib.rst @@ -0,0 +1,7 @@ +The coverage tool is now configured using the new native +auto-discovered :file:`.coveragerc.toml` file +-- by :user:`webknjaz`. + +It is also set up to use the ``ctrace`` core that works +around the performance issues in the ``sysmon`` tracer +which is default under Python 3.14. diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8e3a62e21ac..63abff46de6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -55,7 +55,7 @@ click==8.3.1 # slotscheck # towncrier # wait-for-it -coverage==7.11.0 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index ea1a98f1dbe..ffe4bc1d10a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -55,7 +55,7 @@ click==8.3.1 # slotscheck # towncrier # wait-for-it -coverage==7.11.0 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-common.txt b/requirements/test-common.txt index a9cd41896e8..9c2ec12dd07 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -14,7 +14,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.1 # via wait-for-it -coverage==7.11.0 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 48515c0823c..7581fb78d95 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -29,7 +29,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.1 # via wait-for-it -coverage==7.11.0 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index d2d35d7ad43..ffc4b4785c2 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -29,7 +29,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.1 # via wait-for-it -coverage==7.11.0 +coverage==7.13.0 # via # -r requirements/test-common.in # pytest-cov From f53122c5f10c99492544935b5b91780087fbe566 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Dec 2025 10:44:56 +0000 Subject: [PATCH 43/93] Bump actions/cache from 4.3.0 to 5.0.0 (#11835) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.3.0 to 5.0.0.
Release notes

Sourced from actions/cache's releases.

v5.0.0

[!IMPORTANT] actions/cache@v5 runs on the Node.js 24 runtime and requires a minimum Actions Runner version of 2.327.1.

If you are using self-hosted runners, ensure they are updated before upgrading.


What's Changed

Full Changelog: https://github.com/actions/cache/compare/v4.3.0...v5.0.0

Changelog

Sourced from actions/cache's changelog.

5.0.0

[!IMPORTANT] actions/cache@v5 runs on the Node.js 24 runtime and requires a minimum Actions Runner version of 2.327.1. If you are using self-hosted runners, ensure they are updated before upgrading.

Commits
  • a783357 Merge pull request #1684 from actions/prepare-cache-v5-release
  • 3bb0d78 docs: highlight v5 runner requirement in releases
  • 054c1f2 readme note
  • 0762978 docs: update README for v5 release with Node 24 and runner version requirements
  • 9e02f8f Prepare v5.0.0 release
  • ffd11d2 Merge pull request #1630 from salmanmkc/node24
  • 5bb537b undo readme changes
  • 94a1e4b chore: regenerate package-lock.json
  • 70a5ee3 chore: rebuild dist with version 4.3.0
  • 5267c61 chore: set version to 4.3.0 for prepare release PR
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.3.0&new-version=5.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 7176356f296..b7b1e30e5ed 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v4.3.0 + uses: actions/cache@v5.0.0 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -96,7 +96,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.3.0 + uses: actions/cache@v5.0.0 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -160,7 +160,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v4.3.0 + uses: actions/cache@v5.0.0 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 3551e98f5d4fb958e633822645c6481a3fe0de33 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Dec 2025 10:49:32 +0000 Subject: [PATCH 44/93] Bump urllib3 from 2.6.0 to 2.6.2 (#11836) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.6.0 to 2.6.2.
Release notes

Sourced from urllib3's releases.

2.6.2

🚀 urllib3 is fundraising for HTTP/2 support

urllib3 is raising ~$40,000 USD to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects please consider contributing financially to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.

Thank you for your support.

Changes

  • Fixed HTTPResponse.read_chunked() to properly handle leftover data in the decoder's buffer when reading compressed chunked responses. (urllib3/urllib3#3734)

2.6.1

🚀 urllib3 is fundraising for HTTP/2 support

urllib3 is raising ~$40,000 USD to release HTTP/2 support and ensure long-term sustainable maintenance of the project after a sharp decline in financial support. If your company or organization uses Python and would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and thousands of other projects please consider contributing financially to ensure HTTP/2 support is developed sustainably and maintained for the long-haul.

Thank you for your support.

Changes

  • Restore previously removed HTTPResponse.getheaders() and HTTPResponse.getheader() methods. (#3731)
Changelog

Sourced from urllib3's changelog.

2.6.2 (2025-12-11)

  • Fixed HTTPResponse.read_chunked() to properly handle leftover data in the decoder's buffer when reading compressed chunked responses. ([#3734](https://github.com/urllib3/urllib3/issues/3734) <https://github.com/urllib3/urllib3/issues/3734>__)

2.6.1 (2025-12-08)

  • Restore previously removed HTTPResponse.getheaders() and HTTPResponse.getheader() methods. ([#3731](https://github.com/urllib3/urllib3/issues/3731) <https://github.com/urllib3/urllib3/issues/3731>__)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=urllib3&package-manager=pip&previous-version=2.6.0&new-version=2.6.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 63abff46de6..790d8ce0c59 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -269,7 +269,7 @@ typing-extensions==4.15.0 # virtualenv typing-inspection==0.4.2 # via pydantic -urllib3==2.6.0 +urllib3==2.6.2 # via requests uvloop==0.21.0 ; platform_system != "Windows" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index ffe4bc1d10a..f676269d6aa 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -259,7 +259,7 @@ typing-extensions==4.15.0 # virtualenv typing-inspection==0.4.2 # via pydantic -urllib3==2.6.0 +urllib3==2.6.2 # via requests uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 0f447f88d19..ab23d91c402 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -69,5 +69,5 @@ towncrier==25.8.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.6.0 +urllib3==2.6.2 # via requests diff --git a/requirements/doc.txt b/requirements/doc.txt index d830f66199b..0efefba2adb 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -62,5 +62,5 @@ towncrier==25.8.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.6.0 +urllib3==2.6.2 # via requests From b76967098c599076a809fc49901860788b538ee5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Dec 2025 10:51:49 +0000 Subject: [PATCH 45/93] Bump aiodns from 3.6.0 to 3.6.1 (#11837) Bumps [aiodns](https://github.com/saghul/aiodns) from 3.6.0 to 3.6.1.
Changelog

Sourced from aiodns's changelog.

3.6.1

  • Pin pycares to < 5
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiodns&package-manager=pip&previous-version=3.6.0&new-version=3.6.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index 8c4aba70e90..a726e2563f5 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base-ft.txt --strip-extras requirements/base-ft.in # -aiodns==3.6.0 +aiodns==3.6.1 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/base.txt b/requirements/base.txt index 9c1c46507f4..f0fc1697948 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in # -aiodns==3.6.0 +aiodns==3.6.1 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 790d8ce0c59..707d0d0972a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --strip-extras requirements/constraints.in # -aiodns==3.6.0 +aiodns==3.6.1 # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index f676269d6aa..b726cf4b956 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --strip-extras requirements/dev.in # -aiodns==3.6.0 +aiodns==3.6.1 # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 356ebc3551e..29e34ec0036 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/lint.txt --resolver=backtracking --strip-extras requirements/lint.in # -aiodns==3.6.0 +aiodns==3.6.1 # via -r requirements/lint.in annotated-types==0.7.0 # via pydantic diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index a560c167f25..650babd3dcc 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in # -aiodns==3.6.0 +aiodns==3.6.1 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 7581fb78d95..d4536f90e96 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test-ft.txt --strip-extras requirements/test-ft.in # -aiodns==3.6.0 +aiodns==3.6.1 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index ffc4b4785c2..e1b15e6bb7e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test.txt --strip-extras requirements/test.in # -aiodns==3.6.0 +aiodns==3.6.1 # via -r requirements/runtime-deps.in aiohappyeyeballs==2.6.1 # via -r requirements/runtime-deps.in From 33469387c09428c05d9fce87c0db7b9072eb6db8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Dec 2025 11:18:27 +0000 Subject: [PATCH 46/93] Bump cython from 3.2.2 to 3.2.3 (#11850) Bumps [cython](https://github.com/cython/cython) from 3.2.2 to 3.2.3.
Changelog

Sourced from cython's changelog.

3.2.3 (2025-12-14)

Features added

  • The C-API declarations were updated to include the new PyList_*() functions. (Github issue :issue:7291)

  • The Py_mod_gil module setting can now be changed with a C macro, overriding the freethreading_compatible directive setting. (Github issue :issue:7404)

Bugs fixed

  • t-strings lost the last element when compiled for the Limited API. (Github issue :issue:7381)

  • The array.data property of the cpython.array declarations generated a useless exception check that degraded its use in nogil code. (Github issue :issue:7408)

  • Parallel builds with the cythonize command could request more processes than allowed by the platform, thus failing the build. (Github issue :issue:7384)

  • A minor thread sanitizer issue was resolved. (Github issue :issue:7383)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.2.2&new-version=3.2.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 707d0d0972a..86a2dd43900 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -61,7 +61,7 @@ coverage==7.13.0 # pytest-cov cryptography==46.0.3 # via trustme -cython==3.2.2 +cython==3.2.3 # via -r requirements/cython.in distlib==0.4.0 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 6d9424a8755..667d8f52cd0 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.2.2 +cython==3.2.3 # via -r requirements/cython.in multidict==6.7.0 # via -r requirements/multidict.in From ddd708c7b292dde00247c971b09dc658c035e58e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Dec 2025 11:20:33 +0000 Subject: [PATCH 47/93] Bump actions/upload-artifact from 5 to 6 (#11848) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 5 to 6.
Release notes

Sourced from actions/upload-artifact's releases.

v6.0.0

v6 - What's new

[!IMPORTANT] actions/upload-artifact@v6 now runs on Node.js 24 (runs.using: node24) and requires a minimum Actions Runner version of 2.327.1. If you are using self-hosted runners, ensure they are updated before upgrading.

Node.js 24

This release updates the runtime to Node.js 24. v5 had preliminary support for Node.js 24, however this action was by default still running on Node.js 20. Now this action by default will run on Node.js 24.

What's Changed

Full Changelog: https://github.com/actions/upload-artifact/compare/v5.0.0...v6.0.0

Commits
  • b7c566a Merge pull request #745 from actions/upload-artifact-v6-release
  • e516bc8 docs: correct description of Node.js 24 support in README
  • ddc45ed docs: update README to correct action name for Node.js 24 support
  • 615b319 chore: release v6.0.0 for Node.js 24 support
  • 017748b Merge pull request #744 from actions/fix-storage-blob
  • 38d4c79 chore: rebuild dist
  • 7d27270 chore: add missing license cache files for @​actions/core, @​actions/io, and mi...
  • 5f643d3 chore: update license files for @​actions/artifact@​5.0.1 dependencies
  • 1df1684 chore: update package-lock.json with @​actions/artifact@​5.0.1
  • b5b1a91 fix: update @​actions/artifact to ^5.0.0 for Node.js 24 punycode fix
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/upload-artifact&package-manager=github_actions&previous-version=5&new-version=6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index b7b1e30e5ed..2c256407efa 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -111,7 +111,7 @@ jobs: run: | make generate-llhttp - name: Upload llhttp generated files - uses: actions/upload-artifact@v5 + uses: actions/upload-artifact@v6 with: name: llhttp path: vendor/llhttp/build @@ -339,7 +339,7 @@ jobs: run: | python -m build --sdist - name: Upload artifacts - uses: actions/upload-artifact@v5 + uses: actions/upload-artifact@v6 with: name: dist-sdist path: dist @@ -431,7 +431,7 @@ jobs: CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - name: Upload wheels - uses: actions/upload-artifact@v5 + uses: actions/upload-artifact@v6 with: name: >- dist-${{ matrix.os }}-${{ matrix.musl }}-${{ From 0d282934a2e542465c2f2200dc2e4fd32c708fe7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Dec 2025 11:34:16 +0000 Subject: [PATCH 48/93] Bump mypy from 1.19.0 to 1.19.1 (#11849) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.19.0 to 1.19.1.
Changelog

Sourced from mypy's changelog.

Mypy 1.19.1

  • Fix noncommutative joins with bounded TypeVars (Shantanu, PR 20345)
  • Respect output format for cached runs by serializing raw errors in cache metas (Ivan Levkivskyi, PR 20372)
  • Allow types.NoneType in match cases (A5rocks, PR 20383)
  • Fix mypyc generator regression with empty tuple (BobTheBuidler, PR 20371)
  • Fix crash involving Unpack-ed TypeVarTuple (Shantanu, PR 20323)
  • Fix crash on star import of redefinition (Ivan Levkivskyi, PR 20333)
  • Fix crash on typevar with forward ref used in other module (Ivan Levkivskyi, PR 20334)
  • Fail with an explicit error on PyPy (Ivan Levkivskyi, PR 20389)

Acknowledgements

Thanks to all mypy contributors who contributed to this release:

  • A5rocks
  • BobTheBuidler
  • bzoracler
  • Chainfire
  • Christoph Tyralla
  • David Foster
  • Frank Dana
  • Guo Ci
  • iap
  • Ivan Levkivskyi
  • James Hilton-Balfe
  • jhance
  • Joren Hammudoglu
  • Jukka Lehtosalo
  • KarelKenens
  • Kevin Kannammalil
  • Marc Mueller
  • Michael Carlstrom
  • Michael J. Sullivan
  • Piotr Sawicki
  • Randolf Scholz
  • Shantanu
  • Sigve Sebastian Farstad
  • sobolevn
  • Stanislav Terliakov
  • Stephen Morton
  • Theodore Ando
  • Thiago J. Barbalho
  • wyattscarpenter

I’d also like to thank my employer, Dropbox, for supporting mypy development.

Mypy 1.18

We’ve just uploaded mypy 1.18.1 to the Python Package Index (PyPI). Mypy is a static type checker for Python. This release includes new features, performance

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.19.0&new-version=1.19.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 86a2dd43900..ff98ff3313d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -117,7 +117,7 @@ multidict==6.7.0 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.19.0 ; implementation_name == "cpython" +mypy==1.19.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index b726cf4b956..5cd31b832aa 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -114,7 +114,7 @@ multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl -mypy==1.19.0 ; implementation_name == "cpython" +mypy==1.19.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 29e34ec0036..99c27ce826a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -49,7 +49,7 @@ markdown-it-py==4.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.19.0 ; implementation_name == "cpython" +mypy==1.19.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.1.0 # via mypy diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 9c2ec12dd07..c59496ed68f 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -40,7 +40,7 @@ markdown-it-py==4.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.19.0 ; implementation_name == "cpython" +mypy==1.19.1 ; implementation_name == "cpython" # via -r requirements/test-common.in mypy-extensions==1.1.0 # via mypy diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index d4536f90e96..068e6455cbb 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -67,7 +67,7 @@ multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl -mypy==1.19.0 ; implementation_name == "cpython" +mypy==1.19.1 ; implementation_name == "cpython" # via -r requirements/test-common.in mypy-extensions==1.1.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index e1b15e6bb7e..f26d6f3bb11 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -67,7 +67,7 @@ multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl -mypy==1.19.0 ; implementation_name == "cpython" +mypy==1.19.1 ; implementation_name == "cpython" # via -r requirements/test-common.in mypy-extensions==1.1.0 # via mypy From 1d0ea97ff7033cd4774f2b2851208f825b758185 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Dec 2025 10:43:41 +0000 Subject: [PATCH 49/93] Bump filelock from 3.20.0 to 3.20.1 (#11853) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.20.0 to 3.20.1.
Release notes

Sourced from filelock's releases.

3.20.1

What's Changed

Full Changelog: https://github.com/tox-dev/filelock/compare/3.20.0...3.20.1

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=filelock&package-manager=pip&previous-version=3.20.0&new-version=3.20.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index ff98ff3313d..f40cc0be467 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -71,7 +71,7 @@ exceptiongroup==1.3.1 # via pytest execnet==2.1.2 # via pytest-xdist -filelock==3.20.0 +filelock==3.20.1 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster diff --git a/requirements/dev.txt b/requirements/dev.txt index 5cd31b832aa..157eb18d9ed 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -69,7 +69,7 @@ exceptiongroup==1.3.1 # via pytest execnet==2.1.2 # via pytest-xdist -filelock==3.20.0 +filelock==3.20.1 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster diff --git a/requirements/lint.txt b/requirements/lint.txt index 99c27ce826a..761f3ea7c1d 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -29,7 +29,7 @@ distlib==0.4.0 # via virtualenv exceptiongroup==1.3.1 # via pytest -filelock==3.20.0 +filelock==3.20.1 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster From e82db38d73a2b748e73982b4c879893b6ba567dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Dec 2025 10:51:08 +0000 Subject: [PATCH 50/93] Bump librt from 0.7.3 to 0.7.4 (#11854) Bumps [librt](https://github.com/mypyc/librt) from 0.7.3 to 0.7.4.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=librt&package-manager=pip&previous-version=0.7.3&new-version=0.7.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f40cc0be467..2b2a843549d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -104,7 +104,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -librt==0.7.3 +librt==0.7.4 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/dev.txt b/requirements/dev.txt index 157eb18d9ed..6bffc717c2f 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -102,7 +102,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -librt==0.7.3 +librt==0.7.4 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/lint.txt b/requirements/lint.txt index 761f3ea7c1d..1e513f824c7 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -43,7 +43,7 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 # via -r requirements/lint.in -librt==0.7.3 +librt==0.7.4 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test-common.txt b/requirements/test-common.txt index c59496ed68f..84c1bd637ae 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -34,7 +34,7 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.3 +librt==0.7.4 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 068e6455cbb..6f7e2e35130 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -57,7 +57,7 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.3 +librt==0.7.4 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index f26d6f3bb11..e89a50c6ecd 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.3 +librt==0.7.4 # via mypy markdown-it-py==4.0.0 # via rich From fdd9c1d7d267169c7fbaa9359236e16259018938 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Dec 2025 10:42:32 +0000 Subject: [PATCH 51/93] Bump pre-commit from 4.5.0 to 4.5.1 (#11856) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 4.5.0 to 4.5.1.
Release notes

Sourced from pre-commit's releases.

pre-commit v4.5.1

Fixes

  • Fix language: python with repo: local without additional_dependencies.
Changelog

Sourced from pre-commit's changelog.

4.5.1 - 2025-12-16

Fixes

  • Fix language: python with repo: local without additional_dependencies.
Commits
  • 8a0630c v4.5.1
  • fcbc745 Merge pull request #3597 from pre-commit/empty-setup-py
  • 51592ee fix python local template when artifact dirs are present
  • 67e8faf Merge pull request #3596 from pre-commit/pre-commit-ci-update-config
  • c251e6b [pre-commit.ci] pre-commit autoupdate
  • 98ccafa Merge pull request #3593 from pre-commit/pre-commit-ci-update-config
  • 4895355 [pre-commit.ci] pre-commit autoupdate
  • 2cedd58 Merge pull request #3588 from pre-commit/pre-commit-ci-update-config
  • 465192d [pre-commit.ci] pre-commit autoupdate
  • fd42f96 Merge pull request #3586 from pre-commit/zipapp-sha256-file-not-needed
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pre-commit&package-manager=pip&previous-version=4.5.0&new-version=4.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2b2a843549d..7f849caceb6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -143,7 +143,7 @@ pluggy==1.6.0 # via # pytest # pytest-cov -pre-commit==4.5.0 +pre-commit==4.5.1 # via -r requirements/lint.in propcache==0.4.1 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 6bffc717c2f..6c13de4b50e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -140,7 +140,7 @@ pluggy==1.6.0 # via # pytest # pytest-cov -pre-commit==4.5.0 +pre-commit==4.5.1 # via -r requirements/lint.in propcache==0.4.1 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 1e513f824c7..2f3c8ed8950 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -63,7 +63,7 @@ platformdirs==4.5.1 # via virtualenv pluggy==1.6.0 # via pytest -pre-commit==4.5.0 +pre-commit==4.5.1 # via -r requirements/lint.in pycares==4.11.0 # via aiodns From 7c6ae23fa01f398f4e202c658b76a5be710138bc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 19 Dec 2025 14:09:57 -1000 Subject: [PATCH 52/93] Add decode_text parameter to WebSocket for receiving TEXT as bytes (#11764) (#11858) Co-authored-by: Sam Bull Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CHANGES/11763.feature.rst | 1 + CHANGES/11764.feature.rst | 1 + aiohttp/_websocket/models.py | 22 +++ aiohttp/_websocket/reader_c.pxd | 2 + aiohttp/_websocket/reader_py.py | 57 ++++--- aiohttp/client.py | 136 ++++++++++++++-- aiohttp/client_ws.py | 107 ++++++++++-- aiohttp/http.py | 6 + aiohttp/http_websocket.py | 7 +- aiohttp/test_utils.py | 51 +++++- aiohttp/web_ws.py | 115 +++++++++++-- docs/client_reference.rst | 11 +- docs/web_reference.rst | 10 +- pyproject.toml | 1 + requirements/base.txt | 3 +- requirements/runtime-deps.in | 1 + requirements/runtime-deps.txt | 5 +- requirements/test.txt | 3 +- tests/test_client_ws_functional.py | 206 ++++++++++++++++++++++- tests/test_web_websocket_functional.py | 216 ++++++++++++++++++++++++- 20 files changed, 888 insertions(+), 73 deletions(-) create mode 100644 CHANGES/11763.feature.rst create mode 120000 CHANGES/11764.feature.rst diff --git a/CHANGES/11763.feature.rst b/CHANGES/11763.feature.rst new file mode 100644 index 00000000000..b34bfafaca8 --- /dev/null +++ b/CHANGES/11763.feature.rst @@ -0,0 +1 @@ +Added ``decode_text`` parameter to :meth:`~aiohttp.ClientSession.ws_connect` and :class:`~aiohttp.web.WebSocketResponse` to receive WebSocket TEXT messages as raw bytes instead of decoded strings, enabling direct use with high-performance JSON parsers like ``orjson`` -- by :user:`bdraco`. diff --git a/CHANGES/11764.feature.rst b/CHANGES/11764.feature.rst new file mode 120000 index 00000000000..0860becd808 --- /dev/null +++ b/CHANGES/11764.feature.rst @@ -0,0 +1 @@ +11763.feature.rst \ No newline at end of file diff --git a/aiohttp/_websocket/models.py b/aiohttp/_websocket/models.py index 4c29ea9a984..3cdaa924dd7 100644 --- a/aiohttp/_websocket/models.py +++ b/aiohttp/_websocket/models.py @@ -62,6 +62,28 @@ def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: return loads(self.data) +class WSMessageTextBytes(NamedTuple): + """WebSocket TEXT message with raw bytes (no UTF-8 decoding).""" + + type: WSMsgType + # To type correctly, this would need some kind of tagged union for each type. + # In 4.0, we use a union of message types to properly type data, but in 3.x + # we keep it as Any to avoid a breaking change. + data: Any + extra: str | None + + def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: + """Return parsed JSON data.""" + return loads(self.data) + + +# Type aliases for message types based on decode_text setting +# When decode_text=True, TEXT messages have str data (WSMessage) +# When decode_text=False, TEXT messages have bytes data (WSMessageTextBytes) +WSMessageDecodeText = WSMessage +WSMessageNoDecodeText = WSMessage | WSMessageTextBytes + + # Constructing the tuple directly to avoid the overhead of # the lambda and arg processing since NamedTuples are constructed # with a run time built lambda diff --git a/aiohttp/_websocket/reader_c.pxd b/aiohttp/_websocket/reader_c.pxd index a7620d8e87f..5aa067fa2a3 100644 --- a/aiohttp/_websocket/reader_c.pxd +++ b/aiohttp/_websocket/reader_c.pxd @@ -26,6 +26,7 @@ cdef object TUPLE_NEW cdef object WSMsgType cdef object WSMessage +cdef object WSMessageTextBytes cdef object WS_MSG_TYPE_TEXT cdef object WS_MSG_TYPE_BINARY @@ -60,6 +61,7 @@ cdef class WebSocketReader: cdef WebSocketDataQueue queue cdef unsigned int _max_msg_size + cdef bint _decode_text cdef Exception _exc cdef bytearray _partial diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index 38682fcdf75..f196020c61f 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -15,6 +15,7 @@ WebSocketError, WSCloseCode, WSMessage, + WSMessageTextBytes, WSMsgType, ) @@ -67,7 +68,7 @@ def __init__( self._eof = False self._waiter: asyncio.Future[None] | None = None self._exception: BaseException | None = None - self._buffer: deque[tuple[WSMessage, int]] = deque() + self._buffer: deque[tuple[WSMessage | WSMessageTextBytes, int]] = deque() self._get_buffer = self._buffer.popleft self._put_buffer = self._buffer.append @@ -100,14 +101,16 @@ def feed_eof(self) -> None: self._release_waiter() self._exception = None # Break cyclic references - def feed_data(self, data: "WSMessage", size: "cython_int") -> None: + def feed_data( + self, data: "WSMessage | WSMessageTextBytes", size: "cython_int" + ) -> None: self._size += size self._put_buffer((data, size)) self._release_waiter() if self._size > self._limit and not self._protocol._reading_paused: self._protocol.pause_reading() - async def read(self) -> WSMessage: + async def read(self) -> WSMessage | WSMessageTextBytes: if not self._buffer and not self._eof: assert not self._waiter self._waiter = self._loop.create_future() @@ -118,7 +121,7 @@ async def read(self) -> WSMessage: raise return self._read_from_buffer() - def _read_from_buffer(self) -> WSMessage: + def _read_from_buffer(self) -> WSMessage | WSMessageTextBytes: if self._buffer: data, size = self._get_buffer() self._size -= size @@ -132,10 +135,15 @@ def _read_from_buffer(self) -> WSMessage: class WebSocketReader: def __init__( - self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True + self, + queue: WebSocketDataQueue, + max_msg_size: int, + compress: bool = True, + decode_text: bool = True, ) -> None: self.queue = queue self._max_msg_size = max_msg_size + self._decode_text = decode_text self._exc: Exception | None = None self._partial = bytearray() @@ -262,21 +270,30 @@ def _handle_frame( payload_merged = bytes(assembled_payload) if opcode == OP_CODE_TEXT: - try: - text = payload_merged.decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - - # XXX: The Text and Binary messages here can be a performance - # bottleneck, so we use tuple.__new__ to improve performance. - # This is not type safe, but many tests should fail in - # test_client_ws_functional.py if this is wrong. - self.queue.feed_data( - TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), - len(payload_merged), - ) + if self._decode_text: + try: + text = payload_merged.decode("utf-8") + except UnicodeDecodeError as exc: + raise WebSocketError( + WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" + ) from exc + + # XXX: The Text and Binary messages here can be a performance + # bottleneck, so we use tuple.__new__ to improve performance. + # This is not type safe, but many tests should fail in + # test_client_ws_functional.py if this is wrong. + self.queue.feed_data( + TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")), + len(payload_merged), + ) + else: + # Return raw bytes for TEXT messages when decode_text=False + self.queue.feed_data( + TUPLE_NEW( + WSMessageTextBytes, (WS_MSG_TYPE_TEXT, payload_merged, "") + ), + len(payload_merged), + ) else: self.queue.feed_data( TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")), diff --git a/aiohttp/client.py b/aiohttp/client.py index 7b19ee154f6..fda3cb4f5df 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -19,7 +19,16 @@ ) from contextlib import suppress from types import TracebackType -from typing import TYPE_CHECKING, Any, Final, Generic, TypedDict, TypeVar +from typing import ( + TYPE_CHECKING, + Any, + Final, + Generic, + Literal, + TypedDict, + TypeVar, + overload, +) import attr from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr @@ -186,6 +195,30 @@ class _RequestOptions(TypedDict, total=False): middlewares: Sequence[ClientMiddlewareType] | None +class _WSConnectOptions(TypedDict, total=False): + method: str + protocols: Iterable[str] + timeout: "ClientWSTimeout | _SENTINEL" + receive_timeout: float | None + autoclose: bool + autoping: bool + heartbeat: float | None + auth: BasicAuth | None + origin: str | None + params: Query + headers: LooseHeaders | None + proxy: StrOrURL | None + proxy_auth: BasicAuth | None + ssl: SSLContext | bool | Fingerprint + verify_ssl: bool | None + fingerprint: bytes | None + ssl_context: SSLContext | None + server_hostname: str | None + proxy_headers: LooseHeaders | None + compress: int + max_msg_size: int + + @attr.s(auto_attribs=True, frozen=True, slots=True) class ClientTimeout: total: float | None = None @@ -214,7 +247,11 @@ class ClientTimeout: # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) -_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse) +_RetType_co = TypeVar( + "_RetType_co", + bound="ClientResponse | ClientWebSocketResponse[bool]", + covariant=True, +) _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -917,6 +954,35 @@ async def _connect_and_send_request( ) raise + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + @overload + def ws_connect( + self, + url: StrOrURL, + *, + decode_text: Literal[True] = ..., + **kwargs: Unpack[_WSConnectOptions], + ) -> "_BaseRequestContextManager[ClientWebSocketResponse[Literal[True]]]": ... + + @overload + def ws_connect( + self, + url: StrOrURL, + *, + decode_text: Literal[False], + **kwargs: Unpack[_WSConnectOptions], + ) -> "_BaseRequestContextManager[ClientWebSocketResponse[Literal[False]]]": ... + + @overload + def ws_connect( + self, + url: StrOrURL, + *, + decode_text: bool = ..., + **kwargs: Unpack[_WSConnectOptions], + ) -> "_BaseRequestContextManager[ClientWebSocketResponse[bool]]": ... + def ws_connect( self, url: StrOrURL, @@ -942,7 +1008,8 @@ def ws_connect( proxy_headers: LooseHeaders | None = None, compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, - ) -> "_WSRequestContextManager": + decode_text: bool = True, + ) -> "_BaseRequestContextManager[ClientWebSocketResponse[bool]]": """Initiate websocket connection.""" return _WSRequestContextManager( self._ws_connect( @@ -968,9 +1035,39 @@ def ws_connect( proxy_headers=proxy_headers, compress=compress, max_msg_size=max_msg_size, + decode_text=decode_text, ) ) + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + @overload + async def _ws_connect( + self, + url: StrOrURL, + *, + decode_text: Literal[True] = ..., + **kwargs: Unpack[_WSConnectOptions], + ) -> "ClientWebSocketResponse[Literal[True]]": ... + + @overload + async def _ws_connect( + self, + url: StrOrURL, + *, + decode_text: Literal[False], + **kwargs: Unpack[_WSConnectOptions], + ) -> "ClientWebSocketResponse[Literal[False]]": ... + + @overload + async def _ws_connect( + self, + url: StrOrURL, + *, + decode_text: bool = ..., + **kwargs: Unpack[_WSConnectOptions], + ) -> "ClientWebSocketResponse[bool]": ... + async def _ws_connect( self, url: StrOrURL, @@ -996,7 +1093,8 @@ async def _ws_connect( proxy_headers: LooseHeaders | None = None, compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, - ) -> ClientWebSocketResponse: + decode_text: bool = True, + ) -> "ClientWebSocketResponse[bool]": if timeout is not sentinel: if isinstance(timeout, ClientWSTimeout): ws_timeout = timeout @@ -1162,7 +1260,9 @@ async def _ws_connect( transport = conn.transport assert transport is not None reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop) - conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) + conn_proto.set_parser( + WebSocketReader(reader, max_msg_size, decode_text=decode_text), reader + ) writer = WebSocketWriter( conn_proto, transport, @@ -1467,32 +1567,34 @@ async def __aexit__( await self.close() -class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): +class _BaseRequestContextManager( + Coroutine[Any, Any, _RetType_co], Generic[_RetType_co] +): __slots__ = ("_coro", "_resp") - def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: - self._coro: Coroutine[asyncio.Future[Any], None, _RetType] = coro + def __init__(self, coro: Coroutine[asyncio.Future[Any], None, _RetType_co]) -> None: + self._coro: Coroutine[asyncio.Future[Any], None, _RetType_co] = coro - def send(self, arg: None) -> "asyncio.Future[Any]": + def send(self, arg: None) -> asyncio.Future[Any]: return self._coro.send(arg) - def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]": + def throw(self, *args: Any, **kwargs: Any) -> asyncio.Future[Any]: return self._coro.throw(*args, **kwargs) def close(self) -> None: return self._coro.close() - def __await__(self) -> Generator[Any, None, _RetType]: + def __await__(self) -> Generator[Any, None, _RetType_co]: ret = self._coro.__await__() return ret - def __iter__(self) -> Generator[Any, None, _RetType]: + def __iter__(self) -> Generator[Any, None, _RetType_co]: return self.__await__() - async def __aenter__(self) -> _RetType: - self._resp: _RetType = await self._coro - return await self._resp.__aenter__() + async def __aenter__(self) -> _RetType_co: + self._resp: _RetType_co = await self._coro + return await self._resp.__aenter__() # type: ignore[return-value] async def __aexit__( self, @@ -1504,7 +1606,7 @@ async def __aexit__( _RequestContextManager = _BaseRequestContextManager[ClientResponse] -_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse] +_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse[bool]] class _SessionRequestContextManager: @@ -1513,7 +1615,7 @@ class _SessionRequestContextManager: def __init__( self, - coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], + coro: Coroutine[asyncio.Future[Any], None, ClientResponse], session: ClientSession, ) -> None: self._coro = coro diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 25b04cf12d6..eca6927b38e 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -2,8 +2,9 @@ import asyncio import sys +from collections.abc import Callable from types import TracebackType -from typing import Any, Optional, cast +from typing import Any, Generic, Literal, Optional, cast, overload import attr @@ -17,6 +18,8 @@ WebSocketError, WSCloseCode, WSMessage, + WSMessageDecodeText, + WSMessageNoDecodeText, WSMsgType, ) from .http_websocket import _INTERNAL_RECEIVE_TYPES, WebSocketWriter @@ -28,10 +31,21 @@ JSONEncoder, ) +if sys.version_info >= (3, 13): + from typing import TypeVar +else: + from typing_extensions import TypeVar + if sys.version_info >= (3, 11): import asyncio as async_timeout + from typing import Self else: import async_timeout + from typing_extensions import Self + +# TypeVar for whether text messages are decoded to str (True) or kept as bytes (False) +# Covariant because it only affects return types, not input types +_DecodeText = TypeVar("_DecodeText", bound=bool, covariant=True, default=Literal[True]) @attr.s(frozen=True, slots=True) @@ -43,7 +57,7 @@ class ClientWSTimeout: DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0) -class ClientWebSocketResponse: +class ClientWebSocketResponse(Generic[_DecodeText]): def __init__( self, reader: WebSocketDataQueue, @@ -309,7 +323,24 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo self._response.close() return True - async def receive(self, timeout: float | None = None) -> WSMessage: + @overload + async def receive( + self: "ClientWebSocketResponse[Literal[True]]", timeout: float | None = None + ) -> WSMessageDecodeText: ... + + @overload + async def receive( + self: "ClientWebSocketResponse[Literal[False]]", timeout: float | None = None + ) -> WSMessageNoDecodeText: ... + + @overload + async def receive( + self: "ClientWebSocketResponse[_DecodeText]", timeout: float | None = None + ) -> WSMessageDecodeText | WSMessageNoDecodeText: ... + + async def receive( + self, timeout: float | None = None + ) -> WSMessageDecodeText | WSMessageNoDecodeText: receive_timeout = timeout or self._timeout.ws_receive while True: @@ -382,7 +413,26 @@ async def receive(self, timeout: float | None = None) -> WSMessage: return msg - async def receive_str(self, *, timeout: float | None = None) -> str: + @overload + async def receive_str( + self: "ClientWebSocketResponse[Literal[True]]", *, timeout: float | None = None + ) -> str: ... + + @overload + async def receive_str( + self: "ClientWebSocketResponse[Literal[False]]", *, timeout: float | None = None + ) -> bytes: ... + + @overload + async def receive_str( + self: "ClientWebSocketResponse[_DecodeText]", *, timeout: float | None = None + ) -> str | bytes: ... + + async def receive_str(self, *, timeout: float | None = None) -> str | bytes: + """Receive TEXT message. + + Returns str when decode_text=True (default), bytes when decode_text=False. + """ msg = await self.receive(timeout) if msg.type is not WSMsgType.TEXT: raise WSMessageTypeError( @@ -398,25 +448,64 @@ async def receive_bytes(self, *, timeout: float | None = None) -> bytes: ) return cast(bytes, msg.data) + @overload + async def receive_json( + self: "ClientWebSocketResponse[Literal[True]]", + *, + loads: JSONDecoder = ..., + timeout: float | None = None, + ) -> Any: ... + + @overload + async def receive_json( + self: "ClientWebSocketResponse[Literal[False]]", + *, + loads: Callable[[bytes], Any] = ..., + timeout: float | None = None, + ) -> Any: ... + + @overload + async def receive_json( + self: "ClientWebSocketResponse[_DecodeText]", + *, + loads: JSONDecoder | Callable[[bytes], Any] = ..., + timeout: float | None = None, + ) -> Any: ... + async def receive_json( self, *, - loads: JSONDecoder = DEFAULT_JSON_DECODER, + loads: JSONDecoder | Callable[[bytes], Any] = DEFAULT_JSON_DECODER, timeout: float | None = None, ) -> Any: data = await self.receive_str(timeout=timeout) - return loads(data) + return loads(data) # type: ignore[arg-type] - def __aiter__(self) -> "ClientWebSocketResponse": + def __aiter__(self) -> Self: return self - async def __anext__(self) -> WSMessage: + @overload + async def __anext__( + self: "ClientWebSocketResponse[Literal[True]]", + ) -> WSMessageDecodeText: ... + + @overload + async def __anext__( + self: "ClientWebSocketResponse[Literal[False]]", + ) -> WSMessageNoDecodeText: ... + + @overload + async def __anext__( + self: "ClientWebSocketResponse[_DecodeText]", + ) -> WSMessageDecodeText | WSMessageNoDecodeText: ... + + async def __anext__(self) -> WSMessageDecodeText | WSMessageNoDecodeText: msg = await self.receive() if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): raise StopAsyncIteration return msg - async def __aenter__(self) -> "ClientWebSocketResponse": + async def __aenter__(self) -> Self: return self async def __aexit__( diff --git a/aiohttp/http.py b/aiohttp/http.py index ed7158d5f51..28c8fee8922 100644 --- a/aiohttp/http.py +++ b/aiohttp/http.py @@ -21,6 +21,9 @@ WebSocketWriter as WebSocketWriter, WSCloseCode as WSCloseCode, WSMessage as WSMessage, + WSMessageDecodeText as WSMessageDecodeText, + WSMessageNoDecodeText as WSMessageNoDecodeText, + WSMessageTextBytes as WSMessageTextBytes, WSMsgType as WSMsgType, ws_ext_gen as ws_ext_gen, ws_ext_parse as ws_ext_parse, @@ -57,6 +60,9 @@ "ws_ext_gen", "ws_ext_parse", "WSMessage", + "WSMessageDecodeText", + "WSMessageNoDecodeText", + "WSMessageTextBytes", "WebSocketError", "WSMsgType", "WSCloseCode", diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index 6b4b30e02b2..9fb9fba0a57 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -8,6 +8,9 @@ WSCloseCode, WSHandshakeError, WSMessage, + WSMessageDecodeText, + WSMessageNoDecodeText, + WSMessageTextBytes, WSMsgType, ) from ._websocket.reader import WebSocketReader @@ -26,11 +29,13 @@ "WebSocketReader", "WebSocketWriter", "WSMessage", + "WSMessageDecodeText", + "WSMessageNoDecodeText", + "WSMessageTextBytes", "WebSocketError", "WSMsgType", "WSCloseCode", "ws_ext_gen", "ws_ext_parse", "WSHandshakeError", - "WSMessage", ) diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 7b1b7103cef..64ad80d2b3d 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -12,7 +12,7 @@ from abc import ABC, abstractmethod from collections.abc import Callable, Iterator from types import TracebackType -from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast, overload +from typing import TYPE_CHECKING, Any, Generic, Literal, TypeVar, cast, overload from unittest import IsolatedAsyncioTestCase, mock from aiosignal import Signal @@ -21,6 +21,7 @@ import aiohttp from aiohttp.client import ( + _BaseRequestContextManager, _RequestContextManager, _RequestOptions, _WSRequestContextManager, @@ -305,7 +306,7 @@ def __init__( self._session._retry_connection = False self._closed = False self._responses: list[ClientResponse] = [] - self._websockets: list[ClientWebSocketResponse] = [] + self._websockets: list[ClientWebSocketResponse[bool]] = [] async def start_server(self) -> None: await self._server.start_server(loop=self._loop) @@ -444,18 +445,54 @@ def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: self._request(hdrs.METH_DELETE, path, **kwargs) ) - def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: + @overload + def ws_connect( + self, path: StrOrURL, *, decode_text: Literal[True] = ..., **kwargs: Any + ) -> "_BaseRequestContextManager[ClientWebSocketResponse[Literal[True]]]": ... + + @overload + def ws_connect( + self, path: StrOrURL, *, decode_text: Literal[False], **kwargs: Any + ) -> "_BaseRequestContextManager[ClientWebSocketResponse[Literal[False]]]": ... + + @overload + def ws_connect( + self, path: StrOrURL, *, decode_text: bool = ..., **kwargs: Any + ) -> "_BaseRequestContextManager[ClientWebSocketResponse[bool]]": ... + + def ws_connect( + self, path: StrOrURL, *, decode_text: bool = True, **kwargs: Any + ) -> "_BaseRequestContextManager[ClientWebSocketResponse[bool]]": """Initiate websocket connection. The api corresponds to aiohttp.ClientSession.ws_connect. """ - return _WSRequestContextManager(self._ws_connect(path, **kwargs)) + return _WSRequestContextManager( + self._ws_connect(path, decode_text=decode_text, **kwargs) + ) + @overload async def _ws_connect( - self, path: StrOrURL, **kwargs: Any - ) -> ClientWebSocketResponse: - ws = await self._session.ws_connect(self.make_url(path), **kwargs) + self, path: StrOrURL, *, decode_text: Literal[True] = ..., **kwargs: Any + ) -> "ClientWebSocketResponse[Literal[True]]": ... + + @overload + async def _ws_connect( + self, path: StrOrURL, *, decode_text: Literal[False], **kwargs: Any + ) -> "ClientWebSocketResponse[Literal[False]]": ... + + @overload + async def _ws_connect( + self, path: StrOrURL, *, decode_text: bool = ..., **kwargs: Any + ) -> "ClientWebSocketResponse[bool]": ... + + async def _ws_connect( + self, path: StrOrURL, *, decode_text: bool = True, **kwargs: Any + ) -> "ClientWebSocketResponse[bool]": + ws = await self._session.ws_connect( + self.make_url(path), decode_text=decode_text, **kwargs + ) self._websockets.append(ws) return ws diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 2e55038a130..227b8962d59 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -4,8 +4,8 @@ import hashlib import json import sys -from collections.abc import Iterable -from typing import Any, Final, cast +from collections.abc import Callable, Iterable +from typing import Any, Final, Generic, Literal, cast, overload import attr from multidict import CIMultiDict @@ -25,6 +25,8 @@ WebSocketWriter, WSCloseCode, WSMessage, + WSMessageDecodeText, + WSMessageNoDecodeText, WSMsgType as WSMsgType, ws_ext_gen, ws_ext_parse, @@ -37,10 +39,17 @@ from .web_request import BaseRequest from .web_response import StreamResponse +if sys.version_info >= (3, 13): + from typing import TypeVar +else: + from typing_extensions import TypeVar + if sys.version_info >= (3, 11): import asyncio as async_timeout + from typing import Self else: import async_timeout + from typing_extensions import Self __all__ = ( "WebSocketResponse", @@ -50,6 +59,9 @@ THRESHOLD_CONNLOST_ACCESS: Final[int] = 5 +# TypeVar for whether text messages are decoded to str (True) or kept as bytes (False) +_DecodeText = TypeVar("_DecodeText", bound=bool, covariant=True, default=Literal[True]) + @attr.s(auto_attribs=True, frozen=True, slots=True) class WebSocketReady: @@ -60,7 +72,7 @@ def __bool__(self) -> bool: return self.ok -class WebSocketResponse(StreamResponse): +class WebSocketResponse(StreamResponse, Generic[_DecodeText]): _length_check: bool = False _ws_protocol: str | None = None @@ -91,6 +103,7 @@ def __init__( compress: bool = True, max_msg_size: int = 4 * 1024 * 1024, writer_limit: int = DEFAULT_LIMIT, + decode_text: bool = True, ) -> None: super().__init__(status=101) self._protocols = protocols @@ -104,6 +117,7 @@ def __init__( self._compress: bool | int = compress self._max_msg_size = max_msg_size self._writer_limit = writer_limit + self._decode_text = decode_text def _cancel_heartbeat(self) -> None: self._cancel_pong_response_cb() @@ -337,7 +351,10 @@ def _post_start( self._reader = WebSocketDataQueue(request._protocol, 2**16, loop=loop) request.protocol.set_parser( WebSocketReader( - self._reader, self._max_msg_size, compress=bool(self._compress) + self._reader, + self._max_msg_size, + compress=bool(self._compress), + decode_text=self._decode_text, ) ) # disable HTTP keepalive for WebSocket @@ -513,7 +530,24 @@ def _close_transport(self) -> None: if self._req is not None and self._req.transport is not None: self._req.transport.close() - async def receive(self, timeout: float | None = None) -> WSMessage: + @overload + async def receive( + self: "WebSocketResponse[Literal[True]]", timeout: float | None = None + ) -> WSMessageDecodeText: ... + + @overload + async def receive( + self: "WebSocketResponse[Literal[False]]", timeout: float | None = None + ) -> WSMessageNoDecodeText: ... + + @overload + async def receive( + self: "WebSocketResponse[_DecodeText]", timeout: float | None = None + ) -> WSMessageDecodeText | WSMessageNoDecodeText: ... + + async def receive( + self, timeout: float | None = None + ) -> WSMessageDecodeText | WSMessageNoDecodeText: if self._reader is None: raise RuntimeError("Call .prepare() first") @@ -587,7 +621,26 @@ async def receive(self, timeout: float | None = None) -> WSMessage: return msg - async def receive_str(self, *, timeout: float | None = None) -> str: + @overload + async def receive_str( + self: "WebSocketResponse[Literal[True]]", *, timeout: float | None = None + ) -> str: ... + + @overload + async def receive_str( + self: "WebSocketResponse[Literal[False]]", *, timeout: float | None = None + ) -> bytes: ... + + @overload + async def receive_str( + self: "WebSocketResponse[_DecodeText]", *, timeout: float | None = None + ) -> str | bytes: ... + + async def receive_str(self, *, timeout: float | None = None) -> str | bytes: + """Receive TEXT message. + + Returns str when decode_text=True (default), bytes when decode_text=False. + """ msg = await self.receive(timeout) if msg.type is not WSMsgType.TEXT: raise WSMessageTypeError( @@ -603,19 +656,61 @@ async def receive_bytes(self, *, timeout: float | None = None) -> bytes: ) return cast(bytes, msg.data) + @overload async def receive_json( - self, *, loads: JSONDecoder = json.loads, timeout: float | None = None + self: "WebSocketResponse[Literal[True]]", + *, + loads: JSONDecoder = ..., + timeout: float | None = None, + ) -> Any: ... + + @overload + async def receive_json( + self: "WebSocketResponse[Literal[False]]", + *, + loads: Callable[[bytes], Any] = ..., + timeout: float | None = None, + ) -> Any: ... + + @overload + async def receive_json( + self: "WebSocketResponse[_DecodeText]", + *, + loads: JSONDecoder | Callable[[bytes], Any] = ..., + timeout: float | None = None, + ) -> Any: ... + + async def receive_json( + self, + *, + loads: JSONDecoder | Callable[[bytes], Any] = json.loads, + timeout: float | None = None, ) -> Any: data = await self.receive_str(timeout=timeout) - return loads(data) + return loads(data) # type: ignore[arg-type] async def write(self, data: bytes) -> None: raise RuntimeError("Cannot call .write() for websocket") - def __aiter__(self) -> "WebSocketResponse": + def __aiter__(self) -> Self: return self - async def __anext__(self) -> WSMessage: + @overload + async def __anext__( + self: "WebSocketResponse[Literal[True]]", + ) -> WSMessageDecodeText: ... + + @overload + async def __anext__( + self: "WebSocketResponse[Literal[False]]", + ) -> WSMessageNoDecodeText: ... + + @overload + async def __anext__( + self: "WebSocketResponse[_DecodeText]", + ) -> WSMessageDecodeText | WSMessageNoDecodeText: ... + + async def __anext__(self) -> WSMessageDecodeText | WSMessageNoDecodeText: msg = await self.receive() if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): raise StopAsyncIteration diff --git a/docs/client_reference.rst b/docs/client_reference.rst index d8b36b95c91..9a9506da382 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -738,7 +738,8 @@ The client session supports the context manager protocol for self closing. proxy=None, proxy_auth=None, ssl=True, \ verify_ssl=None, fingerprint=None, \ ssl_context=None, proxy_headers=None, \ - compress=0, max_msg_size=4194304) + compress=0, max_msg_size=4194304, \ + decode_text=True) :async: Create a websocket connection. Returns a @@ -867,6 +868,14 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.5 + :param bool decode_text: If ``True`` (default), TEXT messages are + decoded to strings. If ``False``, TEXT messages + are returned as raw bytes, which can improve + performance when using JSON parsers like + ``orjson`` that accept bytes directly. + + .. versionadded:: 3.14 + .. method:: close() :async: diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 6e466308533..c523cdfa9be 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -970,7 +970,7 @@ and :ref:`aiohttp-web-signals` handlers:: .. class:: WebSocketResponse(*, timeout=10.0, receive_timeout=None, \ autoclose=True, autoping=True, heartbeat=None, \ protocols=(), compress=True, max_msg_size=4194304, \ - writer_limit=65536) + writer_limit=65536, decode_text=True) Class for handling server-side websockets, inherited from :class:`StreamResponse`. @@ -1033,6 +1033,14 @@ and :ref:`aiohttp-web-signals` handlers:: .. versionadded:: 3.11 + :param bool decode_text: If ``True`` (default), TEXT messages are + decoded to strings. If ``False``, TEXT messages + are returned as raw bytes, which can improve + performance when using JSON parsers like + ``orjson`` that accept bytes directly. + + .. versionadded:: 3.14 + The class supports ``async for`` statement for iterating over incoming messages:: diff --git a/pyproject.toml b/pyproject.toml index 7856bf4b326..c1c61b01eab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ dependencies = [ "frozenlist >= 1.1.1", "multidict >=4.5, < 7.0", "propcache >= 0.2.0", + "typing_extensions >= 4.4 ; python_version < '3.13'", "yarl >= 1.17.0, < 2.0", ] dynamic = [ diff --git a/requirements/base.txt b/requirements/base.txt index f0fc1697948..f733fb7a8a7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -42,8 +42,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -typing-extensions==4.15.0 +typing-extensions==4.15.0 ; python_version < "3.13" # via + # -r requirements/runtime-deps.in # aiosignal # multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 95db17e158d..289997df564 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -11,4 +11,5 @@ brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 propcache >= 0.2.0 +typing_extensions >= 4.4 ; python_version < '3.13' yarl >= 1.17.0, < 2.0 diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 650babd3dcc..035c3e6636e 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in @@ -38,8 +38,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -typing-extensions==4.15.0 +typing-extensions==4.15.0 ; python_version < "3.13" # via + # -r requirements/runtime-deps.in # aiosignal # multidict yarl==1.22.0 diff --git a/requirements/test.txt b/requirements/test.txt index e89a50c6ecd..da377b8e25f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -137,8 +137,9 @@ tomli==2.3.0 # pytest trustme==1.2.1 ; platform_machine != "i686" # via -r requirements/test-common.in -typing-extensions==4.15.0 +typing-extensions==4.15.0 ; python_version < "3.13" # via + # -r requirements/runtime-deps.in # aiosignal # cryptography # exceptiongroup diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 8c24c077e6a..47e293e5f70 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,7 @@ import asyncio +import json import sys -from typing import Any, NoReturn +from typing import Any, Literal, NoReturn from unittest import mock import pytest @@ -1248,7 +1249,7 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() app.router.add_route("GET", "/", handler) - sync_future: asyncio.Future[list[aiohttp.ClientWebSocketResponse]] = ( + sync_future: asyncio.Future[list[aiohttp.ClientWebSocketResponse[bool]]] = ( loop.create_future() ) client = await aiohttp_client(app) @@ -1276,3 +1277,204 @@ async def websocket_task() -> None: # Cleanup properly websocket._response = mock.Mock() await websocket.close() + + +async def test_receive_text_as_bytes_client_side(aiohttp_client: AiohttpClient) -> None: + """Test client receiving TEXT messages as raw bytes with decode_text=False.""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + + msg = await ws.receive_str() + await ws.send_str(msg + "/answer") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + # Connect with decode_text=False + resp = await client.ws_connect("/", decode_text=False) + await resp.send_str("ask") + + # Receive TEXT message as bytes + msg = await resp.receive() + assert msg.type is WSMsgType.TEXT + assert isinstance(msg.data, bytes) + assert msg.data == b"ask/answer" + + await resp.close() + + +async def test_receive_text_as_bytes_server_side(aiohttp_client: AiohttpClient) -> None: + """Test server receiving TEXT messages as raw bytes with decode_text=False.""" + + async def handler(request: web.Request) -> web.WebSocketResponse[Literal[False]]: + ws: web.WebSocketResponse[Literal[False]] = web.WebSocketResponse( + decode_text=False + ) + await ws.prepare(request) + + # Receive TEXT message as bytes + msg = await ws.receive() + assert msg.type is WSMsgType.TEXT + assert isinstance(msg.data, bytes) + assert msg.data == b"test message" + + # Send response + await ws.send_bytes(msg.data + b"/reply") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + resp = await client.ws_connect("/") + await resp.send_str("test message") + + msg = await resp.receive() + assert msg.type is WSMsgType.BINARY + assert msg.data == b"test message/reply" + + await resp.close() + + +async def test_receive_text_as_bytes_json_parsing( + aiohttp_client: AiohttpClient, +) -> None: + """Test using orjson or similar parsers with raw bytes from TEXT messages.""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + + msg = await ws.receive_str() + data = json.loads(msg) + await ws.send_str(json.dumps({"response": data["value"] * 2})) + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + # Connect with decode_text=False to get raw bytes + resp = await client.ws_connect("/", decode_text=False) + await resp.send_str(json.dumps({"value": 42})) + + # Receive TEXT message as bytes + msg = await resp.receive() + assert msg.type is WSMsgType.TEXT + assert isinstance(msg.data, bytes) + + # Parse JSON using msg.json() method (covers WSMessageTextBytes.json()) + data = msg.json() + assert data == {"response": 84} + + await resp.close() + + +async def test_decode_text_default_true(aiohttp_client: AiohttpClient) -> None: + """Test that decode_text defaults to True for backward compatibility.""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + + msg = await ws.receive_str() + await ws.send_str(msg + "/reply") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + # Default behavior (decode_text=True) + resp = await client.ws_connect("/") + await resp.send_str("test") + + # Should receive TEXT message as string + msg = await resp.receive() + assert msg.type is WSMsgType.TEXT + assert isinstance(msg.data, str) + assert msg.data == "test/reply" + + await resp.close() + + +async def test_receive_str_returns_bytes_with_decode_text_false( + aiohttp_client: AiohttpClient, +) -> None: + """Test that receive_str() returns bytes when decode_text=False.""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.send_str("hello world") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.ws_connect("/", decode_text=False) as ws: + # receive_str() should return bytes when decode_text=False + data = await ws.receive_str() + assert isinstance(data, bytes) + assert data == b"hello world" + + +async def test_receive_str_returns_str_with_decode_text_true( + aiohttp_client: AiohttpClient, +) -> None: + """Test that receive_str() returns str when decode_text=True (default).""" + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.send_str("hello world") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.ws_connect("/") as ws: + # receive_str() should return str when decode_text=True (default) + data = await ws.receive_str() + assert isinstance(data, str) + assert data == "hello world" + + +async def test_receive_json_with_orjson_style_loads( + aiohttp_client: AiohttpClient, +) -> None: + """Test receive_json() with orjson-style loads that accepts bytes.""" + + def orjson_style_loads(data: bytes) -> dict[str, int]: + """Mock orjson.loads that accepts bytes.""" + assert isinstance(data, bytes) + result: dict[str, int] = json.loads(data) + return result + + async def handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.send_str('{"value": 42}') + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + + async with client.ws_connect("/", decode_text=False) as ws: + # receive_json() with orjson-style loads should work with bytes + data = await ws.receive_json(loads=orjson_style_loads) + assert data == {"value": 42} diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index c071b5839ed..d52282ab774 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -2,9 +2,10 @@ import asyncio import contextlib +import json import sys import weakref -from typing import Any, NoReturn +from typing import Any, Literal, NoReturn from unittest import mock import pytest @@ -1394,3 +1395,216 @@ async def handler(request: web.Request) -> web.WebSocketResponse: assert msg.type is WSMsgType.TEXT assert msg.data == "test" await ws.close() + + +async def test_receive_text_as_bytes_server_side(aiohttp_client: AiohttpClient) -> None: + """Test server receiving TEXT messages as raw bytes with decode_text=False.""" + + async def websocket_handler( + request: web.Request, + ) -> web.WebSocketResponse[Literal[False]]: + ws: web.WebSocketResponse[Literal[False]] = web.WebSocketResponse( + decode_text=False + ) + await ws.prepare(request) + + # Receive TEXT message as bytes + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.TEXT + assert isinstance(msg.data, bytes) + assert msg.data == b"test message" + + # Send response + await ws.send_bytes(msg.data + b"/reply") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", websocket_handler) + client = await aiohttp_client(app) + + async with client.ws_connect("/") as ws: + await ws.send_str("test message") + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.BINARY + assert msg.data == b"test message/reply" + + await ws.close() + + +async def test_receive_text_as_bytes_server_iteration( + aiohttp_client: AiohttpClient, +) -> None: + """Test server iterating over WebSocket with decode_text=False.""" + + async def websocket_handler( + request: web.Request, + ) -> web.WebSocketResponse[Literal[False]]: + ws: web.WebSocketResponse[Literal[False]] = web.WebSocketResponse( + decode_text=False + ) + await ws.prepare(request) + + async for msg in ws: + if msg.type is aiohttp.WSMsgType.TEXT: + # msg.data should be bytes + assert isinstance(msg.data, bytes) + # Echo back + await ws.send_bytes(msg.data) + else: + assert msg.type is aiohttp.WSMsgType.BINARY + assert isinstance(msg.data, bytes) + await ws.send_bytes(msg.data) + + return ws + + app = web.Application() + app.router.add_route("GET", "/", websocket_handler) + client = await aiohttp_client(app) + + async with client.ws_connect("/") as ws: + # Send TEXT message + await ws.send_str("hello") + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.BINARY + assert msg.data == b"hello" + + # Send BINARY message + await ws.send_bytes(b"world") + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.BINARY + assert msg.data == b"world" + + await ws.close() + + +async def test_server_decode_text_default_true(aiohttp_client: AiohttpClient) -> None: + """Test that server decode_text defaults to True for backward compatibility.""" + + async def websocket_handler(request: web.Request) -> web.WebSocketResponse: + # No decode_text parameter - should default to True + ws = web.WebSocketResponse() + await ws.prepare(request) + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.TEXT + assert isinstance(msg.data, str) + assert msg.data == "test" + + await ws.send_str(msg.data + "/reply") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", websocket_handler) + client = await aiohttp_client(app) + + async with client.ws_connect("/") as ws: + await ws.send_str("test") + + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.TEXT + assert isinstance(msg.data, str) + assert msg.data == "test/reply" + + await ws.close() + + +async def test_server_receive_str_returns_bytes_with_decode_text_false( + aiohttp_client: AiohttpClient, +) -> None: + """Test that server receive_str() returns bytes when decode_text=False.""" + + async def websocket_handler( + request: web.Request, + ) -> web.WebSocketResponse[Literal[False]]: + ws: web.WebSocketResponse[Literal[False]] = web.WebSocketResponse( + decode_text=False + ) + await ws.prepare(request) + + # receive_str() should return bytes when decode_text=False + data = await ws.receive_str() + assert isinstance(data, bytes) + assert data == b"hello server" + + await ws.send_str("got bytes") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", websocket_handler) + client = await aiohttp_client(app) + + async with client.ws_connect("/") as ws: + await ws.send_str("hello server") + msg = await ws.receive() + assert msg.data == "got bytes" + + +async def test_server_receive_str_returns_str_with_decode_text_true( + aiohttp_client: AiohttpClient, +) -> None: + """Test that server receive_str() returns str when decode_text=True (default).""" + + async def websocket_handler(request: web.Request) -> web.WebSocketResponse: + ws = web.WebSocketResponse() # decode_text=True by default + await ws.prepare(request) + + # receive_str() should return str when decode_text=True + data = await ws.receive_str() + assert isinstance(data, str) + assert data == "hello server" + + await ws.send_str("got string") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", websocket_handler) + client = await aiohttp_client(app) + + async with client.ws_connect("/") as ws: + await ws.send_str("hello server") + msg = await ws.receive() + assert msg.data == "got string" + + +async def test_server_receive_json_with_orjson_style_loads( + aiohttp_client: AiohttpClient, +) -> None: + """Test server receive_json() with orjson-style loads that accepts bytes.""" + + def orjson_style_loads(data: bytes) -> dict[str, str]: + """Mock orjson.loads that accepts bytes.""" + assert isinstance(data, bytes) + result: dict[str, str] = json.loads(data) + return result + + async def websocket_handler( + request: web.Request, + ) -> web.WebSocketResponse[Literal[False]]: + ws: web.WebSocketResponse[Literal[False]] = web.WebSocketResponse( + decode_text=False + ) + await ws.prepare(request) + + # receive_json() with orjson-style loads should work with bytes + data = await ws.receive_json(loads=orjson_style_loads) + assert data == {"test": "value"} + + await ws.send_str("success") + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", websocket_handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/") + await ws.send_str('{"test": "value"}') + msg = await ws.receive() + assert msg.type is aiohttp.WSMsgType.TEXT + assert msg.data == "success" + await ws.close() From 564d9325b97ddfee93202d8bae7939c8c6bdde51 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 21 Dec 2025 15:59:29 +0000 Subject: [PATCH 53/93] fix(connector): propagate proxy headers on connection reuse (#11777) (#11860) (cherry picked from commit 7bbf17d09d5f87b93022d340e39d53f386d5d485) --------- Co-authored-by: GLeurquin --- CHANGES/2596.bugfix.rst | 2 + CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 54 ++++++++++++++----------- tests/test_connector.py | 90 ++++++++++++++++++++++++++++++++++++++++- 4 files changed, 123 insertions(+), 24 deletions(-) create mode 100644 CHANGES/2596.bugfix.rst diff --git a/CHANGES/2596.bugfix.rst b/CHANGES/2596.bugfix.rst new file mode 100644 index 00000000000..e172506bcde --- /dev/null +++ b/CHANGES/2596.bugfix.rst @@ -0,0 +1,2 @@ +Fixed proxy authorization headers not being passed when reusing a connection, which caused 407 (Proxy authentication required) errors +-- by :user:`GLeurquin`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e54cd6e8cc8..c30d3b76b60 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -144,6 +144,7 @@ Georges Dubus Greg Holt Gregory Haynes Grigoriy Soldatov +Guillaume Leurquin Gus Goulart Gustavo Carneiro Günther Jena diff --git a/aiohttp/connector.py b/aiohttp/connector.py index abca2605601..93cdfa57d24 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -592,6 +592,32 @@ def _available_connections(self, key: "ConnectionKey") -> int: return total_remain + def _update_proxy_auth_header_and_build_proxy_req( + self, req: ClientRequest + ) -> ClientRequest: + """Set Proxy-Authorization header for non-SSL proxy requests and builds the proxy request for SSL proxy requests.""" + url = req.proxy + assert url is not None + headers: dict[str, str] = {} + if req.proxy_headers is not None: + headers = req.proxy_headers # type: ignore[assignment] + headers[hdrs.HOST] = req.headers[hdrs.HOST] + proxy_req = ClientRequest( + hdrs.METH_GET, + url, + headers=headers, + auth=req.proxy_auth, + loop=self._loop, + ssl=req.ssl, + ) + auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) + if auth is not None: + if not req.is_ssl(): + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + return proxy_req + async def connect( self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout" ) -> Connection: @@ -600,12 +626,16 @@ async def connect( if (conn := await self._get(key, traces)) is not None: # If we do not have to wait and we can get a connection from the pool # we can avoid the timeout ceil logic and directly return the connection + if req.proxy: + self._update_proxy_auth_header_and_build_proxy_req(req) return conn async with ceil_timeout(timeout.connect, timeout.ceil_threshold): if self._available_connections(key) <= 0: await self._wait_for_available_connection(key, traces) if (conn := await self._get(key, traces)) is not None: + if req.proxy: + self._update_proxy_auth_header_and_build_proxy_req(req) return conn placeholder = cast( @@ -1566,35 +1596,13 @@ async def _create_proxy_connection( ) -> tuple[asyncio.BaseTransport, ResponseHandler]: self._fail_on_no_start_tls(req) runtime_has_start_tls = self._loop_supports_start_tls() - - headers: dict[str, str] = {} - if req.proxy_headers is not None: - headers = req.proxy_headers # type: ignore[assignment] - headers[hdrs.HOST] = req.headers[hdrs.HOST] - - url = req.proxy - assert url is not None - proxy_req = ClientRequest( - hdrs.METH_GET, - url, - headers=headers, - auth=req.proxy_auth, - loop=self._loop, - ssl=req.ssl, - ) + proxy_req = self._update_proxy_auth_header_and_build_proxy_req(req) # create connection to proxy server transport, proto = await self._create_direct_connection( proxy_req, [], timeout, client_error=ClientProxyConnectionError ) - auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) - if auth is not None: - if not req.is_ssl(): - req.headers[hdrs.PROXY_AUTHORIZATION] = auth - else: - proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth - if req.is_ssl(): if runtime_has_start_tls: self._warn_about_tls_in_tls(transport, req) diff --git a/tests/test_connector.py b/tests/test_connector.py index 09a8b26c193..15709db9b83 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1,5 +1,6 @@ # Tests of http client with custom Connector import asyncio +import contextlib import gc import hashlib import logging @@ -17,11 +18,12 @@ from unittest import mock import pytest +from multidict import CIMultiDict from pytest_mock import MockerFixture from yarl import URL import aiohttp -from aiohttp import client, connector as connector_module, web +from aiohttp import client, connector as connector_module, hdrs, web from aiohttp.client import ClientRequest, ClientTimeout from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ConnectionKey @@ -3181,6 +3183,92 @@ async def test_connect_reuseconn_tracing(loop, key) -> None: await conn.close() +@pytest.mark.parametrize( + "test_case,wait_for_con,expect_proxy_auth_header", + [ + ("use_proxy_with_embedded_auth", False, True), + ("use_proxy_with_auth_headers", True, True), + ("use_proxy_no_auth", False, False), + ("dont_use_proxy", False, False), + ], +) +async def test_connect_reuse_proxy_headers( # type: ignore[misc] + loop: asyncio.AbstractEventLoop, + test_case: str, + wait_for_con: bool, + expect_proxy_auth_header: bool, +) -> None: + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + if test_case != "dont_use_proxy": + proxy = ( + URL("http://user:password@example.com") + if test_case == "use_proxy_with_embedded_auth" + else URL("http://example.com") + ) + proxy_headers = ( + CIMultiDict({hdrs.AUTHORIZATION: "Basic dXNlcjpwYXNzd29yZA=="}) + if test_case == "use_proxy_with_auth_headers" + else None + ) + else: + proxy = None + proxy_headers = None + key = ConnectionKey( + "localhost", + 80, + False, + True, + proxy, + None, + hash(tuple(proxy_headers.items())) if proxy_headers else None, + ) + req = ClientRequest( + "GET", + URL("http://localhost:80"), + loop=loop, + response_class=mock.Mock(), + proxy=proxy, + proxy_headers=proxy_headers, + ) + + conn = aiohttp.BaseConnector(limit=1) + + async def _create_con(*args: Any, **kwargs: Any) -> None: + conn._conns[key] = deque([(proto, loop.time())]) + + with contextlib.ExitStack() as stack: + if wait_for_con: + # Simulate no available connections + stack.enter_context( + mock.patch.object( + conn, "_available_connections", autospec=True, return_value=0 + ) + ) + # Upon waiting for a connection, populate _conns with our proto, + # mocking a connection becoming immediately available + stack.enter_context( + mock.patch.object( + conn, + "_wait_for_available_connection", + autospec=True, + side_effect=_create_con, + ) + ) + else: + await _create_con() + # Call function to test + conn2 = await conn.connect(req, [], ClientTimeout()) + conn2.release() + await conn.close() + + if expect_proxy_auth_header: + assert req.headers[hdrs.PROXY_AUTHORIZATION] == "Basic dXNlcjpwYXNzd29yZA==" + else: + assert hdrs.PROXY_AUTHORIZATION not in req.headers + + async def test_connect_with_limit_and_limit_per_host(loop, key) -> None: proto = mock.Mock() proto.is_connected.return_value = True From 36649001bf3b479ce2e377a08d8708b35ac6c385 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 21 Dec 2025 16:04:48 +0000 Subject: [PATCH 54/93] Revert "Bump coverage from 7.11.0 to 7.13.0 (#11826)" This reverts commit bed4add8dab01703ddfa4f243b6f429d321f216d. --- .coveragerc | 11 +++++++++++ .coveragerc.toml | 22 ---------------------- CHANGES/11826.contrib.rst | 7 ------- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 16 insertions(+), 34 deletions(-) create mode 100644 .coveragerc delete mode 100644 .coveragerc.toml delete mode 100644 CHANGES/11826.contrib.rst diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000000..7792266b114 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,11 @@ +[run] +branch = True +source = aiohttp, tests +omit = site-packages + +[report] +exclude_also = + if TYPE_CHECKING + assert False + : \.\.\.(\s*#.*)?$ + ^ +\.\.\.$ diff --git a/.coveragerc.toml b/.coveragerc.toml deleted file mode 100644 index 4ca5d2808bd..00000000000 --- a/.coveragerc.toml +++ /dev/null @@ -1,22 +0,0 @@ -[run] -branch = true -# NOTE: `ctrace` tracing method is needed because the `sysmon` tracer -# NOTE: which is default on Python 3.14, causes unprecedented slow-down -# NOTE: of the test runs. -# Ref: https://github.com/coveragepy/coveragepy/issues/2099 -core = 'ctrace' -source = [ - 'aiohttp', - 'tests', -] -omit = [ - 'site-packages', -] - -[report] -exclude_also = [ - 'if TYPE_CHECKING', - 'assert False', - ': \.\.\.(\s*#.*)?$', - '^ +\.\.\.$', -] diff --git a/CHANGES/11826.contrib.rst b/CHANGES/11826.contrib.rst deleted file mode 100644 index 134eda601c2..00000000000 --- a/CHANGES/11826.contrib.rst +++ /dev/null @@ -1,7 +0,0 @@ -The coverage tool is now configured using the new native -auto-discovered :file:`.coveragerc.toml` file --- by :user:`webknjaz`. - -It is also set up to use the ``ctrace`` core that works -around the performance issues in the ``sysmon`` tracer -which is default under Python 3.14. diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 35112e806ef..8cc65554bbd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.13.0 +coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 935b3416a57..86a0843a7c5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -54,7 +54,7 @@ click==8.1.8 # slotscheck # towncrier # wait-for-it -coverage==7.13.0 +coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 9b3e1c0c778..1ab1c6b3550 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -14,7 +14,7 @@ cffi==2.0.0 # pytest-codspeed click==8.2.1 # via wait-for-it -coverage==7.13.0 +coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 71d928fcecd..dd791bc78e7 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -27,7 +27,7 @@ cffi==2.0.0 # pytest-codspeed click==8.2.1 # via wait-for-it -coverage==7.13.0 +coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index a6377f3b337..87faf489087 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -27,7 +27,7 @@ cffi==2.0.0 # pytest-codspeed click==8.1.8 # via wait-for-it -coverage==7.13.0 +coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov From de6de9134abaae324da4fcfaa6b65797aee3dc8c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 21 Dec 2025 16:28:17 +0000 Subject: [PATCH 55/93] fix(connector): propagate proxy headers on connection reuse (#11777) (#11861) (cherry picked from commit 7bbf17d09d5f87b93022d340e39d53f386d5d485) --------- Co-authored-by: GLeurquin --- CHANGES/2596.bugfix.rst | 2 + CONTRIBUTORS.txt | 1 + aiohttp/connector.py | 54 ++++++++++++++----------- tests/test_connector.py | 90 ++++++++++++++++++++++++++++++++++++++++- 4 files changed, 123 insertions(+), 24 deletions(-) create mode 100644 CHANGES/2596.bugfix.rst diff --git a/CHANGES/2596.bugfix.rst b/CHANGES/2596.bugfix.rst new file mode 100644 index 00000000000..e172506bcde --- /dev/null +++ b/CHANGES/2596.bugfix.rst @@ -0,0 +1,2 @@ +Fixed proxy authorization headers not being passed when reusing a connection, which caused 407 (Proxy authentication required) errors +-- by :user:`GLeurquin`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 290be0205f1..9f05d211516 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -143,6 +143,7 @@ Gennady Andreyev Georges Dubus Greg Holt Gregory Haynes +Guillaume Leurquin Gus Goulart Gustavo Carneiro Günther Jena diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 0fbacde3b42..290a42400f9 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -609,6 +609,32 @@ def _available_connections(self, key: "ConnectionKey") -> int: return total_remain + def _update_proxy_auth_header_and_build_proxy_req( + self, req: ClientRequest + ) -> ClientRequest: + """Set Proxy-Authorization header for non-SSL proxy requests and builds the proxy request for SSL proxy requests.""" + url = req.proxy + assert url is not None + headers: Dict[str, str] = {} + if req.proxy_headers is not None: + headers = req.proxy_headers # type: ignore[assignment] + headers[hdrs.HOST] = req.headers[hdrs.HOST] + proxy_req = ClientRequest( + hdrs.METH_GET, + url, + headers=headers, + auth=req.proxy_auth, + loop=self._loop, + ssl=req.ssl, + ) + auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) + if auth is not None: + if not req.is_ssl(): + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + return proxy_req + async def connect( self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" ) -> Connection: @@ -617,12 +643,16 @@ async def connect( if (conn := await self._get(key, traces)) is not None: # If we do not have to wait and we can get a connection from the pool # we can avoid the timeout ceil logic and directly return the connection + if req.proxy: + self._update_proxy_auth_header_and_build_proxy_req(req) return conn async with ceil_timeout(timeout.connect, timeout.ceil_threshold): if self._available_connections(key) <= 0: await self._wait_for_available_connection(key, traces) if (conn := await self._get(key, traces)) is not None: + if req.proxy: + self._update_proxy_auth_header_and_build_proxy_req(req) return conn placeholder = cast( @@ -1585,35 +1615,13 @@ async def _create_proxy_connection( ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: self._fail_on_no_start_tls(req) runtime_has_start_tls = self._loop_supports_start_tls() - - headers: Dict[str, str] = {} - if req.proxy_headers is not None: - headers = req.proxy_headers # type: ignore[assignment] - headers[hdrs.HOST] = req.headers[hdrs.HOST] - - url = req.proxy - assert url is not None - proxy_req = ClientRequest( - hdrs.METH_GET, - url, - headers=headers, - auth=req.proxy_auth, - loop=self._loop, - ssl=req.ssl, - ) + proxy_req = self._update_proxy_auth_header_and_build_proxy_req(req) # create connection to proxy server transport, proto = await self._create_direct_connection( proxy_req, [], timeout, client_error=ClientProxyConnectionError ) - auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) - if auth is not None: - if not req.is_ssl(): - req.headers[hdrs.PROXY_AUTHORIZATION] = auth - else: - proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth - if req.is_ssl(): if runtime_has_start_tls: self._warn_about_tls_in_tls(transport, req) diff --git a/tests/test_connector.py b/tests/test_connector.py index 9048bf61e2f..cf187d66b08 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1,5 +1,6 @@ # Tests of http client with custom Connector import asyncio +import contextlib import gc import hashlib import logging @@ -26,11 +27,12 @@ from unittest import mock import pytest +from multidict import CIMultiDict from pytest_mock import MockerFixture from yarl import URL import aiohttp -from aiohttp import client, connector as connector_module, web +from aiohttp import client, connector as connector_module, hdrs, web from aiohttp.client import ClientRequest, ClientTimeout from aiohttp.client_proto import ResponseHandler from aiohttp.client_reqrep import ConnectionKey @@ -3190,6 +3192,92 @@ async def test_connect_reuseconn_tracing(loop, key) -> None: await conn.close() +@pytest.mark.parametrize( + "test_case,wait_for_con,expect_proxy_auth_header", + [ + ("use_proxy_with_embedded_auth", False, True), + ("use_proxy_with_auth_headers", True, True), + ("use_proxy_no_auth", False, False), + ("dont_use_proxy", False, False), + ], +) +async def test_connect_reuse_proxy_headers( # type: ignore[misc] + loop: asyncio.AbstractEventLoop, + test_case: str, + wait_for_con: bool, + expect_proxy_auth_header: bool, +) -> None: + proto = create_mocked_conn(loop) + proto.is_connected.return_value = True + + if test_case != "dont_use_proxy": + proxy = ( + URL("http://user:password@example.com") + if test_case == "use_proxy_with_embedded_auth" + else URL("http://example.com") + ) + proxy_headers = ( + CIMultiDict({hdrs.AUTHORIZATION: "Basic dXNlcjpwYXNzd29yZA=="}) + if test_case == "use_proxy_with_auth_headers" + else None + ) + else: + proxy = None + proxy_headers = None + key = ConnectionKey( + "localhost", + 80, + False, + True, + proxy, + None, + hash(tuple(proxy_headers.items())) if proxy_headers else None, + ) + req = ClientRequest( + "GET", + URL("http://localhost:80"), + loop=loop, + response_class=mock.Mock(), + proxy=proxy, + proxy_headers=proxy_headers, + ) + + conn = aiohttp.BaseConnector(limit=1) + + async def _create_con(*args: Any, **kwargs: Any) -> None: + conn._conns[key] = deque([(proto, loop.time())]) + + with contextlib.ExitStack() as stack: + if wait_for_con: + # Simulate no available connections + stack.enter_context( + mock.patch.object( + conn, "_available_connections", autospec=True, return_value=0 + ) + ) + # Upon waiting for a connection, populate _conns with our proto, + # mocking a connection becoming immediately available + stack.enter_context( + mock.patch.object( + conn, + "_wait_for_available_connection", + autospec=True, + side_effect=_create_con, + ) + ) + else: + await _create_con() + # Call function to test + conn2 = await conn.connect(req, [], ClientTimeout()) + conn2.release() + await conn.close() + + if expect_proxy_auth_header: + assert req.headers[hdrs.PROXY_AUTHORIZATION] == "Basic dXNlcjpwYXNzd29yZA==" + else: + assert hdrs.PROXY_AUTHORIZATION not in req.headers + + async def test_connect_with_limit_and_limit_per_host(loop, key) -> None: proto = mock.Mock() proto.is_connected.return_value = True From d6885d58556286045ca8c985b37dde908a151e5f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Dec 2025 16:30:28 +0000 Subject: [PATCH 56/93] Bump actions/download-artifact from 6 to 7 (#11847) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 6 to 7.
Release notes

Sourced from actions/download-artifact's releases.

v7.0.0

v7 - What's new

[!IMPORTANT] actions/download-artifact@v7 now runs on Node.js 24 (runs.using: node24) and requires a minimum Actions Runner version of 2.327.1. If you are using self-hosted runners, ensure they are updated before upgrading.

Node.js 24

This release updates the runtime to Node.js 24. v6 had preliminary support for Node 24, however this action was by default still running on Node.js 20. Now this action by default will run on Node.js 24.

What's Changed

New Contributors

Full Changelog: https://github.com/actions/download-artifact/compare/v6.0.0...v7.0.0

Commits
  • 37930b1 Merge pull request #452 from actions/download-artifact-v7-release
  • 72582b9 doc: update readme
  • 0d2ec9d chore: release v7.0.0 for Node.js 24 support
  • fd7ae8f Merge pull request #451 from actions/fix-storage-blob
  • d484700 chore: restore minimatch.dep.yml license file
  • 03a8080 chore: remove obsolete dependency license files
  • 56fe6d9 chore: update @​actions/artifact license file to 5.0.1
  • 8e3ebc4 chore: update package-lock.json with @​actions/artifact@​5.0.1
  • 1e3c4b4 fix: update @​actions/artifact to ^5.0.0 for Node.js 24 punycode fix
  • 458627d chore: use local @​actions/artifact package for Node.js 24 testing
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/download-artifact&package-manager=github_actions&previous-version=6&new-version=7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 2c256407efa..279e687de3d 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -179,7 +179,7 @@ jobs: run: echo "PYTHON_GIL=0" >> $GITHUB_ENV - name: Restore llhttp generated files if: ${{ matrix.no-extensions == '' }} - uses: actions/download-artifact@v6 + uses: actions/download-artifact@v7 with: name: llhttp path: vendor/llhttp/build/ @@ -266,7 +266,7 @@ jobs: run: | python -m pip install -r requirements/test.in -c requirements/test.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v6 + uses: actions/download-artifact@v7 with: name: llhttp path: vendor/llhttp/build/ @@ -328,7 +328,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v6 + uses: actions/download-artifact@v7 with: name: llhttp path: vendor/llhttp/build/ @@ -418,7 +418,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v6 + uses: actions/download-artifact@v7 with: name: llhttp path: vendor/llhttp/build/ @@ -463,7 +463,7 @@ jobs: run: | echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token - name: Download distributions - uses: actions/download-artifact@v6 + uses: actions/download-artifact@v7 with: path: dist pattern: dist-* From 93c24f78f49f4e1d58bff9d3dbfd436240c09c6d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 21 Dec 2025 16:30:47 +0000 Subject: [PATCH 57/93] Bump actions/cache from 5.0.0 to 5.0.1 (#11846) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 5.0.0 to 5.0.1.
Release notes

Sourced from actions/cache's releases.

v5.0.1

[!IMPORTANT] actions/cache@v5 runs on the Node.js 24 runtime and requires a minimum Actions Runner version of 2.327.1.

If you are using self-hosted runners, ensure they are updated before upgrading.


v5.0.1

What's Changed

v5.0.0

What's Changed

Full Changelog: https://github.com/actions/cache/compare/v5...v5.0.1

Changelog

Sourced from actions/cache's changelog.

5.0.1

  • Update @azure/storage-blob to ^12.29.1 via @actions/cache@5.0.1 #1685
Commits
  • 9255dc7 Merge pull request #1686 from actions/cache-v5.0.1-release
  • 8ff5423 chore: release v5.0.1
  • 9233019 Merge pull request #1685 from salmanmkc/node24-storage-blob-fix
  • b975f2b fix: add peer property to package-lock.json for dependencies
  • d0a0e18 fix: update license files for @​actions/cache, fast-xml-parser, and strnum
  • 74de208 fix: update @​actions/cache to ^5.0.1 for Node.js 24 punycode fix
  • ac7f115 peer
  • b0f846b fix: update @​actions/cache with storage-blob fix for Node.js 24 punycode depr...
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=5.0.0&new-version=5.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 279e687de3d..cc36f6f7239 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.11 - name: Cache PyPI - uses: actions/cache@v5.0.0 + uses: actions/cache@v5.0.1 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -96,7 +96,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v5.0.0 + uses: actions/cache@v5.0.1 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -160,7 +160,7 @@ jobs: echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" shell: bash - name: Cache PyPI - uses: actions/cache@v5.0.0 + uses: actions/cache@v5.0.1 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 0ba7d581345a230455cc6d0396f5a42f3013829a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Dec 2025 10:40:46 +0000 Subject: [PATCH 58/93] Bump nodeenv from 1.9.1 to 1.10.0 (#11864) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [nodeenv](https://github.com/ekalinin/nodeenv) from 1.9.1 to 1.10.0.
Release notes

Sourced from nodeenv's releases.

1.10.0 - drop

What's Changed

Fixed bugs 🐛

Improvements 🛠

Other Changes

New Contributors

Full Changelog: https://github.com/ekalinin/nodeenv/compare/1.9.1...1.10.0

Commits
  • 9dee547 chore: bump nodeenv version to 1.10.0
  • d45aabb chore: add pyright ignore comments for compatibility
  • 55d6c21 chore: update AUTHORS
  • 5f694e6 test: update test test_node_system_creates_shim
  • fa3fdfb Merge branch 'master' of github.com:ekalinin/nodeenv
  • e868dbe Replace additional use of which(1) with shutil.which() (#355)
  • b4cd00d test: enhance activation tests for nodeenv with custom prompts and file handling
  • 0b5ea9d refactor(tests): improve readability of mock patches in nodeenv tests
  • 37c0c30 ci: add GH workflow for testing and coverage in PR
  • 326a7a4 test: add comprehensive tests for install_npm and install_npm_win functions
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=nodeenv&package-manager=pip&previous-version=1.9.1&new-version=1.10.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 5 +++-- requirements/dev.txt | 5 +++-- requirements/lint.txt | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7f849caceb6..56d972750bd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -123,7 +123,7 @@ mypy==1.19.1 ; implementation_name == "cpython" # -r requirements/test-common.in mypy-extensions==1.1.0 # via mypy -nodeenv==1.9.1 +nodeenv==1.10.0 # via pre-commit packaging==25.0 # via @@ -255,8 +255,9 @@ trustme==1.2.1 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test-common.in -typing-extensions==4.15.0 +typing-extensions==4.15.0 ; python_version < "3.13" # via + # -r requirements/runtime-deps.in # aiosignal # cryptography # exceptiongroup diff --git a/requirements/dev.txt b/requirements/dev.txt index 6c13de4b50e..952e2f6203a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -120,7 +120,7 @@ mypy==1.19.1 ; implementation_name == "cpython" # -r requirements/test-common.in mypy-extensions==1.1.0 # via mypy -nodeenv==1.9.1 +nodeenv==1.10.0 # via pre-commit packaging==25.0 # via @@ -245,8 +245,9 @@ trustme==1.2.1 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test-common.in -typing-extensions==4.15.0 +typing-extensions==4.15.0 ; python_version < "3.13" # via + # -r requirements/runtime-deps.in # aiosignal # cryptography # exceptiongroup diff --git a/requirements/lint.txt b/requirements/lint.txt index 2f3c8ed8950..238b22c63eb 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -53,7 +53,7 @@ mypy==1.19.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.1.0 # via mypy -nodeenv==1.9.1 +nodeenv==1.10.0 # via pre-commit packaging==25.0 # via pytest From c15746e07e2552ff2b7160e0509ee545b6b5a3c8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 22 Dec 2025 16:03:25 +0000 Subject: [PATCH 59/93] [PR #11865/963ca767 backport][3.14] Add python-proxy-headers to third_party.rst (#11866) **This is a backport of PR #11865 as merged into master (963ca76708cd4bc52e726db10fd296c2025d003e).** Co-authored-by: ProxyMesh <57071867+proxymesh@users.noreply.github.com> --- docs/third_party.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/third_party.rst b/docs/third_party.rst index c01023c1f1b..12380d390fe 100644 --- a/docs/third_party.rst +++ b/docs/third_party.rst @@ -149,6 +149,9 @@ ask to raise the status. - `pytest-aiohttp-client `_ Pytest fixture with simpler api, payload decoding and status code assertions. +- `python-proxy-headers `_ + provides ``aiohttp_proxy`` extension for receiving custom response headers from a proxy server + - `octomachinery `_ A framework for developing GitHub Apps and GitHub Actions. From dcc02a8c91167f1786b9102aacfe3fa2ad2ff6a3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 23 Dec 2025 20:10:02 +0000 Subject: [PATCH 60/93] [PR #11867/c3b08f73 backport][3.14] Fix flaky test (#11869) **This is a backport of PR #11867 as merged into master (c3b08f73e5531fb4436c6b90725aa5009fb1d705).** Co-authored-by: Sam Bull --- tests/test_run_app.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 6cb19b1def2..c2ec32b2390 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -1234,6 +1234,7 @@ async def run_test(app: web.Application) -> None: nonlocal t t = asyncio.create_task(test()) yield + await asyncio.sleep(0) # In case test() hasn't resumed yet. t.cancel() with contextlib.suppress(asyncio.CancelledError): await t From 0d2d1d60ff4ca4e99272b1806027e61b66cb51d9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 23 Dec 2025 20:14:04 +0000 Subject: [PATCH 61/93] [PR #11867/c3b08f73 backport][3.13] Fix flaky test (#11868) **This is a backport of PR #11867 as merged into master (c3b08f73e5531fb4436c6b90725aa5009fb1d705).** Co-authored-by: Sam Bull --- tests/test_run_app.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_run_app.py b/tests/test_run_app.py index c4c4d1784d9..47087c22dff 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -1243,6 +1243,7 @@ async def run_test(app: web.Application) -> None: nonlocal t t = asyncio.create_task(test()) yield + await asyncio.sleep(0) # In case test() hasn't resumed yet. t.cancel() with contextlib.suppress(asyncio.CancelledError): await t From 49a440533b937a4d086cfcb41bd73e77e2cc1cae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 25 Dec 2025 10:37:28 +0000 Subject: [PATCH 62/93] Bump librt from 0.7.4 to 0.7.5 (#11872) Bumps [librt](https://github.com/mypyc/librt) from 0.7.4 to 0.7.5.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=librt&package-manager=pip&previous-version=0.7.4&new-version=0.7.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 5 +++-- requirements/test.txt | 2 +- 6 files changed, 8 insertions(+), 7 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 56d972750bd..7a2e296b8ef 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -104,7 +104,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -librt==0.7.4 +librt==0.7.5 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/dev.txt b/requirements/dev.txt index 952e2f6203a..c05e060878e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -102,7 +102,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -librt==0.7.4 +librt==0.7.5 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/lint.txt b/requirements/lint.txt index 238b22c63eb..3b752ec4e1b 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -43,7 +43,7 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 # via -r requirements/lint.in -librt==0.7.4 +librt==0.7.5 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 84c1bd637ae..6898a032aa3 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -34,7 +34,7 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.4 +librt==0.7.5 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 6f7e2e35130..685b8c3d677 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -57,7 +57,7 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.4 +librt==0.7.5 # via mypy markdown-it-py==4.0.0 # via rich @@ -137,8 +137,9 @@ tomli==2.3.0 # pytest trustme==1.2.1 ; platform_machine != "i686" # via -r requirements/test-common.in -typing-extensions==4.15.0 +typing-extensions==4.15.0 ; python_version < "3.13" # via + # -r requirements/runtime-deps.in # aiosignal # cryptography # exceptiongroup diff --git a/requirements/test.txt b/requirements/test.txt index da377b8e25f..b1a4034f512 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.4 +librt==0.7.5 # via mypy markdown-it-py==4.0.0 # via rich From 0c26d876ec804ba9f1b8091569deacc4a8c06ff8 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 27 Dec 2025 22:50:37 +0000 Subject: [PATCH 63/93] Accept async context managers for cleanup contexts (#11681) (#11704) (#11873) (cherry picked from commit 4379717c7cfc428de415d47832c787d49d7b946d) --------- Co-authored-by: Parman Mohammadalizadeh --- CHANGES/11681.feature.rst | 6 +++ CONTRIBUTORS.txt | 1 + aiohttp/web_app.py | 29 +++++++------ docs/faq.rst | 1 + docs/web_advanced.rst | 4 ++ docs/web_reference.rst | 3 +- examples/background_tasks.py | 3 +- tests/test_web_app.py | 83 ++++++++++++++++++++++++++++++++++-- 8 files changed, 111 insertions(+), 19 deletions(-) create mode 100644 CHANGES/11681.feature.rst diff --git a/CHANGES/11681.feature.rst b/CHANGES/11681.feature.rst new file mode 100644 index 00000000000..21b0ab1f7c7 --- /dev/null +++ b/CHANGES/11681.feature.rst @@ -0,0 +1,6 @@ +Started accepting :term:`asynchronous context managers ` for cleanup contexts. +Legacy single-yield :term:`asynchronous generator` cleanup contexts continue to be +supported; async context managers are adapted internally so they are +entered at startup and exited during cleanup. + +-- by :user:`MannXo`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index c30d3b76b60..115e26bb428 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -283,6 +283,7 @@ Pahaz Blinov Panagiotis Kolokotronis Pankaj Pandey Parag Jain +Parman Mohammadalizadeh Patrick Lee Pau Freixes Paul Colomiets diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 1a9489ad500..dc748afa419 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -11,6 +11,7 @@ MutableMapping, Sequence, ) +from contextlib import AbstractAsyncContextManager, asynccontextmanager from functools import lru_cache, partial, update_wrapper from typing import TYPE_CHECKING, Any, Optional, TypeVar, cast, overload @@ -574,34 +575,34 @@ def exceptions(self) -> list[BaseException]: return cast(list[BaseException], self.args[1]) -if TYPE_CHECKING: - _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] -else: - _CleanupContextBase = FrozenList +_CleanupContextCallable = ( + Callable[[Application], AbstractAsyncContextManager[None]] + | Callable[[Application], AsyncIterator[None]] +) -class CleanupContext(_CleanupContextBase): +class CleanupContext(FrozenList[_CleanupContextCallable]): def __init__(self) -> None: super().__init__() - self._exits: list[AsyncIterator[None]] = [] + self._exits: list[AbstractAsyncContextManager[None]] = [] async def _on_startup(self, app: Application) -> None: for cb in self: - it = cb(app).__aiter__() - await it.__anext__() - self._exits.append(it) + ctx = cb(app) + + if not isinstance(ctx, AbstractAsyncContextManager): + ctx = asynccontextmanager(cb)(app) # type: ignore[arg-type] + + await ctx.__aenter__() + self._exits.append(ctx) async def _on_cleanup(self, app: Application) -> None: errors = [] for it in reversed(self._exits): try: - await it.__anext__() - except StopAsyncIteration: - pass + await it.__aexit__(None, None, None) except (Exception, asyncio.CancelledError) as exc: errors.append(exc) - else: - errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) if errors: if len(errors) == 1: raise errors[0] diff --git a/docs/faq.rst b/docs/faq.rst index b3d844c3636..009169a98ae 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -290,6 +290,7 @@ database object, do it explicitly:: This can also be done from a :ref:`cleanup context`:: + @contextlib.asynccontextmanager async def db_context(app: web.Application) -> AsyncIterator[None]: async with create_db() as db: mainapp[db_key] = mainapp[subapp_key][db_key] = db diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 35286f8da6e..dbde4e960f0 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -853,6 +853,7 @@ knowledge about startup/cleanup pairs and their execution state. The solution is :attr:`Application.cleanup_ctx` usage:: + @contextlib.asynccontextmanager async def pg_engine(app: web.Application): app[pg_engine] = await create_async_engine( "postgresql+asyncpg://postgre:@localhost:5432/postgre" @@ -1164,6 +1165,7 @@ below:: await ws.send_str("{}: {}".format(channel, msg)) + @contextlib.asynccontextmanager async def background_tasks(app): app[redis_listener] = asyncio.create_task(listen_to_redis(app)) @@ -1203,6 +1205,7 @@ For example, running a long-lived task alongside the :class:`Application` can be done with a :ref:`aiohttp-web-cleanup-ctx` function like:: + @contextlib.asynccontextmanager async def run_other_task(_app): task = asyncio.create_task(other_long_task()) @@ -1218,6 +1221,7 @@ can be done with a :ref:`aiohttp-web-cleanup-ctx` function like:: Or a separate process can be run with something like:: + @contextlib.asynccontextmanager async def run_process(_app): proc = await asyncio.create_subprocess_exec(path) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index c523cdfa9be..4a130e32f1d 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1618,7 +1618,8 @@ Application and Router Signal handlers should have the following signature:: - async def context(app): + @contextlib.asynccontextmanager + async def context(app: web.Application) -> AsyncIterator[None]: # do startup stuff yield # do cleanup diff --git a/examples/background_tasks.py b/examples/background_tasks.py index 121111d6857..b4e84207bdb 100755 --- a/examples/background_tasks.py +++ b/examples/background_tasks.py @@ -2,7 +2,7 @@ """Example of aiohttp.web.Application.on_startup signal handler""" import asyncio from collections.abc import AsyncIterator -from contextlib import suppress +from contextlib import asynccontextmanager, suppress import valkey.asyncio as valkey @@ -44,6 +44,7 @@ async def listen_to_valkey(app: web.Application) -> None: print(f"message in {channel}: {msg}") +@asynccontextmanager async def background_tasks(app: web.Application) -> AsyncIterator[None]: app[valkey_listener] = asyncio.create_task(listen_to_valkey(app)) diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 3905f4a0f0b..d83c292b6d0 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -2,6 +2,7 @@ import gc import sys from collections.abc import AsyncIterator, Callable, Iterator +from contextlib import asynccontextmanager from typing import NoReturn from unittest import mock @@ -10,6 +11,7 @@ from aiohttp import log, web from aiohttp.abc import AbstractAccessLogger, AbstractRouter from aiohttp.helpers import DEBUG +from aiohttp.pytest_plugin import AiohttpClient from aiohttp.typedefs import Handler @@ -556,13 +558,88 @@ async def inner(app: web.Application) -> AsyncIterator[None]: app.freeze() await app.startup() assert out == ["pre_1"] - with pytest.raises(RuntimeError) as ctx: + with pytest.raises(RuntimeError): await app.cleanup() - assert "has more than one 'yield'" in str(ctx.value) assert out == ["pre_1", "post_1"] -async def test_subapp_chained_config_dict_visibility(aiohttp_client) -> None: +async def test_cleanup_ctx_with_async_generator_and_asynccontextmanager() -> None: + entered = [] + + async def gen_ctx(app: web.Application) -> AsyncIterator[None]: + entered.append("enter-gen") + try: + yield + finally: + entered.append("exit-gen") + + @asynccontextmanager + async def cm_ctx(app: web.Application) -> AsyncIterator[None]: + entered.append("enter-cm") + try: + yield + finally: + entered.append("exit-cm") + + app = web.Application() + app.cleanup_ctx.append(gen_ctx) + app.cleanup_ctx.append(cm_ctx) + app.freeze() + await app.startup() + assert "enter-gen" in entered and "enter-cm" in entered + await app.cleanup() + assert "exit-gen" in entered and "exit-cm" in entered + + +async def test_cleanup_ctx_exception_in_cm_exit() -> None: + app = web.Application() + + exc = RuntimeError("exit failed") + + @asynccontextmanager + async def failing_exit_ctx(app: web.Application) -> AsyncIterator[None]: + yield + raise exc + + app.cleanup_ctx.append(failing_exit_ctx) + app.freeze() + await app.startup() + with pytest.raises(RuntimeError) as ctx: + await app.cleanup() + assert ctx.value is exc + + +async def test_cleanup_ctx_mixed_with_exception_in_cm_exit() -> None: + app = web.Application() + out = [] + + async def working_gen(app: web.Application) -> AsyncIterator[None]: + out.append("pre_gen") + yield + out.append("post_gen") + + exc = RuntimeError("cm exit failed") + + @asynccontextmanager + async def failing_exit_cm(app: web.Application) -> AsyncIterator[None]: + out.append("pre_cm") + yield + out.append("post_cm") + raise exc + + app.cleanup_ctx.append(working_gen) + app.cleanup_ctx.append(failing_exit_cm) + app.freeze() + await app.startup() + with pytest.raises(RuntimeError) as ctx: + await app.cleanup() + assert ctx.value is exc + assert out == ["pre_gen", "pre_cm", "post_cm", "post_gen"] + + +async def test_subapp_chained_config_dict_visibility( + aiohttp_client: AiohttpClient, +) -> None: key1 = web.AppKey("key1", str) key2 = web.AppKey("key2", str) From 44f619b0dc61a0505114abc62211b75978707741 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Dec 2025 10:40:25 +0000 Subject: [PATCH 64/93] Bump coverage from 7.13.0 to 7.13.1 (#11875) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/coveragepy/coveragepy) from 7.13.0 to 7.13.1.
Changelog

Sourced from coverage's changelog.

Version 7.13.1 — 2025-12-28

  • Added: the JSON report now includes a "start_line" key for function and class regions, indicating the first line of the region in the source. Closes issue 2110_.

  • Added: The debug data command now takes file names as arguments on the command line, so you can inspect specific data files without needing to set the COVERAGE_FILE environment variable.

  • Fix: the JSON report used to report module docstrings as executed lines, which no other report did, as described in issue 2105_. This is now fixed, thanks to Jianrong Zhao.

  • Fix: coverage.py uses a more disciplined approach to detecting where third-party code is installed, and avoids measuring it. This shouldn't change any behavior. If you find that it does, please get in touch.

  • Performance: data files that will be combined now record their hash as part of the file name. This lets us skip duplicate data more quickly, speeding the combining step.

  • Docs: added a section explaining more about what is considered a missing branch and how it is reported: :ref:branch_explain, as requested in issue 1597. Thanks to Ayisha Mohammed <pull 2092_>.

  • Tests: the test suite misunderstood what core was being tested if COVERAGE_CORE wasn't set on 3.14+. This is now fixed, closing issue 2109_.

.. _issue 1597: coveragepy/coveragepy#1597 .. _pull 2092: coveragepy/coveragepy#2092 .. _issue 2105: coveragepy/coveragepy#2105 .. _issue 2109: coveragepy/coveragepy#2109 .. _issue 2110: coveragepy/coveragepy#2110

.. _changes_7-13-0:

Commits
  • a6afdc3 docs: sample HTML for 7.13.1
  • a497081 docs: prep for 7.13.1
  • e992033 docs: polish up CHANGES
  • 18bba6e chore: bump the action-dependencies group with 4 updates (#2111)
  • 80fb808 refactor: (?x:...) lets us use re.VERBOSE even when combining later
  • cc272bd docs: leave a comment so we'll find this when 3.12 is the minimum
  • 70d007d types: be explicit
  • a2c1940 types: fully import modules that will be patched
  • 57b975d types: explicit Protocol inheritance permits changing parameter names
  • 63ec12d types: clarify that morfs arguments can be a single morf
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.13.0&new-version=7.13.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7a2e296b8ef..f5206aa3c2c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -55,7 +55,7 @@ click==8.3.1 # slotscheck # towncrier # wait-for-it -coverage==7.13.0 +coverage==7.13.1 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index c05e060878e..99ea3d23625 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -55,7 +55,7 @@ click==8.3.1 # slotscheck # towncrier # wait-for-it -coverage==7.13.0 +coverage==7.13.1 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 6898a032aa3..88341c752db 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -14,7 +14,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.1 # via wait-for-it -coverage==7.13.0 +coverage==7.13.1 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 685b8c3d677..01ef1e6f477 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -29,7 +29,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.1 # via wait-for-it -coverage==7.13.0 +coverage==7.13.1 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index b1a4034f512..4be603a46f2 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -29,7 +29,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.1 # via wait-for-it -coverage==7.13.0 +coverage==7.13.1 # via # -r requirements/test-common.in # pytest-cov From 90f7143d02c41d8c05088b2285d0979f139b64a7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Dec 2025 10:34:10 +0000 Subject: [PATCH 65/93] Bump backports-zstd from 1.1.0 to 1.3.0 (#11878) Bumps [backports-zstd](https://github.com/rogdham/backports.zstd) from 1.1.0 to 1.3.0.
Changelog

Sourced from backports-zstd's changelog.

1.3.0 - 2025-12-29

:rocket: Added

  • Update type hints with typeshed bbddfee

:bug: Fixes

  • Fix conflict on __init__.py file on backports root module by transforming backports.zstd into an implicit namespace package (see PEP-420)
  • Raise an exception at both build and runtime when using an unsupported Python version, instead of crashing with a segmentation fault at runtime (in the rare cases where backports.zstd was installed despite the requires-python marker)

1.2.0 - 2025-12-06

:rocket: Added

  • Update code with CPython 3.14.2 version
  • Build wheels for riscv64
Commits
  • b729048 chore: prepare v1.3.0
  • 052b14a chore: update type hints from upstream
  • fd7b6ed chore: improve sync script
  • 1862c94 fix: use pep-420 implicit namespace package
  • 5d92e80 chore: exception on unsupported Python versions
  • 4356f1a chore: prepare v1.2.0
  • 8497104 chore: add CPython sync script
  • 0745dc4 feat: build wheels for riscv64
  • a6e0e9f feat: update code from upstream 3.14.2
  • 6af4e64 feat: update code from upstream 3.14.1
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=backports-zstd&package-manager=pip&previous-version=1.1.0&new-version=1.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 5 +++-- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 10 insertions(+), 9 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index a726e2563f5..3e57dbe03ae 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.3.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in @@ -42,8 +42,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -typing-extensions==4.15.0 +typing-extensions==4.15.0 ; python_version < "3.13" # via + # -r requirements/runtime-deps.in # aiosignal # multidict yarl==1.22.0 diff --git a/requirements/base.txt b/requirements/base.txt index f733fb7a8a7..22ed18b1e71 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.3.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f5206aa3c2c..e3f62e819f6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -26,7 +26,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -backports-zstd==1.1.0 ; implementation_name == "cpython" +backports-zstd==1.3.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 99ea3d23625..8940dea1520 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,7 +26,7 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx -backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.3.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 3b752ec4e1b..9bdb1e66c86 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,7 +10,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 # via valkey -backports-zstd==1.1.0 ; implementation_name == "cpython" +backports-zstd==1.3.0 ; implementation_name == "cpython" # via -r requirements/lint.in blockbuster==1.5.26 # via -r requirements/lint.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 035c3e6636e..bace48ca565 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -14,7 +14,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.3.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in brotli==1.2.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 01ef1e6f477..238d46dc066 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.3.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in blockbuster==1.5.26 # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index 4be603a46f2..e5702f873cb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -16,7 +16,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in -backports-zstd==1.1.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports-zstd==1.3.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in blockbuster==1.5.26 # via -r requirements/test-common.in From b0bdefdc71b8af6cdd35cf6b0e0781c8c6a0c3e2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Jan 2026 01:50:24 +0000 Subject: [PATCH 66/93] [PR #11857/0a915b8f backport][3.14] Fix multipart parsing for empty body parts (#11880) **This is a backport of PR #11857 as merged into master (0a915b8fb0fc02904b59ede9af8328b9ad305a1b).** Co-authored-by: Mark Larah Co-authored-by: Sam Bull --- CHANGES/11857.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/multipart.py | 10 +++++----- tests/test_multipart.py | 27 +++++++++++++++++++++++++++ 4 files changed, 34 insertions(+), 5 deletions(-) create mode 100644 CHANGES/11857.bugfix.rst diff --git a/CHANGES/11857.bugfix.rst b/CHANGES/11857.bugfix.rst new file mode 100644 index 00000000000..7933efeb074 --- /dev/null +++ b/CHANGES/11857.bugfix.rst @@ -0,0 +1 @@ +Fixed multipart reading failing when encountering an empty body part -- by :user:`Dreamsorcerer`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 115e26bb428..f31a3623140 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -244,6 +244,7 @@ Marco Paolini Marcus Stojcevich Mariano Anaya Mariusz Masztalerczuk +Mark Larah Marko Kohtala Martijn Pieters Martin Melka diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index e643976ce4c..4149f5560a5 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -374,7 +374,8 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: ), "Chunk size must be greater or equal than boundary length + 2" first_chunk = self._prev_chunk is None if first_chunk: - self._prev_chunk = await self._content.read(size) + # We need to re-add the CRLF that got removed from headers parsing. + self._prev_chunk = b"\r\n" + await self._content.read(size) chunk = b"" # content.read() may return less than size, so we need to loop to ensure @@ -401,12 +402,11 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) self._content.unread_data(window[idx:]) - if size > idx: - self._prev_chunk = self._prev_chunk[:idx] + self._prev_chunk = self._prev_chunk[:idx] chunk = window[len(self._prev_chunk) : idx] if not chunk: self._at_eof = True - result = self._prev_chunk + result = self._prev_chunk[2 if first_chunk else 0 :] # Strip initial CRLF self._prev_chunk = chunk return result @@ -772,7 +772,7 @@ async def _read_headers(self) -> "CIMultiDictProxy[str]": lines = [] while True: chunk = await self._content.readline() - chunk = chunk.strip() + chunk = chunk.rstrip(b"\r\n") lines.append(chunk) if not chunk: break diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 3380a811651..ad45d28f403 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1032,6 +1032,33 @@ async def test_reading_skips_prelude(self) -> None: assert first.at_eof() assert not second.at_eof() + async def test_read_empty_body_part(self) -> None: + with Stream(b"--:\r\n\r\n--:--") as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/related;boundary=":"'}, + stream, + ) + body_parts = [] + async for part in reader: + assert isinstance(part, BodyPartReader) + body_parts.append(await part.read()) + + assert body_parts == [b""] + + async def test_read_body_part_headers_only(self) -> None: + with Stream(b"--:\r\nContent-Type: text/plain\r\n\r\n--:--") as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/related;boundary=":"'}, + stream, + ) + body_parts = [] + async for part in reader: + assert isinstance(part, BodyPartReader) + assert "Content-Type" in part.headers + body_parts.append(await part.read()) + + assert body_parts == [b""] + async def test_read_form_default_encoding(self) -> None: with Stream( b"--:\r\n" From 344f6f986335729d4cc1f5fd4bf01cff7127cf03 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Jan 2026 01:50:39 +0000 Subject: [PATCH 67/93] [PR #11857/0a915b8f backport][3.13] Fix multipart parsing for empty body parts (#11879) **This is a backport of PR #11857 as merged into master (0a915b8fb0fc02904b59ede9af8328b9ad305a1b).** Co-authored-by: Mark Larah Co-authored-by: Sam Bull --- CHANGES/11857.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/multipart.py | 10 +++++----- tests/test_multipart.py | 27 +++++++++++++++++++++++++++ 4 files changed, 34 insertions(+), 5 deletions(-) create mode 100644 CHANGES/11857.bugfix.rst diff --git a/CHANGES/11857.bugfix.rst b/CHANGES/11857.bugfix.rst new file mode 100644 index 00000000000..7933efeb074 --- /dev/null +++ b/CHANGES/11857.bugfix.rst @@ -0,0 +1 @@ +Fixed multipart reading failing when encountering an empty body part -- by :user:`Dreamsorcerer`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 9f05d211516..0c506668d90 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -243,6 +243,7 @@ Marco Paolini Marcus Stojcevich Mariano Anaya Mariusz Masztalerczuk +Mark Larah Marko Kohtala Martijn Pieters Martin Melka diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 3464b1c2307..e6de2b88e21 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -386,7 +386,8 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: ), "Chunk size must be greater or equal than boundary length + 2" first_chunk = self._prev_chunk is None if first_chunk: - self._prev_chunk = await self._content.read(size) + # We need to re-add the CRLF that got removed from headers parsing. + self._prev_chunk = b"\r\n" + await self._content.read(size) chunk = b"" # content.read() may return less than size, so we need to loop to ensure @@ -413,12 +414,11 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) self._content.unread_data(window[idx:]) - if size > idx: - self._prev_chunk = self._prev_chunk[:idx] + self._prev_chunk = self._prev_chunk[:idx] chunk = window[len(self._prev_chunk) : idx] if not chunk: self._at_eof = True - result = self._prev_chunk + result = self._prev_chunk[2 if first_chunk else 0 :] # Strip initial CRLF self._prev_chunk = chunk return result @@ -784,7 +784,7 @@ async def _read_headers(self) -> "CIMultiDictProxy[str]": lines = [] while True: chunk = await self._content.readline() - chunk = chunk.strip() + chunk = chunk.rstrip(b"\r\n") lines.append(chunk) if not chunk: break diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 75b73a78070..21dc935d536 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1034,6 +1034,33 @@ async def test_reading_skips_prelude(self) -> None: assert first.at_eof() assert not second.at_eof() + async def test_read_empty_body_part(self) -> None: + with Stream(b"--:\r\n\r\n--:--") as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/related;boundary=":"'}, + stream, + ) + body_parts = [] + async for part in reader: + assert isinstance(part, BodyPartReader) + body_parts.append(await part.read()) + + assert body_parts == [b""] + + async def test_read_body_part_headers_only(self) -> None: + with Stream(b"--:\r\nContent-Type: text/plain\r\n\r\n--:--") as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/related;boundary=":"'}, + stream, + ) + body_parts = [] + async for part in reader: + assert isinstance(part, BodyPartReader) + assert "Content-Type" in part.headers + body_parts.append(await part.read()) + + assert body_parts == [b""] + async def test_read_form_default_encoding(self) -> None: with Stream( b"--:\r\n" From a5d6456b7168bdc0c9a7a5a1ba7e68641febe78b Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 2 Jan 2026 03:07:41 +0000 Subject: [PATCH 68/93] Fixing test for Continuation frame without intial frame (#11862) (#11881) --------- (cherry picked from commit 18bc50bdca83113b20abb1820a6ae63709769247) Co-authored-by: Aditya Nath <928.aditya@gmail.com> --- CHANGES/11862.bugfix.rst | 1 + aiohttp/_websocket/reader_py.py | 12 +++++++----- tests/test_websocket_parser.py | 12 +++++------- 3 files changed, 13 insertions(+), 12 deletions(-) create mode 100644 CHANGES/11862.bugfix.rst diff --git a/CHANGES/11862.bugfix.rst b/CHANGES/11862.bugfix.rst new file mode 100644 index 00000000000..c2ce176c2c3 --- /dev/null +++ b/CHANGES/11862.bugfix.rst @@ -0,0 +1 @@ +A test for websocket parser was marked to fail, which was actually failing because the parser wasn't raising an exception for a continuation frame when there was no initial frame in context. diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index f196020c61f..ca15c3b2b5c 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -195,6 +195,13 @@ def _handle_frame( ) -> None: msg: WSMessage if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}: + # Validate continuation frames before processing + if opcode == OP_CODE_CONTINUATION and self._opcode == OP_CODE_NOT_SET: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Continuation frame for non started message", + ) + # load text/binary if not fin: # got partial frame payload @@ -211,11 +218,6 @@ def _handle_frame( has_partial = bool(self._partial) if opcode == OP_CODE_CONTINUATION: - if self._opcode == OP_CODE_NOT_SET: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Continuation frame for non started message", - ) opcode = self._opcode self._opcode = OP_CODE_NOT_SET # previous frame was non finished diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index abf4e0ad726..d98ceb4b19c 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -231,13 +231,11 @@ def test_parse_frame_header_control_frame( raise out.exception() -@pytest.mark.xfail() -def test_parse_frame_header_new_data_err( - out: WebSocketDataQueue, parser: PatchableWebSocketReader -) -> None: - with pytest.raises(WebSocketError): - parser.parse_frame(struct.pack("!BB", 0b000000000, 0b00000000)) - raise out.exception() +def test_parse_frame_header_new_data_err(parser: PatchableWebSocketReader) -> None: + with pytest.raises(WebSocketError) as msg: + parser._feed_data(struct.pack("!BB", 0b00000000, 0b00000000)) + assert msg.value.code == WSCloseCode.PROTOCOL_ERROR + assert str(msg.value) == "Continuation frame for non started message" def test_parse_frame_header_payload_size( From fafe1eb359627aadc223beaa5858c05090ab4b60 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 2 Jan 2026 03:36:20 +0000 Subject: [PATCH 69/93] Fixing test for Continuation frame without intial frame (#11862) (#11882) --------- (cherry picked from commit 18bc50bdca83113b20abb1820a6ae63709769247) Co-authored-by: Aditya Nath <928.aditya@gmail.com> --- CHANGES/11862.bugfix.rst | 1 + aiohttp/_websocket/reader_py.py | 12 +++++++----- tests/test_websocket_parser.py | 12 +++++------- 3 files changed, 13 insertions(+), 12 deletions(-) create mode 100644 CHANGES/11862.bugfix.rst diff --git a/CHANGES/11862.bugfix.rst b/CHANGES/11862.bugfix.rst new file mode 100644 index 00000000000..c2ce176c2c3 --- /dev/null +++ b/CHANGES/11862.bugfix.rst @@ -0,0 +1 @@ +A test for websocket parser was marked to fail, which was actually failing because the parser wasn't raising an exception for a continuation frame when there was no initial frame in context. diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index f966a1593c5..5166d7ec260 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -189,6 +189,13 @@ def _handle_frame( ) -> None: msg: WSMessage if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}: + # Validate continuation frames before processing + if opcode == OP_CODE_CONTINUATION and self._opcode == OP_CODE_NOT_SET: + raise WebSocketError( + WSCloseCode.PROTOCOL_ERROR, + "Continuation frame for non started message", + ) + # load text/binary if not fin: # got partial frame payload @@ -205,11 +212,6 @@ def _handle_frame( has_partial = bool(self._partial) if opcode == OP_CODE_CONTINUATION: - if self._opcode == OP_CODE_NOT_SET: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Continuation frame for non started message", - ) opcode = self._opcode self._opcode = OP_CODE_NOT_SET # previous frame was non finished diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 37e15b64c18..60331095857 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -232,13 +232,11 @@ def test_parse_frame_header_control_frame( raise out.exception() -@pytest.mark.xfail() -def test_parse_frame_header_new_data_err( - out: WebSocketDataQueue, parser: PatchableWebSocketReader -) -> None: - with pytest.raises(WebSocketError): - parser.parse_frame(struct.pack("!BB", 0b000000000, 0b00000000)) - raise out.exception() +def test_parse_frame_header_new_data_err(parser: PatchableWebSocketReader) -> None: + with pytest.raises(WebSocketError) as msg: + parser._feed_data(struct.pack("!BB", 0b00000000, 0b00000000)) + assert msg.value.code == WSCloseCode.PROTOCOL_ERROR + assert str(msg.value) == "Continuation frame for non started message" def test_parse_frame_header_payload_size( From a969cc52d6797366db9711b9f63c5df1f08a7fab Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Jan 2026 11:02:18 +0000 Subject: [PATCH 70/93] Bump librt from 0.7.5 to 0.7.7 (#11884) Bumps [librt](https://github.com/mypyc/librt) from 0.7.5 to 0.7.7.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=librt&package-manager=pip&previous-version=0.7.5&new-version=0.7.7)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e3f62e819f6..52f834ac9c4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -104,7 +104,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -librt==0.7.5 +librt==0.7.7 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/dev.txt b/requirements/dev.txt index 8940dea1520..61437065baa 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -102,7 +102,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -librt==0.7.5 +librt==0.7.7 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/lint.txt b/requirements/lint.txt index 9bdb1e66c86..a2a3710deb2 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -43,7 +43,7 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 # via -r requirements/lint.in -librt==0.7.5 +librt==0.7.7 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 88341c752db..e3fd94fb778 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -34,7 +34,7 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.5 +librt==0.7.7 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 238d46dc066..fab9c52071c 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -57,7 +57,7 @@ iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.5 +librt==0.7.7 # via mypy markdown-it-py==4.0.0 # via rich diff --git a/requirements/test.txt b/requirements/test.txt index e5702f873cb..cbc6806568f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ iniconfig==2.3.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via -r requirements/test-common.in -librt==0.7.5 +librt==0.7.7 # via mypy markdown-it-py==4.0.0 # via rich From f7a7043ec25a5af6b9b2bcb24677adba177b2660 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Jan 2026 20:09:39 +0000 Subject: [PATCH 71/93] [PR #11893/fb93442c backport][3.14] Add tests for static route resolution with trailing slash (#11896) Co-authored-by: J. Nick Koston --- tests/test_web_urldispatcher.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 1daa3f496fd..9fc00a4ad37 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -49,6 +49,26 @@ b"my_file_in_dir\n\n\n", id="index_subdir", ), + pytest.param( + True, + 200, + "/static", + "/static/", + b"\n\nIndex of /.\n\n\n

Index of" + b' /.

\n\n\n', + id="index_static_trailing_slash", + ), + pytest.param( + True, + 200, + "/static", + "/static/my_dir/", + b"\n\nIndex of /my_dir\n\n\n

" + b'Index of /my_dir

\n\n\n", + id="index_subdir_trailing_slash", + ), ], ) async def test_access_root_of_static_handler( From 14d0c813e7b5ad5468c36418257692ee39201f00 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Jan 2026 20:12:44 +0000 Subject: [PATCH 72/93] [PR #11893/fb93442c backport][3.13] Add tests for static route resolution with trailing slash (#11895) Co-authored-by: J. Nick Koston --- tests/test_web_urldispatcher.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 11ec47c1730..311849ffce8 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -48,6 +48,26 @@ b"my_file_in_dir\n\n\n", id="index_subdir", ), + pytest.param( + True, + 200, + "/static", + "/static/", + b"\n\nIndex of /.\n\n\n

Index of" + b' /.

\n\n\n', + id="index_static_trailing_slash", + ), + pytest.param( + True, + 200, + "/static", + "/static/my_dir/", + b"\n\nIndex of /my_dir\n\n\n

" + b'Index of /my_dir

\n\n\n", + id="index_subdir_trailing_slash", + ), ], ) async def test_access_root_of_static_handler( From 6dde72efcb77433646fcfadc7951ea05a5ecda22 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 2 Jan 2026 23:35:04 +0000 Subject: [PATCH 73/93] Improve regex performance (#11885) (#11899) (cherry picked from commit bcf0a36e296d4311ce61df4966ab7ff3cb6635fa) --- aiohttp/_cookie_helpers.py | 2 +- aiohttp/client_middleware_digest_auth.py | 40 +++++++++++---------- aiohttp/web_request.py | 1 + tests/test_client_middleware_digest_auth.py | 14 ++++++++ tests/test_cookie_helpers.py | 21 ++++++++--- tests/test_web_request.py | 15 +++++++- 6 files changed, 68 insertions(+), 25 deletions(-) diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index 6628a3c5d0c..1dbe9ad7655 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -52,7 +52,7 @@ \s* # Optional whitespace at start of cookie (?P # Start of group 'key' # aiohttp has extended to include [] for compatibility with real-world cookies - [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]+? # Any word of at least one letter + [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\[\]]+ # Any word of at least one letter ) # End of group 'key' ( # Optional group: there may not be a value. \s*=\s* # Equal Sign diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py index 18d47c96219..43e398ebd11 100644 --- a/aiohttp/client_middleware_digest_auth.py +++ b/aiohttp/client_middleware_digest_auth.py @@ -10,6 +10,7 @@ import hashlib import os import re +import sys import time from collections.abc import Callable from typing import Final, Literal, TypedDict @@ -51,24 +52,27 @@ class DigestAuthChallenge(TypedDict, total=False): # Compile the regex pattern once at module level for performance _HEADER_PAIRS_PATTERN = re.compile( - r'(\w+)\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' - # | | | | | | | | | || | - # +----|--|-|-|--|----|------|----|--||-----|--> alphanumeric key - # +--|-|-|--|----|------|----|--||-----|--> maybe whitespace - # | | | | | | | || | - # +-|-|--|----|------|----|--||-----|--> = (delimiter) - # +-|--|----|------|----|--||-----|--> maybe whitespace - # | | | | | || | - # +--|----|------|----|--||-----|--> group quoted or unquoted - # | | | | || | - # +----|------|----|--||-----|--> if quoted... - # +------|----|--||-----|--> anything but " or \ - # +----|--||-----|--> escaped characters allowed - # +--||-----|--> or can be empty string - # || | - # +|-----|--> if unquoted... - # +-----|--> anything but , or - # +--> at least one char req'd + r'(?:^|\s|,\s*)(\w+)\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' + if sys.version_info < (3, 11) + else r'(?:^|\s|,\s*)((?>\w+))\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' + # +------------|--------|--|-|-|--|----|------|----|--||-----|-> Match valid start/sep + # +--------|--|-|-|--|----|------|----|--||-----|-> alphanumeric key (atomic + # | | | | | | | | || | group reduces backtracking) + # +--|-|-|--|----|------|----|--||-----|-> maybe whitespace + # | | | | | | | || | + # +-|-|--|----|------|----|--||-----|-> = (delimiter) + # +-|--|----|------|----|--||-----|-> maybe whitespace + # | | | | | || | + # +--|----|------|----|--||-----|-> group quoted or unquoted + # | | | | || | + # +----|------|----|--||-----|-> if quoted... + # +------|----|--||-----|-> anything but " or \ + # +----|--||-----|-> escaped characters allowed + # +--||-----|-> or can be empty string + # || | + # +|-----|-> if unquoted... + # +-----|-> anything but , or + # +-> at least one char req'd ) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index dea5eb34e55..2cc69637b82 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -90,6 +90,7 @@ class FileField: _QUOTED_STRING: Final[str] = rf'"(?:{_QUOTED_PAIR}|{_QDTEXT})*"' +# This does not have a ReDOS/performance concern as long as it used with re.match(). _FORWARDED_PAIR: Final[str] = rf"({_TOKEN})=({_TOKEN}|{_QUOTED_STRING})(:\d{{1,4}})?" _QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py index 064d4d78239..c15bf1b422e 100644 --- a/tests/test_client_middleware_digest_auth.py +++ b/tests/test_client_middleware_digest_auth.py @@ -2,6 +2,7 @@ import io import re +import time from collections.abc import Generator from hashlib import md5, sha1 from typing import Literal @@ -13,6 +14,7 @@ from aiohttp import ClientSession, hdrs from aiohttp.client_exceptions import ClientError from aiohttp.client_middleware_digest_auth import ( + _HEADER_PAIRS_PATTERN, DigestAuthChallenge, DigestAuthMiddleware, DigestFunctions, @@ -1327,3 +1329,15 @@ async def handler(request: Request) -> Response: assert request_count == 2 # Initial 401 + successful retry assert len(auth_algorithms) == 1 assert auth_algorithms[0] == "MD5-sess" # Not "MD5-SESS" + + +def test_regex_performance() -> None: + value = "0" * 54773 + "\\0=a" + start = time.perf_counter() + matches = _HEADER_PAIRS_PATTERN.findall(value) + end = time.perf_counter() + + # If this is taking more than 10ms, there's probably a performance/ReDoS issue. + assert (end - start) < 0.01 + # This example probably shouldn't produce a match either. + assert not matches diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 577e3156560..8dbdd5ccb3d 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1,6 +1,7 @@ """Tests for internal cookie helper functions.""" import sys +import time from http.cookies import ( CookieError, Morsel, @@ -635,6 +636,18 @@ def test_cookie_pattern_matches_partitioned_attribute(test_string: str) -> None: assert match.group("key").lower() == "partitioned" +def test_cookie_pattern_performance() -> None: + value = "a" + "=" * 21651 + "\x00" + start = time.perf_counter() + match = helpers._COOKIE_PATTERN.match(value) + end = time.perf_counter() + + # If this is taking more than 10ms, there's probably a performance/ReDoS issue. + assert (end - start) < 0.01 + # This example shouldn't produce a match either. + assert match is None + + def test_parse_set_cookie_headers_issue_7993_double_quotes() -> None: """ Test that cookies with unmatched opening quotes don't break parsing of subsequent cookies. @@ -1299,11 +1312,9 @@ def test_parse_cookie_header_malformed() -> None: # Missing name header = "=value; name=value2" result = parse_cookie_header(header) - assert len(result) == 2 - assert result[0][0] == "=value" - assert result[0][1].value == "" - assert result[1][0] == "name" - assert result[1][1].value == "value2" + assert len(result) == 1 + assert result[0][0] == "name" + assert result[0][1].value == "value2" def test_parse_cookie_header_complex_quoted() -> None: diff --git a/tests/test_web_request.py b/tests/test_web_request.py index d98fb58170f..1d963444853 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -2,6 +2,7 @@ import datetime import socket import sys +import time from collections.abc import Iterator, MutableMapping from typing import Any from unittest import mock @@ -15,7 +16,7 @@ from aiohttp.streams import StreamReader from aiohttp.test_utils import make_mocked_request from aiohttp.web import BaseRequest, HTTPRequestEntityTooLarge, Request, RequestKey -from aiohttp.web_request import ETag +from aiohttp.web_request import _FORWARDED_PAIR_RE, ETag @pytest.fixture @@ -614,6 +615,18 @@ def test_single_forwarded_header() -> None: assert req.forwarded[0]["proto"] == "identifier" +def test_forwarded_re_performance() -> None: + value = "{" + "f" * 54773 + "z\x00a=v" + start = time.perf_counter() + match = _FORWARDED_PAIR_RE.match(value) + end = time.perf_counter() + + # If this is taking more than 10ms, there's probably a performance/ReDoS issue. + assert (end - start) < 0.01 + # This example shouldn't produce a match either. + assert match is None + + @pytest.mark.parametrize( "forward_for_in, forward_for_out", [ From ff3f0e2f654f60cad8e25c2719ddc4e33693205b Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 2 Jan 2026 23:36:13 +0000 Subject: [PATCH 74/93] Reject non-ascii characters in some headers (#11886) (#11901) (cherry picked from commit 5affd64f86d28a16a8f8e6fea2d217c99bf7831f) --- aiohttp/_http_parser.pyx | 6 +++--- aiohttp/http_parser.py | 16 +++++++++------- tests/test_http_parser.py | 31 ++++++++++++++++++++++++++++++- 3 files changed, 42 insertions(+), 11 deletions(-) diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index b2ddeb30d83..4a7101edbcb 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -419,7 +419,8 @@ cdef class HttpParser: headers = CIMultiDictProxy(CIMultiDict(self._headers)) if self._cparser.type == cparser.HTTP_REQUEST: - allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + h_upg = headers.get("upgrade", "") + allowed = upgrade and h_upg.isascii() and h_upg.lower() in ALLOWED_UPGRADES if allowed or self._cparser.method == cparser.HTTP_CONNECT: self._upgraded = True else: @@ -434,8 +435,7 @@ cdef class HttpParser: enc = self._content_encoding if enc is not None: self._content_encoding = None - enc = enc.lower() - if enc in ('gzip', 'deflate', 'br', 'zstd'): + if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}: encoding = enc if self._cparser.type == cparser.HTTP_REQUEST: diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 98097dc76ce..0aa78959523 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -224,7 +224,9 @@ def parse_headers( def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: """Check if the upgrade header is supported.""" - return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} + u = headers.get(hdrs.UPGRADE, "") + # .lower() can transform non-ascii characters. + return u.isascii() and u.lower() in {"tcp", "websocket"} class HttpParser(abc.ABC, Generic[_MsgT]): @@ -535,11 +537,9 @@ def parse_headers( upgrade = True # encoding - enc = headers.get(hdrs.CONTENT_ENCODING) - if enc: - enc = enc.lower() - if enc in ("gzip", "deflate", "br", "zstd"): - encoding = enc + enc = headers.get(hdrs.CONTENT_ENCODING, "") + if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}: + encoding = enc # chunking te = headers.get(hdrs.TRANSFER_ENCODING) @@ -656,7 +656,9 @@ def parse_message(self, lines: list[bytes]) -> RawRequestMessage: ) def _is_chunked_te(self, te: str) -> bool: - if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked": + te = te.rsplit(",", maxsplit=1)[-1].strip(" \t") + # .lower() transforms some non-ascii chars, so must check first. + if te.isascii() and te.lower() == "chunked": return True # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 raise BadHttpMessage("Request has invalid `Transfer-Encoding`") diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 35390232d1e..d11f991f302 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -476,7 +476,21 @@ def test_request_chunked(parser) -> None: assert isinstance(payload, streams.StreamReader) -def test_request_te_chunked_with_content_length(parser: Any) -> None: +def test_te_header_non_ascii(parser: HttpRequestParser) -> None: + # K = Kelvin sign, not valid ascii. + text = "GET /test HTTP/1.1\r\nTransfer-Encoding: chunKed\r\n\r\n" + with pytest.raises(http_exceptions.BadHttpMessage): + parser.feed_data(text.encode()) + + +def test_upgrade_header_non_ascii(parser: HttpRequestParser) -> None: + # K = Kelvin sign, not valid ascii. + text = "GET /test HTTP/1.1\r\nUpgrade: websocKet\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text.encode()) + assert not upgrade + + +def test_request_te_chunked_with_content_length(parser: HttpRequestParser) -> None: text = ( b"GET /test HTTP/1.1\r\n" b"content-length: 1234\r\n" @@ -574,6 +588,21 @@ def test_compression_zstd(parser: HttpRequestParser) -> None: assert msg.compression == "zstd" +@pytest.mark.parametrize( + "enc", + ( + "zstd".encode(), # "st".upper() == "ST" + "deflate".encode(), # "fl".upper() == "FL" + ), +) +def test_compression_non_ascii(parser: HttpRequestParser, enc: bytes) -> None: + text = b"GET /test HTTP/1.1\r\ncontent-encoding: " + enc + b"\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + msg = messages[0][0] + # Non-ascii input should not evaluate to a valid encoding scheme. + assert msg.compression is None + + def test_compression_unknown(parser: HttpRequestParser) -> None: text = b"GET /test HTTP/1.1\r\ncontent-encoding: compress\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) From 15a367e032fb18ef5a16158baa0ce348284da464 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 00:01:45 +0000 Subject: [PATCH 75/93] Improve regex performance (#11885) (#11900) (cherry picked from commit bcf0a36e296d4311ce61df4966ab7ff3cb6635fa) --- aiohttp/_cookie_helpers.py | 2 +- aiohttp/client_middleware_digest_auth.py | 40 +++++++++++---------- tests/test_client_middleware_digest_auth.py | 14 ++++++++ tests/test_cookie_helpers.py | 21 ++++++++--- tests/test_web_request.py | 15 +++++++- 5 files changed, 67 insertions(+), 25 deletions(-) diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index 837893e5626..19a495a96a8 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -51,7 +51,7 @@ \s* # Optional whitespace at start of cookie (?P # Start of group 'key' # aiohttp has extended to include [] for compatibility with real-world cookies - [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]]+? # Any word of at least one letter + [\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\[\]]+ # Any word of at least one letter ) # End of group 'key' ( # Optional group: there may not be a value. \s*=\s* # Equal Sign diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py index c1ed7ca0fdd..5aab5acb85a 100644 --- a/aiohttp/client_middleware_digest_auth.py +++ b/aiohttp/client_middleware_digest_auth.py @@ -10,6 +10,7 @@ import hashlib import os import re +import sys import time from typing import ( Callable, @@ -60,24 +61,27 @@ class DigestAuthChallenge(TypedDict, total=False): # Compile the regex pattern once at module level for performance _HEADER_PAIRS_PATTERN = re.compile( - r'(\w+)\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' - # | | | | | | | | | || | - # +----|--|-|-|--|----|------|----|--||-----|--> alphanumeric key - # +--|-|-|--|----|------|----|--||-----|--> maybe whitespace - # | | | | | | | || | - # +-|-|--|----|------|----|--||-----|--> = (delimiter) - # +-|--|----|------|----|--||-----|--> maybe whitespace - # | | | | | || | - # +--|----|------|----|--||-----|--> group quoted or unquoted - # | | | | || | - # +----|------|----|--||-----|--> if quoted... - # +------|----|--||-----|--> anything but " or \ - # +----|--||-----|--> escaped characters allowed - # +--||-----|--> or can be empty string - # || | - # +|-----|--> if unquoted... - # +-----|--> anything but , or - # +--> at least one char req'd + r'(?:^|\s|,\s*)(\w+)\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' + if sys.version_info < (3, 11) + else r'(?:^|\s|,\s*)((?>\w+))\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))' + # +------------|--------|--|-|-|--|----|------|----|--||-----|-> Match valid start/sep + # +--------|--|-|-|--|----|------|----|--||-----|-> alphanumeric key (atomic + # | | | | | | | | || | group reduces backtracking) + # +--|-|-|--|----|------|----|--||-----|-> maybe whitespace + # | | | | | | | || | + # +-|-|--|----|------|----|--||-----|-> = (delimiter) + # +-|--|----|------|----|--||-----|-> maybe whitespace + # | | | | | || | + # +--|----|------|----|--||-----|-> group quoted or unquoted + # | | | | || | + # +----|------|----|--||-----|-> if quoted... + # +------|----|--||-----|-> anything but " or \ + # +----|--||-----|-> escaped characters allowed + # +--||-----|-> or can be empty string + # || | + # +|-----|-> if unquoted... + # +-----|-> anything but , or + # +-> at least one char req'd ) diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py index 2059bfea337..40ebadf6e37 100644 --- a/tests/test_client_middleware_digest_auth.py +++ b/tests/test_client_middleware_digest_auth.py @@ -2,6 +2,7 @@ import io import re +import time from hashlib import md5, sha1 from typing import Generator, Literal, Union from unittest import mock @@ -12,6 +13,7 @@ from aiohttp import ClientSession, hdrs from aiohttp.client_exceptions import ClientError from aiohttp.client_middleware_digest_auth import ( + _HEADER_PAIRS_PATTERN, DigestAuthChallenge, DigestAuthMiddleware, DigestFunctions, @@ -1326,3 +1328,15 @@ async def handler(request: Request) -> Response: assert request_count == 2 # Initial 401 + successful retry assert len(auth_algorithms) == 1 assert auth_algorithms[0] == "MD5-sess" # Not "MD5-SESS" + + +def test_regex_performance() -> None: + value = "0" * 54773 + "\\0=a" + start = time.perf_counter() + matches = _HEADER_PAIRS_PATTERN.findall(value) + end = time.perf_counter() + + # If this is taking more than 10ms, there's probably a performance/ReDoS issue. + assert (end - start) < 0.01 + # This example probably shouldn't produce a match either. + assert not matches diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 577e3156560..8dbdd5ccb3d 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1,6 +1,7 @@ """Tests for internal cookie helper functions.""" import sys +import time from http.cookies import ( CookieError, Morsel, @@ -635,6 +636,18 @@ def test_cookie_pattern_matches_partitioned_attribute(test_string: str) -> None: assert match.group("key").lower() == "partitioned" +def test_cookie_pattern_performance() -> None: + value = "a" + "=" * 21651 + "\x00" + start = time.perf_counter() + match = helpers._COOKIE_PATTERN.match(value) + end = time.perf_counter() + + # If this is taking more than 10ms, there's probably a performance/ReDoS issue. + assert (end - start) < 0.01 + # This example shouldn't produce a match either. + assert match is None + + def test_parse_set_cookie_headers_issue_7993_double_quotes() -> None: """ Test that cookies with unmatched opening quotes don't break parsing of subsequent cookies. @@ -1299,11 +1312,9 @@ def test_parse_cookie_header_malformed() -> None: # Missing name header = "=value; name=value2" result = parse_cookie_header(header) - assert len(result) == 2 - assert result[0][0] == "=value" - assert result[0][1].value == "" - assert result[1][0] == "name" - assert result[1][1].value == "value2" + assert len(result) == 1 + assert result[0][0] == "name" + assert result[0][1].value == "value2" def test_parse_cookie_header_complex_quoted() -> None: diff --git a/tests/test_web_request.py b/tests/test_web_request.py index e706e18dee5..00d22b8e9ab 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -1,6 +1,7 @@ import asyncio import datetime import socket +import time from collections.abc import MutableMapping from typing import Any from unittest import mock @@ -14,7 +15,7 @@ from aiohttp.streams import StreamReader from aiohttp.test_utils import make_mocked_request from aiohttp.web import BaseRequest, HTTPRequestEntityTooLarge -from aiohttp.web_request import ETag +from aiohttp.web_request import _FORWARDED_PAIR_RE, ETag @pytest.fixture @@ -530,6 +531,18 @@ def test_single_forwarded_header() -> None: assert req.forwarded[0]["proto"] == "identifier" +def test_forwarded_re_performance() -> None: + value = "{" + "f" * 54773 + "z\x00a=v" + start = time.perf_counter() + match = _FORWARDED_PAIR_RE.match(value) + end = time.perf_counter() + + # If this is taking more than 10ms, there's probably a performance/ReDoS issue. + assert (end - start) < 0.01 + # This example shouldn't produce a match either. + assert match is None + + @pytest.mark.parametrize( "forward_for_in, forward_for_out", [ From 32677f2adfd907420c078dda6b79225c6f4ebce0 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 00:02:45 +0000 Subject: [PATCH 76/93] Reject non-ascii characters in some headers (#11886) (#11902) (cherry picked from commit 5affd64f86d28a16a8f8e6fea2d217c99bf7831f) --- aiohttp/_http_parser.pyx | 6 +++--- aiohttp/http_parser.py | 16 +++++++++------- tests/test_http_parser.py | 31 ++++++++++++++++++++++++++++++- 3 files changed, 42 insertions(+), 11 deletions(-) diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index b2ddeb30d83..4a7101edbcb 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -419,7 +419,8 @@ cdef class HttpParser: headers = CIMultiDictProxy(CIMultiDict(self._headers)) if self._cparser.type == cparser.HTTP_REQUEST: - allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES + h_upg = headers.get("upgrade", "") + allowed = upgrade and h_upg.isascii() and h_upg.lower() in ALLOWED_UPGRADES if allowed or self._cparser.method == cparser.HTTP_CONNECT: self._upgraded = True else: @@ -434,8 +435,7 @@ cdef class HttpParser: enc = self._content_encoding if enc is not None: self._content_encoding = None - enc = enc.lower() - if enc in ('gzip', 'deflate', 'br', 'zstd'): + if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}: encoding = enc if self._cparser.type == cparser.HTTP_REQUEST: diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 9a2c00e6542..90327dbe661 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -238,7 +238,9 @@ def parse_headers( def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: """Check if the upgrade header is supported.""" - return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} + u = headers.get(hdrs.UPGRADE, "") + # .lower() can transform non-ascii characters. + return u.isascii() and u.lower() in {"tcp", "websocket"} class HttpParser(abc.ABC, Generic[_MsgT]): @@ -549,11 +551,9 @@ def parse_headers( upgrade = True # encoding - enc = headers.get(hdrs.CONTENT_ENCODING) - if enc: - enc = enc.lower() - if enc in ("gzip", "deflate", "br", "zstd"): - encoding = enc + enc = headers.get(hdrs.CONTENT_ENCODING, "") + if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}: + encoding = enc # chunking te = headers.get(hdrs.TRANSFER_ENCODING) @@ -670,7 +670,9 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: ) def _is_chunked_te(self, te: str) -> bool: - if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked": + te = te.rsplit(",", maxsplit=1)[-1].strip(" \t") + # .lower() transforms some non-ascii chars, so must check first. + if te.isascii() and te.lower() == "chunked": return True # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 raise BadHttpMessage("Request has invalid `Transfer-Encoding`") diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 7717e56f45e..8841a4c6a30 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -476,7 +476,21 @@ def test_request_chunked(parser) -> None: assert isinstance(payload, streams.StreamReader) -def test_request_te_chunked_with_content_length(parser: Any) -> None: +def test_te_header_non_ascii(parser: HttpRequestParser) -> None: + # K = Kelvin sign, not valid ascii. + text = "GET /test HTTP/1.1\r\nTransfer-Encoding: chunKed\r\n\r\n" + with pytest.raises(http_exceptions.BadHttpMessage): + parser.feed_data(text.encode()) + + +def test_upgrade_header_non_ascii(parser: HttpRequestParser) -> None: + # K = Kelvin sign, not valid ascii. + text = "GET /test HTTP/1.1\r\nUpgrade: websocKet\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text.encode()) + assert not upgrade + + +def test_request_te_chunked_with_content_length(parser: HttpRequestParser) -> None: text = ( b"GET /test HTTP/1.1\r\n" b"content-length: 1234\r\n" @@ -574,6 +588,21 @@ def test_compression_zstd(parser: HttpRequestParser) -> None: assert msg.compression == "zstd" +@pytest.mark.parametrize( + "enc", + ( + "zstd".encode(), # "st".upper() == "ST" + "deflate".encode(), # "fl".upper() == "FL" + ), +) +def test_compression_non_ascii(parser: HttpRequestParser, enc: bytes) -> None: + text = b"GET /test HTTP/1.1\r\ncontent-encoding: " + enc + b"\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + msg = messages[0][0] + # Non-ascii input should not evaluate to a valid encoding scheme. + assert msg.compression is None + + def test_compression_unknown(parser: HttpRequestParser) -> None: text = b"GET /test HTTP/1.1\r\ncontent-encoding: compress\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) From 6d76ac39a52129be4e5e47532c8d50ec2d4fe872 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 3 Jan 2026 00:30:40 +0000 Subject: [PATCH 77/93] [PR #11887/7a067d19 backport][3.14] Reject non-ascii digits in Range header (#11904) **This is a backport of PR #11887 as merged into master (7a067d1905e1eeb921a50010dd0004990dbb3bf0).** Co-authored-by: Sam Bull --- aiohttp/web_request.py | 2 +- tests/test_web_request.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 2cc69637b82..674b3269ca0 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -616,7 +616,7 @@ def http_range(self) -> slice: if rng is not None: try: pattern = r"^bytes=(\d*)-(\d*)$" - start, end = re.findall(pattern, rng)[0] + start, end = re.findall(pattern, rng, re.ASCII)[0] except IndexError: # pattern was not found in header raise ValueError("range not in acceptable format") diff --git a/tests/test_web_request.py b/tests/test_web_request.py index 1d963444853..ea60f41a921 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -245,6 +245,13 @@ def bytes_gen(size): assert req.content[req.http_range] == payload[-500:] +def test_range_non_ascii() -> None: + # ५ = DEVANAGARI DIGIT FIVE + req = make_mocked_request("GET", "/", headers=CIMultiDict([("RANGE", "bytes=4-५")])) + with pytest.raises(ValueError, match="range not in acceptable format"): + req.http_range + + def test_non_keepalive_on_http10() -> None: req = make_mocked_request("GET", "/", version=HttpVersion(1, 0)) assert not req.keep_alive From c7b7a044f88c71cefda95ec75cdcfaa4792b3b96 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 3 Jan 2026 00:39:41 +0000 Subject: [PATCH 78/93] [PR #11887/7a067d19 backport][3.13] Reject non-ascii digits in Range header (#11903) **This is a backport of PR #11887 as merged into master (7a067d1905e1eeb921a50010dd0004990dbb3bf0).** Co-authored-by: Sam Bull --- aiohttp/web_request.py | 2 +- tests/test_web_request.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 0bc69b74db9..6e090270d1a 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -607,7 +607,7 @@ def http_range(self) -> slice: if rng is not None: try: pattern = r"^bytes=(\d*)-(\d*)$" - start, end = re.findall(pattern, rng)[0] + start, end = re.findall(pattern, rng, re.ASCII)[0] except IndexError: # pattern was not found in header raise ValueError("range not in acceptable format") diff --git a/tests/test_web_request.py b/tests/test_web_request.py index 00d22b8e9ab..22871d2687b 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -244,6 +244,13 @@ def bytes_gen(size): assert req.content[req.http_range] == payload[-500:] +def test_range_non_ascii() -> None: + # ५ = DEVANAGARI DIGIT FIVE + req = make_mocked_request("GET", "/", headers=CIMultiDict([("RANGE", "bytes=4-५")])) + with pytest.raises(ValueError, match="range not in acceptable format"): + req.http_range + + def test_non_keepalive_on_http10() -> None: req = make_mocked_request("GET", "/", version=HttpVersion(1, 0)) assert not req.keep_alive From f2a86fd5ac0383000d1715afddfa704413f0711e Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 01:55:05 +0000 Subject: [PATCH 79/93] Reject static URLs that traverse outside static root (#11888) (#11906) (cherry picked from commit 63961fa77fa2443109f25c3d8ab94772d3878626) Co-authored-by: J. Nick Koston --- aiohttp/web_urldispatcher.py | 18 +++++++++--------- tests/test_urldispatch.py | 18 +++++++++++++++++- tests/test_web_sendfile_functional.py | 2 +- tests/test_web_urldispatcher.py | 4 ++-- 4 files changed, 29 insertions(+), 13 deletions(-) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 8213456c5f5..cfa57a31004 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -7,6 +7,7 @@ import inspect import keyword import os +import platform import re import sys import warnings @@ -94,6 +95,7 @@ ) PATH_SEP: Final[str] = re.escape("/") +IS_WINDOWS: Final[bool] = platform.system() == "Windows" _ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] _Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] @@ -651,7 +653,12 @@ def set_options_route(self, handler: Handler) -> None: async def resolve(self, request: Request) -> _Resolve: path = request.rel_url.path_safe method = request.method - if not path.startswith(self._prefix2) and path != self._prefix: + # We normalise here to avoid matches that traverse below the static root. + # e.g. /static/../../../../home/user/webapp/static/ + norm_path = os.path.normpath(path) + if IS_WINDOWS: + norm_path = norm_path.replace("\\", "/") + if not norm_path.startswith(self._prefix2) and norm_path != self._prefix: return None, set() allowed_methods = self._allowed_methods @@ -668,14 +675,7 @@ def __iter__(self) -> Iterator[AbstractRoute]: return iter(self._routes.values()) async def _handle(self, request: Request) -> StreamResponse: - rel_url = request.match_info["filename"] - filename = Path(rel_url) - if filename.anchor: - # rel_url is an absolute name like - # /static/\\machine_name\c$ or /static/D:\path - # where the static dir is totally different - raise HTTPForbidden() - + filename = request.match_info["filename"] unresolved_path = self._directory.joinpath(filename) loop = asyncio.get_running_loop() return await loop.run_in_executor( diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index ba6bdff23a0..e329ea2cb87 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,4 +1,5 @@ import pathlib +import platform import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized from typing import NoReturn @@ -1041,7 +1042,22 @@ async def test_405_for_resource_adapter(router) -> None: assert (None, {"HEAD", "GET"}) == ret -async def test_check_allowed_method_for_found_resource(router) -> None: +@pytest.mark.skipif(platform.system() == "Windows", reason="Different path formats") +async def test_static_resource_outside_traversal(router: web.UrlDispatcher) -> None: + """Test relative path traversing outside root does not resolve.""" + static_file = pathlib.Path(aiohttp.__file__) + request_path = "/st" + "/.." * (len(static_file.parts) - 2) + str(static_file) + assert pathlib.Path(request_path).resolve() == static_file + + resource = router.add_static("/st", static_file.parent) + ret = await resource.resolve(make_mocked_request("GET", request_path)) + # Should not resolve, otherwise filesystem information may be leaked. + assert (None, set()) == ret + + +async def test_check_allowed_method_for_found_resource( + router: web.UrlDispatcher, +) -> None: handler = make_handler() resource = router.add_resource("/") resource.add_route("GET", handler) diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 0325a4658e2..3207623a0d8 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -638,7 +638,7 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: url_abspath = "/static/" + str(full_path.resolve()) resp = await client.get(url_abspath) - assert 403 == resp.status + assert resp.status == 404 await resp.release() await client.close() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 311849ffce8..efaecfd6468 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -858,8 +858,8 @@ async def test_static_absolute_url( here = pathlib.Path(__file__).parent app.router.add_static("/static", here) client = await aiohttp_client(app) - resp = await client.get("/static/" + str(file_path.resolve())) - assert resp.status == 403 + async with client.get("/static/" + str(file_path.resolve())) as resp: + assert resp.status == 404 async def test_for_issue_5250( From 94fb7fcfce9cdd5fc7ab3c8d0f9bfa394be69eb1 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 02:28:04 +0000 Subject: [PATCH 80/93] Reject static URLs that traverse outside static root (#11888) (#11905) (cherry picked from commit 63961fa77fa2443109f25c3d8ab94772d3878626) Co-authored-by: J. Nick Koston --- aiohttp/web_urldispatcher.py | 18 +++++++++--------- tests/test_urldispatch.py | 18 +++++++++++++++++- tests/test_web_sendfile_functional.py | 2 +- tests/test_web_urldispatcher.py | 4 ++-- 4 files changed, 29 insertions(+), 13 deletions(-) diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index e50b5212bb5..01ee8a7808b 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -7,6 +7,7 @@ import inspect import keyword import os +import platform import re import sys import warnings @@ -78,6 +79,7 @@ ) PATH_SEP: Final[str] = re.escape("/") +IS_WINDOWS: Final[bool] = platform.system() == "Windows" _ExpectHandler = Callable[[Request], Awaitable[StreamResponse | None]] _Resolve = tuple[Optional["UrlMappingMatchInfo"], set[str]] @@ -629,7 +631,12 @@ def set_options_route(self, handler: Handler) -> None: async def resolve(self, request: Request) -> _Resolve: path = request.rel_url.path_safe method = request.method - if not path.startswith(self._prefix2) and path != self._prefix: + # We normalise here to avoid matches that traverse below the static root. + # e.g. /static/../../../../home/user/webapp/static/ + norm_path = os.path.normpath(path) + if IS_WINDOWS: + norm_path = norm_path.replace("\\", "/") + if not norm_path.startswith(self._prefix2) and norm_path != self._prefix: return None, set() allowed_methods = self._allowed_methods @@ -646,14 +653,7 @@ def __iter__(self) -> Iterator[AbstractRoute]: return iter(self._routes.values()) async def _handle(self, request: Request) -> StreamResponse: - rel_url = request.match_info["filename"] - filename = Path(rel_url) - if filename.anchor: - # rel_url is an absolute name like - # /static/\\machine_name\c$ or /static/D:\path - # where the static dir is totally different - raise HTTPForbidden() - + filename = request.match_info["filename"] unresolved_path = self._directory.joinpath(filename) loop = asyncio.get_running_loop() return await loop.run_in_executor( diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index ba6bdff23a0..e329ea2cb87 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,4 +1,5 @@ import pathlib +import platform import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized from typing import NoReturn @@ -1041,7 +1042,22 @@ async def test_405_for_resource_adapter(router) -> None: assert (None, {"HEAD", "GET"}) == ret -async def test_check_allowed_method_for_found_resource(router) -> None: +@pytest.mark.skipif(platform.system() == "Windows", reason="Different path formats") +async def test_static_resource_outside_traversal(router: web.UrlDispatcher) -> None: + """Test relative path traversing outside root does not resolve.""" + static_file = pathlib.Path(aiohttp.__file__) + request_path = "/st" + "/.." * (len(static_file.parts) - 2) + str(static_file) + assert pathlib.Path(request_path).resolve() == static_file + + resource = router.add_static("/st", static_file.parent) + ret = await resource.resolve(make_mocked_request("GET", request_path)) + # Should not resolve, otherwise filesystem information may be leaked. + assert (None, set()) == ret + + +async def test_check_allowed_method_for_found_resource( + router: web.UrlDispatcher, +) -> None: handler = make_handler() resource = router.add_resource("/") resource.add_route("GET", handler) diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 36d4d54032f..56899d118ee 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -639,7 +639,7 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: url_abspath = "/static/" + str(full_path.resolve()) resp = await client.get(url_abspath) - assert 403 == resp.status + assert resp.status == 404 await resp.release() await client.close() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 9fc00a4ad37..85138a343b0 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -859,8 +859,8 @@ async def test_static_absolute_url( here = pathlib.Path(__file__).parent app.router.add_static("/static", here) client = await aiohttp_client(app) - resp = await client.get("/static/" + str(file_path.resolve())) - assert resp.status == 403 + async with client.get("/static/" + str(file_path.resolve())) as resp: + assert resp.status == 404 async def test_for_issue_5250( From fae376c571776bcfd49eb2b235fe72b1ccf5c953 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 02:35:25 +0000 Subject: [PATCH 81/93] Enforce client_max_size over entire multipart form (#11889) (#11907) (cherry picked from commit ed90718fab5d34c127a283e10385f19440df7dd0) --- aiohttp/web_request.py | 2 +- tests/test_web_functional.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 674b3269ca0..1c24d9b749e 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -730,9 +730,9 @@ async def post(self) -> "MultiDictProxy[str | bytes | FileField]": multipart = await self.multipart() max_size = self._client_max_size + size = 0 field = await multipart.next() while field is not None: - size = 0 field_ct = field.headers.get(hdrs.CONTENT_TYPE) if isinstance(field, BodyPartReader): diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index d3b139fce85..0d9ab86f036 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -1706,8 +1706,8 @@ async def handler(request): await resp.release() -async def test_app_max_client_size_adjusted(aiohttp_client) -> None: - async def handler(request): +async def test_app_max_client_size_adjusted(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: await request.post() return web.Response(body=b"ok") From b7dbd35375aedbcd712cbae8ad513d56d11cce60 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 02:48:45 +0000 Subject: [PATCH 82/93] Enforce client_max_size over entire multipart form (#11889) (#11908) (cherry picked from commit ed90718fab5d34c127a283e10385f19440df7dd0) --- aiohttp/web_request.py | 2 +- tests/test_web_functional.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 6e090270d1a..745d70fc351 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -721,9 +721,9 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": multipart = await self.multipart() max_size = self._client_max_size + size = 0 field = await multipart.next() while field is not None: - size = 0 field_ct = field.headers.get(hdrs.CONTENT_TYPE) if isinstance(field, BodyPartReader): diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index e0f123def0d..fe9cce27b8e 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -1705,8 +1705,8 @@ async def handler(request): await resp.release() -async def test_app_max_client_size_adjusted(aiohttp_client) -> None: - async def handler(request): +async def test_app_max_client_size_adjusted(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: await request.post() return web.Response(body=b"ok") From 19335712fabc4678e2ae5484e06127a93608d3fd Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 3 Jan 2026 02:51:44 +0000 Subject: [PATCH 83/93] [PR #11890/384a1730 backport][3.14] Log once per cookie header (#11910) **This is a backport of PR #11890 as merged into master (384a173022c9d057110c1418c5c4ff83a321900f).** Co-authored-by: Sam Bull --- aiohttp/_cookie_helpers.py | 12 ++++++++---- tests/test_cookie_helpers.py | 20 +++++++++++++------- tests/test_web_request.py | 17 +++++++++++++++++ 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index 1dbe9ad7655..aca86b7f771 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -185,6 +185,7 @@ def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: i = 0 n = len(header) + invalid_names = [] while i < n: # Use the same pattern as parse_set_cookie_headers to find cookies match = _COOKIE_PATTERN.match(header, i) @@ -202,9 +203,7 @@ def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: # Validate the name (same as regex path) if not _COOKIE_NAME_RE.match(key): - internal_logger.warning( - "Can not load cookie: Illegal cookie name %r", key - ) + invalid_names.append(key) else: morsel = Morsel() morsel.__setstate__( # type: ignore[attr-defined] @@ -222,7 +221,7 @@ def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: # Validate the name if not key or not _COOKIE_NAME_RE.match(key): - internal_logger.warning("Can not load cookie: Illegal cookie name %r", key) + invalid_names.append(key) continue # Create new morsel @@ -238,6 +237,11 @@ def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: cookies.append((key, morsel)) + if invalid_names: + internal_logger.debug( + "Cannot load cookie. Illegal cookie names: %r", invalid_names + ) + return cookies diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 8dbdd5ccb3d..38a44972c09 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1,5 +1,6 @@ """Tests for internal cookie helper functions.""" +import logging import sys import time from http.cookies import ( @@ -1444,14 +1445,16 @@ def test_parse_cookie_header_illegal_names(caplog: pytest.LogCaptureFixture) -> """Test parse_cookie_header warns about illegal cookie names.""" # Cookie name with comma (not allowed in _COOKIE_NAME_RE) header = "good=value; invalid,cookie=bad; another=test" - result = parse_cookie_header(header) + with caplog.at_level(logging.DEBUG): + result = parse_cookie_header(header) # Should skip the invalid cookie but continue parsing assert len(result) == 2 assert result[0][0] == "good" assert result[0][1].value == "value" assert result[1][0] == "another" assert result[1][1].value == "test" - assert "Can not load cookie: Illegal cookie name 'invalid,cookie'" in caplog.text + assert "Cannot load cookie. Illegal cookie name" in caplog.text + assert "'invalid,cookie'" in caplog.text def test_parse_cookie_header_large_value() -> None: @@ -1554,7 +1557,8 @@ def test_parse_cookie_header_invalid_name_in_fallback( """Test that fallback parser rejects cookies with invalid names.""" header = 'normal=value; invalid,name={"x":"y"}; another=test' - result = parse_cookie_header(header) + with caplog.at_level(logging.DEBUG): + result = parse_cookie_header(header) assert len(result) == 2 @@ -1566,7 +1570,8 @@ def test_parse_cookie_header_invalid_name_in_fallback( assert name2 == "another" assert morsel2.value == "test" - assert "Can not load cookie: Illegal cookie name 'invalid,name'" in caplog.text + assert "Cannot load cookie. Illegal cookie name" in caplog.text + assert "'invalid,name'" in caplog.text def test_parse_cookie_header_empty_key_in_fallback( @@ -1574,8 +1579,8 @@ def test_parse_cookie_header_empty_key_in_fallback( ) -> None: """Test that fallback parser logs warning for empty cookie names.""" header = 'normal=value; ={"malformed":"json"}; another=test' - - result = parse_cookie_header(header) + with caplog.at_level(logging.DEBUG): + result = parse_cookie_header(header) assert len(result) == 2 @@ -1587,7 +1592,8 @@ def test_parse_cookie_header_empty_key_in_fallback( assert name2 == "another" assert morsel2.value == "test" - assert "Can not load cookie: Illegal cookie name ''" in caplog.text + assert "Cannot load cookie. Illegal cookie name" in caplog.text + assert "''" in caplog.text @pytest.mark.parametrize( diff --git a/tests/test_web_request.py b/tests/test_web_request.py index ea60f41a921..a6b73d75eac 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -1,5 +1,6 @@ import asyncio import datetime +import logging import socket import sys import time @@ -381,6 +382,22 @@ def test_request_cookies_edge_cases() -> None: assert req.cookies == {"test": "quoted value", "normal": "unquoted"} +def test_request_cookies_many_invalid(caplog: pytest.LogCaptureFixture) -> None: + """Test many invalid cookies doesn't cause too many logs.""" + bad = "bad" + chr(1) + "name" + cookie = "; ".join(f"{bad}{i}=1" for i in range(3000)) + req = make_mocked_request("GET", "/", headers=CIMultiDict(COOKIE=cookie)) + + with caplog.at_level(logging.DEBUG): + cookies = req.cookies + + assert len(caplog.record_tuples) == 1 + _, level, msg = caplog.record_tuples[0] + assert level is logging.DEBUG + assert "Cannot load cookie" in msg + assert cookies == {} + + def test_request_cookies_no_500_error() -> None: """Test that cookies with special characters don't cause 500 errors. From 64629a0834f94e46d9881f4e99c41a137e1f3326 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 3 Jan 2026 02:53:03 +0000 Subject: [PATCH 84/93] [PR #11890/384a1730 backport][3.13] Log once per cookie header (#11909) **This is a backport of PR #11890 as merged into master (384a173022c9d057110c1418c5c4ff83a321900f).** Co-authored-by: Sam Bull --- aiohttp/_cookie_helpers.py | 12 ++++++++---- tests/test_cookie_helpers.py | 20 +++++++++++++------- tests/test_web_request.py | 17 +++++++++++++++++ 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index 19a495a96a8..10e2e0eff9d 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -184,6 +184,7 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: i = 0 n = len(header) + invalid_names = [] while i < n: # Use the same pattern as parse_set_cookie_headers to find cookies match = _COOKIE_PATTERN.match(header, i) @@ -201,9 +202,7 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: # Validate the name (same as regex path) if not _COOKIE_NAME_RE.match(key): - internal_logger.warning( - "Can not load cookie: Illegal cookie name %r", key - ) + invalid_names.append(key) else: morsel = Morsel() morsel.__setstate__( # type: ignore[attr-defined] @@ -221,7 +220,7 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: # Validate the name if not key or not _COOKIE_NAME_RE.match(key): - internal_logger.warning("Can not load cookie: Illegal cookie name %r", key) + invalid_names.append(key) continue # Create new morsel @@ -237,6 +236,11 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: cookies.append((key, morsel)) + if invalid_names: + internal_logger.debug( + "Cannot load cookie. Illegal cookie names: %r", invalid_names + ) + return cookies diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 8dbdd5ccb3d..38a44972c09 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1,5 +1,6 @@ """Tests for internal cookie helper functions.""" +import logging import sys import time from http.cookies import ( @@ -1444,14 +1445,16 @@ def test_parse_cookie_header_illegal_names(caplog: pytest.LogCaptureFixture) -> """Test parse_cookie_header warns about illegal cookie names.""" # Cookie name with comma (not allowed in _COOKIE_NAME_RE) header = "good=value; invalid,cookie=bad; another=test" - result = parse_cookie_header(header) + with caplog.at_level(logging.DEBUG): + result = parse_cookie_header(header) # Should skip the invalid cookie but continue parsing assert len(result) == 2 assert result[0][0] == "good" assert result[0][1].value == "value" assert result[1][0] == "another" assert result[1][1].value == "test" - assert "Can not load cookie: Illegal cookie name 'invalid,cookie'" in caplog.text + assert "Cannot load cookie. Illegal cookie name" in caplog.text + assert "'invalid,cookie'" in caplog.text def test_parse_cookie_header_large_value() -> None: @@ -1554,7 +1557,8 @@ def test_parse_cookie_header_invalid_name_in_fallback( """Test that fallback parser rejects cookies with invalid names.""" header = 'normal=value; invalid,name={"x":"y"}; another=test' - result = parse_cookie_header(header) + with caplog.at_level(logging.DEBUG): + result = parse_cookie_header(header) assert len(result) == 2 @@ -1566,7 +1570,8 @@ def test_parse_cookie_header_invalid_name_in_fallback( assert name2 == "another" assert morsel2.value == "test" - assert "Can not load cookie: Illegal cookie name 'invalid,name'" in caplog.text + assert "Cannot load cookie. Illegal cookie name" in caplog.text + assert "'invalid,name'" in caplog.text def test_parse_cookie_header_empty_key_in_fallback( @@ -1574,8 +1579,8 @@ def test_parse_cookie_header_empty_key_in_fallback( ) -> None: """Test that fallback parser logs warning for empty cookie names.""" header = 'normal=value; ={"malformed":"json"}; another=test' - - result = parse_cookie_header(header) + with caplog.at_level(logging.DEBUG): + result = parse_cookie_header(header) assert len(result) == 2 @@ -1587,7 +1592,8 @@ def test_parse_cookie_header_empty_key_in_fallback( assert name2 == "another" assert morsel2.value == "test" - assert "Can not load cookie: Illegal cookie name ''" in caplog.text + assert "Cannot load cookie. Illegal cookie name" in caplog.text + assert "''" in caplog.text @pytest.mark.parametrize( diff --git a/tests/test_web_request.py b/tests/test_web_request.py index 22871d2687b..64af7cd410e 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -1,5 +1,6 @@ import asyncio import datetime +import logging import socket import time from collections.abc import MutableMapping @@ -380,6 +381,22 @@ def test_request_cookies_edge_cases() -> None: assert req.cookies == {"test": "quoted value", "normal": "unquoted"} +def test_request_cookies_many_invalid(caplog: pytest.LogCaptureFixture) -> None: + """Test many invalid cookies doesn't cause too many logs.""" + bad = "bad" + chr(1) + "name" + cookie = "; ".join(f"{bad}{i}=1" for i in range(3000)) + req = make_mocked_request("GET", "/", headers=CIMultiDict(COOKIE=cookie)) + + with caplog.at_level(logging.DEBUG): + cookies = req.cookies + + assert len(caplog.record_tuples) == 1 + _, level, msg = caplog.record_tuples[0] + assert level is logging.DEBUG + assert "Cannot load cookie" in msg + assert cookies == {} + + def test_request_cookies_no_500_error() -> None: """Test that cookies with special characters don't cause 500 errors. From 540053a09138b1acd832c0d7338e3db5c4de8074 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 3 Jan 2026 03:48:31 +0000 Subject: [PATCH 85/93] [PR #11892/271532ea backport][3.14] Use collections.deque for chunk splits (#11911) **This is a backport of PR #11892 as merged into master (271532ea355c65480c8ecc14137dfbb72aec8f6f).** Co-authored-by: Sam Bull Co-authored-by: Finder --- aiohttp/streams.py | 8 ++++---- tests/test_http_parser.py | 14 +++++++++----- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 6c147953d0d..08a7db254c0 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -140,7 +140,7 @@ def __init__( self._loop = loop self._size = 0 self._cursor = 0 - self._http_chunk_splits: list[int] | None = None + self._http_chunk_splits: collections.deque[int] | None = None self._buffer: collections.deque[bytes] = collections.deque() self._buffer_offset = 0 self._eof = False @@ -294,7 +294,7 @@ def begin_http_chunk_receiving(self) -> None: raise RuntimeError( "Called begin_http_chunk_receiving when some data was already fed" ) - self._http_chunk_splits = [] + self._http_chunk_splits = collections.deque() def end_http_chunk_receiving(self) -> None: if self._http_chunk_splits is None: @@ -453,7 +453,7 @@ async def readchunk(self) -> tuple[bytes, bool]: raise self._exception while self._http_chunk_splits: - pos = self._http_chunk_splits.pop(0) + pos = self._http_chunk_splits.popleft() if pos == self._cursor: return (b"", True) if pos > self._cursor: @@ -526,7 +526,7 @@ def _read_nowait_chunk(self, n: int) -> bytes: chunk_splits = self._http_chunk_splits # Prevent memory leak: drop useless chunk splits while chunk_splits and chunk_splits[0] < self._cursor: - chunk_splits.pop(0) + chunk_splits.popleft() if self._size < self._low_water and self._protocol._reading_paused: self._protocol.resume_reading() diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index d11f991f302..eb0287f37d9 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -1254,7 +1254,8 @@ def test_http_request_chunked_payload(parser) -> None: parser.feed_data(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n") assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert payload._http_chunk_splits is not None + assert [4, 8] == list(payload._http_chunk_splits) assert payload.is_eof() @@ -1269,7 +1270,8 @@ def test_http_request_chunked_payload_and_next_message(parser) -> None: ) assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert payload._http_chunk_splits is not None + assert [4, 8] == list(payload._http_chunk_splits) assert payload.is_eof() assert len(messages) == 1 @@ -1293,12 +1295,13 @@ def test_http_request_chunked_payload_chunks(parser) -> None: parser.feed_data(b"test: test\r\n") assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert payload._http_chunk_splits is not None + assert [4, 8] == list(payload._http_chunk_splits) assert not payload.is_eof() parser.feed_data(b"\r\n") assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert [4, 8] == list(payload._http_chunk_splits) assert payload.is_eof() @@ -1309,7 +1312,8 @@ def test_parse_chunked_payload_chunk_extension(parser) -> None: parser.feed_data(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\ntest: test\r\n\r\n") assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert payload._http_chunk_splits is not None + assert [4, 8] == list(payload._http_chunk_splits) assert payload.is_eof() From dc3170b56904bdf814228fae70a5501a42a6c712 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 03:57:17 +0000 Subject: [PATCH 86/93] Use collections.deque for chunk splits (#11892) (#11912) (cherry picked from commit 271532ea355c65480c8ecc14137dfbb72aec8f6f) --------- Co-authored-by: Finder --- aiohttp/streams.py | 8 ++++---- tests/test_http_parser.py | 14 +++++++++----- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/aiohttp/streams.py b/aiohttp/streams.py index e2bc04dd99c..88d9e174ef2 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -149,7 +149,7 @@ def __init__( self._loop = loop self._size = 0 self._cursor = 0 - self._http_chunk_splits: Optional[List[int]] = None + self._http_chunk_splits: Optional[Deque[int]] = None self._buffer: Deque[bytes] = collections.deque() self._buffer_offset = 0 self._eof = False @@ -303,7 +303,7 @@ def begin_http_chunk_receiving(self) -> None: raise RuntimeError( "Called begin_http_chunk_receiving when some data was already fed" ) - self._http_chunk_splits = [] + self._http_chunk_splits = collections.deque() def end_http_chunk_receiving(self) -> None: if self._http_chunk_splits is None: @@ -462,7 +462,7 @@ async def readchunk(self) -> Tuple[bytes, bool]: raise self._exception while self._http_chunk_splits: - pos = self._http_chunk_splits.pop(0) + pos = self._http_chunk_splits.popleft() if pos == self._cursor: return (b"", True) if pos > self._cursor: @@ -535,7 +535,7 @@ def _read_nowait_chunk(self, n: int) -> bytes: chunk_splits = self._http_chunk_splits # Prevent memory leak: drop useless chunk splits while chunk_splits and chunk_splits[0] < self._cursor: - chunk_splits.pop(0) + chunk_splits.popleft() if self._size < self._low_water and self._protocol._reading_paused: self._protocol.resume_reading() diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 8841a4c6a30..cc0a812f958 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -1254,7 +1254,8 @@ def test_http_request_chunked_payload(parser) -> None: parser.feed_data(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n") assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert payload._http_chunk_splits is not None + assert [4, 8] == list(payload._http_chunk_splits) assert payload.is_eof() @@ -1269,7 +1270,8 @@ def test_http_request_chunked_payload_and_next_message(parser) -> None: ) assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert payload._http_chunk_splits is not None + assert [4, 8] == list(payload._http_chunk_splits) assert payload.is_eof() assert len(messages) == 1 @@ -1293,12 +1295,13 @@ def test_http_request_chunked_payload_chunks(parser) -> None: parser.feed_data(b"test: test\r\n") assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert payload._http_chunk_splits is not None + assert [4, 8] == list(payload._http_chunk_splits) assert not payload.is_eof() parser.feed_data(b"\r\n") assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert [4, 8] == list(payload._http_chunk_splits) assert payload.is_eof() @@ -1309,7 +1312,8 @@ def test_parse_chunked_payload_chunk_extension(parser) -> None: parser.feed_data(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\ntest: test\r\n\r\n") assert b"dataline" == b"".join(d for d in payload._buffer) - assert [4, 8] == payload._http_chunk_splits + assert payload._http_chunk_splits is not None + assert [4, 8] == list(payload._http_chunk_splits) assert payload.is_eof() From ed4ab6fbcd74d8e509cae587333cb5fd43d2001a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 04:39:12 +0000 Subject: [PATCH 87/93] Replace asserts with exceptions (#11897) (#11913) (cherry picked from commit d5bf65f15c0c718b6b95e9bc9d0914a92c51e60f) Co-authored-by: J. Nick Koston --- aiohttp/multipart.py | 10 ++++------ aiohttp/web_request.py | 8 +++----- tests/test_multipart.py | 12 +++++++++++- tests/test_web_request.py | 24 +++++++++++++++++++++++- 4 files changed, 41 insertions(+), 13 deletions(-) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 4149f5560a5..a152f2630b3 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -349,11 +349,8 @@ async def read_chunk(self, size: int = chunk_size) -> bytes: self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True - if self._at_eof: - clrf = await self._content.readline() - assert ( - b"\r\n" == clrf - ), "reader did not read all the data or it is malformed" + if self._at_eof and await self._content.readline() != b"\r\n": + raise ValueError("Reader did not read all the data or it is malformed") return chunk async def _read_chunk_from_length(self, size: int) -> bytes: @@ -383,7 +380,8 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: while len(chunk) < self._boundary_len: chunk += await self._content.read(size) self._content_eof += int(self._content.at_eof()) - assert self._content_eof < 3, "Reading after EOF" + if self._content_eof > 2: + raise ValueError("Reading after EOF") if self._content_eof: break if len(chunk) > size: diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 1c24d9b749e..3928f4da254 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -731,12 +731,12 @@ async def post(self) -> "MultiDictProxy[str | bytes | FileField]": max_size = self._client_max_size size = 0 - field = await multipart.next() - while field is not None: + while (field := await multipart.next()) is not None: field_ct = field.headers.get(hdrs.CONTENT_TYPE) if isinstance(field, BodyPartReader): - assert field.name is not None + if field.name is None: + raise ValueError("Multipart field missing name.") # Note that according to RFC 7578, the Content-Type header # is optional, even for files, so we can't assume it's @@ -788,8 +788,6 @@ async def post(self) -> "MultiDictProxy[str | bytes | FileField]": raise ValueError( "To decode nested multipart you need to use custom reader", ) - - field = await multipart.next() else: data = await self.read() if data: diff --git a/tests/test_multipart.py b/tests/test_multipart.py index ad45d28f403..1751609680f 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -220,11 +220,21 @@ async def test_read_incomplete_body_chunked(self) -> None: with Stream(data) as stream: obj = aiohttp.BodyPartReader(BOUNDARY, {}, stream) result = b"" - with pytest.raises(AssertionError): + with pytest.raises(ValueError): for _ in range(4): result += await obj.read_chunk(7) assert data == result + async def test_read_with_content_length_malformed_crlf(self) -> None: + # Content-Length is correct but data after content is not \r\n + content = b"Hello" + h = CIMultiDictProxy(CIMultiDict({"CONTENT-LENGTH": str(len(content))})) + # Malformed: "XX" instead of "\r\n" after content + with Stream(content + b"XX--:--") as stream: + obj = aiohttp.BodyPartReader(BOUNDARY, h, stream) + with pytest.raises(ValueError, match="malformed"): + await obj.read() + async def test_read_boundary_with_incomplete_chunk(self) -> None: with Stream(b"") as stream: diff --git a/tests/test_web_request.py b/tests/test_web_request.py index a6b73d75eac..0e40b0dfea6 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -13,6 +13,7 @@ from yarl import URL from aiohttp import HttpVersion +from aiohttp.base_protocol import BaseProtocol from aiohttp.http_parser import RawRequestMessage from aiohttp.streams import StreamReader from aiohttp.test_utils import make_mocked_request @@ -929,7 +930,28 @@ async def test_multipart_formdata(protocol) -> None: assert dict(result) == {"a": "b", "c": "d"} -async def test_multipart_formdata_file(protocol) -> None: +async def test_multipart_formdata_field_missing_name(protocol: BaseProtocol) -> None: + # Ensure ValueError is raised when Content-Disposition has no name + payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop()) + payload.feed_data( + b"-----------------------------326931944431359\r\n" + b"Content-Disposition: form-data\r\n" # Missing name! + b"\r\n" + b"value\r\n" + b"-----------------------------326931944431359--\r\n" + ) + content_type = ( + "multipart/form-data; boundary=---------------------------326931944431359" + ) + payload.feed_eof() + req = make_mocked_request( + "POST", "/", headers={"CONTENT-TYPE": content_type}, payload=payload + ) + with pytest.raises(ValueError, match="Multipart field missing name"): + await req.post() + + +async def test_multipart_formdata_file(protocol: BaseProtocol) -> None: # Make sure file uploads work, even without a content type payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop()) payload.feed_data( From bc1319ec3cbff9438a758951a30907b072561259 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 04:53:29 +0000 Subject: [PATCH 88/93] Replace asserts with exceptions (#11897) (#11914) (cherry picked from commit d5bf65f15c0c718b6b95e9bc9d0914a92c51e60f) Co-authored-by: J. Nick Koston --- aiohttp/multipart.py | 10 ++++------ aiohttp/web_request.py | 8 +++----- tests/test_multipart.py | 12 +++++++++++- tests/test_web_request.py | 24 +++++++++++++++++++++++- 4 files changed, 41 insertions(+), 13 deletions(-) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index e6de2b88e21..872294e4fe3 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -361,11 +361,8 @@ async def read_chunk(self, size: int = chunk_size) -> bytes: self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True - if self._at_eof: - clrf = await self._content.readline() - assert ( - b"\r\n" == clrf - ), "reader did not read all the data or it is malformed" + if self._at_eof and await self._content.readline() != b"\r\n": + raise ValueError("Reader did not read all the data or it is malformed") return chunk async def _read_chunk_from_length(self, size: int) -> bytes: @@ -395,7 +392,8 @@ async def _read_chunk_from_stream(self, size: int) -> bytes: while len(chunk) < self._boundary_len: chunk += await self._content.read(size) self._content_eof += int(self._content.at_eof()) - assert self._content_eof < 3, "Reading after EOF" + if self._content_eof > 2: + raise ValueError("Reading after EOF") if self._content_eof: break if len(chunk) > size: diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 745d70fc351..b5fa40c2637 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -722,12 +722,12 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": max_size = self._client_max_size size = 0 - field = await multipart.next() - while field is not None: + while (field := await multipart.next()) is not None: field_ct = field.headers.get(hdrs.CONTENT_TYPE) if isinstance(field, BodyPartReader): - assert field.name is not None + if field.name is None: + raise ValueError("Multipart field missing name.") # Note that according to RFC 7578, the Content-Type header # is optional, even for files, so we can't assume it's @@ -779,8 +779,6 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": raise ValueError( "To decode nested multipart you need to use custom reader", ) - - field = await multipart.next() else: data = await self.read() if data: diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 21dc935d536..2fa2e331df2 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -221,11 +221,21 @@ async def test_read_incomplete_body_chunked(self) -> None: with Stream(data) as stream: obj = aiohttp.BodyPartReader(BOUNDARY, {}, stream) result = b"" - with pytest.raises(AssertionError): + with pytest.raises(ValueError): for _ in range(4): result += await obj.read_chunk(7) assert data == result + async def test_read_with_content_length_malformed_crlf(self) -> None: + # Content-Length is correct but data after content is not \r\n + content = b"Hello" + h = CIMultiDictProxy(CIMultiDict({"CONTENT-LENGTH": str(len(content))})) + # Malformed: "XX" instead of "\r\n" after content + with Stream(content + b"XX--:--") as stream: + obj = aiohttp.BodyPartReader(BOUNDARY, h, stream) + with pytest.raises(ValueError, match="malformed"): + await obj.read() + async def test_read_boundary_with_incomplete_chunk(self) -> None: with Stream(b"") as stream: diff --git a/tests/test_web_request.py b/tests/test_web_request.py index 64af7cd410e..dffc691dff0 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -12,6 +12,7 @@ from yarl import URL from aiohttp import HttpVersion +from aiohttp.base_protocol import BaseProtocol from aiohttp.http_parser import RawRequestMessage from aiohttp.streams import StreamReader from aiohttp.test_utils import make_mocked_request @@ -845,7 +846,28 @@ async def test_multipart_formdata(protocol) -> None: assert dict(result) == {"a": "b", "c": "d"} -async def test_multipart_formdata_file(protocol) -> None: +async def test_multipart_formdata_field_missing_name(protocol: BaseProtocol) -> None: + # Ensure ValueError is raised when Content-Disposition has no name + payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop()) + payload.feed_data( + b"-----------------------------326931944431359\r\n" + b"Content-Disposition: form-data\r\n" # Missing name! + b"\r\n" + b"value\r\n" + b"-----------------------------326931944431359--\r\n" + ) + content_type = ( + "multipart/form-data; boundary=---------------------------326931944431359" + ) + payload.feed_eof() + req = make_mocked_request( + "POST", "/", headers={"CONTENT-TYPE": content_type}, payload=payload + ) + with pytest.raises(ValueError, match="Multipart field missing name"): + await req.post() + + +async def test_multipart_formdata_file(protocol: BaseProtocol) -> None: # Make sure file uploads work, even without a content type payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop()) payload.feed_data( From 0d3328e94abde8aeb5cda4b32b996da5c430f04c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 15:20:15 +0000 Subject: [PATCH 89/93] Limit number of chunks before pausing reading (#11894) (#11915) (cherry picked from commit 1e4120e87daec963c67f956111e6bca44d7c3dea) Co-authored-by: J. Nick Koston --- aiohttp/streams.py | 25 ++++++- tests/test_streams.py | 170 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 194 insertions(+), 1 deletion(-) diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 08a7db254c0..3a424c28558 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -107,6 +107,8 @@ class StreamReader(AsyncStreamReaderMixin): "_protocol", "_low_water", "_high_water", + "_low_water_chunks", + "_high_water_chunks", "_loop", "_size", "_cursor", @@ -137,6 +139,11 @@ def __init__( self._high_water = limit * 2 if loop is None: loop = asyncio.get_event_loop() + # Ensure high_water_chunks >= 3 so it's always > low_water_chunks. + self._high_water_chunks = max(3, limit // 4) + # Use max(2, ...) because there's always at least 1 chunk split remaining + # (the current position), so we need low_water >= 2 to allow resume. + self._low_water_chunks = max(2, self._high_water_chunks // 2) self._loop = loop self._size = 0 self._cursor = 0 @@ -320,6 +327,15 @@ def end_http_chunk_receiving(self) -> None: self._http_chunk_splits.append(self.total_bytes) + # If we get too many small chunks before self._high_water is reached, then any + # .read() call becomes computationally expensive, and could block the event loop + # for too long, hence an additional self._high_water_chunks here. + if ( + len(self._http_chunk_splits) > self._high_water_chunks + and not self._protocol._reading_paused + ): + self._protocol.pause_reading() + # wake up readchunk when end of http chunk received waiter = self._waiter if waiter is not None: @@ -528,7 +544,14 @@ def _read_nowait_chunk(self, n: int) -> bytes: while chunk_splits and chunk_splits[0] < self._cursor: chunk_splits.popleft() - if self._size < self._low_water and self._protocol._reading_paused: + if ( + self._protocol._reading_paused + and self._size < self._low_water + and ( + self._http_chunk_splits is None + or len(self._http_chunk_splits) < self._low_water_chunks + ) + ): self._protocol.resume_reading() return data diff --git a/tests/test_streams.py b/tests/test_streams.py index 1b65f771c77..c5bc6716b7f 100644 --- a/tests/test_streams.py +++ b/tests/test_streams.py @@ -1552,3 +1552,173 @@ async def test_stream_reader_iter_chunks_chunked_encoding(protocol) -> None: def test_isinstance_check() -> None: assert isinstance(streams.EMPTY_PAYLOAD, streams.StreamReader) + + +async def test_stream_reader_pause_on_high_water_chunks( + protocol: mock.Mock, +) -> None: + """Test that reading is paused when chunk count exceeds high water mark.""" + loop = asyncio.get_event_loop() + # Use small limit so high_water_chunks is small: limit // 4 = 10 + stream = streams.StreamReader(protocol, limit=40, loop=loop) + + assert stream._high_water_chunks == 10 + assert stream._low_water_chunks == 5 + + # Feed chunks until we exceed high_water_chunks + for i in range(12): + stream.begin_http_chunk_receiving() + stream.feed_data(b"x") # 1 byte per chunk + stream.end_http_chunk_receiving() + + # pause_reading should have been called when chunk count exceeded 10 + protocol.pause_reading.assert_called() + + +async def test_stream_reader_resume_on_low_water_chunks( + protocol: mock.Mock, +) -> None: + """Test that reading resumes when chunk count drops below low water mark.""" + loop = asyncio.get_event_loop() + # Use small limit so high_water_chunks is small: limit // 4 = 10 + stream = streams.StreamReader(protocol, limit=40, loop=loop) + + assert stream._high_water_chunks == 10 + assert stream._low_water_chunks == 5 + + # Feed chunks until we exceed high_water_chunks + for i in range(12): + stream.begin_http_chunk_receiving() + stream.feed_data(b"x") # 1 byte per chunk + stream.end_http_chunk_receiving() + + # Simulate that reading was paused + protocol._reading_paused = True + protocol.pause_reading.reset_mock() + + # Read data to reduce both size and chunk count + # Reading will consume chunks and reduce _http_chunk_splits + data = await stream.read(10) + assert data == b"xxxxxxxxxx" + + # resume_reading should have been called when both size and chunk count + # dropped below their respective low water marks + protocol.resume_reading.assert_called() + + +async def test_stream_reader_no_resume_when_chunks_still_high( + protocol: mock.Mock, +) -> None: + """Test that reading doesn't resume if chunk count is still above low water.""" + loop = asyncio.get_event_loop() + # Use small limit so high_water_chunks is small: limit // 4 = 10 + stream = streams.StreamReader(protocol, limit=40, loop=loop) + + # Feed many chunks + for i in range(12): + stream.begin_http_chunk_receiving() + stream.feed_data(b"x") + stream.end_http_chunk_receiving() + + # Simulate that reading was paused + protocol._reading_paused = True + + # Read only a few bytes - chunk count will still be high + data = await stream.read(2) + assert data == b"xx" + + # resume_reading should NOT be called because chunk count is still >= low_water_chunks + protocol.resume_reading.assert_not_called() + + +async def test_stream_reader_read_non_chunked_response( + protocol: mock.Mock, +) -> None: + """Test that non-chunked responses work correctly (no chunk tracking).""" + loop = asyncio.get_event_loop() + stream = streams.StreamReader(protocol, limit=40, loop=loop) + + # Non-chunked: just feed data without begin/end_http_chunk_receiving + stream.feed_data(b"Hello World") + + # _http_chunk_splits should be None for non-chunked responses + assert stream._http_chunk_splits is None + + # Reading should work without issues + data = await stream.read(5) + assert data == b"Hello" + + data = await stream.read(6) + assert data == b" World" + + +async def test_stream_reader_resume_non_chunked_when_paused( + protocol: mock.Mock, +) -> None: + """Test that resume works for non-chunked responses when paused due to size.""" + loop = asyncio.get_event_loop() + # Small limit so we can trigger pause via size + stream = streams.StreamReader(protocol, limit=10, loop=loop) + + # Feed data that exceeds high_water (limit * 2 = 20) + stream.feed_data(b"x" * 25) + + # Simulate that reading was paused due to size + protocol._reading_paused = True + protocol.pause_reading.assert_called() + + # Read enough to drop below low_water (limit = 10) + data = await stream.read(20) + assert data == b"x" * 20 + + # resume_reading should be called (size is now 5 < low_water 10) + protocol.resume_reading.assert_called() + + +@pytest.mark.parametrize("limit", [1, 2, 4]) +async def test_stream_reader_small_limit_resumes_reading( + protocol: mock.Mock, + limit: int, +) -> None: + """Test that small limits still allow resume_reading to be called. + + Even with very small limits, high_water_chunks should be at least 3 + and low_water_chunks should be at least 2, with high > low to ensure + proper flow control. + """ + loop = asyncio.get_event_loop() + stream = streams.StreamReader(protocol, limit=limit, loop=loop) + + # Verify minimum thresholds are enforced and high > low + assert stream._high_water_chunks >= 3 + assert stream._low_water_chunks >= 2 + assert stream._high_water_chunks > stream._low_water_chunks + + # Set up pause/resume side effects + def pause_reading() -> None: + protocol._reading_paused = True + + protocol.pause_reading.side_effect = pause_reading + + def resume_reading() -> None: + protocol._reading_paused = False + + protocol.resume_reading.side_effect = resume_reading + + # Feed 4 chunks (triggers pause at > high_water_chunks which is >= 3) + for char in b"abcd": + stream.begin_http_chunk_receiving() + stream.feed_data(bytes([char])) + stream.end_http_chunk_receiving() + + # Reading should now be paused + assert protocol._reading_paused is True + assert protocol.pause_reading.called + + # Read all data - should resume (chunk count drops below low_water_chunks) + data = stream.read_nowait() + assert data == b"abcd" + assert stream._size == 0 + + protocol.resume_reading.assert_called() + assert protocol._reading_paused is False From 4ed97a4e46eaf61bd0f05063245f613469700229 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 15:23:14 +0000 Subject: [PATCH 90/93] Limit number of chunks before pausing reading (#11894) (#11916) (cherry picked from commit 1e4120e87daec963c67f956111e6bca44d7c3dea) Co-authored-by: J. Nick Koston --- aiohttp/streams.py | 25 ++++++- tests/test_streams.py | 170 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 194 insertions(+), 1 deletion(-) diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 88d9e174ef2..6cc74fc9cbd 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -116,6 +116,8 @@ class StreamReader(AsyncStreamReaderMixin): "_protocol", "_low_water", "_high_water", + "_low_water_chunks", + "_high_water_chunks", "_loop", "_size", "_cursor", @@ -146,6 +148,11 @@ def __init__( self._high_water = limit * 2 if loop is None: loop = asyncio.get_event_loop() + # Ensure high_water_chunks >= 3 so it's always > low_water_chunks. + self._high_water_chunks = max(3, limit // 4) + # Use max(2, ...) because there's always at least 1 chunk split remaining + # (the current position), so we need low_water >= 2 to allow resume. + self._low_water_chunks = max(2, self._high_water_chunks // 2) self._loop = loop self._size = 0 self._cursor = 0 @@ -329,6 +336,15 @@ def end_http_chunk_receiving(self) -> None: self._http_chunk_splits.append(self.total_bytes) + # If we get too many small chunks before self._high_water is reached, then any + # .read() call becomes computationally expensive, and could block the event loop + # for too long, hence an additional self._high_water_chunks here. + if ( + len(self._http_chunk_splits) > self._high_water_chunks + and not self._protocol._reading_paused + ): + self._protocol.pause_reading() + # wake up readchunk when end of http chunk received waiter = self._waiter if waiter is not None: @@ -537,7 +553,14 @@ def _read_nowait_chunk(self, n: int) -> bytes: while chunk_splits and chunk_splits[0] < self._cursor: chunk_splits.popleft() - if self._size < self._low_water and self._protocol._reading_paused: + if ( + self._protocol._reading_paused + and self._size < self._low_water + and ( + self._http_chunk_splits is None + or len(self._http_chunk_splits) < self._low_water_chunks + ) + ): self._protocol.resume_reading() return data diff --git a/tests/test_streams.py b/tests/test_streams.py index 1b65f771c77..c5bc6716b7f 100644 --- a/tests/test_streams.py +++ b/tests/test_streams.py @@ -1552,3 +1552,173 @@ async def test_stream_reader_iter_chunks_chunked_encoding(protocol) -> None: def test_isinstance_check() -> None: assert isinstance(streams.EMPTY_PAYLOAD, streams.StreamReader) + + +async def test_stream_reader_pause_on_high_water_chunks( + protocol: mock.Mock, +) -> None: + """Test that reading is paused when chunk count exceeds high water mark.""" + loop = asyncio.get_event_loop() + # Use small limit so high_water_chunks is small: limit // 4 = 10 + stream = streams.StreamReader(protocol, limit=40, loop=loop) + + assert stream._high_water_chunks == 10 + assert stream._low_water_chunks == 5 + + # Feed chunks until we exceed high_water_chunks + for i in range(12): + stream.begin_http_chunk_receiving() + stream.feed_data(b"x") # 1 byte per chunk + stream.end_http_chunk_receiving() + + # pause_reading should have been called when chunk count exceeded 10 + protocol.pause_reading.assert_called() + + +async def test_stream_reader_resume_on_low_water_chunks( + protocol: mock.Mock, +) -> None: + """Test that reading resumes when chunk count drops below low water mark.""" + loop = asyncio.get_event_loop() + # Use small limit so high_water_chunks is small: limit // 4 = 10 + stream = streams.StreamReader(protocol, limit=40, loop=loop) + + assert stream._high_water_chunks == 10 + assert stream._low_water_chunks == 5 + + # Feed chunks until we exceed high_water_chunks + for i in range(12): + stream.begin_http_chunk_receiving() + stream.feed_data(b"x") # 1 byte per chunk + stream.end_http_chunk_receiving() + + # Simulate that reading was paused + protocol._reading_paused = True + protocol.pause_reading.reset_mock() + + # Read data to reduce both size and chunk count + # Reading will consume chunks and reduce _http_chunk_splits + data = await stream.read(10) + assert data == b"xxxxxxxxxx" + + # resume_reading should have been called when both size and chunk count + # dropped below their respective low water marks + protocol.resume_reading.assert_called() + + +async def test_stream_reader_no_resume_when_chunks_still_high( + protocol: mock.Mock, +) -> None: + """Test that reading doesn't resume if chunk count is still above low water.""" + loop = asyncio.get_event_loop() + # Use small limit so high_water_chunks is small: limit // 4 = 10 + stream = streams.StreamReader(protocol, limit=40, loop=loop) + + # Feed many chunks + for i in range(12): + stream.begin_http_chunk_receiving() + stream.feed_data(b"x") + stream.end_http_chunk_receiving() + + # Simulate that reading was paused + protocol._reading_paused = True + + # Read only a few bytes - chunk count will still be high + data = await stream.read(2) + assert data == b"xx" + + # resume_reading should NOT be called because chunk count is still >= low_water_chunks + protocol.resume_reading.assert_not_called() + + +async def test_stream_reader_read_non_chunked_response( + protocol: mock.Mock, +) -> None: + """Test that non-chunked responses work correctly (no chunk tracking).""" + loop = asyncio.get_event_loop() + stream = streams.StreamReader(protocol, limit=40, loop=loop) + + # Non-chunked: just feed data without begin/end_http_chunk_receiving + stream.feed_data(b"Hello World") + + # _http_chunk_splits should be None for non-chunked responses + assert stream._http_chunk_splits is None + + # Reading should work without issues + data = await stream.read(5) + assert data == b"Hello" + + data = await stream.read(6) + assert data == b" World" + + +async def test_stream_reader_resume_non_chunked_when_paused( + protocol: mock.Mock, +) -> None: + """Test that resume works for non-chunked responses when paused due to size.""" + loop = asyncio.get_event_loop() + # Small limit so we can trigger pause via size + stream = streams.StreamReader(protocol, limit=10, loop=loop) + + # Feed data that exceeds high_water (limit * 2 = 20) + stream.feed_data(b"x" * 25) + + # Simulate that reading was paused due to size + protocol._reading_paused = True + protocol.pause_reading.assert_called() + + # Read enough to drop below low_water (limit = 10) + data = await stream.read(20) + assert data == b"x" * 20 + + # resume_reading should be called (size is now 5 < low_water 10) + protocol.resume_reading.assert_called() + + +@pytest.mark.parametrize("limit", [1, 2, 4]) +async def test_stream_reader_small_limit_resumes_reading( + protocol: mock.Mock, + limit: int, +) -> None: + """Test that small limits still allow resume_reading to be called. + + Even with very small limits, high_water_chunks should be at least 3 + and low_water_chunks should be at least 2, with high > low to ensure + proper flow control. + """ + loop = asyncio.get_event_loop() + stream = streams.StreamReader(protocol, limit=limit, loop=loop) + + # Verify minimum thresholds are enforced and high > low + assert stream._high_water_chunks >= 3 + assert stream._low_water_chunks >= 2 + assert stream._high_water_chunks > stream._low_water_chunks + + # Set up pause/resume side effects + def pause_reading() -> None: + protocol._reading_paused = True + + protocol.pause_reading.side_effect = pause_reading + + def resume_reading() -> None: + protocol._reading_paused = False + + protocol.resume_reading.side_effect = resume_reading + + # Feed 4 chunks (triggers pause at > high_water_chunks which is >= 3) + for char in b"abcd": + stream.begin_http_chunk_receiving() + stream.feed_data(bytes([char])) + stream.end_http_chunk_receiving() + + # Reading should now be paused + assert protocol._reading_paused is True + assert protocol.pause_reading.called + + # Read all data - should resume (chunk count drops below low_water_chunks) + data = stream.read_nowait() + assert data == b"abcd" + assert stream._size == 0 + + protocol.resume_reading.assert_called() + assert protocol._reading_paused is False From 6237b5192d70b223943888d71117b826653ea53a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 15:41:15 +0000 Subject: [PATCH 91/93] Use decompressor max_length parameter (#11898) (#11917) (cherry picked from commit 92477c5a74c43dfe0474bd24f8de11875daa2298) --------- Co-authored-by: J. Nick Koston --- CHANGES/11898.breaking.rst | 2 + aiohttp/compression_utils.py | 121 ++++++++++++++++++++------------ aiohttp/http_exceptions.py | 4 ++ aiohttp/http_parser.py | 23 +++++- aiohttp/multipart.py | 31 +++++--- aiohttp/web_request.py | 2 +- docs/spelling_wordlist.txt | 1 + pyproject.toml | 4 +- requirements/runtime-deps.in | 4 +- tests/test_client_functional.py | 114 +++++++++++++++++++++++++++++- tests/test_http_parser.py | 34 +++++++++ tests/test_multipart.py | 83 ++++++++++++++++------ 12 files changed, 335 insertions(+), 88 deletions(-) create mode 100644 CHANGES/11898.breaking.rst diff --git a/CHANGES/11898.breaking.rst b/CHANGES/11898.breaking.rst new file mode 100644 index 00000000000..cfbf2ae4727 --- /dev/null +++ b/CHANGES/11898.breaking.rst @@ -0,0 +1,2 @@ +``Brotli`` and ``brotlicffi`` minimum version is now 1.2. +Decompression now has a default maximum output size of 32MiB per decompress call -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index 7b222f1b639..9fb595e9bb2 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -1,6 +1,7 @@ import asyncio import sys import zlib +from abc import ABC, abstractmethod from concurrent.futures import Executor from typing import Any, Final, Protocol, TypedDict, cast @@ -32,7 +33,12 @@ HAS_ZSTD = False -MAX_SYNC_CHUNK_SIZE = 1024 +MAX_SYNC_CHUNK_SIZE = 4096 +DEFAULT_MAX_DECOMPRESS_SIZE = 2**25 # 32MiB + +# Unlimited decompression constants - different libraries use different conventions +ZLIB_MAX_LENGTH_UNLIMITED = 0 # zlib uses 0 to mean unlimited +ZSTD_MAX_LENGTH_UNLIMITED = -1 # zstd uses -1 to mean unlimited class ZLibCompressObjProtocol(Protocol): @@ -144,19 +150,37 @@ def encoding_to_mode( return -ZLibBackend.MAX_WBITS if suppress_deflate_header else ZLibBackend.MAX_WBITS -class ZlibBaseHandler: +class DecompressionBaseHandler(ABC): def __init__( self, - mode: int, executor: Executor | None = None, max_sync_chunk_size: int | None = MAX_SYNC_CHUNK_SIZE, ): - self._mode = mode + """Base class for decompression handlers.""" self._executor = executor self._max_sync_chunk_size = max_sync_chunk_size + @abstractmethod + def decompress_sync( + self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: + """Decompress the given data.""" + + async def decompress( + self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: + """Decompress the given data.""" + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_event_loop().run_in_executor( + self._executor, self.decompress_sync, data, max_length + ) + return self.decompress_sync(data, max_length) + -class ZLibCompressor(ZlibBaseHandler): +class ZLibCompressor: def __init__( self, encoding: str | None = None, @@ -167,14 +191,12 @@ def __init__( executor: Executor | None = None, max_sync_chunk_size: int | None = MAX_SYNC_CHUNK_SIZE, ): - super().__init__( - mode=( - encoding_to_mode(encoding, suppress_deflate_header) - if wbits is None - else wbits - ), - executor=executor, - max_sync_chunk_size=max_sync_chunk_size, + self._executor = executor + self._max_sync_chunk_size = max_sync_chunk_size + self._mode = ( + encoding_to_mode(encoding, suppress_deflate_header) + if wbits is None + else wbits ) self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend) @@ -233,7 +255,7 @@ def flush(self, mode: int | None = None) -> bytes: ) -class ZLibDecompressor(ZlibBaseHandler): +class ZLibDecompressor(DecompressionBaseHandler): def __init__( self, encoding: str | None = None, @@ -241,33 +263,16 @@ def __init__( executor: Executor | None = None, max_sync_chunk_size: int | None = MAX_SYNC_CHUNK_SIZE, ): - super().__init__( - mode=encoding_to_mode(encoding, suppress_deflate_header), - executor=executor, - max_sync_chunk_size=max_sync_chunk_size, - ) + super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) + self._mode = encoding_to_mode(encoding, suppress_deflate_header) self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend) self._decompressor = self._zlib_backend.decompressobj(wbits=self._mode) - def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: + def decompress_sync( + self, data: Buffer, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: return self._decompressor.decompress(data, max_length) - async def decompress(self, data: bytes, max_length: int = 0) -> bytes: - """Decompress the data and return the decompressed bytes. - - If the data size is large than the max_sync_chunk_size, the decompression - will be done in the executor. Otherwise, the decompression will be done - in the event loop. - """ - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_running_loop().run_in_executor( - self._executor, self._decompressor.decompress, data, max_length - ) - return self.decompress_sync(data, max_length) - def flush(self, length: int = 0) -> bytes: return ( self._decompressor.flush(length) @@ -280,40 +285,64 @@ def eof(self) -> bool: return self._decompressor.eof -class BrotliDecompressor: +class BrotliDecompressor(DecompressionBaseHandler): # Supports both 'brotlipy' and 'Brotli' packages # since they share an import name. The top branches # are for 'brotlipy' and bottom branches for 'Brotli' - def __init__(self) -> None: + def __init__( + self, + executor: Executor | None = None, + max_sync_chunk_size: int | None = MAX_SYNC_CHUNK_SIZE, + ) -> None: + """Decompress data using the Brotli library.""" if not HAS_BROTLI: raise RuntimeError( "The brotli decompression is not available. " "Please install `Brotli` module" ) self._obj = brotli.Decompressor() + super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) - def decompress_sync(self, data: bytes) -> bytes: + def decompress_sync( + self, data: Buffer, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: + """Decompress the given data.""" if hasattr(self._obj, "decompress"): - return cast(bytes, self._obj.decompress(data)) - return cast(bytes, self._obj.process(data)) + return cast(bytes, self._obj.decompress(data, max_length)) + return cast(bytes, self._obj.process(data, max_length)) def flush(self) -> bytes: + """Flush the decompressor.""" if hasattr(self._obj, "flush"): return cast(bytes, self._obj.flush()) return b"" -class ZSTDDecompressor: - def __init__(self) -> None: +class ZSTDDecompressor(DecompressionBaseHandler): + def __init__( + self, + executor: Executor | None = None, + max_sync_chunk_size: int | None = MAX_SYNC_CHUNK_SIZE, + ) -> None: if not HAS_ZSTD: raise RuntimeError( "The zstd decompression is not available. " "Please install `backports.zstd` module" ) self._obj = ZstdDecompressor() - - def decompress_sync(self, data: bytes) -> bytes: - return self._obj.decompress(data) + super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) + + def decompress_sync( + self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: + # zstd uses -1 for unlimited, while zlib uses 0 for unlimited + # Convert the zlib convention (0=unlimited) to zstd convention (-1=unlimited) + zstd_max_length = ( + ZSTD_MAX_LENGTH_UNLIMITED + if max_length == ZLIB_MAX_LENGTH_UNLIMITED + else max_length + ) + return self._obj.decompress(data, zstd_max_length) def flush(self) -> bytes: return b"" diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py index f8ea08162c8..ac6745acf21 100644 --- a/aiohttp/http_exceptions.py +++ b/aiohttp/http_exceptions.py @@ -73,6 +73,10 @@ class ContentLengthError(PayloadEncodingError): """Not enough data to satisfy content length header.""" +class DecompressSizeError(PayloadEncodingError): + """Decompressed size exceeds the configured limit.""" + + class LineTooLong(BadHttpMessage): def __init__( self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 0aa78959523..6fbe04a3203 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -13,6 +13,7 @@ from . import hdrs from .base_protocol import BaseProtocol from .compression_utils import ( + DEFAULT_MAX_DECOMPRESS_SIZE, HAS_BROTLI, HAS_ZSTD, BrotliDecompressor, @@ -34,6 +35,7 @@ BadStatusLine, ContentEncodingError, ContentLengthError, + DecompressSizeError, InvalidHeader, InvalidURLError, LineTooLong, @@ -949,7 +951,12 @@ class DeflateBuffer: decompressor: Any - def __init__(self, out: StreamReader, encoding: str | None) -> None: + def __init__( + self, + out: StreamReader, + encoding: str | None, + max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE, + ) -> None: self.out = out self.size = 0 out.total_compressed_bytes = self.size @@ -974,6 +981,8 @@ def __init__(self, out: StreamReader, encoding: str | None) -> None: else: self.decompressor = ZLibDecompressor(encoding=encoding) + self._max_decompress_size = max_decompress_size + def set_exception( self, exc: BaseException, @@ -1003,7 +1012,10 @@ def feed_data(self, chunk: bytes, size: int) -> None: ) try: - chunk = self.decompressor.decompress_sync(chunk) + # Decompress with limit + 1 so we can detect if output exceeds limit + chunk = self.decompressor.decompress_sync( + chunk, max_length=self._max_decompress_size + 1 + ) except Exception: raise ContentEncodingError( "Can not decode content-encoding: %s" % self.encoding @@ -1011,6 +1023,13 @@ def feed_data(self, chunk: bytes, size: int) -> None: self._started_decoding = True + # Check if decompression limit was exceeded + if len(chunk) > self._max_decompress_size: + raise DecompressSizeError( + "Decompressed data exceeds the configured limit of %d bytes" + % self._max_decompress_size + ) + if chunk: self.out.feed_data(chunk, len(chunk)) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index a152f2630b3..ba41659d90c 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -13,7 +13,12 @@ from multidict import CIMultiDict, CIMultiDictProxy -from .compression_utils import ZLibCompressor, ZLibDecompressor +from .abc import AbstractStreamWriter +from .compression_utils import ( + DEFAULT_MAX_DECOMPRESS_SIZE, + ZLibCompressor, + ZLibDecompressor, +) from .hdrs import ( CONTENT_DISPOSITION, CONTENT_ENCODING, @@ -261,6 +266,7 @@ def __init__( *, subtype: str = "mixed", default_charset: str | None = None, + max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE, ) -> None: self.headers = headers self._boundary = boundary @@ -277,6 +283,7 @@ def __init__( self._prev_chunk: bytes | None = None self._content_eof = 0 self._cache: dict[str, Any] = {} + self._max_decompress_size = max_decompress_size def __aiter__(self: Self) -> Self: return self @@ -306,7 +313,7 @@ async def read(self, *, decode: bool = False) -> bytes: while not self._at_eof: data.extend(await self.read_chunk(self.chunk_size)) if decode: - return self.decode(data) + return await self.decode(data) return data async def read_chunk(self, size: int = chunk_size) -> bytes: @@ -484,7 +491,7 @@ def at_eof(self) -> bool: """Returns True if the boundary was reached or False otherwise.""" return self._at_eof - def decode(self, data: bytes) -> bytes: + async def decode(self, data: bytes) -> bytes: """Decodes data. Decoding is done according the specified Content-Encoding @@ -494,18 +501,18 @@ def decode(self, data: bytes) -> bytes: data = self._decode_content_transfer(data) # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 if not self._is_form_data and CONTENT_ENCODING in self.headers: - return self._decode_content(data) + return await self._decode_content(data) return data - def _decode_content(self, data: bytes) -> bytes: + async def _decode_content(self, data: bytes) -> bytes: encoding = self.headers.get(CONTENT_ENCODING, "").lower() if encoding == "identity": return data if encoding in {"deflate", "gzip"}: - return ZLibDecompressor( + return await ZLibDecompressor( encoding=encoding, suppress_deflate_header=True, - ).decompress_sync(data) + ).decompress(data, max_length=self._max_decompress_size) raise RuntimeError(f"unknown content encoding: {encoding}") @@ -576,11 +583,11 @@ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> byt """ raise TypeError("Unable to read body part as bytes. Use write() to consume.") - async def write(self, writer: Any) -> None: + async def write(self, writer: AbstractStreamWriter) -> None: field = self._value chunk = await field.read_chunk(size=2**16) while chunk: - await writer.write(field.decode(chunk)) + await writer.write(await field.decode(chunk)) chunk = await field.read_chunk(size=2**16) @@ -1020,7 +1027,9 @@ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> byt return b"".join(parts) - async def write(self, writer: Any, close_boundary: bool = True) -> None: + async def write( + self, writer: AbstractStreamWriter, close_boundary: bool = True + ) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: if self._is_form_data: @@ -1074,7 +1083,7 @@ async def close(self) -> None: class MultipartPayloadWriter: - def __init__(self, writer: Any) -> None: + def __init__(self, writer: AbstractStreamWriter) -> None: self._writer = writer self._encoding: str | None = None self._compress: ZLibCompressor | None = None diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 3928f4da254..c2185a7a214 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -749,7 +749,7 @@ async def post(self) -> "MultiDictProxy[str | bytes | FileField]": ) chunk = await field.read_chunk(size=2**16) while chunk: - chunk = field.decode(chunk) + chunk = await field.decode(chunk) await self._loop.run_in_executor(None, tmp.write, chunk) size += len(chunk) if 0 < max_size < size: diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 63626cb35e2..9b5eafcea4a 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -192,6 +192,7 @@ lowercased Mako manylinux metadata +MiB microservice middleware middlewares diff --git a/pyproject.toml b/pyproject.toml index c1c61b01eab..72eaf0e04f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,8 +50,8 @@ dynamic = [ [project.optional-dependencies] speedups = [ "aiodns >= 3.3.0", - "Brotli; platform_python_implementation == 'CPython'", - "brotlicffi; platform_python_implementation != 'CPython'", + "Brotli >= 1.2; platform_python_implementation == 'CPython'", + "brotlicffi >= 1.2; platform_python_implementation != 'CPython'", "backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14'", ] diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 289997df564..cf8f209b4ac 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -6,8 +6,8 @@ aiosignal >= 1.4.0 async-timeout >= 4.0, < 6.0 ; python_version < '3.11' attrs >= 17.3.0 backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14' -Brotli; platform_python_implementation == 'CPython' -brotlicffi; platform_python_implementation != 'CPython' +Brotli >= 1.2; platform_python_implementation == 'CPython' +brotlicffi >= 1.2; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 propcache >= 0.2.0 diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 4cf18b9e5ed..ae584a1f737 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -13,11 +13,25 @@ import tarfile import time import zipfile +import zlib from collections.abc import AsyncIterator, Awaitable, Callable from contextlib import suppress from typing import Any, NoReturn from unittest import mock +try: + try: + import brotlicffi as brotli + except ImportError: + import brotli +except ImportError: + brotli = None # pragma: no cover + +try: + from backports.zstd import ZstdCompressor +except ImportError: + ZstdCompressor = None # type: ignore[assignment,misc] # pragma: no cover + import pytest from multidict import MultiDict from pytest_mock import MockerFixture @@ -37,7 +51,9 @@ TooManyRedirects, ) from aiohttp.client_reqrep import ClientRequest +from aiohttp.compression_utils import DEFAULT_MAX_DECOMPRESS_SIZE from aiohttp.connector import Connection +from aiohttp.http_exceptions import DecompressSizeError from aiohttp.http_writer import StreamWriter from aiohttp.payload import ( AsyncIterablePayload, @@ -2411,8 +2427,102 @@ async def handler(request): resp.close() -async def test_bad_payload_chunked_encoding(aiohttp_client) -> None: - async def handler(request): +async def test_payload_decompress_size_limit(aiohttp_client: AiohttpClient) -> None: + """Test that decompression size limit triggers DecompressSizeError. + + When a compressed payload expands beyond the configured limit, + we raise DecompressSizeError. + """ + # Create a highly compressible payload that exceeds the decompression limit. + # 64MiB of repeated bytes compresses to ~32KB but expands beyond the + # 32MiB per-call limit. + original = b"A" * (64 * 2**20) + compressed = zlib.compress(original) + assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE + + async def handler(request: web.Request) -> web.Response: + # Send compressed data with Content-Encoding header + resp = web.Response(body=compressed) + resp.headers["Content-Encoding"] = "deflate" + return resp + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + + with pytest.raises(aiohttp.ClientPayloadError) as exc_info: + await resp.read() + + assert isinstance(exc_info.value.__cause__, DecompressSizeError) + assert "Decompressed data exceeds" in str(exc_info.value.__cause__) + + +@pytest.mark.skipif(brotli is None, reason="brotli is not installed") +async def test_payload_decompress_size_limit_brotli( + aiohttp_client: AiohttpClient, +) -> None: + """Test that brotli decompression size limit triggers DecompressSizeError.""" + assert brotli is not None + # Create a highly compressible payload that exceeds the decompression limit. + original = b"A" * (64 * 2**20) + compressed = brotli.compress(original) + assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE + + async def handler(request: web.Request) -> web.Response: + resp = web.Response(body=compressed) + resp.headers["Content-Encoding"] = "br" + return resp + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + + with pytest.raises(aiohttp.ClientPayloadError) as exc_info: + await resp.read() + + assert isinstance(exc_info.value.__cause__, DecompressSizeError) + assert "Decompressed data exceeds" in str(exc_info.value.__cause__) + + +@pytest.mark.skipif(ZstdCompressor is None, reason="backports.zstd is not installed") +async def test_payload_decompress_size_limit_zstd( + aiohttp_client: AiohttpClient, +) -> None: + """Test that zstd decompression size limit triggers DecompressSizeError.""" + assert ZstdCompressor is not None + # Create a highly compressible payload that exceeds the decompression limit. + original = b"A" * (64 * 2**20) + compressor = ZstdCompressor() + compressed = compressor.compress(original) + compressor.flush() + assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE + + async def handler(request: web.Request) -> web.Response: + resp = web.Response(body=compressed) + resp.headers["Content-Encoding"] = "zstd" + return resp + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + + with pytest.raises(aiohttp.ClientPayloadError) as exc_info: + await resp.read() + + assert isinstance(exc_info.value.__cause__, DecompressSizeError) + assert "Decompressed data exceeds" in str(exc_info.value.__cause__) + + +async def test_bad_payload_chunked_encoding(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.StreamResponse: resp = web.StreamResponse() resp.force_close() resp._length_check = False diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index eb0287f37d9..1a426825ede 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -3,6 +3,7 @@ import asyncio import re import sys +import zlib from contextlib import nullcontext from typing import Any from unittest import mock @@ -1919,3 +1920,36 @@ async def test_empty_body(self, protocol: BaseProtocol) -> None: dbuf.feed_eof() assert buf.at_eof() + + @pytest.mark.parametrize( + "chunk_size", + [1024, 2**14, 2**16], # 1KB, 16KB, 64KB + ids=["1KB", "16KB", "64KB"], + ) + async def test_streaming_decompress_large_payload( + self, protocol: BaseProtocol, chunk_size: int + ) -> None: + """Test that large payloads decompress correctly when streamed in chunks. + + This simulates real HTTP streaming where compressed data arrives in + small network chunks. Each chunk's decompressed output should be within + the max_decompress_size limit, allowing full recovery of the original data. + """ + # Create a large payload (3MiB) that compresses well + original = b"A" * (3 * 2**20) + compressed = zlib.compress(original) + + buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) + dbuf = DeflateBuffer(buf, "deflate") + + # Feed compressed data in chunks (simulating network streaming) + for i in range(0, len(compressed), chunk_size): + chunk = compressed[i : i + chunk_size] + dbuf.feed_data(chunk, len(chunk)) + + dbuf.feed_eof() + + # Read all decompressed data + result = b"".join(buf._buffer) + assert len(result) == len(original) + assert result == original diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 1751609680f..af091e2947f 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -9,6 +9,7 @@ import aiohttp from aiohttp import payload +from aiohttp.abc import AbstractStreamWriter from aiohttp.compression_utils import ZLibBackend from aiohttp.hdrs import ( CONTENT_DISPOSITION, @@ -36,14 +37,14 @@ def buf(): @pytest.fixture -def stream(buf): - writer = mock.Mock() +def stream(buf: bytearray) -> AbstractStreamWriter: + writer = mock.create_autospec(AbstractStreamWriter, instance=True, spec_set=True) async def write(chunk): buf.extend(chunk) writer.write.side_effect = write - return writer + return writer # type: ignore[no-any-return] @pytest.fixture @@ -415,7 +416,7 @@ async def test_decode_with_content_transfer_encoding_base64(self) -> None: result = b"" while not obj.at_eof(): chunk = await obj.read_chunk(size=6) - result += obj.decode(chunk) + result += await obj.decode(chunk) assert b"Time to Relax!" == result @pytest.mark.parametrize("encoding", ("binary", "8bit", "7bit")) @@ -1127,7 +1128,9 @@ async def test_writer(writer) -> None: assert writer.boundary == ":" -async def test_writer_serialize_io_chunk(buf, stream, writer) -> None: +async def test_writer_serialize_io_chunk( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: with io.BytesIO(b"foobarbaz") as file_handle: writer.append(file_handle) await writer.write(stream) @@ -1137,7 +1140,9 @@ async def test_writer_serialize_io_chunk(buf, stream, writer) -> None: ) -async def test_writer_serialize_json(buf, stream, writer) -> None: +async def test_writer_serialize_json( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append_json({"привет": "мир"}) await writer.write(stream) assert ( @@ -1146,7 +1151,9 @@ async def test_writer_serialize_json(buf, stream, writer) -> None: ) -async def test_writer_serialize_form(buf, stream, writer) -> None: +async def test_writer_serialize_form( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: data = [("foo", "bar"), ("foo", "baz"), ("boo", "zoo")] writer.append_form(data) await writer.write(stream) @@ -1154,7 +1161,9 @@ async def test_writer_serialize_form(buf, stream, writer) -> None: assert b"foo=bar&foo=baz&boo=zoo" in buf -async def test_writer_serialize_form_dict(buf, stream, writer) -> None: +async def test_writer_serialize_form_dict( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: data = {"hello": "мир"} writer.append_form(data) await writer.write(stream) @@ -1162,7 +1171,9 @@ async def test_writer_serialize_form_dict(buf, stream, writer) -> None: assert b"hello=%D0%BC%D0%B8%D1%80" in buf -async def test_writer_write(buf, stream, writer) -> None: +async def test_writer_write( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append("foo-bar-baz") writer.append_json({"test": "passed"}) writer.append_form({"test": "passed"}) @@ -1208,7 +1219,9 @@ async def test_writer_write(buf, stream, writer) -> None: ) == bytes(buf) -async def test_writer_write_no_close_boundary(buf, stream) -> None: +async def test_writer_write_no_close_boundary( + buf: bytearray, stream: AbstractStreamWriter +) -> None: writer = aiohttp.MultipartWriter(boundary=":") writer.append("foo-bar-baz") writer.append_json({"test": "passed"}) @@ -1240,13 +1253,19 @@ async def test_writer_write_no_close_boundary(buf, stream) -> None: ) == bytes(buf) -async def test_writer_write_no_parts(buf, stream, writer) -> None: +async def test_writer_write_no_parts( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: await writer.write(stream) assert b"--:--\r\n" == bytes(buf) @pytest.mark.usefixtures("parametrize_zlib_backend") -async def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer): +async def test_writer_serialize_with_content_encoding_gzip( + buf: bytearray, + stream: AbstractStreamWriter, + writer: aiohttp.MultipartWriter, +) -> None: writer.append("Time to Relax!", {CONTENT_ENCODING: "gzip"}) await writer.write(stream) headers, message = bytes(buf).split(b"\r\n\r\n", 1) @@ -1262,7 +1281,9 @@ async def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer): assert b"Time to Relax!" == data -async def test_writer_serialize_with_content_encoding_deflate(buf, stream, writer): +async def test_writer_serialize_with_content_encoding_deflate( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append("Time to Relax!", {CONTENT_ENCODING: "deflate"}) await writer.write(stream) headers, message = bytes(buf).split(b"\r\n\r\n", 1) @@ -1276,7 +1297,9 @@ async def test_writer_serialize_with_content_encoding_deflate(buf, stream, write assert thing == message -async def test_writer_serialize_with_content_encoding_identity(buf, stream, writer): +async def test_writer_serialize_with_content_encoding_identity( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: thing = b"\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00" writer.append(thing, {CONTENT_ENCODING: "identity"}) await writer.write(stream) @@ -1291,12 +1314,16 @@ async def test_writer_serialize_with_content_encoding_identity(buf, stream, writ assert thing == message.split(b"\r\n")[0] -def test_writer_serialize_with_content_encoding_unknown(buf, stream, writer): +def test_writer_serialize_with_content_encoding_unknown( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: with pytest.raises(RuntimeError): writer.append("Time to Relax!", {CONTENT_ENCODING: "snappy"}) -async def test_writer_with_content_transfer_encoding_base64(buf, stream, writer): +async def test_writer_with_content_transfer_encoding_base64( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append("Time to Relax!", {CONTENT_TRANSFER_ENCODING: "base64"}) await writer.write(stream) headers, message = bytes(buf).split(b"\r\n\r\n", 1) @@ -1309,7 +1336,9 @@ async def test_writer_with_content_transfer_encoding_base64(buf, stream, writer) assert b"VGltZSB0byBSZWxheCE=" == message.split(b"\r\n")[0] -async def test_writer_content_transfer_encoding_quote_printable(buf, stream, writer): +async def test_writer_content_transfer_encoding_quote_printable( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append("Привет, мир!", {CONTENT_TRANSFER_ENCODING: "quoted-printable"}) await writer.write(stream) headers, message = bytes(buf).split(b"\r\n\r\n", 1) @@ -1325,7 +1354,9 @@ async def test_writer_content_transfer_encoding_quote_printable(buf, stream, wri ) -def test_writer_content_transfer_encoding_unknown(buf, stream, writer) -> None: +def test_writer_content_transfer_encoding_unknown( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: with pytest.raises(RuntimeError): writer.append("Time to Relax!", {CONTENT_TRANSFER_ENCODING: "unknown"}) @@ -1449,7 +1480,9 @@ def test_append_none_not_allowed(self) -> None: with aiohttp.MultipartWriter(boundary=":") as writer: writer.append(None) - async def test_write_preserves_content_disposition(self, buf, stream) -> None: + async def test_write_preserves_content_disposition( + self, buf: bytearray, stream: AbstractStreamWriter + ) -> None: with aiohttp.MultipartWriter(boundary=":") as writer: part = writer.append(b"foo", headers={CONTENT_TYPE: "test/passed"}) part.set_content_disposition("form-data", filename="bug") @@ -1466,7 +1499,9 @@ async def test_write_preserves_content_disposition(self, buf, stream) -> None: ) assert message == b"foo\r\n--:--\r\n" - async def test_preserve_content_disposition_header(self, buf, stream): + async def test_preserve_content_disposition_header( + self, buf: bytearray, stream: AbstractStreamWriter + ) -> None: # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: @@ -1490,7 +1525,9 @@ async def test_preserve_content_disposition_header(self, buf, stream): b'Content-Disposition: attachments; filename="bug.py"' ) - async def test_set_content_disposition_override(self, buf, stream): + async def test_set_content_disposition_override( + self, buf: bytearray, stream: AbstractStreamWriter + ) -> None: # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: @@ -1514,7 +1551,9 @@ async def test_set_content_disposition_override(self, buf, stream): b'Content-Disposition: attachments; filename="bug.py"' ) - async def test_reset_content_disposition_header(self, buf, stream): + async def test_reset_content_disposition_header( + self, buf: bytearray, stream: AbstractStreamWriter + ) -> None: # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: From 2b920c39002cee0ec5b402581779bbaaf7c9138a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 15:56:02 +0000 Subject: [PATCH 92/93] Use decompressor max_length parameter (#11898) (#11918) (cherry picked from commit 92477c5a74c43dfe0474bd24f8de11875daa2298) --------- Co-authored-by: J. Nick Koston --- CHANGES/11898.breaking.rst | 2 + aiohttp/compression_utils.py | 121 ++++++++++++++++++++------------ aiohttp/http_exceptions.py | 4 ++ aiohttp/http_parser.py | 23 +++++- aiohttp/multipart.py | 31 +++++--- aiohttp/web_request.py | 2 +- docs/spelling_wordlist.txt | 1 + pyproject.toml | 4 +- requirements/runtime-deps.in | 4 +- tests/test_client_functional.py | 114 +++++++++++++++++++++++++++++- tests/test_http_parser.py | 34 +++++++++ tests/test_multipart.py | 83 ++++++++++++++++------ 12 files changed, 335 insertions(+), 88 deletions(-) create mode 100644 CHANGES/11898.breaking.rst diff --git a/CHANGES/11898.breaking.rst b/CHANGES/11898.breaking.rst new file mode 100644 index 00000000000..cfbf2ae4727 --- /dev/null +++ b/CHANGES/11898.breaking.rst @@ -0,0 +1,2 @@ +``Brotli`` and ``brotlicffi`` minimum version is now 1.2. +Decompression now has a default maximum output size of 32MiB per decompress call -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index c51fc524f98..e478d24c3d7 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -1,6 +1,7 @@ import asyncio import sys import zlib +from abc import ABC, abstractmethod from concurrent.futures import Executor from typing import Any, Final, Optional, Protocol, TypedDict, cast @@ -32,7 +33,12 @@ HAS_ZSTD = False -MAX_SYNC_CHUNK_SIZE = 1024 +MAX_SYNC_CHUNK_SIZE = 4096 +DEFAULT_MAX_DECOMPRESS_SIZE = 2**25 # 32MiB + +# Unlimited decompression constants - different libraries use different conventions +ZLIB_MAX_LENGTH_UNLIMITED = 0 # zlib uses 0 to mean unlimited +ZSTD_MAX_LENGTH_UNLIMITED = -1 # zstd uses -1 to mean unlimited class ZLibCompressObjProtocol(Protocol): @@ -144,19 +150,37 @@ def encoding_to_mode( return -ZLibBackend.MAX_WBITS if suppress_deflate_header else ZLibBackend.MAX_WBITS -class ZlibBaseHandler: +class DecompressionBaseHandler(ABC): def __init__( self, - mode: int, executor: Optional[Executor] = None, max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, ): - self._mode = mode + """Base class for decompression handlers.""" self._executor = executor self._max_sync_chunk_size = max_sync_chunk_size + @abstractmethod + def decompress_sync( + self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: + """Decompress the given data.""" + + async def decompress( + self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: + """Decompress the given data.""" + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_event_loop().run_in_executor( + self._executor, self.decompress_sync, data, max_length + ) + return self.decompress_sync(data, max_length) + -class ZLibCompressor(ZlibBaseHandler): +class ZLibCompressor: def __init__( self, encoding: Optional[str] = None, @@ -167,14 +191,12 @@ def __init__( executor: Optional[Executor] = None, max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, ): - super().__init__( - mode=( - encoding_to_mode(encoding, suppress_deflate_header) - if wbits is None - else wbits - ), - executor=executor, - max_sync_chunk_size=max_sync_chunk_size, + self._executor = executor + self._max_sync_chunk_size = max_sync_chunk_size + self._mode = ( + encoding_to_mode(encoding, suppress_deflate_header) + if wbits is None + else wbits ) self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend) @@ -233,7 +255,7 @@ def flush(self, mode: Optional[int] = None) -> bytes: ) -class ZLibDecompressor(ZlibBaseHandler): +class ZLibDecompressor(DecompressionBaseHandler): def __init__( self, encoding: Optional[str] = None, @@ -241,33 +263,16 @@ def __init__( executor: Optional[Executor] = None, max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, ): - super().__init__( - mode=encoding_to_mode(encoding, suppress_deflate_header), - executor=executor, - max_sync_chunk_size=max_sync_chunk_size, - ) + super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) + self._mode = encoding_to_mode(encoding, suppress_deflate_header) self._zlib_backend: Final = ZLibBackendWrapper(ZLibBackend._zlib_backend) self._decompressor = self._zlib_backend.decompressobj(wbits=self._mode) - def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: + def decompress_sync( + self, data: Buffer, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: return self._decompressor.decompress(data, max_length) - async def decompress(self, data: bytes, max_length: int = 0) -> bytes: - """Decompress the data and return the decompressed bytes. - - If the data size is large than the max_sync_chunk_size, the decompression - will be done in the executor. Otherwise, the decompression will be done - in the event loop. - """ - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_running_loop().run_in_executor( - self._executor, self._decompressor.decompress, data, max_length - ) - return self.decompress_sync(data, max_length) - def flush(self, length: int = 0) -> bytes: return ( self._decompressor.flush(length) @@ -280,40 +285,64 @@ def eof(self) -> bool: return self._decompressor.eof -class BrotliDecompressor: +class BrotliDecompressor(DecompressionBaseHandler): # Supports both 'brotlipy' and 'Brotli' packages # since they share an import name. The top branches # are for 'brotlipy' and bottom branches for 'Brotli' - def __init__(self) -> None: + def __init__( + self, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ) -> None: + """Decompress data using the Brotli library.""" if not HAS_BROTLI: raise RuntimeError( "The brotli decompression is not available. " "Please install `Brotli` module" ) self._obj = brotli.Decompressor() + super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) - def decompress_sync(self, data: bytes) -> bytes: + def decompress_sync( + self, data: Buffer, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: + """Decompress the given data.""" if hasattr(self._obj, "decompress"): - return cast(bytes, self._obj.decompress(data)) - return cast(bytes, self._obj.process(data)) + return cast(bytes, self._obj.decompress(data, max_length)) + return cast(bytes, self._obj.process(data, max_length)) def flush(self) -> bytes: + """Flush the decompressor.""" if hasattr(self._obj, "flush"): return cast(bytes, self._obj.flush()) return b"" -class ZSTDDecompressor: - def __init__(self) -> None: +class ZSTDDecompressor(DecompressionBaseHandler): + def __init__( + self, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ) -> None: if not HAS_ZSTD: raise RuntimeError( "The zstd decompression is not available. " "Please install `backports.zstd` module" ) self._obj = ZstdDecompressor() - - def decompress_sync(self, data: bytes) -> bytes: - return self._obj.decompress(data) + super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) + + def decompress_sync( + self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED + ) -> bytes: + # zstd uses -1 for unlimited, while zlib uses 0 for unlimited + # Convert the zlib convention (0=unlimited) to zstd convention (-1=unlimited) + zstd_max_length = ( + ZSTD_MAX_LENGTH_UNLIMITED + if max_length == ZLIB_MAX_LENGTH_UNLIMITED + else max_length + ) + return self._obj.decompress(data, zstd_max_length) def flush(self) -> bytes: return b"" diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py index 773830211e6..0b5867c7861 100644 --- a/aiohttp/http_exceptions.py +++ b/aiohttp/http_exceptions.py @@ -74,6 +74,10 @@ class ContentLengthError(PayloadEncodingError): """Not enough data to satisfy content length header.""" +class DecompressSizeError(PayloadEncodingError): + """Decompressed size exceeds the configured limit.""" + + class LineTooLong(BadHttpMessage): def __init__( self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 90327dbe661..393e76a1586 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -27,6 +27,7 @@ from . import hdrs from .base_protocol import BaseProtocol from .compression_utils import ( + DEFAULT_MAX_DECOMPRESS_SIZE, HAS_BROTLI, HAS_ZSTD, BrotliDecompressor, @@ -48,6 +49,7 @@ BadStatusLine, ContentEncodingError, ContentLengthError, + DecompressSizeError, InvalidHeader, InvalidURLError, LineTooLong, @@ -963,7 +965,12 @@ class DeflateBuffer: decompressor: Any - def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: + def __init__( + self, + out: StreamReader, + encoding: Optional[str], + max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE, + ) -> None: self.out = out self.size = 0 out.total_compressed_bytes = self.size @@ -988,6 +995,8 @@ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: else: self.decompressor = ZLibDecompressor(encoding=encoding) + self._max_decompress_size = max_decompress_size + def set_exception( self, exc: BaseException, @@ -1017,7 +1026,10 @@ def feed_data(self, chunk: bytes, size: int) -> None: ) try: - chunk = self.decompressor.decompress_sync(chunk) + # Decompress with limit + 1 so we can detect if output exceeds limit + chunk = self.decompressor.decompress_sync( + chunk, max_length=self._max_decompress_size + 1 + ) except Exception: raise ContentEncodingError( "Can not decode content-encoding: %s" % self.encoding @@ -1025,6 +1037,13 @@ def feed_data(self, chunk: bytes, size: int) -> None: self._started_decoding = True + # Check if decompression limit was exceeded + if len(chunk) > self._max_decompress_size: + raise DecompressSizeError( + "Decompressed data exceeds the configured limit of %d bytes" + % self._max_decompress_size + ) + if chunk: self.out.feed_data(chunk, len(chunk)) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 872294e4fe3..9c37f0bb716 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -25,7 +25,12 @@ from multidict import CIMultiDict, CIMultiDictProxy -from .compression_utils import ZLibCompressor, ZLibDecompressor +from .abc import AbstractStreamWriter +from .compression_utils import ( + DEFAULT_MAX_DECOMPRESS_SIZE, + ZLibCompressor, + ZLibDecompressor, +) from .hdrs import ( CONTENT_DISPOSITION, CONTENT_ENCODING, @@ -273,6 +278,7 @@ def __init__( *, subtype: str = "mixed", default_charset: Optional[str] = None, + max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE, ) -> None: self.headers = headers self._boundary = boundary @@ -289,6 +295,7 @@ def __init__( self._prev_chunk: Optional[bytes] = None self._content_eof = 0 self._cache: Dict[str, Any] = {} + self._max_decompress_size = max_decompress_size def __aiter__(self: Self) -> Self: return self @@ -318,7 +325,7 @@ async def read(self, *, decode: bool = False) -> bytes: while not self._at_eof: data.extend(await self.read_chunk(self.chunk_size)) if decode: - return self.decode(data) + return await self.decode(data) return data async def read_chunk(self, size: int = chunk_size) -> bytes: @@ -496,7 +503,7 @@ def at_eof(self) -> bool: """Returns True if the boundary was reached or False otherwise.""" return self._at_eof - def decode(self, data: bytes) -> bytes: + async def decode(self, data: bytes) -> bytes: """Decodes data. Decoding is done according the specified Content-Encoding @@ -506,18 +513,18 @@ def decode(self, data: bytes) -> bytes: data = self._decode_content_transfer(data) # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 if not self._is_form_data and CONTENT_ENCODING in self.headers: - return self._decode_content(data) + return await self._decode_content(data) return data - def _decode_content(self, data: bytes) -> bytes: + async def _decode_content(self, data: bytes) -> bytes: encoding = self.headers.get(CONTENT_ENCODING, "").lower() if encoding == "identity": return data if encoding in {"deflate", "gzip"}: - return ZLibDecompressor( + return await ZLibDecompressor( encoding=encoding, suppress_deflate_header=True, - ).decompress_sync(data) + ).decompress(data, max_length=self._max_decompress_size) raise RuntimeError(f"unknown content encoding: {encoding}") @@ -588,11 +595,11 @@ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> byt """ raise TypeError("Unable to read body part as bytes. Use write() to consume.") - async def write(self, writer: Any) -> None: + async def write(self, writer: AbstractStreamWriter) -> None: field = self._value chunk = await field.read_chunk(size=2**16) while chunk: - await writer.write(field.decode(chunk)) + await writer.write(await field.decode(chunk)) chunk = await field.read_chunk(size=2**16) @@ -1032,7 +1039,9 @@ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> byt return b"".join(parts) - async def write(self, writer: Any, close_boundary: bool = True) -> None: + async def write( + self, writer: AbstractStreamWriter, close_boundary: bool = True + ) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: if self._is_form_data: @@ -1086,7 +1095,7 @@ async def close(self) -> None: class MultipartPayloadWriter: - def __init__(self, writer: Any) -> None: + def __init__(self, writer: AbstractStreamWriter) -> None: self._writer = writer self._encoding: Optional[str] = None self._compress: Optional[ZLibCompressor] = None diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index b5fa40c2637..0eafcd6e34c 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -740,7 +740,7 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": ) chunk = await field.read_chunk(size=2**16) while chunk: - chunk = field.decode(chunk) + chunk = await field.decode(chunk) await self._loop.run_in_executor(None, tmp.write, chunk) size += len(chunk) if 0 < max_size < size: diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 0912c312f6d..d6e1acb15db 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -189,6 +189,7 @@ lowercased Mako manylinux metadata +MiB microservice middleware middlewares diff --git a/pyproject.toml b/pyproject.toml index 8d5719fc871..d1550fee500 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,8 +50,8 @@ dynamic = [ [project.optional-dependencies] speedups = [ "aiodns >= 3.3.0", - "Brotli; platform_python_implementation == 'CPython'", - "brotlicffi; platform_python_implementation != 'CPython'", + "Brotli >= 1.2; platform_python_implementation == 'CPython'", + "brotlicffi >= 1.2; platform_python_implementation != 'CPython'", "backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14'", ] diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 95db17e158d..02e4664704c 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -6,8 +6,8 @@ aiosignal >= 1.4.0 async-timeout >= 4.0, < 6.0 ; python_version < '3.11' attrs >= 17.3.0 backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14' -Brotli; platform_python_implementation == 'CPython' -brotlicffi; platform_python_implementation != 'CPython' +Brotli >= 1.2; platform_python_implementation == 'CPython' +brotlicffi >= 1.2; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 propcache >= 0.2.0 diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 34cc69f88a7..ebada9f572f 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -13,6 +13,7 @@ import tarfile import time import zipfile +import zlib from contextlib import suppress from typing import ( Any, @@ -26,6 +27,19 @@ ) from unittest import mock +try: + try: + import brotlicffi as brotli + except ImportError: + import brotli +except ImportError: + brotli = None # pragma: no cover + +try: + from backports.zstd import ZstdCompressor +except ImportError: + ZstdCompressor = None # type: ignore[assignment,misc] # pragma: no cover + import pytest from multidict import MultiDict from pytest_mock import MockerFixture @@ -45,7 +59,9 @@ TooManyRedirects, ) from aiohttp.client_reqrep import ClientRequest +from aiohttp.compression_utils import DEFAULT_MAX_DECOMPRESS_SIZE from aiohttp.connector import Connection +from aiohttp.http_exceptions import DecompressSizeError from aiohttp.http_writer import StreamWriter from aiohttp.payload import ( AsyncIterablePayload, @@ -2419,8 +2435,102 @@ async def handler(request): resp.close() -async def test_bad_payload_chunked_encoding(aiohttp_client) -> None: - async def handler(request): +async def test_payload_decompress_size_limit(aiohttp_client: AiohttpClient) -> None: + """Test that decompression size limit triggers DecompressSizeError. + + When a compressed payload expands beyond the configured limit, + we raise DecompressSizeError. + """ + # Create a highly compressible payload that exceeds the decompression limit. + # 64MiB of repeated bytes compresses to ~32KB but expands beyond the + # 32MiB per-call limit. + original = b"A" * (64 * 2**20) + compressed = zlib.compress(original) + assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE + + async def handler(request: web.Request) -> web.Response: + # Send compressed data with Content-Encoding header + resp = web.Response(body=compressed) + resp.headers["Content-Encoding"] = "deflate" + return resp + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + + with pytest.raises(aiohttp.ClientPayloadError) as exc_info: + await resp.read() + + assert isinstance(exc_info.value.__cause__, DecompressSizeError) + assert "Decompressed data exceeds" in str(exc_info.value.__cause__) + + +@pytest.mark.skipif(brotli is None, reason="brotli is not installed") +async def test_payload_decompress_size_limit_brotli( + aiohttp_client: AiohttpClient, +) -> None: + """Test that brotli decompression size limit triggers DecompressSizeError.""" + assert brotli is not None + # Create a highly compressible payload that exceeds the decompression limit. + original = b"A" * (64 * 2**20) + compressed = brotli.compress(original) + assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE + + async def handler(request: web.Request) -> web.Response: + resp = web.Response(body=compressed) + resp.headers["Content-Encoding"] = "br" + return resp + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + + with pytest.raises(aiohttp.ClientPayloadError) as exc_info: + await resp.read() + + assert isinstance(exc_info.value.__cause__, DecompressSizeError) + assert "Decompressed data exceeds" in str(exc_info.value.__cause__) + + +@pytest.mark.skipif(ZstdCompressor is None, reason="backports.zstd is not installed") +async def test_payload_decompress_size_limit_zstd( + aiohttp_client: AiohttpClient, +) -> None: + """Test that zstd decompression size limit triggers DecompressSizeError.""" + assert ZstdCompressor is not None + # Create a highly compressible payload that exceeds the decompression limit. + original = b"A" * (64 * 2**20) + compressor = ZstdCompressor() + compressed = compressor.compress(original) + compressor.flush() + assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE + + async def handler(request: web.Request) -> web.Response: + resp = web.Response(body=compressed) + resp.headers["Content-Encoding"] = "zstd" + return resp + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == 200 + + with pytest.raises(aiohttp.ClientPayloadError) as exc_info: + await resp.read() + + assert isinstance(exc_info.value.__cause__, DecompressSizeError) + assert "Decompressed data exceeds" in str(exc_info.value.__cause__) + + +async def test_bad_payload_chunked_encoding(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.StreamResponse: resp = web.StreamResponse() resp.force_close() resp._length_check = False diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index cc0a812f958..bf3428e770b 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -3,6 +3,7 @@ import asyncio import re import sys +import zlib from contextlib import nullcontext from typing import Any, Dict, List from unittest import mock @@ -1919,3 +1920,36 @@ async def test_empty_body(self, protocol: BaseProtocol) -> None: dbuf.feed_eof() assert buf.at_eof() + + @pytest.mark.parametrize( + "chunk_size", + [1024, 2**14, 2**16], # 1KB, 16KB, 64KB + ids=["1KB", "16KB", "64KB"], + ) + async def test_streaming_decompress_large_payload( + self, protocol: BaseProtocol, chunk_size: int + ) -> None: + """Test that large payloads decompress correctly when streamed in chunks. + + This simulates real HTTP streaming where compressed data arrives in + small network chunks. Each chunk's decompressed output should be within + the max_decompress_size limit, allowing full recovery of the original data. + """ + # Create a large payload (3MiB) that compresses well + original = b"A" * (3 * 2**20) + compressed = zlib.compress(original) + + buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) + dbuf = DeflateBuffer(buf, "deflate") + + # Feed compressed data in chunks (simulating network streaming) + for i in range(0, len(compressed), chunk_size): + chunk = compressed[i : i + chunk_size] + dbuf.feed_data(chunk, len(chunk)) + + dbuf.feed_eof() + + # Read all decompressed data + result = b"".join(buf._buffer) + assert len(result) == len(original) + assert result == original diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 2fa2e331df2..4e52a3cba1c 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -10,6 +10,7 @@ import aiohttp from aiohttp import payload +from aiohttp.abc import AbstractStreamWriter from aiohttp.compression_utils import ZLibBackend from aiohttp.hdrs import ( CONTENT_DISPOSITION, @@ -37,14 +38,14 @@ def buf(): @pytest.fixture -def stream(buf): - writer = mock.Mock() +def stream(buf: bytearray) -> AbstractStreamWriter: + writer = mock.create_autospec(AbstractStreamWriter, instance=True, spec_set=True) async def write(chunk): buf.extend(chunk) writer.write.side_effect = write - return writer + return writer # type: ignore[no-any-return] @pytest.fixture @@ -416,7 +417,7 @@ async def test_decode_with_content_transfer_encoding_base64(self) -> None: result = b"" while not obj.at_eof(): chunk = await obj.read_chunk(size=6) - result += obj.decode(chunk) + result += await obj.decode(chunk) assert b"Time to Relax!" == result @pytest.mark.parametrize("encoding", ("binary", "8bit", "7bit")) @@ -1129,7 +1130,9 @@ async def test_writer(writer) -> None: assert writer.boundary == ":" -async def test_writer_serialize_io_chunk(buf, stream, writer) -> None: +async def test_writer_serialize_io_chunk( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: with io.BytesIO(b"foobarbaz") as file_handle: writer.append(file_handle) await writer.write(stream) @@ -1139,7 +1142,9 @@ async def test_writer_serialize_io_chunk(buf, stream, writer) -> None: ) -async def test_writer_serialize_json(buf, stream, writer) -> None: +async def test_writer_serialize_json( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append_json({"привет": "мир"}) await writer.write(stream) assert ( @@ -1148,7 +1153,9 @@ async def test_writer_serialize_json(buf, stream, writer) -> None: ) -async def test_writer_serialize_form(buf, stream, writer) -> None: +async def test_writer_serialize_form( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: data = [("foo", "bar"), ("foo", "baz"), ("boo", "zoo")] writer.append_form(data) await writer.write(stream) @@ -1156,7 +1163,9 @@ async def test_writer_serialize_form(buf, stream, writer) -> None: assert b"foo=bar&foo=baz&boo=zoo" in buf -async def test_writer_serialize_form_dict(buf, stream, writer) -> None: +async def test_writer_serialize_form_dict( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: data = {"hello": "мир"} writer.append_form(data) await writer.write(stream) @@ -1164,7 +1173,9 @@ async def test_writer_serialize_form_dict(buf, stream, writer) -> None: assert b"hello=%D0%BC%D0%B8%D1%80" in buf -async def test_writer_write(buf, stream, writer) -> None: +async def test_writer_write( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append("foo-bar-baz") writer.append_json({"test": "passed"}) writer.append_form({"test": "passed"}) @@ -1210,7 +1221,9 @@ async def test_writer_write(buf, stream, writer) -> None: ) == bytes(buf) -async def test_writer_write_no_close_boundary(buf, stream) -> None: +async def test_writer_write_no_close_boundary( + buf: bytearray, stream: AbstractStreamWriter +) -> None: writer = aiohttp.MultipartWriter(boundary=":") writer.append("foo-bar-baz") writer.append_json({"test": "passed"}) @@ -1242,13 +1255,19 @@ async def test_writer_write_no_close_boundary(buf, stream) -> None: ) == bytes(buf) -async def test_writer_write_no_parts(buf, stream, writer) -> None: +async def test_writer_write_no_parts( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: await writer.write(stream) assert b"--:--\r\n" == bytes(buf) @pytest.mark.usefixtures("parametrize_zlib_backend") -async def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer): +async def test_writer_serialize_with_content_encoding_gzip( + buf: bytearray, + stream: AbstractStreamWriter, + writer: aiohttp.MultipartWriter, +) -> None: writer.append("Time to Relax!", {CONTENT_ENCODING: "gzip"}) await writer.write(stream) headers, message = bytes(buf).split(b"\r\n\r\n", 1) @@ -1264,7 +1283,9 @@ async def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer): assert b"Time to Relax!" == data -async def test_writer_serialize_with_content_encoding_deflate(buf, stream, writer): +async def test_writer_serialize_with_content_encoding_deflate( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append("Time to Relax!", {CONTENT_ENCODING: "deflate"}) await writer.write(stream) headers, message = bytes(buf).split(b"\r\n\r\n", 1) @@ -1278,7 +1299,9 @@ async def test_writer_serialize_with_content_encoding_deflate(buf, stream, write assert thing == message -async def test_writer_serialize_with_content_encoding_identity(buf, stream, writer): +async def test_writer_serialize_with_content_encoding_identity( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: thing = b"\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00" writer.append(thing, {CONTENT_ENCODING: "identity"}) await writer.write(stream) @@ -1293,12 +1316,16 @@ async def test_writer_serialize_with_content_encoding_identity(buf, stream, writ assert thing == message.split(b"\r\n")[0] -def test_writer_serialize_with_content_encoding_unknown(buf, stream, writer): +def test_writer_serialize_with_content_encoding_unknown( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: with pytest.raises(RuntimeError): writer.append("Time to Relax!", {CONTENT_ENCODING: "snappy"}) -async def test_writer_with_content_transfer_encoding_base64(buf, stream, writer): +async def test_writer_with_content_transfer_encoding_base64( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append("Time to Relax!", {CONTENT_TRANSFER_ENCODING: "base64"}) await writer.write(stream) headers, message = bytes(buf).split(b"\r\n\r\n", 1) @@ -1311,7 +1338,9 @@ async def test_writer_with_content_transfer_encoding_base64(buf, stream, writer) assert b"VGltZSB0byBSZWxheCE=" == message.split(b"\r\n")[0] -async def test_writer_content_transfer_encoding_quote_printable(buf, stream, writer): +async def test_writer_content_transfer_encoding_quote_printable( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: writer.append("Привет, мир!", {CONTENT_TRANSFER_ENCODING: "quoted-printable"}) await writer.write(stream) headers, message = bytes(buf).split(b"\r\n\r\n", 1) @@ -1327,7 +1356,9 @@ async def test_writer_content_transfer_encoding_quote_printable(buf, stream, wri ) -def test_writer_content_transfer_encoding_unknown(buf, stream, writer) -> None: +def test_writer_content_transfer_encoding_unknown( + buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter +) -> None: with pytest.raises(RuntimeError): writer.append("Time to Relax!", {CONTENT_TRANSFER_ENCODING: "unknown"}) @@ -1451,7 +1482,9 @@ def test_append_none_not_allowed(self) -> None: with aiohttp.MultipartWriter(boundary=":") as writer: writer.append(None) - async def test_write_preserves_content_disposition(self, buf, stream) -> None: + async def test_write_preserves_content_disposition( + self, buf: bytearray, stream: AbstractStreamWriter + ) -> None: with aiohttp.MultipartWriter(boundary=":") as writer: part = writer.append(b"foo", headers={CONTENT_TYPE: "test/passed"}) part.set_content_disposition("form-data", filename="bug") @@ -1468,7 +1501,9 @@ async def test_write_preserves_content_disposition(self, buf, stream) -> None: ) assert message == b"foo\r\n--:--\r\n" - async def test_preserve_content_disposition_header(self, buf, stream): + async def test_preserve_content_disposition_header( + self, buf: bytearray, stream: AbstractStreamWriter + ) -> None: # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: @@ -1492,7 +1527,9 @@ async def test_preserve_content_disposition_header(self, buf, stream): b'Content-Disposition: attachments; filename="bug.py"' ) - async def test_set_content_disposition_override(self, buf, stream): + async def test_set_content_disposition_override( + self, buf: bytearray, stream: AbstractStreamWriter + ) -> None: # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: @@ -1516,7 +1553,9 @@ async def test_set_content_disposition_override(self, buf, stream): b'Content-Disposition: attachments; filename="bug.py"' ) - async def test_reset_content_disposition_header(self, buf, stream): + async def test_reset_content_disposition_header( + self, buf: bytearray, stream: AbstractStreamWriter + ) -> None: # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: From 41f01ed680b7efc8f97677df45731b80328aad6c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 3 Jan 2026 16:25:40 +0000 Subject: [PATCH 93/93] Release v3.13.3 (#11919) --- CHANGES.rst | 96 +++++++++++++++++++++++++++++++++++++ CHANGES/10713.misc.rst | 1 - CHANGES/11643.packaging.rst | 2 - CHANGES/11689.contrib.rst | 1 - CHANGES/11778.misc.rst | 1 - CHANGES/11857.bugfix.rst | 1 - CHANGES/11862.bugfix.rst | 1 - CHANGES/11898.breaking.rst | 2 - CHANGES/2596.bugfix.rst | 2 - aiohttp/__init__.py | 2 +- 10 files changed, 97 insertions(+), 12 deletions(-) delete mode 100644 CHANGES/10713.misc.rst delete mode 100644 CHANGES/11643.packaging.rst delete mode 100644 CHANGES/11689.contrib.rst delete mode 100644 CHANGES/11778.misc.rst delete mode 100644 CHANGES/11857.bugfix.rst delete mode 100644 CHANGES/11862.bugfix.rst delete mode 100644 CHANGES/11898.breaking.rst delete mode 100644 CHANGES/2596.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index fd193db6959..466fce8795d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,102 @@ .. towncrier release notes start +3.13.3 (2026-01-03) +=================== + +This release contains fixes for several vulnerabilities. It is advised to +upgrade as soon as possible. + +Bug fixes +--------- + +- Fixed proxy authorization headers not being passed when reusing a connection, which caused 407 (Proxy authentication required) errors + -- by :user:`GLeurquin`. + + + *Related issues and pull requests on GitHub:* + :issue:`2596`. + + + +- Fixed multipart reading failing when encountering an empty body part -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`11857`. + + + +- Fixed a case where the parser wasn't raising an exception for a websocket continuation frame when there was no initial frame in context. + + + *Related issues and pull requests on GitHub:* + :issue:`11862`. + + + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- ``Brotli`` and ``brotlicffi`` minimum version is now 1.2. + Decompression now has a default maximum output size of 32MiB per decompress call -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`11898`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Moved dependency metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` + -- by :user:`cdce8p`. + + + *Related issues and pull requests on GitHub:* + :issue:`11643`. + + + + +Contributor-facing changes +-------------------------- + +- Removed unused ``update-pre-commit`` github action workflow -- by :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`11689`. + + + + +Miscellaneous internal changes +------------------------------ + +- Optimized web server performance when access logging is disabled by reducing time syscalls -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`10713`. + + + +- Added regression test for cached logging status -- by :user:`meehand`. + + + *Related issues and pull requests on GitHub:* + :issue:`11778`. + + + + +---- + + 3.13.2 (2025-10-28) =================== diff --git a/CHANGES/10713.misc.rst b/CHANGES/10713.misc.rst deleted file mode 100644 index a556d11e1e0..00000000000 --- a/CHANGES/10713.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Optimized web server performance when access logging is disabled by reducing time syscalls -- by :user:`bdraco`. diff --git a/CHANGES/11643.packaging.rst b/CHANGES/11643.packaging.rst deleted file mode 100644 index 8ef91a18788..00000000000 --- a/CHANGES/11643.packaging.rst +++ /dev/null @@ -1,2 +0,0 @@ -Moved dependency metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` --- by :user:`cdce8p`. diff --git a/CHANGES/11689.contrib.rst b/CHANGES/11689.contrib.rst deleted file mode 100644 index 1f6404aa4bd..00000000000 --- a/CHANGES/11689.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Removed unused ``update-pre-commit`` github action workflow -- by :user:`Cycloctane`. diff --git a/CHANGES/11778.misc.rst b/CHANGES/11778.misc.rst deleted file mode 100644 index ad29aea920a..00000000000 --- a/CHANGES/11778.misc.rst +++ /dev/null @@ -1 +0,0 @@ -Added regression test for cached logging status -- by :user:`meehand`. diff --git a/CHANGES/11857.bugfix.rst b/CHANGES/11857.bugfix.rst deleted file mode 100644 index 7933efeb074..00000000000 --- a/CHANGES/11857.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed multipart reading failing when encountering an empty body part -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/11862.bugfix.rst b/CHANGES/11862.bugfix.rst deleted file mode 100644 index c2ce176c2c3..00000000000 --- a/CHANGES/11862.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -A test for websocket parser was marked to fail, which was actually failing because the parser wasn't raising an exception for a continuation frame when there was no initial frame in context. diff --git a/CHANGES/11898.breaking.rst b/CHANGES/11898.breaking.rst deleted file mode 100644 index cfbf2ae4727..00000000000 --- a/CHANGES/11898.breaking.rst +++ /dev/null @@ -1,2 +0,0 @@ -``Brotli`` and ``brotlicffi`` minimum version is now 1.2. -Decompression now has a default maximum output size of 32MiB per decompress call -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/2596.bugfix.rst b/CHANGES/2596.bugfix.rst deleted file mode 100644 index e172506bcde..00000000000 --- a/CHANGES/2596.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed proxy authorization headers not being passed when reusing a connection, which caused 407 (Proxy authentication required) errors --- by :user:`GLeurquin`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 7200d24dd1c..357baf019de 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.13.3.dev0" +__version__ = "3.13.3" from typing import TYPE_CHECKING, Tuple