diff --git a/.agents/skills/buildkite-get-results/scripts/get_buildkite_results.py b/.agents/skills/buildkite-get-results/scripts/get_buildkite_results.py
index eb5532e4d6..769c253c6d 100755
--- a/.agents/skills/buildkite-get-results/scripts/get_buildkite_results.py
+++ b/.agents/skills/buildkite-get-results/scripts/get_buildkite_results.py
@@ -17,9 +17,7 @@ def get_pr_checks(pr_number):
stderr=subprocess.DEVNULL,
)
except FileNotFoundError:
- print(
- "Error: 'gh' (GitHub CLI) is not installed or not in PATH.", file=sys.stderr
- )
+ print("Error: 'gh' (GitHub CLI) is not installed or not in PATH.", file=sys.stderr)
sys.exit(1)
except subprocess.CalledProcessError:
print("Error: 'gh' command failed. Is it installed?", file=sys.stderr)
@@ -165,15 +163,18 @@ def main():
build_state = data.get("state", "Unknown")
print(f"Build State: {build_state}")
-
+
jobs = data.get("jobs", [])
jobs_count = data.get("statistics", {}).get("jobs_count", 0)
-
+
print(f"Total jobs reported: {jobs_count}")
print(f"Jobs found in data: {len(jobs)}")
-
+
if jobs_count != len(jobs):
- print(f"WARNING: Reported job count ({jobs_count}) does not match jobs found ({len(jobs)}).", file=sys.stderr)
+ print(
+ f"WARNING: Reported job count ({jobs_count}) does not match jobs found ({len(jobs)}).",
+ file=sys.stderr,
+ )
print("-" * 40)
diff --git a/.agents/skills/buildkite-retry-job/scripts/retry_buildkite_jobs.py b/.agents/skills/buildkite-retry-job/scripts/retry_buildkite_jobs.py
index 67385fb8fd..e7e4304a39 100755
--- a/.agents/skills/buildkite-retry-job/scripts/retry_buildkite_jobs.py
+++ b/.agents/skills/buildkite-retry-job/scripts/retry_buildkite_jobs.py
@@ -31,9 +31,7 @@ def make_request(url, method="GET", data=None, token=None):
def main():
- parser = argparse.ArgumentParser(
- description="Retry failed jobs in a Buildkite build."
- )
+ parser = argparse.ArgumentParser(description="Retry failed jobs in a Buildkite build.")
parser.add_argument("org", help="Organization slug")
parser.add_argument("pipeline", help="Pipeline slug")
parser.add_argument("build", help="Build number")
@@ -46,9 +44,7 @@ def main():
token = os.environ.get("BUILDKITE_API_TOKEN")
if not token:
- print(
- "Please set the BUILDKITE_API_TOKEN environment variable.", file=sys.stderr
- )
+ print("Please set the BUILDKITE_API_TOKEN environment variable.", file=sys.stderr)
sys.exit(1)
url = f"https://api.buildkite.com/v2/organizations/{args.org}/pipelines/{args.pipeline}/builds/{args.build}"
diff --git a/.github/workflows/ruff.yaml b/.github/workflows/ruff.yaml
new file mode 100644
index 0000000000..154b8c229c
--- /dev/null
+++ b/.github/workflows/ruff.yaml
@@ -0,0 +1,26 @@
+name: ruff
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+ types:
+ - opened
+ - synchronize
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+ - name: Install ruff
+ run: pip install ruff==0.15.13
+ - name: Run ruff check
+ run: ruff check --output-format=github
+ - name: Run ruff format check
+ run: ruff format --check
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 57d31f5f5f..0a442425a9 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -29,18 +29,14 @@ repos:
- --warnings=all
- id: buildifier-lint
args: *args
- - repo: https://github.com/pycqa/isort
- rev: 5.12.0
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.15.13
hooks:
- - id: isort
- name: isort (python)
- args:
- - --profile
- - black
- - repo: https://github.com/psf/black
- rev: 25.1.0
- hooks:
- - id: black
+ - id: ruff
+ args: [--fix, --exit-non-zero-on-fix]
+ exclude: ^(gazelle/python/testdata|tests/pypi/whl_library/testdata)
+ - id: ruff-format
+ exclude: ^(gazelle/python/testdata|tests/pypi/whl_library/testdata)
- repo: local
hooks:
- id: update-deleted-packages
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 917110978b..10d4d0db3d 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -374,7 +374,7 @@ If you did `pre-commit install`, various tools are run when you do `git commit`.
This might show as an error such as:
```
-[INFO] Installing environment for https://github.com/psf/black.
+[INFO] Installing environment for https://github.com/astral-sh/ruff-pre-commit.
[INFO] Once installed this environment will be reused.
[INFO] This may take a few minutes...
An unexpected error has occurred: CalledProcessError: command: ...
@@ -382,6 +382,6 @@ An unexpected error has occurred: CalledProcessError: command: ...
To fix, you'll need to figure out what command is failing and why. Because these
are tools that run locally, its likely you'll need to fix something with your
-environment or the installation of the tools. For Python tools (e.g. black or
-isort), you can try using a different Python version in your shell by using
-tools such as [pyenv](https://github.com/pyenv/pyenv).
+environment or the installation of the tools. For Python tools (e.g. ruff), you
+can try using a different Python version in your shell by using tools such as
+[pyenv](https://github.com/pyenv/pyenv).
diff --git a/docs/conf.py b/docs/conf.py
index ef7b66acfa..e65f4855cc 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -69,26 +69,26 @@
"api/python/defs": "/api/rules_python/python/defs.html",
"api/python/index": "/api/rules_python/python/index.html",
"api/python/py_runtime_info": "/api/rules_python/python/py_runtime_info.html",
- "api/python/private/common/py_library_rule_bazel": "/api/rules_python/python/private/py_library_rule.html",
- "api/python/private/common/py_test_rule_bazel": "/api/rules_python/python/private/py_test_rule_bazel.html",
- "api/python/private/common/py_binary_rule_bazel": "/api/rules_python/python/private/py_binary_rule.html",
- "api/python/private/common/py_runtime_rule": "/api/rules_python/python/private/py_runtime_rule.html",
- "api/python/extensions/pip": "/api/rules_python/python/extensions/pip.html",
- "api/python/extensions/python": "/api/rules_python/python/extensions/python.html",
- "api/python/entry_points/py_console_script_binary": "/api/rules_python/python/entry_points/py_console_script_binary.html",
- "api/python/cc/py_cc_toolchain_info": "/api/rules_python/python/cc/py_cc_toolchain_info.html",
- "api/python/cc/index": "/api/rules_python/python/cc/index.html",
- "api/python/py_cc_link_params_info": "/api/rules_python/python/py_cc_link_params_info.html",
- "api/python/runtime_env_toolchains/index": "/api/rules_python/python/runtime_env_toolchains/index.html",
- "api/python/pip": "/api/rules_python/python/pip.html",
- "api/python/config_settings/index": "/api/rules_python/python/config_settings/index.html",
- "api/python/packaging": "/api/rules_python/python/packaging.html",
- "api/python/py_runtime": "/api/rules_python/python/py_runtime.html",
- "api/sphinxdocs/sphinx": "/api/sphinxdocs/sphinxdocs/sphinx.html",
- "api/sphinxdocs/sphinx_stardoc": "/api/sphinxdocs/sphinxdocs/sphinx_stardoc.html",
- "api/sphinxdocs/readthedocs": "/api/sphinxdocs/sphinxdocs/readthedocs.html",
- "api/sphinxdocs/index": "sphinxdocs/index.html",
- "api/sphinxdocs/private/sphinx_docs_library": "/api/sphinxdocs/sphinxdocs/private/sphinx_docs_library.html",
+ "api/python/private/common/py_library_rule_bazel": "/api/rules_python/python/private/py_library_rule.html", # noqa: E501
+ "api/python/private/common/py_test_rule_bazel": "/api/rules_python/python/private/py_test_rule_bazel.html", # noqa: E501
+ "api/python/private/common/py_binary_rule_bazel": "/api/rules_python/python/private/py_binary_rule.html", # noqa: E501
+ "api/python/private/common/py_runtime_rule": "/api/rules_python/python/private/py_runtime_rule.html", # noqa: E501
+ "api/python/extensions/pip": "/api/rules_python/python/extensions/pip.html", # noqa: E501
+ "api/python/extensions/python": "/api/rules_python/python/extensions/python.html", # noqa: E501
+ "api/python/entry_points/py_console_script_binary": "/api/rules_python/python/entry_points/py_console_script_binary.html", # noqa: E501
+ "api/python/cc/py_cc_toolchain_info": "/api/rules_python/python/cc/py_cc_toolchain_info.html", # noqa: E501
+ "api/python/cc/index": "/api/rules_python/python/cc/index.html", # noqa: E501
+ "api/python/py_cc_link_params_info": "/api/rules_python/python/py_cc_link_params_info.html", # noqa: E501
+ "api/python/runtime_env_toolchains/index": "/api/rules_python/python/runtime_env_toolchains/index.html", # noqa: E501
+ "api/python/pip": "/api/rules_python/python/pip.html", # noqa: E501
+ "api/python/config_settings/index": "/api/rules_python/python/config_settings/index.html", # noqa: E501
+ "api/python/packaging": "/api/rules_python/python/packaging.html", # noqa: E501
+ "api/python/py_runtime": "/api/rules_python/python/py_runtime.html", # noqa: E501
+ "api/sphinxdocs/sphinx": "/api/sphinxdocs/sphinxdocs/sphinx.html", # noqa: E501
+ "api/sphinxdocs/sphinx_stardoc": "/api/sphinxdocs/sphinxdocs/sphinx_stardoc.html", # noqa: E501
+ "api/sphinxdocs/readthedocs": "/api/sphinxdocs/sphinxdocs/readthedocs.html", # noqa: E501
+ "api/sphinxdocs/index": "sphinxdocs/index.html", # noqa: E501
+ "api/sphinxdocs/private/sphinx_docs_library": "/api/sphinxdocs/sphinxdocs/private/sphinx_docs_library.html", # noqa: E501
"api/sphinxdocs/sphinx_docs_library": "/api/sphinxdocs/sphinxdocs/sphinx_docs_library.html",
"api/sphinxdocs/inventories/index": "/api/sphinxdocs/sphinxdocs/inventories/index.html",
"pip.html": "pypi/index.html",
@@ -133,11 +133,11 @@
# --- Extlinks configuration
extlinks = {
"gh-issue": (
- f"https://github.com/bazel-contrib/rules_python/issues/%s",
+ "https://github.com/bazel-contrib/rules_python/issues/%s",
"#%s issue",
),
- "gh-path": (f"https://github.com/bazel-contrib/rules_python/tree/main/%s", "%s"),
- "gh-pr": (f"https://github.com/bazel-contrib/rules_python/pull/%s", "#%s PR"),
+ "gh-path": ("https://github.com/bazel-contrib/rules_python/tree/main/%s", "%s"),
+ "gh-pr": ("https://github.com/bazel-contrib/rules_python/pull/%s", "#%s PR"),
}
# --- MyST configuration
diff --git a/examples/bzlmod/entry_points/tests/pylint_deps_test.py b/examples/bzlmod/entry_points/tests/pylint_deps_test.py
index f6743ce9b5..47ae9896b7 100644
--- a/examples/bzlmod/entry_points/tests/pylint_deps_test.py
+++ b/examples/bzlmod/entry_points/tests/pylint_deps_test.py
@@ -15,7 +15,6 @@
import os
import pathlib
import subprocess
-import tempfile
import unittest
from python.runfiles import runfiles
@@ -29,17 +28,17 @@ def __init__(self, *args, **kwargs):
def test_pylint_entry_point(self):
rlocation_path = os.environ.get("ENTRY_POINT")
- assert (
- rlocation_path is not None
- ), "expected 'ENTRY_POINT' env variable to be set to rlocation of the tool"
+ assert rlocation_path is not None, (
+ "expected 'ENTRY_POINT' env variable to be set to rlocation of the tool"
+ )
entry_point = pathlib.Path(runfiles.Create().Rlocation(rlocation_path))
self.assertTrue(entry_point.exists(), f"'{entry_point}' does not exist")
# Let's run the entrypoint and check the tool version.
#
- # NOTE @aignas 2023-08-24: the Windows python launcher with Python 3.9 and bazel 6 is not happy if we start
- # passing extra files via `subprocess.run` and it starts to fail with an error that the file which is the
+ # NOTE @aignas 2023-08-24: the Windows python launcher with Python 3.9 and bazel 6 is not happy if we start # noqa: E501
+ # passing extra files via `subprocess.run` and it starts to fail with an error that the file which is the # noqa: E501
# entry_point cannot be found. However, just calling `--version` seems to be fine.
proc = subprocess.run(
[str(entry_point), "--version"],
@@ -51,20 +50,20 @@ def test_pylint_entry_point(self):
"",
proc.stderr.decode("utf-8").strip(),
)
- self.assertRegex(proc.stdout.decode("utf-8").strip(), "^pylint 2\.15\.9")
+ self.assertRegex(proc.stdout.decode("utf-8").strip(), r"^pylint 2\.15\.9")
def test_pylint_report_has_expected_warnings(self):
rlocation_path = os.environ.get("PYLINT_REPORT")
- assert (
- rlocation_path is not None
- ), "expected 'PYLINT_REPORT' env variable to be set to rlocation of the report"
+ assert rlocation_path is not None, (
+ "expected 'PYLINT_REPORT' env variable to be set to rlocation of the report"
+ )
pylint_report = pathlib.Path(runfiles.Create().Rlocation(rlocation_path))
self.assertTrue(pylint_report.exists(), f"'{pylint_report}' does not exist")
self.assertRegex(
pylint_report.read_text().strip(),
- "W8201: Logging should be used instead of the print\(\) function\. \(print-function\)",
+ r"W8201: Logging should be used instead of the print\(\) function\. \(print-function\)",
)
diff --git a/examples/bzlmod/entry_points/tests/pylint_test.py b/examples/bzlmod/entry_points/tests/pylint_test.py
index c2532938d8..0ae493b02c 100644
--- a/examples/bzlmod/entry_points/tests/pylint_test.py
+++ b/examples/bzlmod/entry_points/tests/pylint_test.py
@@ -28,17 +28,17 @@ def __init__(self, *args, **kwargs):
def test_pylint_entry_point(self):
rlocation_path = os.environ.get("ENTRY_POINT")
- assert (
- rlocation_path is not None
- ), "expected 'ENTRY_POINT' env variable to be set to rlocation of the tool"
+ assert rlocation_path is not None, (
+ "expected 'ENTRY_POINT' env variable to be set to rlocation of the tool"
+ )
entry_point = pathlib.Path(runfiles.Create().Rlocation(rlocation_path))
self.assertTrue(entry_point.exists(), f"'{entry_point}' does not exist")
# Let's run the entrypoint and check the tool version.
#
- # NOTE @aignas 2023-08-24: the Windows python launcher with Python 3.9 and bazel 6 is not happy if we start
- # passing extra files via `subprocess.run` and it starts to fail with an error that the file which is the
+ # NOTE @aignas 2023-08-24: the Windows python launcher with Python 3.9 and bazel 6 is not happy if we start # noqa: E501
+ # passing extra files via `subprocess.run` and it starts to fail with an error that the file which is the # noqa: E501
# entry_point cannot be found. However, just calling `--version` seems to be fine.
proc = subprocess.run(
[str(entry_point), "--version"],
@@ -50,7 +50,7 @@ def test_pylint_entry_point(self):
"",
proc.stderr.decode("utf-8").strip(),
)
- self.assertRegex(proc.stdout.decode("utf-8").strip(), "^pylint 2\.15\.9")
+ self.assertRegex(proc.stdout.decode("utf-8").strip(), r"^pylint 2\.15\.9")
if __name__ == "__main__":
diff --git a/examples/bzlmod/entry_points/tests/yamllint_test.py b/examples/bzlmod/entry_points/tests/yamllint_test.py
index 0a0235793b..528802229d 100644
--- a/examples/bzlmod/entry_points/tests/yamllint_test.py
+++ b/examples/bzlmod/entry_points/tests/yamllint_test.py
@@ -28,17 +28,17 @@ def __init__(self, *args, **kwargs):
def test_yamllint_entry_point(self):
rlocation_path = os.environ.get("ENTRY_POINT")
- assert (
- rlocation_path is not None
- ), "expected 'ENTRY_POINT' env variable to be set to rlocation of the tool"
+ assert rlocation_path is not None, (
+ "expected 'ENTRY_POINT' env variable to be set to rlocation of the tool"
+ )
entry_point = pathlib.Path(runfiles.Create().Rlocation(rlocation_path))
self.assertTrue(entry_point.exists(), f"'{entry_point}' does not exist")
# Let's run the entrypoint and check the tool version.
#
- # NOTE @aignas 2023-08-24: the Windows python launcher with Python 3.9 and bazel 6 is not happy if we start
- # passing extra files via `subprocess.run` and it starts to fail with an error that the file which is the
+ # NOTE @aignas 2023-08-24: the Windows python launcher with Python 3.9 and bazel 6 is not happy if we start # noqa: E501
+ # passing extra files via `subprocess.run` and it starts to fail with an error that the file which is the # noqa: E501
# entry_point cannot be found. However, just calling `--version` seems to be fine.
proc = subprocess.run(
[str(entry_point), "--version"],
diff --git a/examples/bzlmod/runfiles/runfiles_test.py b/examples/bzlmod/runfiles/runfiles_test.py
index 7b7e87726a..06c6ef0a89 100644
--- a/examples/bzlmod/runfiles/runfiles_test.py
+++ b/examples/bzlmod/runfiles/runfiles_test.py
@@ -37,9 +37,7 @@ def testRunfileWithRlocationpath(self):
self.assertEqual(f.read().strip(), "Hello, example_bzlmod!")
def testRunfileInOtherModuleWithOurRepoMapping(self):
- data_path = runfiles.Create().Rlocation(
- "our_other_module/other_module/pkg/data/data.txt"
- )
+ data_path = runfiles.Create().Rlocation("our_other_module/other_module/pkg/data/data.txt")
with open(data_path, "rt", encoding="utf-8", newline="\n") as f:
self.assertEqual(f.read().strip(), "Hello, other_module!")
diff --git a/examples/bzlmod/test.py b/examples/bzlmod/test.py
index 24be3ba3fe..3febed7585 100644
--- a/examples/bzlmod/test.py
+++ b/examples/bzlmod/test.py
@@ -13,7 +13,6 @@
# limitations under the License.
import os
-import pathlib
import re
import sys
import unittest
@@ -59,7 +58,7 @@ def test_coverage_sys_path(self):
f"sys.path has {len(sys.path)} items:\n {all_paths}",
)
- first_item, last_item = sys.path[0], sys.path[-1]
+ first_item, _ = sys.path[0], sys.path[-1]
self.assertFalse(
first_item.endswith("coverage"),
f"Expected the first item in sys.path '{first_item}' to not be related to coverage",
diff --git a/examples/bzlmod/tests/cross_version_test.py b/examples/bzlmod/tests/cross_version_test.py
index 437be2ed5a..bcb8f3f5c9 100644
--- a/examples/bzlmod/tests/cross_version_test.py
+++ b/examples/bzlmod/tests/cross_version_test.py
@@ -27,7 +27,11 @@
if subprocess_current != subprocess_expected:
print(
- f"expected subprocess version '{subprocess_expected}' is different than returned '{subprocess_current}'"
+ (
+ "expected subprocess version"
+ f" '{subprocess_expected}'"
+ f" is different than returned '{subprocess_current}'"
+ )
)
sys.exit(1)
diff --git a/examples/bzlmod/tests/my_lib_test.py b/examples/bzlmod/tests/my_lib_test.py
index b06374c983..019d29e31f 100644
--- a/examples/bzlmod/tests/my_lib_test.py
+++ b/examples/bzlmod/tests/my_lib_test.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
import sys
import libs.my_lib as my_lib
diff --git a/examples/bzlmod/tests/other_module/other_module_import_test.py b/examples/bzlmod/tests/other_module/other_module_import_test.py
index 6b92a853e0..b5b15c383d 100644
--- a/examples/bzlmod/tests/other_module/other_module_import_test.py
+++ b/examples/bzlmod/tests/other_module/other_module_import_test.py
@@ -1,8 +1,10 @@
"""Regression test for https://github.com/bazel-contrib/rules_python/issues/3563"""
+
import os
import subprocess
import sys
+
def main():
# The rlocation path for the bin_zipapp. It is in the "our_other_module" repository.
zipapp_path = os.environ.get("ZIPAPP_PATH")
@@ -18,5 +20,6 @@ def main():
print(f"bin_zippapp failed with return code {result.returncode}")
sys.exit(result.returncode)
+
if __name__ == "__main__":
main()
diff --git a/examples/bzlmod/whl_mods/pip_whl_mods_test.py b/examples/bzlmod/whl_mods/pip_whl_mods_test.py
index 3d7d161f1f..626664c74e 100644
--- a/examples/bzlmod/whl_mods/pip_whl_mods_test.py
+++ b/examples/bzlmod/whl_mods/pip_whl_mods_test.py
@@ -71,9 +71,7 @@ def test_copy_files(self):
self.assertEqual(content, "Hello world from copied file")
def test_copy_executables(self):
- executable_name = (
- "executable.exe" if platform.system() == "windows" else "executable.py"
- )
+ executable_name = "executable.exe" if platform.system() == "windows" else "executable.py"
r = runfiles.Create()
rpath = r.Rlocation(
@@ -111,9 +109,7 @@ def test_data_exclude_glob(self):
wheel_path = r.Rlocation("{}/WHEEL".format(dist_info_dir))
self.assertTrue(Path(metadata_path).exists(), f"Could not find {metadata_path}")
- self.assertFalse(
- Path(wheel_path).exists(), f"Expected to not find {wheel_path}"
- )
+ self.assertFalse(Path(wheel_path).exists(), f"Expected to not find {wheel_path}")
def test_extra(self):
# This test verifies that annotations work correctly for pip packages with extras
diff --git a/examples/multi_python_versions/tests/cross_version_test.py b/examples/multi_python_versions/tests/cross_version_test.py
index 437be2ed5a..bcb8f3f5c9 100644
--- a/examples/multi_python_versions/tests/cross_version_test.py
+++ b/examples/multi_python_versions/tests/cross_version_test.py
@@ -27,7 +27,11 @@
if subprocess_current != subprocess_expected:
print(
- f"expected subprocess version '{subprocess_expected}' is different than returned '{subprocess_current}'"
+ (
+ "expected subprocess version"
+ f" '{subprocess_expected}'"
+ f" is different than returned '{subprocess_current}'"
+ )
)
sys.exit(1)
diff --git a/examples/multi_python_versions/tests/my_lib_test.py b/examples/multi_python_versions/tests/my_lib_test.py
index 449cb8473c..b6c577c55a 100644
--- a/examples/multi_python_versions/tests/my_lib_test.py
+++ b/examples/multi_python_versions/tests/my_lib_test.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
import sys
import libs.my_lib as my_lib
diff --git a/examples/wheel/private/directory_writer.py b/examples/wheel/private/directory_writer.py
index 4b69f3a5d0..85e09441c4 100644
--- a/examples/wheel/private/directory_writer.py
+++ b/examples/wheel/private/directory_writer.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""The action executable of the `@rules_python//examples/wheel/private:wheel_utils.bzl%directory_writer` rule."""
+"""The action executable of the wheel_utils.bzl directory_writer rule."""
import argparse
import json
diff --git a/examples/wheel/test_publish.py b/examples/wheel/test_publish.py
index 4bc657c52e..9d6d63ee0e 100644
--- a/examples/wheel/test_publish.py
+++ b/examples/wheel/test_publish.py
@@ -96,8 +96,9 @@ def test_upload_and_query_simple_api(self):
# Then I should be able to get its contents
with urlopen(self.url + "/example-minimal-library/") as response:
got_content = response.read().decode("utf-8")
- want_content = """
-
+ want_content = (
+ "\n"
+ + """\
@@ -106,7 +107,11 @@ def test_upload_and_query_simple_api(self):
Links for example-minimal-library
- example_minimal_library-0.0.1-py3-none-any.whl
+ example_minimal_library-0.0.1-py3-none-any.whl
"""
self.assertEqual(
diff --git a/examples/wheel/wheel_test.py b/examples/wheel/wheel_test.py
index 9ed2b842e5..a781581b5b 100644
--- a/examples/wheel/wheel_test.py
+++ b/examples/wheel/wheel_test.py
@@ -39,9 +39,7 @@ def _get_path(self, filename):
raise AssertionError(f"Runfiles failed to resolve {runfiles_path}")
elif not os.path.exists(path):
# A non-None value doesn't mean the file actually exists, though
- raise AssertionError(
- f"Path {path} does not exist (from runfiles path {runfiles_path}"
- )
+ raise AssertionError(f"Path {path} does not exist (from runfiles path {runfiles_path}")
else:
return path
@@ -64,9 +62,7 @@ def assertAllEntriesHasReproducibleMetadata(self, zf):
(stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO | stat.S_IFREG) << 16,
msg=zinfo.filename,
)
- self.assertEqual(
- zinfo.compress_type, zipfile.ZIP_DEFLATED, msg=zinfo.filename
- )
+ self.assertEqual(zinfo.compress_type, zipfile.ZIP_DEFLATED, msg=zinfo.filename)
def test_py_library_wheel(self):
filename = self._get_path("example_minimal_library-0.0.1-py3-none-any.whl")
@@ -140,9 +136,7 @@ def test_customized_wheel(self):
record_contents = zf.read("example_customized-0.0.1.dist-info/RECORD")
wheel_contents = zf.read("example_customized-0.0.1.dist-info/WHEEL")
metadata_contents = zf.read("example_customized-0.0.1.dist-info/METADATA")
- entry_point_contents = zf.read(
- "example_customized-0.0.1.dist-info/entry_points.txt"
- )
+ entry_point_contents = zf.read("example_customized-0.0.1.dist-info/entry_points.txt")
print(record_contents)
self.assertEqual(
@@ -232,9 +226,7 @@ def test_filename_escaping(self):
"file_name_escaping-0.0.1rc1+ubuntu.r7.dist-info/RECORD",
],
)
- metadata_contents = zf.read(
- "file_name_escaping-0.0.1rc1+ubuntu.r7.dist-info/METADATA"
- )
+ metadata_contents = zf.read("file_name_escaping-0.0.1rc1+ubuntu.r7.dist-info/METADATA")
self.assertEqual(
metadata_contents,
b"""\
@@ -270,9 +262,9 @@ def test_custom_package_root_wheel(self):
],
)
- record_contents = zf.read(
- "examples_custom_package_root-0.0.1.dist-info/RECORD"
- ).decode("utf-8")
+ record_contents = zf.read("examples_custom_package_root-0.0.1.dist-info/RECORD").decode(
+ "utf-8"
+ )
# Ensure RECORD files do not have leading forward slashes
for line in record_contents.splitlines():
@@ -390,9 +382,7 @@ def test_python_abi3_binary_wheel(self):
)
with zipfile.ZipFile(filename) as zf:
self.assertAllEntriesHasReproducibleMetadata(zf)
- metadata_contents = zf.read(
- "example_python_abi3_binary_wheel-0.0.1.dist-info/METADATA"
- )
+ metadata_contents = zf.read("example_python_abi3_binary_wheel-0.0.1.dist-info/METADATA")
# The entries are guaranteed to be sorted.
self.assertEqual(
metadata_contents,
@@ -405,9 +395,7 @@ def test_python_abi3_binary_wheel(self):
UNKNOWN
""",
)
- wheel_contents = zf.read(
- "example_python_abi3_binary_wheel-0.0.1.dist-info/WHEEL"
- )
+ wheel_contents = zf.read("example_python_abi3_binary_wheel-0.0.1.dist-info/WHEEL")
self.assertEqual(
wheel_contents.decode(),
f"""\
@@ -489,8 +477,8 @@ def test_requires_file_and_extra_requires_files(self):
"Requires-Dist: tomli>=2.0.0",
"Requires-Dist: starlark",
"Requires-Dist: pyyaml!=6.0.1,>=6.0.0; extra == 'example'",
- 'Requires-Dist: toml; ((python_version == "3.11" or python_version == "3.12") and python_version != "3.8") and extra == \'example\'',
- 'Requires-Dist: wheel; (python_version == "3.11" or python_version == "3.12") and extra == \'example\'',
+ 'Requires-Dist: toml; ((python_version == "3.11" or python_version == "3.12") and python_version != "3.8") and extra == \'example\'', # noqa: E501
+ 'Requires-Dist: wheel; (python_version == "3.11" or python_version == "3.12") and extra == \'example\'', # noqa: E501
],
requires,
)
@@ -523,7 +511,6 @@ def test_minimal_data_files(self):
with zipfile.ZipFile(filename) as zf:
self.assertAllEntriesHasReproducibleMetadata(zf)
- metadata_file = None
self.assertEqual(
zf.namelist(),
[
@@ -559,8 +546,8 @@ def test_extra_requires(self):
"Requires-Dist: starlark",
'Requires-Dist: pytest; python_version != "3.8"',
"Requires-Dist: pyyaml!=6.0.1,>=6.0.0; extra == 'example'",
- 'Requires-Dist: toml; ((python_version == "3.11" or python_version == "3.12") and python_version != "3.8") and extra == \'example\'',
- 'Requires-Dist: wheel; (python_version == "3.11" or python_version == "3.12") and extra == \'example\'',
+ 'Requires-Dist: toml; ((python_version == "3.11" or python_version == "3.12") and python_version != "3.8") and extra == \'example\'', # noqa: E501
+ 'Requires-Dist: wheel; (python_version == "3.11" or python_version == "3.12") and extra == \'example\'', # noqa: E501
],
requires,
)
@@ -591,7 +578,9 @@ def test_requires_dist_depends_on_extras(self):
)
def test_requires_dist_depends_on_extras_file(self):
- filename = self._get_path("requires_dist_depends_on_extras_using_file-0.0.1-py3-none-any.whl")
+ filename = self._get_path(
+ "requires_dist_depends_on_extras_using_file-0.0.1-py3-none-any.whl"
+ )
with zipfile.ZipFile(filename) as zf:
self.assertAllEntriesHasReproducibleMetadata(zf)
@@ -616,9 +605,7 @@ def test_requires_dist_depends_on_extras_file(self):
)
def test_data_files_installed_in_folder(self):
- filename = self._get_path(
- "data_files_installed_in_folder-0.0.1-py3-none-any.whl"
- )
+ filename = self._get_path("data_files_installed_in_folder-0.0.1-py3-none-any.whl")
with zipfile.ZipFile(filename) as zf:
self.assertAllEntriesHasReproducibleMetadata(zf)
diff --git a/gazelle/examples/bzlmod_build_file_generation/runfiles/runfiles_test.py b/gazelle/examples/bzlmod_build_file_generation/runfiles/runfiles_test.py
index 6ce4c2db37..7be9decbbe 100644
--- a/gazelle/examples/bzlmod_build_file_generation/runfiles/runfiles_test.py
+++ b/gazelle/examples/bzlmod_build_file_generation/runfiles/runfiles_test.py
@@ -39,9 +39,7 @@ def testRunfileWithRlocationpath(self):
self.assertEqual(f.read().strip(), "Hello, example_bzlmod!")
def testRunfileInOtherModuleWithOurRepoMapping(self):
- data_path = runfiles.Create().Rlocation(
- "our_other_module/other_module/pkg/data/data.txt"
- )
+ data_path = runfiles.Create().Rlocation("our_other_module/other_module/pkg/data/data.txt")
with open(data_path, "rt", encoding="utf-8", newline="\n") as f:
self.assertEqual(f.read().strip(), "Hello, other_module!")
diff --git a/gazelle/manifest/copy_to_source.py b/gazelle/manifest/copy_to_source.py
index b897b1fcf3..e4d85111f1 100644
--- a/gazelle/manifest/copy_to_source.py
+++ b/gazelle/manifest/copy_to_source.py
@@ -6,7 +6,6 @@
import os
import shutil
-import stat
import sys
from pathlib import Path
@@ -14,9 +13,10 @@
def copy_to_source(generated_relative_path: Path, target_relative_path: Path) -> None:
"""Copy the generated file to the target file path.
- Expands the relative paths by looking at Bazel env vars to figure out which absolute paths to use.
+ Expands the relative paths by looking at Bazel env vars
+ to figure out which absolute paths to use.
"""
- # This script normally gets executed from the runfiles dir, so find the absolute path to the generated file based on that.
+ # This script normally gets executed from the runfiles dir, so find the absolute path to the generated file based on that. # noqa: E501
generated_absolute_path = Path.cwd() / generated_relative_path
# Similarly, the target is relative to the source directory.
diff --git a/gazelle/modules_mapping/generator.py b/gazelle/modules_mapping/generator.py
index 611910c669..a7316b8e5b 100644
--- a/gazelle/modules_mapping/generator.py
+++ b/gazelle/modules_mapping/generator.py
@@ -39,8 +39,7 @@ def dig_wheel(self, whl):
# Skip stubs and types wheels.
wheel_name = get_wheel_name(whl)
if self.include_stub_packages and (
- wheel_name.endswith(("_stubs", "_types"))
- or wheel_name.startswith(("types_", "stubs_"))
+ wheel_name.endswith(("_stubs", "_types")) or wheel_name.startswith(("types_", "stubs_"))
):
self.mapping[wheel_name.lower()] = wheel_name.lower()
return
@@ -129,9 +128,7 @@ def get_wheel_name(path):
pp = pathlib.PurePath(path)
if pp.suffix != ".whl":
raise RuntimeError(
- "{} is not a valid wheel file name: the wheel doesn't follow ".format(
- pp.name
- )
+ "{} is not a valid wheel file name: the wheel doesn't follow ".format(pp.name)
+ "https://www.python.org/dev/peps/pep-0427/#file-name-convention"
)
return pp.name[: pp.name.find("-")]
diff --git a/gazelle/modules_mapping/merger.py b/gazelle/modules_mapping/merger.py
index deb0cb2666..a37a9bbaaf 100644
--- a/gazelle/modules_mapping/merger.py
+++ b/gazelle/modules_mapping/merger.py
@@ -24,9 +24,7 @@ def merge_modules_mappings(input_files: list[Path], output_file: Path) -> None:
if __name__ == "__main__":
- parser = argparse.ArgumentParser(
- description="Merge multiple modules_mapping.json files"
- )
+ parser = argparse.ArgumentParser(description="Merge multiple modules_mapping.json files")
parser.add_argument(
"--output",
required=True,
diff --git a/gazelle/modules_mapping/test_merger.py b/gazelle/modules_mapping/test_merger.py
index 6260fdd6ff..87c35f4404 100644
--- a/gazelle/modules_mapping/test_merger.py
+++ b/gazelle/modules_mapping/test_merger.py
@@ -1,7 +1,7 @@
-import pathlib
-import unittest
import json
+import pathlib
import tempfile
+import unittest
from merger import merge_modules_mappings
diff --git a/python/bin/repl_stub.py b/python/bin/repl_stub.py
index f5b7c0aa4f..5a46ad7bb1 100644
--- a/python/bin/repl_stub.py
+++ b/python/bin/repl_stub.py
@@ -62,15 +62,9 @@ def complete(self, text, state):
elif "GNU readline" in readline.__doc__: # type: ignore
readline.parse_and_bind("tab: complete")
else:
- print(
- "Could not enable tab completion: "
- "unable to determine readline backend"
- )
+ print("Could not enable tab completion: unable to determine readline backend")
except ImportError:
- print(
- "Could not enable tab completion: "
- "readline module not available on this platform"
- )
+ print("Could not enable tab completion: readline module not available on this platform")
# We set the banner to an empty string because the repl_template.py file already prints the banner.
code.interact(local=console_locals, banner="", exitmsg=exitmsg)
diff --git a/python/private/get_local_runtime_info.py b/python/private/get_local_runtime_info.py
index a59e17a012..a59508e8a1 100644
--- a/python/private/get_local_runtime_info.py
+++ b/python/private/get_local_runtime_info.py
@@ -48,9 +48,7 @@ def _search_directories(get_config, base_executable) -> list[str]:
# On MacOS, the LDLIBRARY may be a relative path under /Library/Frameworks,
# such as "Python.framework/Versions/3.12/Python", not a file under the
# LIBDIR/LIBPL directory, so include PYTHONFRAMEWORKPREFIX.
- lib_dirs = [
- get_config(x) for x in ("PYTHONFRAMEWORKPREFIX", "LIBPL", "LIBDIR")
- ]
+ lib_dirs = [get_config(x) for x in ("PYTHONFRAMEWORKPREFIX", "LIBPL", "LIBDIR")]
# On Debian, with multiarch enabled, prior to Python 3.10, `LIBDIR` didn't
# tell the location of the libs, just the base directory. The `MULTIARCH`
@@ -67,8 +65,8 @@ def _search_directories(get_config, base_executable) -> list[str]:
if not _IS_DARWIN:
for exec_dir in (
- os.path.dirname(base_executable) if base_executable else None,
- get_config("BINDIR"),
+ os.path.dirname(base_executable) if base_executable else None,
+ get_config("BINDIR"),
):
if not exec_dir:
continue
@@ -122,7 +120,8 @@ def _search_library_names(get_config, version, abi_flags) -> list[str]:
#
# A typical LIBRARY is 'libpythonX.Y.a' on Linux.
lib_names = [
- get_config(x) for x in (
+ get_config(x)
+ for x in (
"LDLIBRARY",
"INSTSONAME",
"PY3LIBRARY",
@@ -167,8 +166,7 @@ def _get_python_library_info(base_executable) -> dict[str, Any]:
abi_flags = _get_abi_flags(config_vars.get)
search_directories = _search_directories(config_vars.get, base_executable)
- search_libnames = _search_library_names(config_vars.get, version,
- abi_flags)
+ search_libnames = _search_library_names(config_vars.get, version, abi_flags)
# Used to test whether the library is an abi3 library or a full api library.
abi3_libraries = _default_library_names(sys.version_info.major, abi_flags)
@@ -221,12 +219,8 @@ def _get_python_library_info(base_executable) -> dict[str, Any]:
# Additional DLLs are needed on Windows to link properly.
dlls = []
if _IS_WINDOWS:
- dlls.extend(
- glob.glob(os.path.join(os.path.dirname(base_executable), "*.dll")))
- dlls = [
- x for x in dlls
- if x not in dynamic_libraries and x not in abi_dynamic_libraries
- ]
+ dlls.extend(glob.glob(os.path.join(os.path.dirname(base_executable), "*.dll")))
+ dlls = [x for x in dlls if x not in dynamic_libraries and x not in abi_dynamic_libraries]
def _unique_basenames(inputs: dict[str, None]) -> list[str]:
"""Returns a list of paths, keeping only the first path for each basename."""
diff --git a/python/private/py_console_script_gen.py b/python/private/py_console_script_gen.py
index a1df2c2a06..2b712fbf0b 100644
--- a/python/private/py_console_script_gen.py
+++ b/python/private/py_console_script_gen.py
@@ -15,21 +15,25 @@
"""
console_script generator from entry_points.txt contents.
-For Python versions earlier than 3.11 and for earlier bazel versions than 7.0 we need to workaround the issue of
-sys.path[0] breaking out of the runfiles tree see the following for more context:
+For Python versions earlier than 3.11 and for earlier bazel versions than 7.0
+we need to workaround the issue of sys.path[0] breaking out of the runfiles tree
+see the following for more context:
* https://github.com/bazel-contrib/rules_python/issues/382
* https://github.com/bazelbuild/bazel/pull/15701
-In affected bazel and Python versions we see in programs such as `flake8`, `pylint` or `pytest` errors because the
-first `sys.path` element is outside the `runfiles` directory and if the `name` of the `py_binary` is the same as
-the program name, then the script (e.g. `flake8`) will start failing whilst trying to import its own internals from
-the bazel entrypoint script.
+In affected bazel and Python versions we see in programs such as `flake8`,
+`pylint` or `pytest` errors because the first `sys.path` element is outside the
+`runfiles` directory and if the `name` of the `py_binary` is the same as the
+program name, then the script (e.g. `flake8`) will start failing whilst trying
+to import its own internals from the bazel entrypoint script.
-The mitigation strategy is to remove the first entry in the `sys.path` if it does not have `.runfiles` and it seems
-to fix the behaviour of console_scripts under `bazel run`.
+The mitigation strategy is to remove the first entry in the `sys.path` if it
+does not have `.runfiles` and it seems to fix the behaviour of console_scripts
+under `bazel run`.
-This would not happen if we created a console_script binary in the root of an external repository, e.g.
-`@pypi_pylint//` because the path for the external repository is already in the runfiles directory.
+This would not happen if we created a console_script binary in the root of an
+external repository, e.g. `@pypi_pylint//` because the path for the external
+repository is already in the runfiles directory.
"""
from __future__ import annotations
@@ -37,9 +41,6 @@
import argparse
import configparser
import pathlib
-import re
-import sys
-import textwrap
_ENTRY_POINTS_TXT = "entry_points.txt"
@@ -75,7 +76,7 @@ class EntryPointsParser(configparser.ConfigParser):
optionxform = staticmethod(str)
-def _guess_entry_point(guess: str, console_scripts: dict[string, string]) -> str | None:
+def _guess_entry_point(guess: str, console_scripts: dict[str, str]) -> str | None:
for key, candidate in console_scripts.items():
if guess == key:
return candidate
@@ -95,8 +96,10 @@ def run(
entry_points: The entry_points.txt file to be parsed.
out: The output file.
console_script: The console_script entry in the entry_points.txt file.
- console_script_guess: The string used for guessing the console_script if it is not provided.
- shebang: The shebang to use for the entry point python file. Defaults to empty string (no shebang).
+ console_script_guess: The string used for guessing the console_script
+ if not provided.
+ shebang: The shebang for the entry point python file. Defaults to empty
+ string (no shebang).
"""
config = EntryPointsParser()
config.read(entry_points)
@@ -105,7 +108,7 @@ def run(
console_scripts = dict(config["console_scripts"])
except KeyError:
raise RuntimeError(
- f"The package does not provide any console_scripts in its {_ENTRY_POINTS_TXT}"
+ f"The package does not provide any console_scripts in its {_ENTRY_POINTS_TXT}" # noqa: E501
)
if console_script:
@@ -114,7 +117,7 @@ def run(
except KeyError:
available = ", ".join(sorted(console_scripts.keys()))
raise RuntimeError(
- f"The console_script '{console_script}' was not found, only the following are available: {available}"
+ f"The console_script '{console_script}' was not found, only the following are available: {available}" # noqa: E501
) from None
else:
# Get rid of the extension and the common prefix
diff --git a/python/private/py_wheel_dist.py b/python/private/py_wheel_dist.py
index 3af3345ef9..0569e823bc 100644
--- a/python/private/py_wheel_dist.py
+++ b/python/private/py_wheel_dist.py
@@ -9,9 +9,7 @@ def parse_args() -> argparse.Namespace:
"""Parse command line arguments."""
parser = argparse.ArgumentParser()
- parser.add_argument(
- "--wheel", type=Path, required=True, help="The path to a wheel."
- )
+ parser.add_argument("--wheel", type=Path, required=True, help="The path to a wheel.")
parser.add_argument(
"--name_file",
type=Path,
diff --git a/python/private/pypi/dependency_resolver/dependency_resolver.py b/python/private/pypi/dependency_resolver/dependency_resolver.py
index f3a339f929..7dd277af3e 100644
--- a/python/private/pypi/dependency_resolver/dependency_resolver.py
+++ b/python/private/pypi/dependency_resolver/dependency_resolver.py
@@ -170,9 +170,7 @@ def main(
os.environ["CUSTOM_COMPILE_COMMAND"] = update_command
os.environ["PIP_CONFIG_FILE"] = os.getenv("PIP_CONFIG_FILE") or os.devnull
- argv.append(
- f"--output-file={requirements_file_relative if UPDATE else requirements_out}"
- )
+ argv.append(f"--output-file={requirements_file_relative if UPDATE else requirements_out}")
argv.extend(
(src_relative if Path(src_relative).exists() else resolved_src)
for src_relative, resolved_src in zip(srcs_relative, resolved_srcs)
@@ -188,7 +186,7 @@ def main(
if UPDATE:
print("Updating " + requirements_file_relative)
- # Make sure the output file for pip_compile exists. It won't if we are on Windows and --enable_runfiles is not set.
+ # Make sure the output file for pip_compile exists. It won't if we are on Windows and --enable_runfiles is not set. # noqa: E501
if not os.path.exists(requirements_file_relative):
os.makedirs(os.path.dirname(requirements_file_relative), exist_ok=True)
shutil.copy(resolved_requirements_file, requirements_file_relative)
@@ -197,13 +195,11 @@ def main(
workspace = os.environ["BUILD_WORKSPACE_DIRECTORY"]
requirements_file_tree = os.path.join(workspace, requirements_file_relative)
absolute_output_file = Path(requirements_file_relative).absolute()
- # In most cases, requirements_file will be a symlink to the real file in the source tree.
- # If symlinks are not enabled (e.g. on Windows), then requirements_file will be a copy,
+ # In most cases, requirements_file will be a symlink to the real file in the source tree. # noqa: E501
+ # If symlinks are not enabled (e.g. on Windows), then requirements_file will be a copy, # noqa: E501
# and we should copy the updated requirements back to the source tree.
if not absolute_output_file.samefile(requirements_file_tree):
- atexit.register(
- lambda: shutil.copy(absolute_output_file, requirements_file_tree)
- )
+ atexit.register(lambda: shutil.copy(absolute_output_file, requirements_file_tree))
_run_pip_compile(verbose_command=f"{update_command} -- --verbose")
requirements_file_relative_path = Path(requirements_file_relative)
content = requirements_file_relative_path.read_text()
diff --git a/python/private/pypi/repack_whl.py b/python/private/pypi/repack_whl.py
index 92d052a81f..c8846ecd09 100644
--- a/python/private/pypi/repack_whl.py
+++ b/python/private/pypi/repack_whl.py
@@ -63,9 +63,7 @@ def _unidiff_output(expected, actual, record):
expected = expected.splitlines(1)
actual = actual.splitlines(1)
- diff = difflib.unified_diff(
- expected, actual, fromfile=f"a/{record}", tofile=f"b/{record}"
- )
+ diff = difflib.unified_diff(expected, actual, fromfile=f"a/{record}", tofile=f"b/{record}")
return "".join(diff)
@@ -174,7 +172,7 @@ def main(sys_argv):
rel_path = p.relative_to(patched_wheel_dir)
out.add_file(str(rel_path), p)
- logging.debug(f"Writing RECORD file")
+ logging.debug("Writing RECORD file")
got_record = out.add_recordfile()
if got_record == record_contents:
@@ -187,14 +185,10 @@ def main(sys_argv):
out.distinfo_path("RECORD"),
)
args.record_patch.write_text(record_diff)
- logging.warning(
- f"Please apply patch to the RECORD file ({args.record_patch}):\n{record_diff}"
- )
+ logging.warning(f"Please apply patch to the RECORD file ({args.record_patch}):\n{record_diff}")
if __name__ == "__main__":
- logging.basicConfig(
- format="%(module)s: %(levelname)s: %(message)s", level=logging.DEBUG
- )
+ logging.basicConfig(format="%(module)s: %(levelname)s: %(message)s", level=logging.DEBUG)
sys.exit(main(sys.argv[1:]))
diff --git a/python/private/pypi/requirements_parser/resolve_target_platforms.py b/python/private/pypi/requirements_parser/resolve_target_platforms.py
index accacf5bfa..96607ee240 100755
--- a/python/private/pypi/requirements_parser/resolve_target_platforms.py
+++ b/python/private/pypi/requirements_parser/resolve_target_platforms.py
@@ -21,7 +21,6 @@
import pathlib
from packaging.requirements import Requirement
-
from python.private.pypi.whl_installer.platform import Platform
INPUT_HELP = """\
diff --git a/python/private/pypi/whl_installer/arguments.py b/python/private/pypi/whl_installer/arguments.py
index 9122654a11..ee7da04ca7 100644
--- a/python/private/pypi/whl_installer/arguments.py
+++ b/python/private/pypi/whl_installer/arguments.py
@@ -14,7 +14,6 @@
import argparse
import json
-import pathlib
from typing import Any, Dict, Set
@@ -52,8 +51,8 @@ def parser(**kwargs: Any) -> argparse.ArgumentParser:
parser.add_argument(
"--download_only",
action="store_true",
- help="Use 'pip download' instead of 'pip wheel'. Disables building wheels from source, but allows use of "
- "--platform, --python-version, --implementation, and --abi in --extra_pip_args.",
+ help="Use 'pip download' instead of 'pip wheel'. Disables building wheels from source, but allows use of " # noqa: E501
+ "--platform, --python-version, --implementation, and --abi in --extra_pip_args.", # noqa: E501
)
return parser
diff --git a/python/private/pypi/whl_installer/wheel_installer.py b/python/private/pypi/whl_installer/wheel_installer.py
index 81dd3995db..f0e368700d 100644
--- a/python/private/pypi/whl_installer/wheel_installer.py
+++ b/python/private/pypi/whl_installer/wheel_installer.py
@@ -18,12 +18,10 @@
import glob
import json
import os
-import re
import subprocess
import sys
from pathlib import Path
from tempfile import NamedTemporaryFile
-from typing import Dict, List, Optional, Set, Tuple
from python.private.pypi.whl_installer import arguments
@@ -34,7 +32,7 @@ def _configure_reproducible_wheels() -> None:
patching in some configuration with environment variables.
"""
- # wheel, by default, enables debug symbols in GCC. This incidentally captures the build path in the .so file
+ # wheel, by default, enables debug symbols in GCC. This incidentally captures the build path in the .so file # noqa: E501
# We can override this behavior by disabling debug symbols entirely.
# https://github.com/pypa/pip/issues/6505
if "CFLAGS" in os.environ:
diff --git a/python/private/site_init_template.py b/python/private/site_init_template.py
index e4d501bfd5..868fda13ba 100644
--- a/python/private/site_init_template.py
+++ b/python/private/site_init_template.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""site initialization logic for Bazel-built py_binary targets."""
+
import os
import os.path
import sys
@@ -58,16 +59,14 @@ def _find_runfiles_root():
runfiles_dir = os.environ.get("RUNFILES_DIR", None)
if not runfiles_dir:
runfiles_manifest_file = os.environ.get("RUNFILES_MANIFEST_FILE", "")
- if runfiles_manifest_file.endswith(
- ".runfiles_manifest"
- ) or runfiles_manifest_file.endswith(".runfiles/MANIFEST"):
+ if runfiles_manifest_file.endswith(".runfiles_manifest") or runfiles_manifest_file.endswith(
+ ".runfiles/MANIFEST"
+ ):
runfiles_dir = runfiles_manifest_file[:-9]
# Be defensive: the runfiles dir should contain ourselves. If it doesn't,
# then it must not be our runfiles directory.
- if runfiles_dir and os.path.exists(
- os.path.join(runfiles_dir, _SELF_RUNFILES_RELATIVE_PATH)
- ):
+ if runfiles_dir and os.path.exists(os.path.join(runfiles_dir, _SELF_RUNFILES_RELATIVE_PATH)):
return runfiles_dir
num_dirs_to_runfiles_root = _SELF_RUNFILES_RELATIVE_PATH.count("/") + 1
@@ -159,9 +158,7 @@ def _maybe_add_path(path, reason):
_maybe_add_path(abs_path, "imports-strs")
if _IMPORT_ALL:
- repo_dirs = sorted(
- os.path.join(_RUNFILES_ROOT, d) for d in os.listdir(_RUNFILES_ROOT)
- )
+ repo_dirs = sorted(os.path.join(_RUNFILES_ROOT, d) for d in os.listdir(_RUNFILES_ROOT))
for d in repo_dirs:
if os.path.isdir(d):
_maybe_add_path(d, "import-all")
@@ -178,9 +175,7 @@ def _maybe_add_path(path, reason):
if cov_tool:
_print_verbose_coverage(f"Using toolchain coverage_tool {cov_tool}")
elif cov_tool := os.environ.get("PYTHON_COVERAGE"):
- _print_verbose_coverage(
- f"Using env var coverage: PYTHON_COVERAGE={cov_tool}"
- )
+ _print_verbose_coverage(f"Using env var coverage: PYTHON_COVERAGE={cov_tool}")
if cov_tool:
if os.path.isabs(cov_tool):
diff --git a/python/private/stage2_bootstrap_template.py b/python/private/stage2_bootstrap_template.py
index 2a7be67c13..59e13a8bd0 100644
--- a/python/private/stage2_bootstrap_template.py
+++ b/python/private/stage2_bootstrap_template.py
@@ -184,9 +184,9 @@ def find_runfiles_root(main_rel_path):
runfiles_dir = os.environ.get("RUNFILES_DIR", None)
if not runfiles_dir:
runfiles_manifest_file = os.environ.get("RUNFILES_MANIFEST_FILE", "")
- if runfiles_manifest_file.endswith(
- ".runfiles_manifest"
- ) or runfiles_manifest_file.endswith(".runfiles/MANIFEST"):
+ if runfiles_manifest_file.endswith(".runfiles_manifest") or runfiles_manifest_file.endswith(
+ ".runfiles/MANIFEST"
+ ):
runfiles_dir = runfiles_manifest_file[:-9]
# Be defensive: the runfiles dir should contain our main entry point. If
# it doesn't, then it must not be our runfiles directory.
@@ -356,8 +356,6 @@ def _maybe_collect_coverage(enable):
print_verbose_coverage("Instrumented Files:\n" + "\n".join(instrumented_files))
print_verbose_coverage("Sources:\n" + "\n".join(unique_dirs))
- import uuid
-
import coverage
coverage_dir = os.environ["COVERAGE_DIR"]
@@ -367,9 +365,7 @@ def _maybe_collect_coverage(enable):
# using an rc file.
rcfile_name = os.path.join(coverage_dir, ".coveragerc_{}".format(unique_id))
disable_warnings = (
- "disable_warnings = module-not-imported, no-data-collected"
- if COVERAGE_INSTRUMENTED
- else ""
+ "disable_warnings = module-not-imported, no-data-collected" if COVERAGE_INSTRUMENTED else ""
)
print_verbose_coverage("coveragerc file:", rcfile_name)
with open(rcfile_name, "w") as rcfile:
@@ -501,9 +497,7 @@ def main():
# prevent interference from the system.
# 3. If running without a venv configured. This occurs with the
# system_python bootstrap.
- print_verbose(
- f"sys.path missing expected site-packages: adding {site_packages}"
- )
+ print_verbose(f"sys.path missing expected site-packages: adding {site_packages}")
_add_site_packages(site_packages)
print_verbose("runfiles root:", runfiles_root)
@@ -532,17 +526,13 @@ def main():
# To replicate this behavior, we add main's directory within the runfiles
# when safe path isn't enabled.
if not getattr(sys.flags, "safe_path", False):
- prepend_path_entries = [
- os.path.join(runfiles_root, os.path.dirname(main_rel_path))
- ]
+ prepend_path_entries = [os.path.join(runfiles_root, os.path.dirname(main_rel_path))]
else:
prepend_path_entries = []
main_filename = os.path.join(runfiles_root, main_rel_path)
main_filename = get_windows_path_with_unc_prefix(main_filename)
- assert os.path.exists(main_filename), (
- "Cannot exec() %r: file not found." % main_filename
- )
+ assert os.path.exists(main_filename), "Cannot exec() %r: file not found." % main_filename
assert os.access(main_filename, os.R_OK), (
"Cannot exec() %r: file not readable." % main_filename
)
diff --git a/python/private/zipapp/zip_main_template.py b/python/private/zipapp/zip_main_template.py
index 06ed19f1a5..ce902e4c8e 100644
--- a/python/private/zipapp/zip_main_template.py
+++ b/python/private/zipapp/zip_main_template.py
@@ -143,9 +143,7 @@ def find_binary(runfiles_root, bin_name):
return None
if bin_name.startswith("//"):
# Case 1: Path is a label. Not supported yet.
- raise AssertionError(
- "Bazel does not support execution of Python interpreters via labels"
- )
+ raise AssertionError("Bazel does not support execution of Python interpreters via labels")
elif os.path.isabs(bin_name):
# Case 2: Absolute path.
return bin_name
@@ -298,9 +296,7 @@ def finish_venv_setup(runfiles_root):
# The venv bin/python3 interpreter should always be under runfiles, but
# double check. We don't want to accidentally create symlinks elsewhere
if not python_program.startswith(runfiles_root):
- raise AssertionError(
- "Program's venv binary not under runfiles: {python_program}"
- )
+ raise AssertionError("Program's venv binary not under runfiles: {python_program}")
symlink_to = find_binary(runfiles_root, _PYTHON_BINARY_ACTUAL)
os.makedirs(dirname(python_program), exist_ok=True)
if os.path.lexists(python_program):
@@ -357,21 +353,15 @@ def main():
main_filename = join(runfiles_root, main_rel_path)
main_filename = get_windows_path_with_unc_prefix(main_filename)
- assert os.path.exists(main_filename), (
- "Cannot exec() %r: file not found." % main_filename
- )
- assert os.access(main_filename, os.R_OK), (
- "Cannot exec() %r: file not readable." % main_filename
- )
+ assert os.path.exists(main_filename), "Cannot exec() %r: file not found." % main_filename
+ assert os.access(main_filename, os.R_OK), "Cannot exec() %r: file not readable." % main_filename
if _PYTHON_BINARY_VENV:
python_program = finish_venv_setup(runfiles_root)
else:
python_program = find_binary(runfiles_root, _PYTHON_BINARY_ACTUAL)
if python_program is None:
- raise AssertionError(
- "Could not find python binary: " + _PYTHON_BINARY_ACTUAL
- )
+ raise AssertionError("Could not find python binary: " + _PYTHON_BINARY_ACTUAL)
# Some older Python versions on macOS (namely Python 3.7) may unintentionally
# leave this environment variable set after starting the interpreter, which
diff --git a/python/runfiles/runfiles.py b/python/runfiles/runfiles.py
index 03ebdf6331..f9aa6d170a 100644
--- a/python/runfiles/runfiles.py
+++ b/python/runfiles/runfiles.py
@@ -22,14 +22,14 @@
dependency graphs under bzlmod.
:::
"""
-import collections.abc
+
import inspect
import os
import pathlib
import posixpath
import sys
from collections import defaultdict
-from typing import Dict, Generator, Iterable, List, Optional, Tuple, Union
+from typing import Dict, Generator, Optional, Tuple, Union
if sys.version_info >= (3, 11):
from typing import Self
@@ -67,9 +67,7 @@ def __init__(
prefix_source,
target_app,
), target_canonical in prefixed_mappings.items():
- self._grouped_prefixed_mappings[target_app].append(
- (prefix_source, target_canonical)
- )
+ self._grouped_prefixed_mappings[target_app].append((prefix_source, target_canonical))
@staticmethod
def create_from_file(repo_mapping_path: Optional[str]) -> "_RepositoryMapping":
@@ -132,9 +130,7 @@ def lookup(self, source_repo: Optional[str], target_apparent: str) -> Optional[s
# Try prefixed mapping if no exact match found
if target_apparent in self._grouped_prefixed_mappings:
- for prefix_source, target_canonical in self._grouped_prefixed_mappings[
- target_apparent
- ]:
+ for prefix_source, target_canonical in self._grouped_prefixed_mappings[target_apparent]:
if source_repo.startswith(prefix_source):
return target_canonical
@@ -147,9 +143,7 @@ def is_empty(self) -> bool:
Returns:
True if there are no mappings, False otherwise
"""
- return (
- len(self._exact_mappings) == 0 and len(self._grouped_prefixed_mappings) == 0
- )
+ return len(self._exact_mappings) == 0 and len(self._grouped_prefixed_mappings) == 0
class Path(pathlib.Path):
@@ -344,9 +338,7 @@ def read_bytes(self) -> bytes:
return self._as_path().read_bytes()
# override
- def read_text(
- self, encoding: Optional[str] = None, errors: Optional[str] = None
- ) -> str:
+ def read_text(self, encoding: Optional[str] = None, errors: Optional[str] = None) -> str:
return self._as_path().read_text(encoding=encoding, errors=errors)
# override
@@ -427,13 +419,11 @@ def _LoadRunfiles(path: str) -> Dict[str, str]:
for line in f:
line = line.rstrip("\n")
if line.startswith(" "):
- # In lines that start with a space, spaces, newlines, and backslashes are escaped as \s, \n, and \b in
+ # In lines that start with a space, spaces, newlines, and backslashes are escaped as \s, \n, and \b in # noqa: E501
# link and newlines and backslashes are escaped in target.
escaped_link, escaped_target = line[1:].split(" ", maxsplit=1)
link = (
- escaped_link.replace(r"\s", " ")
- .replace(r"\n", "\n")
- .replace(r"\b", "\\")
+ escaped_link.replace(r"\s", " ").replace(r"\n", "\n").replace(r"\b", "\\")
)
target = escaped_target.replace(r"\n", "\n").replace(r"\b", "\\")
else:
@@ -579,9 +569,9 @@ def Rlocation(self, path: str, source_repo: Optional[str] = None) -> Optional[st
# which also should not be mapped.
return self._strategy.RlocationChecked(path)
- assert (
- source_repo is not None
- ), "BUG: if the `source_repo` is None, we should never go past the `if` statement above"
+ assert source_repo is not None, (
+ "BUG: if the `source_repo` is None, we should never go past the `if` statement above"
+ )
# Look up the target repository using the repository mapping
if target_canonical is not None:
@@ -664,9 +654,7 @@ def CurrentRepository(self, frame: int = 1) -> str:
)
)
- caller_runfiles_directory = caller_runfiles_path[
- : caller_runfiles_path.find(os.path.sep)
- ]
+ caller_runfiles_directory = caller_runfiles_path[: caller_runfiles_path.find(os.path.sep)]
# With Bzlmod, the runfiles directory of the main repository is always
# named "_main". Without Bzlmod, the value returned by this function is
# never used, so we just assume Bzlmod is enabled.
diff --git a/python/uv/private/lock_copier.py b/python/uv/private/lock_copier.py
index bcc64c1661..8eec2d96c4 100644
--- a/python/uv/private/lock_copier.py
+++ b/python/uv/private/lock_copier.py
@@ -52,7 +52,8 @@ def main():
if "BUILD_WORKSPACE_DIRECTORY" not in environ:
raise RuntimeError(
- "This must be either run as `bazel test` via a `native_test` or similar or via `bazel run`"
+ "This must be either run as `bazel test` via a `native_test` or"
+ " similar or via `bazel run`"
)
print(f"cp /{src} /{dst}")
diff --git a/ruff.toml b/ruff.toml
new file mode 100644
index 0000000000..5ea96ece9c
--- /dev/null
+++ b/ruff.toml
@@ -0,0 +1,30 @@
+line-length = 100
+target-version = "py313"
+exclude = [
+ "gazelle/python/testdata",
+ "tests/pypi/whl_library/testdata",
+]
+
+[lint]
+select = ["I", "F", "E", "W"]
+
+[lint.isort]
+known-first-party = [
+ "rules_python",
+ "sphinxdocs",
+ "gazelle",
+ "tests",
+ "examples",
+]
+
+[lint.per-file-ignores]
+"gazelle/python/testdata/**" = ["E", "F", "W"]
+"tests/pypi/whl_library/testdata/**" = ["E", "F", "W"]
+"python/private/stage2_bootstrap_template.py" = ["E501", "F821"]
+"python/private/zipapp/zip_main_template.py" = ["E402", "E501"]
+"python/private/site_init_template.py" = ["F841"]
+"python/private/pypi/namespace_pkg_tmpl.py" = ["F821"]
+"python/bin/repl_stub.py" = ["E402"]
+"tests/bootstrap_impls/system_python_nodeps_test.py" = ["E402", "F401"]
+"tests/modules/other/venv_bin.py" = ["E402"]
+"python/runfiles/__init__.py" = ["F403"]
diff --git a/sphinxdocs/sphinxdocs/private/proto_to_markdown.py b/sphinxdocs/sphinxdocs/private/proto_to_markdown.py
index 58fb79393d..24fa750293 100644
--- a/sphinxdocs/sphinxdocs/private/proto_to_markdown.py
+++ b/sphinxdocs/sphinxdocs/private/proto_to_markdown.py
@@ -13,11 +13,9 @@
# limitations under the License.
import argparse
-import io
import itertools
import pathlib
import sys
-import textwrap
from typing import Callable, TextIO, TypeVar
from stardoc.proto import stardoc_output_pb2
@@ -133,10 +131,7 @@ def _render_module(self, module: stardoc_output_pb2.ModuleInfo):
(m.extension_name, m, self._render_module_extension)
for m in module.module_extension_info
),
- (
- (r.rule_name, r, self._render_repository_rule)
- for r in module.repository_rule_info
- ),
+ ((r.rule_name, r, self._render_repository_rule) for r in module.repository_rule_info),
)
# Sort by name, ignoring case. The `.TYPEDEF` string is removed so
# that the .TYPEDEF entries come before what is in the typedef.
@@ -154,9 +149,7 @@ def _render_module(self, module: stardoc_output_pb2.ModuleInfo):
def _process_object(self, name, obj, renderer):
# The trailing doc is added to prevent matching a common prefix
typedef_group = name.removesuffix(".TYPEDEF") + "."
- while self._typedef_stack and not typedef_group.startswith(
- self._typedef_stack[-1]
- ):
+ while self._typedef_stack and not typedef_group.startswith(self._typedef_stack[-1]):
self._typedef_stack.pop()
self._render_typedef_end()
renderer(obj)
@@ -456,9 +449,7 @@ def _render_attributes(self, attributes: list[stardoc_output_pb2.AttributeInfo])
if attr.provider_name_group:
self._write(" {required-providers}`")
- for _, outer_is_last, provider_group in _position_iter(
- attr.provider_name_group
- ):
+ for _, outer_is_last, provider_group in _position_iter(attr.provider_name_group):
pairs = list(
zip(
provider_group.origin_key,
diff --git a/sphinxdocs/sphinxdocs/private/sphinx_build.py b/sphinxdocs/sphinxdocs/private/sphinx_build.py
index b438c89fe1..e0c91f4db9 100644
--- a/sphinxdocs/sphinxdocs/private/sphinx_build.py
+++ b/sphinxdocs/sphinxdocs/private/sphinx_build.py
@@ -30,16 +30,13 @@ def __init__(self, message, exit_code):
class Worker:
-
- def __init__(
- self, instream: "typing.TextIO", outstream: "typing.TextIO", exec_root: str
- ):
+ def __init__(self, instream: "typing.TextIO", outstream: "typing.TextIO", exec_root: str):
# NOTE: Sphinx performs its own logging re-configuration, so any
# logging config we do isn't respected by Sphinx. Controlling where
# stdout and stderr goes are the main mechanisms. Recall that
# Bazel send worker stderr to the worker log file.
- # outputBase=$(bazel info output_base)
- # find $outputBase/bazel-workers/ -type f -printf '%T@ %p\n' | sort -n | tail -1 | awk '{print $2}'
+ # outputBase=$(bazel info output_base) # noqa: E501
+ # find $outputBase/bazel-workers/ -type f -printf '%T@ %p\n' | sort -n | tail -1 | awk '{print $2}' # noqa: E501
logging.basicConfig(level=logging.WARN)
logger.info("Initializing worker")
@@ -83,8 +80,11 @@ def run(self) -> None:
if response:
self._send_response(response)
except SphinxMainError as e:
- logger.error("Sphinx main returned failure: exit_code=%s request=%s",
- request, e.exit_code)
+ logger.error(
+ "Sphinx main returned failure: exit_code=%s request=%s",
+ request,
+ e.exit_code,
+ )
request_id = 0 if not request else request.get("requestId", 0)
self._send_response(
{
@@ -218,7 +218,6 @@ def _process_request(self, request: "WorkRequest") -> "WorkResponse | None":
)
raise SphinxMainError(message, exit_code)
-
# Copying is unfortunately necessary because Bazel doesn't know to
# implicily bring along what the symlinks point to.
shutil.copytree(worker_outdir, bazel_outdir, dirs_exist_ok=True)
diff --git a/sphinxdocs/sphinxdocs/private/sphinx_server.py b/sphinxdocs/sphinxdocs/private/sphinx_server.py
index 1bd6ee5550..def404e1e5 100644
--- a/sphinxdocs/sphinxdocs/private/sphinx_server.py
+++ b/sphinxdocs/sphinxdocs/private/sphinx_server.py
@@ -19,12 +19,12 @@ class DirectoryHandler(server.SimpleHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super().__init__(directory=serve_directory, *args, **kwargs)
- address = ("0.0.0.0", 8000)
- # with server.ThreadingHTTPServer(address, DirectoryHandler) as (ip, port, httpd):
+ # with server.ThreadingHTTPServer(("0.0.0.0", 8000), DirectoryHandler) as (ip, port, httpd):
+ _address = ("0.0.0.0", 8000)
with _start_server(DirectoryHandler, "0.0.0.0", 8000) as (ip, port, httpd):
def _print_server_info():
- print(f"Serving...")
+ print("Serving...")
print(f" Address: http://{ip}:{port}")
print(f" Serving directory: {serve_directory}")
print(f" url: file://{serve_directory}")
@@ -41,9 +41,7 @@ def _print_server_info():
httpd.serve_forever()
except KeyboardInterrupt:
_print_server_info()
- print(
- "*** KeyboardInterrupt received: CTRL+C again to terminate server ***"
- )
+ print("*** KeyboardInterrupt received: CTRL+C again to terminate server ***")
try:
time.sleep(1)
print("Restarting serving ...")
diff --git a/sphinxdocs/sphinxdocs/src/sphinx_bzl/bzl.py b/sphinxdocs/sphinxdocs/src/sphinx_bzl/bzl.py
index a1f47b3b1d..84d7fc3e15 100644
--- a/sphinxdocs/sphinxdocs/src/sphinx_bzl/bzl.py
+++ b/sphinxdocs/sphinxdocs/src/sphinx_bzl/bzl.py
@@ -24,14 +24,12 @@
from docutils import nodes as docutils_nodes
from docutils.parsers.rst import directives as docutils_directives
from docutils.parsers.rst import states
-from sphinx import addnodes, builders
+from sphinx import addnodes, builders, domains, environment, roles
from sphinx import directives as sphinx_directives
-from sphinx import domains, environment, roles
from sphinx.highlighting import lexer_classes
from sphinx.locale import _
-from sphinx.util import docfields
+from sphinx.util import docfields, inspect, logging
from sphinx.util import docutils as sphinx_docutils
-from sphinx.util import inspect, logging
from sphinx.util import nodes as sphinx_nodes
from sphinx.util import typing as sphinx_typing
from typing_extensions import TypeAlias, override
@@ -49,9 +47,7 @@
_GetObjectsTuple: TypeAlias = tuple[str, str, str, str, str, int]
# See SphinxRole.run definition; the docs for role classes are pretty sparse.
-_RoleRunResult: TypeAlias = tuple[
- list[docutils_nodes.Node], list[docutils_nodes.system_message]
-]
+_RoleRunResult: TypeAlias = tuple[list[docutils_nodes.Node], list[docutils_nodes.system_message]]
def _log_debug(message, *args):
@@ -118,7 +114,7 @@ def to_get_objects_tuple(self) -> _GetObjectsTuple:
)
def __repr__(self):
- return f"ObjectEntry({self.full_id=}, {self.object_type=}, {self.display_name=}, {self.index_entry.docname=})"
+ return f"ObjectEntry({self.full_id=}, {self.object_type=}, {self.display_name=}, {self.index_entry.docname=})" # noqa: E501
# A simple helper just to document what the index tuple nodes are.
@@ -171,9 +167,7 @@ def __init__(
raise InvalidValueError("label must start with //")
if not label.endswith(".bzl") and (symbol or namespace):
- raise InvalidValueError(
- "Symbol and namespace can only be specified for .bzl labels"
- )
+ raise InvalidValueError("Symbol and namespace can only be specified for .bzl labels")
self.repo = repo
self.label = label
@@ -367,9 +361,7 @@ def _make_xrefs_for_arg_attr(
bzl_file = env.ref_context["bzl:file"]
anchor_prefix = ".".join(env.ref_context["bzl:doc_id_stack"])
if not anchor_prefix:
- raise InvalidValueError(
- f"doc_id_stack empty when processing arg {arg_name}"
- )
+ raise InvalidValueError(f"doc_id_stack empty when processing arg {arg_name}")
index_description = f"{arg_name} ({self.name} in {bzl_file}%{anchor_prefix})"
anchor_id = f"{anchor_prefix}.{arg_name}"
full_id = _full_id_from_env(env, [arg_name])
@@ -417,9 +409,7 @@ def _make_xrefs_for_arg_attr(
index_node = addnodes.index(
entries=[
- _index_node_tuple(
- "single", f"{self.name}; {index_description}", anchor_id
- ),
+ _index_node_tuple("single", f"{self.name}; {index_description}", anchor_id),
_index_node_tuple("single", index_description, anchor_id),
]
)
@@ -455,7 +445,7 @@ def make_field(
field_text = item[1][0].astext()
parts = [p.strip() for p in field_text.split(",")]
field_body = docutils_nodes.field_body()
- for _, is_last, part in _position_iter(parts):
+ for _i, is_last, part in _position_iter(parts):
node = self.make_xref(
self.bodyrolename,
self._body_domain or domain,
@@ -608,8 +598,9 @@ def first_child_with_class_name(
root, class_name
) -> typing.Union[None, docutils_nodes.Element]:
matches = root.findall(
- lambda node: isinstance(node, docutils_nodes.Element)
- and class_name in node["classes"]
+ lambda node: (
+ isinstance(node, docutils_nodes.Element) and class_name in node["classes"]
+ )
)
found = next(matches, None)
return found
@@ -628,9 +619,7 @@ def match_arg_field_name(node):
arg_body_field = arg_name_field.next_node(descend=False, siblings=True)
# arg_type_node = first_child_with_class_name(arg_body_field, "arg-type-span")
arg_type_node = first_child_with_class_name(arg_body_field, "type-expr")
- arg_default_node = first_child_with_class_name(
- arg_body_field, "default-value-span"
- )
+ arg_default_node = first_child_with_class_name(arg_body_field, "default-value-span")
# Inserting into the body field itself causes the elements
# to be grouped into the paragraph node containing the arg
@@ -663,9 +652,7 @@ def after_content(self) -> None:
# docs on how to build signatures:
# https://www.sphinx-doc.org/en/master/extdev/nodes.html#sphinx.addnodes.desc_signature
@override
- def handle_signature(
- self, sig_text: str, sig_node: addnodes.desc_signature
- ) -> _BzlObjectId:
+ def handle_signature(self, sig_text: str, sig_node: addnodes.desc_signature) -> _BzlObjectId:
self._signature_add_object_type(sig_node)
relative_name, lparen, params_text = sig_text.partition("(")
@@ -767,7 +754,7 @@ def make_xref(name, title=None):
def _signature_add_object_type(self, sig_node: addnodes.desc_signature):
if sig_object_type := self._get_signature_object_type():
- sig_node += addnodes.desc_annotation("", self._get_signature_object_type())
+ sig_node += addnodes.desc_annotation("", sig_object_type)
sig_node += addnodes.desc_sig_space()
@override
@@ -835,9 +822,7 @@ def _get_additional_index_types(self):
return []
@override
- def _object_hierarchy_parts(
- self, sig_node: addnodes.desc_signature
- ) -> tuple[str, ...]:
+ def _object_hierarchy_parts(self, sig_node: addnodes.desc_signature) -> tuple[str, ...]:
return _parse_full_id(sig_node["bzl:object_id"])
@override
@@ -1523,9 +1508,7 @@ class _BzlDomain(domains.Domain):
"attr": domains.ObjType("attr", "attr", "obj"), # rule attribute
"function": domains.ObjType("function", "func", "obj"),
"method": domains.ObjType("method", "method", "obj"),
- "module-extension": domains.ObjType(
- "module extension", "module_extension", "obj"
- ),
+ "module-extension": domains.ObjType("module extension", "module_extension", "obj"),
# Providers are close enough to types that we include "type". This
# also makes :type: Foo work in directive options.
"provider": domains.ObjType("provider", "provider", "type", "obj"),
@@ -1599,9 +1582,7 @@ class _BzlDomain(domains.Domain):
}
@override
- def get_full_qualified_name(
- self, node: docutils_nodes.Element
- ) -> typing.Union[str, None]:
+ def get_full_qualified_name(self, node: docutils_nodes.Element) -> typing.Union[str, None]:
bzl_file = node.get("bzl:file")
symbol_name = node.get("bzl:symbol")
ref_target = node.get("reftarget")
@@ -1646,9 +1627,7 @@ def resolve_xref(
node: addnodes.pending_xref,
contnode: docutils_nodes.Element,
) -> typing.Union[docutils_nodes.Element, None]:
- _log_debug(
- "resolve_xref: fromdocname=%s, typ=%s, target=%s", fromdocname, typ, target
- )
+ _log_debug("resolve_xref: fromdocname=%s, typ=%s, target=%s", fromdocname, typ, target)
del env, node # Unused
entry = self._find_entry_for_xref(fromdocname, typ, target)
if not entry:
@@ -1695,9 +1674,7 @@ def _find_entry_for_xref(
# Give preference to shorter object ids. This is a work around
# to allow e.g. `FooInfo` to refer to the FooInfo type rather than
# the `FooInfo` constructor.
- entries = sorted(
- self.data["alt_names"][target].items(), key=lambda item: len(item[0])
- )
+ entries = sorted(self.data["alt_names"][target].items(), key=lambda item: len(item[0]))
for _, entry in entries:
if object_type in self.object_types[entry.object_type].roles:
return entry
@@ -1767,9 +1744,7 @@ def clear_doc(self, docname: str) -> None:
del self.data["alt_names"][alt_name]
del self.data["doc_names"][docname]
- def merge_domaindata(
- self, docnames: list[str], otherdata: dict[str, typing.Any]
- ) -> None:
+ def merge_domaindata(self, docnames: list[str], otherdata: dict[str, typing.Any]) -> None:
# Merge in simple dict[key, value] data
for top_key in ("objects",):
self.data[top_key].update(otherdata.get(top_key, {}))
diff --git a/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py b/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py
index c42bcf0b22..83eb5383d0 100644
--- a/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py
+++ b/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py
@@ -13,13 +13,13 @@
# limitations under the License.
import io
-import re
from absl.testing import absltest
from google.protobuf import text_format
-from sphinxdocs.private import proto_to_markdown
from stardoc.proto import stardoc_output_pb2
+from sphinxdocs.private import proto_to_markdown
+
_EVERYTHING_MODULE = """\
module_docstring: "MODULE_DOC_STRING"
file: "@repo//pkg:foo.bzl"
diff --git a/sphinxdocs/tests/sphinx_stardoc/sphinx_output_test.py b/sphinxdocs/tests/sphinx_stardoc/sphinx_output_test.py
index 4ed6d4df94..7a6a154610 100644
--- a/sphinxdocs/tests/sphinx_stardoc/sphinx_output_test.py
+++ b/sphinxdocs/tests/sphinx_stardoc/sphinx_output_test.py
@@ -1,9 +1,10 @@
import importlib.resources
from xml.etree import ElementTree
-import tests.sphinx_stardoc as sphinx_stardoc
from absl.testing import absltest, parameterized
+import tests.sphinx_stardoc as sphinx_stardoc
+
class SphinxOutputTest(parameterized.TestCase):
def setUp(self):
@@ -19,8 +20,7 @@ def assert_xref(self, doc, *, text, href):
self.assertEqual(
href,
actual,
- msg=f"Unexpected href for {text=}: "
- + ElementTree.tostring(match).decode("utf8"),
+ msg=f"Unexpected href for {text=}: " + ElementTree.tostring(match).decode("utf8"),
)
def _read_doc(self, doc):
@@ -56,14 +56,26 @@ def _doc_element(self, doc):
("full_norepo_aspect", "//lang:aspect.bzl%myaspect", "aspect.html#myaspect"),
("full_norepo_target", "//lang:relativetarget", "target.html#relativetarget"),
("full_repo_func", "@testrepo//lang:function.bzl%myfunc", "function.html#myfunc"),
- ("full_repo_func_arg", "@testrepo//lang:function.bzl%myfunc.arg1", "function.html#myfunc.arg1"),
+ (
+ "full_repo_func_arg",
+ "@testrepo//lang:function.bzl%myfunc.arg1",
+ "function.html#myfunc.arg1",
+ ),
("full_repo_rule", "@testrepo//lang:rule.bzl%my_rule", "rule.html#my_rule"),
("full_repo_rule_attr", "@testrepo//lang:rule.bzl%my_rule.ra1", "rule.html#my_rule.ra1"),
("full_repo_provider", "@testrepo//lang:provider.bzl%LangInfo", "provider.html#LangInfo"),
("full_repo_aspect", "@testrepo//lang:aspect.bzl%myaspect", "aspect.html#myaspect"),
("full_repo_target", "@testrepo//lang:relativetarget", "target.html#relativetarget"),
- ("tag_class_attr_using_attr_role", "myext.mytag.ta1", "module_extension.html#myext.mytag.ta1"),
- ("tag_class_attr_using_attr_role_just_attr_name", "ta1", "module_extension.html#myext.mytag.ta1"),
+ (
+ "tag_class_attr_using_attr_role",
+ "myext.mytag.ta1",
+ "module_extension.html#myext.mytag.ta1",
+ ),
+ (
+ "tag_class_attr_using_attr_role_just_attr_name",
+ "ta1",
+ "module_extension.html#myext.mytag.ta1",
+ ),
("file_without_repo", "//lang:rule.bzl", "rule.html"),
("file_with_repo", "@testrepo//lang:rule.bzl", "rule.html"),
("package_absolute", "//lang", "target.html"),
diff --git a/tests/bootstrap_impls/bazel_tools_importable_test.py b/tests/bootstrap_impls/bazel_tools_importable_test.py
index ad753bc03d..c374dd5dcf 100644
--- a/tests/bootstrap_impls/bazel_tools_importable_test.py
+++ b/tests/bootstrap_impls/bazel_tools_importable_test.py
@@ -7,7 +7,7 @@ def test_bazel_tools_importable(self):
try:
import bazel_tools
import bazel_tools.tools.python
- import bazel_tools.tools.python.runfiles
+ import bazel_tools.tools.python.runfiles # noqa: F401
except ImportError as exc:
raise AssertionError(
"Failed to import bazel_tools.python.runfiles\n"
diff --git a/tests/bootstrap_impls/bin.py b/tests/bootstrap_impls/bin.py
index 3d467dcf29..cfdf6d713e 100644
--- a/tests/bootstrap_impls/bin.py
+++ b/tests/bootstrap_impls/bin.py
@@ -16,9 +16,7 @@
import sys
print("Hello")
-print(
- "RULES_PYTHON_ZIP_DIR:{}".format(sys._xoptions.get("RULES_PYTHON_ZIP_DIR", "UNSET"))
-)
+print("RULES_PYTHON_ZIP_DIR:{}".format(sys._xoptions.get("RULES_PYTHON_ZIP_DIR", "UNSET")))
print("PYTHONSAFEPATH:", os.environ.get("PYTHONSAFEPATH", "UNSET") or "EMPTY")
print("sys.flags.safe_path:", sys.flags.safe_path)
print("file:", __file__)
diff --git a/tests/bootstrap_impls/bin_calls_bin/inner.py b/tests/bootstrap_impls/bin_calls_bin/inner.py
index 6fef455a84..74a70b5f0d 100644
--- a/tests/bootstrap_impls/bin_calls_bin/inner.py
+++ b/tests/bootstrap_impls/bin_calls_bin/inner.py
@@ -9,6 +9,7 @@
try:
import tests.bootstrap_impls.bin_calls_bin.inner_lib as inner_lib
+
print(f"inner: import_result='{inner_lib.confirm()}'")
except ImportError as e:
print(f"inner: import_result='{e}'")
diff --git a/tests/bootstrap_impls/bin_calls_bin/inner_lib.py b/tests/bootstrap_impls/bin_calls_bin/inner_lib.py
index 97efbb1565..5815b4f41b 100644
--- a/tests/bootstrap_impls/bin_calls_bin/inner_lib.py
+++ b/tests/bootstrap_impls/bin_calls_bin/inner_lib.py
@@ -1,3 +1,3 @@
# Rather than having a completely empty file...
def confirm():
- return "success"
\ No newline at end of file
+ return "success"
diff --git a/tests/bootstrap_impls/call_sys_exe.py b/tests/bootstrap_impls/call_sys_exe.py
index 0c6157048c..c431145386 100644
--- a/tests/bootstrap_impls/call_sys_exe.py
+++ b/tests/bootstrap_impls/call_sys_exe.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
import subprocess
import sys
diff --git a/tests/bootstrap_impls/sys_path_order_test.py b/tests/bootstrap_impls/sys_path_order_test.py
index a9018c39ce..a15272b4ae 100644
--- a/tests/bootstrap_impls/sys_path_order_test.py
+++ b/tests/bootstrap_impls/sys_path_order_test.py
@@ -64,8 +64,7 @@ def test_sys_path_order(self):
first_user = i
sys_path_str = "\n".join(
- f"{i}: ({category}) {value}"
- for i, (category, value) in enumerate(categorized_paths)
+ f"{i}: ({category}) {value}" for i, (category, value) in enumerate(categorized_paths)
)
if None in (last_stdlib, first_user, first_runtime_site):
self.fail(
diff --git a/tests/bootstrap_impls/system_python_nodeps_test.py b/tests/bootstrap_impls/system_python_nodeps_test.py
index d9b43e0f27..9580264dd5 100644
--- a/tests/bootstrap_impls/system_python_nodeps_test.py
+++ b/tests/bootstrap_impls/system_python_nodeps_test.py
@@ -8,4 +8,3 @@
# Verify a C-implemented module can be imported.
# Socket isn't implement in C, but requires `_socket`,
# which is implemented in C
-import socket
diff --git a/tests/build_data/build_data_test.py b/tests/build_data/build_data_test.py
index e4ff81a634..874453e8e4 100644
--- a/tests/build_data/build_data_test.py
+++ b/tests/build_data/build_data_test.py
@@ -4,7 +4,6 @@
class BuildDataTest(unittest.TestCase):
-
def test_target_build_data(self):
import bazel_binary_info
diff --git a/tests/cc/current_py_cc_headers/abi3_headers_linkage_test.py b/tests/cc/current_py_cc_headers/abi3_headers_linkage_test.py
index 6c337653b1..baae92dd2f 100644
--- a/tests/cc/current_py_cc_headers/abi3_headers_linkage_test.py
+++ b/tests/cc/current_py_cc_headers/abi3_headers_linkage_test.py
@@ -1,5 +1,3 @@
-import os.path
-import pathlib
import sys
import unittest
@@ -17,9 +15,7 @@ def test_linkage_windows(self):
if not hasattr(pe, "DIRECTORY_ENTRY_IMPORT"):
self.fail("No import directory found.")
- imported_dlls = [
- entry.dll.decode("utf-8").lower() for entry in pe.DIRECTORY_ENTRY_IMPORT
- ]
+ imported_dlls = [entry.dll.decode("utf-8").lower() for entry in pe.DIRECTORY_ENTRY_IMPORT]
python_dlls = [dll for dll in imported_dlls if dll.startswith("python3")]
self.assertEqual(python_dlls, ["python3.dll"])
diff --git a/tests/entry_points/py_console_script_gen_test.py b/tests/entry_points/py_console_script_gen_test.py
index 77ad1a5faa..d5e3ae98a8 100644
--- a/tests/entry_points/py_console_script_gen_test.py
+++ b/tests/entry_points/py_console_script_gen_test.py
@@ -81,7 +81,7 @@ def test_no_entry_point_selected_error(self):
)
self.assertEqual(
- "Tried to guess that you wanted 'bar-baz', but could not find it. Please select one of the following console scripts: foo",
+ "Tried to guess that you wanted 'bar-baz', but could not find it. Please select one of the following console scripts: foo", # noqa: E501
cm.exception.args[0],
)
@@ -194,8 +194,8 @@ def test_a_second_entry_point_class_method(self):
got = out.read_text()
- self.assertRegex(got, "from foo\.baz import Bar")
- self.assertRegex(got, "sys\.exit\(Bar\.baz\(\)\)")
+ self.assertRegex(got, r"from foo\.baz import Bar")
+ self.assertRegex(got, r"sys\.exit\(Bar\.baz\(\)\)")
def test_shebang_included(self):
with tempfile.TemporaryDirectory() as tmpdir:
diff --git a/tests/implicit_namespace_packages/namespace_packages_test.py b/tests/implicit_namespace_packages/namespace_packages_test.py
index ea47c08fd2..59f05e343b 100644
--- a/tests/implicit_namespace_packages/namespace_packages_test.py
+++ b/tests/implicit_namespace_packages/namespace_packages_test.py
@@ -2,7 +2,6 @@
class NamespacePackagesTest(unittest.TestCase):
-
def test_both_importable(self):
import nspkg
import nspkg.subpkg1
@@ -10,14 +9,10 @@ def test_both_importable(self):
import nspkg.subpkg2.subpkgmod
self.assertEqual("nspkg.subpkg1", nspkg.subpkg1.expected_name)
- self.assertEqual(
- "nspkg.subpkg1.subpkgmod", nspkg.subpkg1.subpkgmod.expected_name
- )
+ self.assertEqual("nspkg.subpkg1.subpkgmod", nspkg.subpkg1.subpkgmod.expected_name)
self.assertEqual("nspkg.subpkg2", nspkg.subpkg2.expected_name)
- self.assertEqual(
- "nspkg.subpkg2.subpkgmod", nspkg.subpkg2.subpkgmod.expected_name
- )
+ self.assertEqual("nspkg.subpkg2.subpkgmod", nspkg.subpkg2.subpkgmod.expected_name)
if __name__ == "__main__":
diff --git a/tests/integration/custom_commands_test.py b/tests/integration/custom_commands_test.py
index 288a4e7a91..336937ece0 100644
--- a/tests/integration/custom_commands_test.py
+++ b/tests/integration/custom_commands_test.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import logging
import unittest
from tests.integration import runner
@@ -21,7 +20,12 @@
class CustomCommandsTest(runner.TestCase):
# Regression test for https://github.com/bazel-contrib/rules_python/issues/1840
def test_run_build_python_zip_false(self):
- result = self.run_bazel("run", "--build_python_zip=false", "--@rules_python//python/config_settings:build_python_zip=false", "//:bin")
+ result = self.run_bazel(
+ "run",
+ "--build_python_zip=false",
+ "--@rules_python//python/config_settings:build_python_zip=false",
+ "//:bin",
+ )
self.assert_result_matches(result, "bazel-out")
diff --git a/tests/integration/local_toolchains/echo_test.py b/tests/integration/local_toolchains/echo_test.py
index 4cc31ff759..17121e0f17 100644
--- a/tests/integration/local_toolchains/echo_test.py
+++ b/tests/integration/local_toolchains/echo_test.py
@@ -4,6 +4,5 @@
class ExtensionTest(unittest.TestCase):
-
def test_echo_extension(self):
self.assertEqual(echo_ext.echo(42, "str"), tuple(42, "str"))
diff --git a/tests/integration/local_toolchains/local_runtime_test.py b/tests/integration/local_toolchains/local_runtime_test.py
index 220ceaead4..62767e089d 100644
--- a/tests/integration/local_toolchains/local_runtime_test.py
+++ b/tests/integration/local_toolchains/local_runtime_test.py
@@ -17,9 +17,7 @@ def test_python_from_path_used(self):
shell_path = shutil.which("python3")
if shell_path is None:
- self.fail(
- "which(python3) returned None.\n" + f"PATH={os.environ.get('PATH')}"
- )
+ self.fail("which(python3) returned None.\n" + f"PATH={os.environ.get('PATH')}")
# We call the interpreter and print its executable because of
# things like pyenv: they install a shim that re-execs python.
diff --git a/tests/integration/local_toolchains/repo_runtime_test.py b/tests/integration/local_toolchains/repo_runtime_test.py
index 4614407c4e..0eb2ef4248 100644
--- a/tests/integration/local_toolchains/repo_runtime_test.py
+++ b/tests/integration/local_toolchains/repo_runtime_test.py
@@ -1,8 +1,5 @@
import os.path
-import shutil
-import subprocess
import sys
-import tempfile
import unittest
diff --git a/tests/integration/pip_parse_isolated/test_isolated.py b/tests/integration/pip_parse_isolated/test_isolated.py
index f889f071fb..5620801955 100644
--- a/tests/integration/pip_parse_isolated/test_isolated.py
+++ b/tests/integration/pip_parse_isolated/test_isolated.py
@@ -3,9 +3,10 @@
See MODULE.bazel.
"""
-import six
import unittest
+import six
+
class TestIsolated(unittest.TestCase):
def test_import(self):
diff --git a/tests/integration/runner.py b/tests/integration/runner.py
index 2534ab2d90..5b597e1a94 100644
--- a/tests/integration/runner.py
+++ b/tests/integration/runner.py
@@ -49,8 +49,7 @@ def __init__(
def describe(self) -> str:
env_lines = [
- " " + shlex.quote(f"{key}={value}")
- for key, value in sorted(self.env.items())
+ " " + shlex.quote(f"{key}={value}") for key, value in sorted(self.env.items())
]
env = " \\\n".join(env_lines)
args = shlex.join(self.args)
diff --git a/tests/integration/toolchain_target_settings/main.py b/tests/integration/toolchain_target_settings/main.py
index 6ae68c3f51..52ec605a55 100644
--- a/tests/integration/toolchain_target_settings/main.py
+++ b/tests/integration/toolchain_target_settings/main.py
@@ -1,2 +1,3 @@
import sys
+
print(f"Python {sys.version}")
diff --git a/tests/py_zipapp/main.py b/tests/py_zipapp/main.py
index 5770170d2c..f3db986452 100644
--- a/tests/py_zipapp/main.py
+++ b/tests/py_zipapp/main.py
@@ -14,9 +14,7 @@ def main():
except ImportError as e:
import sys
- e.add_note(
- "Failed to import a dependency.\n" + "sys.path:\n" + "\n".join(sys.path)
- )
+ e.add_note("Failed to import a dependency.\n" + "sys.path:\n" + "\n".join(sys.path))
raise
diff --git a/tests/py_zipapp/system_python_zipapp_test.py b/tests/py_zipapp/system_python_zipapp_test.py
index 79cd142c4d..7c3e2deeaf 100644
--- a/tests/py_zipapp/system_python_zipapp_test.py
+++ b/tests/py_zipapp/system_python_zipapp_test.py
@@ -1,7 +1,6 @@
import os
import subprocess
import unittest
-import zipfile
class SystemPythonZipAppTest(unittest.TestCase):
diff --git a/tests/py_zipapp/venv_zipapp_test.py b/tests/py_zipapp/venv_zipapp_test.py
index bd26d533a3..fa413ffb63 100644
--- a/tests/py_zipapp/venv_zipapp_test.py
+++ b/tests/py_zipapp/venv_zipapp_test.py
@@ -18,9 +18,7 @@ def test_zipapp_runnable(self):
except subprocess.CalledProcessError as e:
self.fail(
(
- "exec failed: {}\n"
- + "exit code: {}\n"
- + "=== stdout/stderr start ===\n"
+ "exec failed: {}\n" + "exit code: {}\n" + "=== stdout/stderr start ===\n"
"{}\n" + "=== stdout/stderr end ==="
).format(zipapp_path, e.returncode, e.output.decode("utf-8"))
)
@@ -100,9 +98,7 @@ def test_zipapp_structure(self):
self.assertZipEntryIsSymlink(zf, python_bin)
# Verify _bazel_site_init.py exists in site-packages
- self.assertHasPathMatchingSuffix(
- namelist, "/site-packages/_bazel_site_init.py"
- )
+ self.assertHasPathMatchingSuffix(namelist, "/site-packages/_bazel_site_init.py")
if __name__ == "__main__":
diff --git a/tests/pypi/whl_library/whl_library_extras_test.py b/tests/pypi/whl_library/whl_library_extras_test.py
index 4fe344470a..43cd5aec3f 100644
--- a/tests/pypi/whl_library/whl_library_extras_test.py
+++ b/tests/pypi/whl_library/whl_library_extras_test.py
@@ -2,7 +2,6 @@
class NamespacePackagesTest(unittest.TestCase):
-
def test_extras_propagated(self):
import pkg
diff --git a/tests/repl/repl_test.py b/tests/repl/repl_test.py
index 319dab561a..5a67442294 100644
--- a/tests/repl/repl_test.py
+++ b/tests/repl/repl_test.py
@@ -1,6 +1,5 @@
import os
import subprocess
-import sys
import tempfile
import unittest
from pathlib import Path
@@ -49,9 +48,7 @@ def run_code_in_repl(self, lines: Iterable[str], *, env=None) -> str:
raise RuntimeError(f"Failed to run the REPL:\n{error.stdout}") from error
except Exception as exc:
if env:
- env_str = "\n".join(
- f"{key}={value!r}" for key, value in sorted(env.items())
- )
+ env_str = "\n".join(f"{key}={value!r}" for key, value in sorted(env.items()))
else:
env_str = ""
if isinstance(exc, subprocess.CalledProcessError):
@@ -89,11 +86,9 @@ def test_repl_version(self):
def test_cannot_import_test_module_directly(self):
"""Validates that we cannot import helper/test_module.py since it's not a direct dep."""
with self.assertRaises(ModuleNotFoundError):
- import test_module
+ pass
- @unittest.skipIf(
- not EXPECT_TEST_MODULE_IMPORTABLE, "test only works without repl_dep set"
- )
+ @unittest.skipIf(not EXPECT_TEST_MODULE_IMPORTABLE, "test only works without repl_dep set")
def test_import_test_module_success(self):
"""Validates that we can import helper/test_module.py when repl_dep is set."""
result = self.run_code_in_repl(
@@ -104,9 +99,7 @@ def test_import_test_module_success(self):
)
self.assertIn("Hello World", result)
- @unittest.skipIf(
- EXPECT_TEST_MODULE_IMPORTABLE, "test only works without repl_dep set"
- )
+ @unittest.skipIf(EXPECT_TEST_MODULE_IMPORTABLE, "test only works without repl_dep set")
def test_import_test_module_failure(self):
"""Validates that we cannot import helper/test_module.py when repl_dep isn't set."""
result = self.run_code_in_repl(
@@ -150,9 +143,7 @@ def test_pythonstartup_doesnt_leak(self):
for var_name in ("exitmsg", "sys", "code", "bazel_runfiles", "STUB_PATH"):
with self.subTest(var_name=var_name):
result = self.run_code_in_repl([f"print({var_name})"], env=env)
- self.assertIn(
- f"NameError: name '{var_name}' is not defined", result
- )
+ self.assertIn(f"NameError: name '{var_name}' is not defined", result)
if __name__ == "__main__":
diff --git a/tests/runfiles/pathlib_test.py b/tests/runfiles/pathlib_test.py
index a959138235..fa30735e72 100644
--- a/tests/runfiles/pathlib_test.py
+++ b/tests/runfiles/pathlib_test.py
@@ -146,8 +146,7 @@ def test_glob(self) -> None:
p = root / "my_repo"
glob_results = {
- pathlib.PurePath(c).relative_to(pathlib.PurePath(p)).as_posix()
- for c in p.glob("*.txt")
+ pathlib.PurePath(c).relative_to(pathlib.PurePath(p)).as_posix() for c in p.glob("*.txt")
}
self.assertEqual(glob_results, {"data.txt"})
diff --git a/tests/runfiles/runfiles_test.py b/tests/runfiles/runfiles_test.py
index 165ab8c8a9..c3700e2618 100644
--- a/tests/runfiles/runfiles_test.py
+++ b/tests/runfiles/runfiles_test.py
@@ -32,33 +32,15 @@ def testRlocationArgumentValidation(self) -> None:
self.assertRaises(ValueError, lambda: r.Rlocation(None)) # type: ignore
self.assertRaises(ValueError, lambda: r.Rlocation(""))
self.assertRaises(TypeError, lambda: r.Rlocation(1)) # type: ignore
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("../foo")
- )
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("foo/..")
- )
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("foo/../bar")
- )
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("./foo")
- )
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("foo/.")
- )
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("foo/./bar")
- )
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("//foobar")
- )
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("foo//")
- )
- self.assertRaisesRegex(
- ValueError, "is not normalized", lambda: r.Rlocation("foo//bar")
- )
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("../foo"))
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("foo/.."))
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("foo/../bar"))
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("./foo"))
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("foo/."))
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("foo/./bar"))
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("//foobar"))
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("foo//"))
+ self.assertRaisesRegex(ValueError, "is not normalized", lambda: r.Rlocation("foo//bar"))
self.assertRaisesRegex(
ValueError,
"is absolute without a drive letter",
@@ -68,9 +50,7 @@ def testRlocationArgumentValidation(self) -> None:
def testRlocationWithData(self) -> None:
r = runfiles.Create()
assert r is not None # mypy doesn't understand the unittest api.
- settings_path = r.Rlocation(
- "rules_python/tests/support/current_build_settings.json"
- )
+ settings_path = r.Rlocation("rules_python/tests/support/current_build_settings.json")
assert settings_path is not None
settings = json.loads(pathlib.Path(settings_path).read_text())
self.assertIn("bootstrap_impl", settings)
@@ -118,12 +98,8 @@ def testManifestBasedRunfilesEnvVars(self) -> None:
r.EnvVars(),
{
"RUNFILES_MANIFEST_FILE": mf.Path(),
- "RUNFILES_DIR": (
- mf.Path()[: -len("foo.runfiles_manifest")] + "foo.runfiles"
- ),
- "JAVA_RUNFILES": (
- mf.Path()[: -len("foo.runfiles_manifest")] + "foo.runfiles"
- ),
+ "RUNFILES_DIR": (mf.Path()[: -len("foo.runfiles_manifest")] + "foo.runfiles"),
+ "JAVA_RUNFILES": (mf.Path()[: -len("foo.runfiles_manifest")] + "foo.runfiles"),
},
)
@@ -208,9 +184,7 @@ def testManifestBasedRlocation(self) -> None:
r = runfiles.CreateManifestBased(mf.Path())
self.assertEqual(r.Rlocation("Foo/runfile1"), "Foo/runfile1")
self.assertEqual(r.Rlocation("Foo/runfile2"), "C:/Actual Path\\runfile2")
- self.assertEqual(
- r.Rlocation("Foo/Bar/runfile3"), "D:\\the path\\run file 3.txt"
- )
+ self.assertEqual(r.Rlocation("Foo/Bar/runfile3"), "D:\\the path\\run file 3.txt")
self.assertEqual(
r.Rlocation("Foo/Bar/Dir/runfile4"),
"E:\\Actual Path\\Directory/runfile4",
@@ -231,24 +205,27 @@ def testManifestBasedRlocation(self) -> None:
self.assertEqual(r.Rlocation("/foo"), "/foo")
def testManifestBasedRlocationWithRepoMappingFromMain(self) -> None:
- with _MockFile(
- contents=[
- ",config.json,config.json~1.2.3",
- ",my_module,_main",
- ",my_protobuf,protobuf~3.19.2",
- ",my_workspace,_main",
- "protobuf~3.19.2,config.json,config.json~1.2.3",
- "protobuf~3.19.2,protobuf,protobuf~3.19.2",
- ]
- ) as rm, _MockFile(
- contents=[
- "_repo_mapping " + rm.Path(),
- "config.json /etc/config.json",
- "protobuf~3.19.2/foo/runfile C:/Actual Path\\protobuf\\runfile",
- "_main/bar/runfile /the/path/./to/other//other runfile.txt",
- "protobuf~3.19.2/bar/dir E:\\Actual Path\\Directory",
- ],
- ) as mf:
+ with (
+ _MockFile(
+ contents=[
+ ",config.json,config.json~1.2.3",
+ ",my_module,_main",
+ ",my_protobuf,protobuf~3.19.2",
+ ",my_workspace,_main",
+ "protobuf~3.19.2,config.json,config.json~1.2.3",
+ "protobuf~3.19.2,protobuf,protobuf~3.19.2",
+ ]
+ ) as rm,
+ _MockFile(
+ contents=[
+ "_repo_mapping " + rm.Path(),
+ "config.json /etc/config.json",
+ "protobuf~3.19.2/foo/runfile C:/Actual Path\\protobuf\\runfile",
+ "_main/bar/runfile /the/path/./to/other//other runfile.txt",
+ "protobuf~3.19.2/bar/dir E:\\Actual Path\\Directory",
+ ],
+ ) as mf,
+ ):
r = runfiles.CreateManifestBased(mf.Path())
self.assertEqual(
@@ -263,9 +240,7 @@ def testManifestBasedRlocationWithRepoMappingFromMain(self) -> None:
r.Rlocation("my_protobuf/foo/runfile", ""),
"C:/Actual Path\\protobuf\\runfile",
)
- self.assertEqual(
- r.Rlocation("my_protobuf/bar/dir", ""), "E:\\Actual Path\\Directory"
- )
+ self.assertEqual(r.Rlocation("my_protobuf/bar/dir", ""), "E:\\Actual Path\\Directory")
self.assertEqual(
r.Rlocation("my_protobuf/bar/dir/file", ""),
"E:\\Actual Path\\Directory/file",
@@ -306,24 +281,27 @@ def testManifestBasedRlocationWithRepoMappingFromMain(self) -> None:
self.assertIsNone(r.Rlocation("protobuf", ""))
def testManifestBasedRlocationWithRepoMappingFromOtherRepo(self) -> None:
- with _MockFile(
- contents=[
- ",config.json,config.json~1.2.3",
- ",my_module,_main",
- ",my_protobuf,protobuf~3.19.2",
- ",my_workspace,_main",
- "protobuf~3.19.2,config.json,config.json~1.2.3",
- "protobuf~3.19.2,protobuf,protobuf~3.19.2",
- ]
- ) as rm, _MockFile(
- contents=[
- "_repo_mapping " + rm.Path(),
- "config.json /etc/config.json",
- "protobuf~3.19.2/foo/runfile C:/Actual Path\\protobuf\\runfile",
- "_main/bar/runfile /the/path/./to/other//other runfile.txt",
- "protobuf~3.19.2/bar/dir E:\\Actual Path\\Directory",
- ],
- ) as mf:
+ with (
+ _MockFile(
+ contents=[
+ ",config.json,config.json~1.2.3",
+ ",my_module,_main",
+ ",my_protobuf,protobuf~3.19.2",
+ ",my_workspace,_main",
+ "protobuf~3.19.2,config.json,config.json~1.2.3",
+ "protobuf~3.19.2,protobuf,protobuf~3.19.2",
+ ]
+ ) as rm,
+ _MockFile(
+ contents=[
+ "_repo_mapping " + rm.Path(),
+ "config.json /etc/config.json",
+ "protobuf~3.19.2/foo/runfile C:/Actual Path\\protobuf\\runfile",
+ "_main/bar/runfile /the/path/./to/other//other runfile.txt",
+ "protobuf~3.19.2/bar/dir E:\\Actual Path\\Directory",
+ ],
+ ) as mf,
+ ):
r = runfiles.CreateManifestBased(mf.Path())
self.assertEqual(
@@ -339,22 +317,16 @@ def testManifestBasedRlocationWithRepoMappingFromOtherRepo(self) -> None:
"E:\\Actual Path\\Directory/file",
)
self.assertEqual(
- r.Rlocation(
- "protobuf/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"
- ),
+ r.Rlocation("protobuf/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"),
"E:\\Actual Path\\Directory/de eply/nes ted/fi~le",
)
self.assertIsNone(r.Rlocation("my_module/bar/runfile", "protobuf~3.19.2"))
self.assertIsNone(r.Rlocation("my_protobuf/foo/runfile", "protobuf~3.19.2"))
self.assertIsNone(r.Rlocation("my_protobuf/bar/dir", "protobuf~3.19.2"))
+ self.assertIsNone(r.Rlocation("my_protobuf/bar/dir/file", "protobuf~3.19.2"))
self.assertIsNone(
- r.Rlocation("my_protobuf/bar/dir/file", "protobuf~3.19.2")
- )
- self.assertIsNone(
- r.Rlocation(
- "my_protobuf/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"
- )
+ r.Rlocation("my_protobuf/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2")
)
self.assertEqual(
@@ -374,15 +346,11 @@ def testManifestBasedRlocationWithRepoMappingFromOtherRepo(self) -> None:
"E:\\Actual Path\\Directory/file",
)
self.assertEqual(
- r.Rlocation(
- "protobuf~3.19.2/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"
- ),
+ r.Rlocation("protobuf~3.19.2/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"),
"E:\\Actual Path\\Directory/de eply/nes ted/fi~le",
)
- self.assertEqual(
- r.Rlocation("config.json", "protobuf~3.19.2"), "/etc/config.json"
- )
+ self.assertEqual(r.Rlocation("config.json", "protobuf~3.19.2"), "/etc/config.json")
self.assertIsNone(r.Rlocation("_main", "protobuf~3.19.2"))
self.assertIsNone(r.Rlocation("my_module", "protobuf~3.19.2"))
self.assertIsNone(r.Rlocation("protobuf", "protobuf~3.19.2"))
@@ -414,9 +382,7 @@ def testDirectoryBasedRlocationWithRepoMappingFromMain(self) -> None:
dir = os.path.dirname(rm.Path())
r = runfiles.CreateDirectoryBased(dir)
- self.assertEqual(
- r.Rlocation("my_module/bar/runfile", ""), dir + "/_main/bar/runfile"
- )
+ self.assertEqual(r.Rlocation("my_module/bar/runfile", ""), dir + "/_main/bar/runfile")
self.assertEqual(
r.Rlocation("my_workspace/bar/runfile", ""), dir + "/_main/bar/runfile"
)
@@ -436,17 +402,13 @@ def testDirectoryBasedRlocationWithRepoMappingFromMain(self) -> None:
dir + "/protobuf~3.19.2/bar/dir/de eply/nes ted/fi~le",
)
- self.assertEqual(
- r.Rlocation("protobuf/foo/runfile", ""), dir + "/protobuf/foo/runfile"
- )
+ self.assertEqual(r.Rlocation("protobuf/foo/runfile", ""), dir + "/protobuf/foo/runfile")
self.assertEqual(
r.Rlocation("protobuf/bar/dir/dir/de eply/nes ted/fi~le", ""),
dir + "/protobuf/bar/dir/dir/de eply/nes ted/fi~le",
)
- self.assertEqual(
- r.Rlocation("_main/bar/runfile", ""), dir + "/_main/bar/runfile"
- )
+ self.assertEqual(r.Rlocation("_main/bar/runfile", ""), dir + "/_main/bar/runfile")
self.assertEqual(
r.Rlocation("protobuf~3.19.2/foo/runfile", ""),
dir + "/protobuf~3.19.2/foo/runfile",
@@ -494,9 +456,7 @@ def testDirectoryBasedRlocationWithRepoMappingFromOtherRepo(self) -> None:
dir + "/protobuf~3.19.2/bar/dir/file",
)
self.assertEqual(
- r.Rlocation(
- "protobuf/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"
- ),
+ r.Rlocation("protobuf/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"),
dir + "/protobuf~3.19.2/bar/dir/de eply/nes ted/fi~le",
)
@@ -505,9 +465,7 @@ def testDirectoryBasedRlocationWithRepoMappingFromOtherRepo(self) -> None:
dir + "/my_module/bar/runfile",
)
self.assertEqual(
- r.Rlocation(
- "my_protobuf/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"
- ),
+ r.Rlocation("my_protobuf/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"),
dir + "/my_protobuf/bar/dir/de eply/nes ted/fi~le",
)
@@ -528,15 +486,11 @@ def testDirectoryBasedRlocationWithRepoMappingFromOtherRepo(self) -> None:
dir + "/protobuf~3.19.2/bar/dir/file",
)
self.assertEqual(
- r.Rlocation(
- "protobuf~3.19.2/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"
- ),
+ r.Rlocation("protobuf~3.19.2/bar/dir/de eply/nes ted/fi~le", "protobuf~3.19.2"),
dir + "/protobuf~3.19.2/bar/dir/de eply/nes ted/fi~le",
)
- self.assertEqual(
- r.Rlocation("config.json", "protobuf~3.19.2"), dir + "/config.json"
- )
+ self.assertEqual(r.Rlocation("config.json", "protobuf~3.19.2"), dir + "/config.json")
def testDirectoryBasedRlocationWithCompactRepoMappingFromMain(self) -> None:
"""Test repository mapping with prefix-based entries (compact format)."""
@@ -547,7 +501,7 @@ def testDirectoryBasedRlocationWithCompactRepoMappingFromMain(self) -> None:
"_,config.json,config.json~1.2.3",
",my_module,_main",
",my_workspace,_main",
- # Prefixed mappings (with asterisk) - these apply to any repo starting with the prefix
+ # Prefixed mappings (with asterisk) - these apply to any repo starting with the prefix # noqa: E501
"deps+*,external_dep,external_dep~1.0.0",
"test_deps+*,test_lib,test_lib~2.1.0",
],
@@ -556,9 +510,7 @@ def testDirectoryBasedRlocationWithCompactRepoMappingFromMain(self) -> None:
r = runfiles.CreateDirectoryBased(dir)
# Test exact mappings still work
- self.assertEqual(
- r.Rlocation("my_module/bar/runfile", ""), dir + "/_main/bar/runfile"
- )
+ self.assertEqual(r.Rlocation("my_module/bar/runfile", ""), dir + "/_main/bar/runfile")
self.assertEqual(
r.Rlocation("my_workspace/bar/runfile", ""), dir + "/_main/bar/runfile"
)
@@ -714,9 +666,7 @@ def IsWindows() -> bool:
class _MockFile:
- def __init__(
- self, name: Optional[str] = None, contents: Optional[List[Any]] = None
- ) -> None:
+ def __init__(self, name: Optional[str] = None, contents: Optional[List[Any]] = None) -> None:
self._contents = contents or []
self._name = name or "x"
self._path: Optional[str] = None
@@ -725,7 +675,7 @@ def __enter__(self) -> Any:
tmpdir = os.environ.get("TEST_TMPDIR")
self._path = os.path.join(tempfile.mkdtemp(dir=tmpdir), self._name)
with open(self._path, "wt", encoding="utf-8", newline="\n") as f:
- f.writelines(l + "\n" for l in self._contents)
+ f.writelines(line + "\n" for line in self._contents)
return self
def __exit__(
diff --git a/tests/runtime_env_toolchain/toolchain_runs_test.py b/tests/runtime_env_toolchain/toolchain_runs_test.py
index 13b5775ff0..503baa2963 100644
--- a/tests/runtime_env_toolchain/toolchain_runs_test.py
+++ b/tests/runtime_env_toolchain/toolchain_runs_test.py
@@ -10,15 +10,11 @@
class RunTest(unittest.TestCase):
def test_ran(self):
rf = runfiles.Create()
- settings_path = rf.Rlocation(
- "rules_python/tests/support/current_build_settings.json"
- )
+ settings_path = rf.Rlocation("rules_python/tests/support/current_build_settings.json")
settings = json.loads(pathlib.Path(settings_path).read_text())
if platform.system() == "Windows":
- self.assertEqual(
- "/_magic_pyruntime_sentinel_do_not_use", settings["interpreter_path"]
- )
+ self.assertEqual("/_magic_pyruntime_sentinel_do_not_use", settings["interpreter_path"])
else:
self.assertIn(
"runtime_env_toolchain_interpreter.sh",
diff --git a/tests/toolchains/custom_platform_toolchain_test.py b/tests/toolchains/custom_platform_toolchain_test.py
index fd28cf772e..284cbd8d3a 100644
--- a/tests/toolchains/custom_platform_toolchain_test.py
+++ b/tests/toolchains/custom_platform_toolchain_test.py
@@ -3,12 +3,11 @@
class VerifyCustomPlatformToolchainTest(unittest.TestCase):
-
def test_custom_platform_interpreter_used(self):
# For lack of a better option, check the version. Identifying the
self.assertEqual(
- "3.13.1",
- f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}")
+ "3.13.1", f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
+ )
if __name__ == "__main__":
diff --git a/tests/toolchains/python_toolchain_test.py b/tests/toolchains/python_toolchain_test.py
index ff45fc0863..4b4f302281 100644
--- a/tests/toolchains/python_toolchain_test.py
+++ b/tests/toolchains/python_toolchain_test.py
@@ -13,9 +13,7 @@ def test_expected_toolchain_matches(self):
expect_version = os.environ["EXPECT_PYTHON_VERSION"]
rf = runfiles.Create()
- settings_path = rf.Rlocation(
- "rules_python/tests/support/current_build_settings.json"
- )
+ settings_path = rf.Rlocation("rules_python/tests/support/current_build_settings.json")
settings = json.loads(pathlib.Path(settings_path).read_text())
expected = "python_{}".format(expect_version.replace(".", "_"))
diff --git a/tests/tools/private/release/release_test.py b/tests/tools/private/release/release_test.py
index 676a898440..f123e8a83e 100644
--- a/tests/tools/private/release/release_test.py
+++ b/tests/tools/private/release/release_test.py
@@ -1,4 +1,3 @@
-import datetime
import os
import pathlib
import shutil
@@ -88,12 +87,10 @@ def test_update_changelog(self):
# Assert
new_content = changelog_path.read_text()
+ self.assertIn(_UNRELEASED_TEMPLATE, new_content, msg=f"ACTUAL:\n\n{new_content}\n\n")
+ self.assertIn("## [1.23.4] - 2025-01-01", new_content)
self.assertIn(
- _UNRELEASED_TEMPLATE, new_content, msg=f"ACTUAL:\n\n{new_content}\n\n"
- )
- self.assertIn(f"## [1.23.4] - 2025-01-01", new_content)
- self.assertIn(
- f"[1.23.4]: https://github.com/bazel-contrib/rules_python/releases/tag/1.23.4",
+ "[1.23.4]: https://github.com/bazel-contrib/rules_python/releases/tag/1.23.4",
new_content,
)
self.assertIn("{#v1-23-4}", new_content)
@@ -234,9 +231,7 @@ def test_no_markers(self):
self.assertEqual(next_version, "1.2.4")
def test_only_patch(self):
- (self.tmpdir / "mock_file.bzl").write_text(
- ":::{versionchanged} VERSION_NEXT_PATCH"
- )
+ (self.tmpdir / "mock_file.bzl").write_text(":::{versionchanged} VERSION_NEXT_PATCH")
self.mock_get_latest_version.return_value = "1.2.3"
next_version = releaser.determine_next_version()
@@ -244,9 +239,7 @@ def test_only_patch(self):
self.assertEqual(next_version, "1.2.4")
def test_only_feature(self):
- (self.tmpdir / "mock_file.bzl").write_text(
- ":::{versionadded} VERSION_NEXT_FEATURE"
- )
+ (self.tmpdir / "mock_file.bzl").write_text(":::{versionadded} VERSION_NEXT_FEATURE")
self.mock_get_latest_version.return_value = "1.2.3"
next_version = releaser.determine_next_version()
@@ -254,12 +247,8 @@ def test_only_feature(self):
self.assertEqual(next_version, "1.3.0")
def test_both_markers(self):
- (self.tmpdir / "mock_file_patch.bzl").write_text(
- ":::{versionchanged} VERSION_NEXT_PATCH"
- )
- (self.tmpdir / "mock_file_feature.bzl").write_text(
- ":::{versionadded} VERSION_NEXT_FEATURE"
- )
+ (self.tmpdir / "mock_file_patch.bzl").write_text(":::{versionchanged} VERSION_NEXT_PATCH")
+ (self.tmpdir / "mock_file_feature.bzl").write_text(":::{versionadded} VERSION_NEXT_FEATURE")
self.mock_get_latest_version.return_value = "1.2.3"
next_version = releaser.determine_next_version()
diff --git a/tests/tools/wheelmaker_test.py b/tests/tools/wheelmaker_test.py
index 7c30981e83..ff79d35777 100644
--- a/tests/tools/wheelmaker_test.py
+++ b/tests/tools/wheelmaker_test.py
@@ -35,14 +35,10 @@ def test_quote_all_false_leaves_simple_filenames_unquoted(self) -> None:
def test_quote_all_quotes_filenames_with_commas(self) -> None:
"""Filenames with commas are always quoted, regardless of quote_all_filenames."""
whl = self._make_whl_file(quote_all=True)
- self.assertEqual(
- whl._quote_filename("foo,bar/baz.py"), '"foo,bar/baz.py"'
- )
+ self.assertEqual(whl._quote_filename("foo,bar/baz.py"), '"foo,bar/baz.py"')
whl = self._make_whl_file(quote_all=False)
- self.assertEqual(
- whl._quote_filename("foo,bar/baz.py"), '"foo,bar/baz.py"'
- )
+ self.assertEqual(whl._quote_filename("foo,bar/baz.py"), '"foo,bar/baz.py"')
@dataclass
@@ -145,9 +141,7 @@ def test_requirement(self):
self.assertEqual(result, "Requires-Dist: requests>=2.0")
def test_requirement_and_extra(self):
- result = wheelmaker.get_new_requirement_line(
- "requests>=2.0", "extra=='dev'"
- )
+ result = wheelmaker.get_new_requirement_line("requests>=2.0", "extra=='dev'")
self.assertEqual(result, "Requires-Dist: requests>=2.0; extra=='dev'")
def test_requirement_with_url(self):
@@ -160,12 +154,8 @@ def test_requirement_with_url(self):
)
def test_requirement_with_marker(self):
- result = wheelmaker.get_new_requirement_line(
- "requests>=2.0; python_version>='3.6'", ""
- )
- self.assertEqual(
- result, 'Requires-Dist: requests>=2.0; python_version >= "3.6"'
- )
+ result = wheelmaker.get_new_requirement_line("requests>=2.0; python_version>='3.6'", "")
+ self.assertEqual(result, 'Requires-Dist: requests>=2.0; python_version >= "3.6"')
def test_requirement_with_marker_and_extra(self):
result = wheelmaker.get_new_requirement_line(
diff --git a/tests/tools/zipapp/zip_main_maker_test.py b/tests/tools/zipapp/zip_main_maker_test.py
index afcaf294d1..dd8e8e8029 100644
--- a/tests/tools/zipapp/zip_main_maker_test.py
+++ b/tests/tools/zipapp/zip_main_maker_test.py
@@ -36,7 +36,7 @@ def test_creates_zip_main(self):
f.write(f"rf-file|0|file1.txt|{file1_path}\n")
f.write(f"rf-file|0|file2.txt|{file2_path}\n")
f.write(f"rf-symlink|1|symlink.txt|{symlink_path}\n")
- f.write(f"rf-empty|empty_file.txt\n")
+ f.write("rf-empty|empty_file.txt\n")
argv = [
"zip_main_maker.py",
@@ -58,7 +58,7 @@ def test_creates_zip_main(self):
line1 = f"rf-file|0|file1.txt|{file1_path}"
line2 = f"rf-file|0|file2.txt|{file2_path}"
line3 = f"rf-symlink|1|symlink.txt|{symlink_path}"
- line4 = f"rf-empty|empty_file.txt"
+ line4 = "rf-empty|empty_file.txt"
# Sort lines like the program does
lines = sorted([line1, line2, line3, line4])
diff --git a/tests/tools/zipapp/zipper_test.py b/tests/tools/zipapp/zipper_test.py
index 8b441c4456..8a10e693ad 100644
--- a/tests/tools/zipapp/zipper_test.py
+++ b/tests/tools/zipapp/zipper_test.py
@@ -2,7 +2,6 @@
import pathlib
import shutil
import tempfile
-import time
import unittest
import zipfile
@@ -36,9 +35,7 @@ def _create_zip(self, **kwargs):
defaults.update(kwargs)
zipper.create_zip(**defaults)
- def assertZipFileContent(
- self, zf, path, content=None, is_symlink=False, target=None
- ):
+ def assertZipFileContent(self, zf, path, content=None, is_symlink=False, target=None):
info = zf.getinfo(path)
if is_symlink:
self.assertTrue(
@@ -66,7 +63,7 @@ def test_create_zip_with_files_and_symlinks(self):
f"rf-file|0|foo/bar.txt|{file1_path}",
f"rf-symlink|1|link1|{symlink_path}", # Should read target 'target.txt'
f"rf-root-symlink|0|root_file|{file1_path}",
- f"rf-empty|empty_file",
+ "rf-empty|empty_file",
]
self.manifest_path.write_text("\n".join(manifest_content))
@@ -87,9 +84,7 @@ def test_create_zip_with_files_and_symlinks(self):
)
self.assertZipFileContent(zf, "file1.txt", content="content1")
- self.assertZipFileContent(
- zf, "runfiles/my_ws/foo/bar.txt", content="content1"
- )
+ self.assertZipFileContent(zf, "runfiles/my_ws/foo/bar.txt", content="content1")
self.assertZipFileContent(
zf, "runfiles/my_ws/link1", is_symlink=True, target="target.txt"
)
@@ -196,7 +191,7 @@ def test_runfiles_mapping_with_cross_repo_paths(self):
manifest_content = [
f"rf-file|0|../other_repo/foo.txt|{file1_path}",
- f"rf-empty|../other_repo/empty_file",
+ "rf-empty|../other_repo/empty_file",
]
self.manifest_path.write_text("\n".join(manifest_content))
@@ -211,9 +206,7 @@ def test_runfiles_mapping_with_cross_repo_paths(self):
"runfiles/other_repo/empty_file",
},
)
- self.assertZipFileContent(
- zf, "runfiles/other_repo/foo.txt", content="content1"
- )
+ self.assertZipFileContent(zf, "runfiles/other_repo/foo.txt", content="content1")
self.assertZipFileContent(zf, "runfiles/other_repo/empty_file", content="")
def test_runfiles_mapping_with_legacy_external_paths(self):
@@ -222,7 +215,7 @@ def test_runfiles_mapping_with_legacy_external_paths(self):
manifest_content = [
f"rf-file|0|external/other_repo/foo.txt|{file1_path}",
- f"rf-empty|external/other_repo/empty_file",
+ "rf-empty|external/other_repo/empty_file",
]
self.manifest_path.write_text("\n".join(manifest_content))
@@ -237,9 +230,7 @@ def test_runfiles_mapping_with_legacy_external_paths(self):
"runfiles/other_repo/empty_file",
},
)
- self.assertZipFileContent(
- zf, "runfiles/other_repo/foo.txt", content="content1"
- )
+ self.assertZipFileContent(zf, "runfiles/other_repo/foo.txt", content="content1")
self.assertZipFileContent(zf, "runfiles/other_repo/empty_file", content="")
def test_output_deterministic(self):
@@ -265,7 +256,7 @@ def test_output_deterministic(self):
f"rf-file|0|b_rf_file|{file2}", # -> runfiles/my_ws/b_rf_file
f"rf-root-symlink|0|a_root_link|{file3}", # -> runfiles/a_root_link
f"regular|0|a/regular|{file3}",
- f"rf-empty|d_rf_empty", # -> runfiles/my_ws/d_rf_empty
+ "rf-empty|d_rf_empty", # -> runfiles/my_ws/d_rf_empty
f"rf-symlink|0|c_rf_link|{file3}", # -> runfiles/my_ws/c_rf_link
]
@@ -323,14 +314,12 @@ def test_symlink_extraction(self):
link_path = extract_dir / "runfiles/my_ws/path/to/link"
self.assertTrue(link_path.is_symlink(), f"{link_path} should be a symlink")
- self.assertEqual(
- os.readlink(link_path), "../../target/path".replace("/", os.path.sep)
- )
+ self.assertEqual(os.readlink(link_path), "../../target/path".replace("/", os.path.sep))
self.assertEqual(link_path.read_text(), "target content")
link2_path = extract_dir / "runfiles/my_ws/same_dir_link"
self.assertTrue(link2_path.is_symlink(), f"{link2_path} should be a symlink")
- # Relative path from runfiles/my_ws/ to runfiles/my_ws/same_dir_target is just same_dir_target
+ # Relative path from runfiles/my_ws/ to runfiles/my_ws/same_dir_target is just same_dir_target # noqa: E501
self.assertEqual(os.readlink(link2_path), "same_dir_target")
self.assertEqual(link2_path.read_text(), "target content")
diff --git a/tests/uv/lock/lock_run_test.py b/tests/uv/lock/lock_run_test.py
index ef57f23d31..d4e564934d 100644
--- a/tests/uv/lock/lock_run_test.py
+++ b/tests/uv/lock/lock_run_test.py
@@ -1,5 +1,4 @@
import subprocess
-import sys
import tempfile
import unittest
from pathlib import Path
@@ -28,9 +27,7 @@ def test_requirements_updating_for_the_first_time(self):
workspace_dir = Path(dir)
want_path = workspace_dir / "tests" / "uv" / "lock" / "does_not_exist.txt"
- self.assertFalse(
- want_path.exists(), "The path should not exist after the test"
- )
+ self.assertFalse(want_path.exists(), "The path should not exist after the test")
output = subprocess.run(
copier_path,
capture_output=True,
@@ -57,14 +54,7 @@ def test_requirements_updating(self):
# When
with tempfile.TemporaryDirectory() as dir:
workspace_dir = Path(dir)
- want_path = (
- workspace_dir
- / "tests"
- / "uv"
- / "lock"
- / "testdata"
- / "requirements.txt"
- )
+ want_path = workspace_dir / "tests" / "uv" / "lock" / "testdata" / "requirements.txt"
want_path.parent.mkdir(parents=True)
want_path.write_text(
want_text + "\n\n"
@@ -98,9 +88,7 @@ def test_requirements_run_on_the_first_time(self):
# there already
want_path.parent.mkdir(parents=True)
- self.assertFalse(
- want_path.exists(), "The path should not exist after the test"
- )
+ self.assertFalse(want_path.exists(), "The path should not exist after the test")
output = subprocess.run(
copier_path,
capture_output=True,
@@ -128,14 +116,7 @@ def test_requirements_run(self):
# When
with tempfile.TemporaryDirectory() as dir:
workspace_dir = Path(dir)
- want_path = (
- workspace_dir
- / "tests"
- / "uv"
- / "lock"
- / "testdata"
- / "requirements.txt"
- )
+ want_path = workspace_dir / "tests" / "uv" / "lock" / "testdata" / "requirements.txt"
want_path.parent.mkdir(parents=True)
want_path.write_text(
diff --git a/tests/uv/toolchain/uv_help_test.py b/tests/uv/toolchain/uv_help_test.py
index be5e755d91..e0cebd5fc4 100755
--- a/tests/uv/toolchain/uv_help_test.py
+++ b/tests/uv/toolchain/uv_help_test.py
@@ -14,9 +14,7 @@ def test_uv_help(self):
data_rpath = os.environ["DATA"]
uv_help_path = rfiles.Rlocation(data_rpath)
- assert (
- uv_help_path is not None
- ), f"the rlocation path was not found: {data_rpath}"
+ assert uv_help_path is not None, f"the rlocation path was not found: {data_rpath}"
uv_help = Path(uv_help_path).read_text()
diff --git a/tests/venv_site_packages_libs/bin.py b/tests/venv_site_packages_libs/bin.py
index 368251e75b..0769a1444b 100644
--- a/tests/venv_site_packages_libs/bin.py
+++ b/tests/venv_site_packages_libs/bin.py
@@ -1,5 +1,4 @@
import importlib
-import os
import sys
import sysconfig
import unittest
@@ -26,7 +25,8 @@ def assert_venv_path_exists(self, rel_path):
path = self.venv / rel_path
self.assertTrue(
path.exists(),
- f"Expected {path} to exist. {path.parent.name} contents: {list(path.parent.iterdir()) if path.parent.exists() else 'N/A'}",
+ f"Expected {path} to exist. {path.parent.name} contents:" # noqa: E501
+ f" {list(path.parent.iterdir()) if path.parent.exists() else 'N/A'}",
)
def assert_imported_from_venv(self, module_name):
@@ -34,8 +34,7 @@ def assert_imported_from_venv(self, module_name):
self.assertEqual(module.__name__, module_name)
self.assertIsNotNone(
module.__file__,
- f"Expected module {module_name!r} to have"
- + f"__file__ set, but got None. {module=}",
+ f"Expected module {module_name!r} to have" + f"__file__ set, but got None. {module=}",
)
self.assertTrue(
module.__file__.startswith(str(self.venv)),
@@ -65,7 +64,7 @@ def test_imported_from_venv(self):
def test_data_is_included(self):
self.assert_imported_from_venv("simple")
- module = importlib.import_module("simple")
+ _ = importlib.import_module("simple")
# Ensure that packages from simple v1 are not present
files = [p.name for p in self.site_packages.glob("*")]
self.assertIn("simple_v1_extras", files)
@@ -80,7 +79,7 @@ def test_override_pkg(self):
def test_dirs_from_replaced_package_are_not_present(self):
self.assert_imported_from_venv("simple")
- module = importlib.import_module("simple")
+ importlib.import_module("simple")
dist_info_dirs = [p.name for p in self.site_packages.glob("simple*.dist-info")]
self.assertEqual(
["simple-1.0.0.dist-info"],
@@ -93,24 +92,20 @@ def test_dirs_from_replaced_package_are_not_present(self):
def test_data_from_another_pkg_is_included_via_copy_file(self):
self.assert_imported_from_venv("simple")
- module = importlib.import_module("simple")
+ importlib.import_module("simple")
# Ensure that packages from simple v1 are not present
d = self.site_packages / "external_data"
files = [p.name for p in d.glob("*")]
self.assertIn("another_module_data.txt", files)
def test_whl_with_data1_included(self):
- module = self.assert_imported_from_venv("whl_with_data1")
+ self.assert_imported_from_venv("whl_with_data1")
site_packages_rel = self.site_packages.relative_to(self.venv)
# purelib
self.assert_venv_path_exists(site_packages_rel / "whl_with_data1/data_file.txt")
# platlib
- self.assert_venv_path_exists(
- site_packages_rel / "whl_with_data1/platlib_file.txt"
- )
-
- venv_root = self.venv
+ self.assert_venv_path_exists(site_packages_rel / "whl_with_data1/platlib_file.txt")
# data
self.assert_venv_path_exists("whl_with_data1/data_data_file.txt")
@@ -119,12 +114,10 @@ def test_whl_with_data1_included(self):
self.assert_venv_path_exists(self.bin_dir_name / "whl_script.sh")
# headers
- self.assert_venv_path_exists(
- self.include_dir_name / "whl_with_data1/header_file.h"
- )
+ self.assert_venv_path_exists(self.include_dir_name / "whl_with_data1/header_file.h")
def test_whl_with_data2_included(self):
- module = self.assert_imported_from_venv("whl_with_data2")
+ self.assert_imported_from_venv("whl_with_data2")
site_packages_rel = self.site_packages.relative_to(self.venv)
self.assert_venv_path_exists(site_packages_rel / "whl_with_data2/data_file.txt")
@@ -135,9 +128,7 @@ def test_whl_with_data2_included(self):
# and then linked as `venv/whl_with_data1/data_data_file.txt`.
self.assert_venv_path_exists("whl_with_data2/data_data_file.txt")
- self.assert_venv_path_exists(
- self.include_dir_name / "whl_with_data2/header_file.h"
- )
+ self.assert_venv_path_exists(self.include_dir_name / "whl_with_data2/header_file.h")
def test_whl_with_data_overlap(self):
self.assert_venv_path_exists("overlap/both.txt")
diff --git a/tests/venv_site_packages_libs/importlib_metadata_test.py b/tests/venv_site_packages_libs/importlib_metadata_test.py
index 178ff14c50..1ab8f833f2 100644
--- a/tests/venv_site_packages_libs/importlib_metadata_test.py
+++ b/tests/venv_site_packages_libs/importlib_metadata_test.py
@@ -3,18 +3,15 @@
class ImportlibMetadataTest(unittest.TestCase):
-
def test_importlib_metadata_files(self):
files = importlib.metadata.files("whl-with-data1")
self.assertIsNotNone(files, "importlib.metadata.files returned None")
- self.assertGreater(
- len(files), 0, "importlib.metadata.files returned empty list"
- )
+ self.assertGreater(len(files), 0, "importlib.metadata.files returned empty list")
# Verify it contains some expected files.
# The RECORD file lists paths relative to the installation root (site-packages).
# whl_with_data1-1.0.data/purelib/data_overlap.py should be installed as data_overlap.py
- # whl_with_data1-1.0.data/platlib/whl_with_data1/platlib_file.txt should be whl_with_data1/platlib_file.txt
+ # whl_with_data1-1.0.data/platlib/whl_with_data1/platlib_file.txt should be whl_with_data1/platlib_file.txt # noqa: E501
file_names = [f.name for f in files]
self.assertIn("data_overlap.py", file_names)
diff --git a/tests/venv_site_packages_libs/shared_lib_loading_test.py b/tests/venv_site_packages_libs/shared_lib_loading_test.py
index a3f7bfcd5a..9988f34565 100644
--- a/tests/venv_site_packages_libs/shared_lib_loading_test.py
+++ b/tests/venv_site_packages_libs/shared_lib_loading_test.py
@@ -136,9 +136,7 @@ def _get_elf_info(self, path):
dynsym = elf.get_section_by_name(".dynsym")
if dynsym:
info["undefined_symbols"] = [
- s.name
- for s in dynsym.iter_symbols()
- if s.entry["st_shndx"] == "SHN_UNDEF"
+ s.name for s in dynsym.iter_symbols() if s.entry["st_shndx"] == "SHN_UNDEF"
]
return info
diff --git a/tests/venv_site_packages_libs/whl_scripts_runnable_test.py b/tests/venv_site_packages_libs/whl_scripts_runnable_test.py
index b62b5a5fce..e3958dfe95 100644
--- a/tests/venv_site_packages_libs/whl_scripts_runnable_test.py
+++ b/tests/venv_site_packages_libs/whl_scripts_runnable_test.py
@@ -76,8 +76,7 @@ def test_pythonw_script(self):
# invokes the interpreter.
self.assertIn("pythonw.exe", first_line)
self.assertTrue(
- first_line.startswith("@setlocal")
- or first_line.startswith("@echo off"),
+ first_line.startswith("@setlocal") or first_line.startswith("@echo off"),
f"Expected Windows batch wrapper, got {first_line}",
)
else:
@@ -92,7 +91,7 @@ def test_pythonw_script(self):
try:
os.close(temp_fd)
out_path = Path(temp_str)
- result = subprocess.run(
+ _ = subprocess.run(
[str(script_path), str(out_path)],
capture_output=True,
text=True,
diff --git a/tests/whl_filegroup/extract_wheel_files_test.py b/tests/whl_filegroup/extract_wheel_files_test.py
index 125d7f312c..03948be2d4 100644
--- a/tests/whl_filegroup/extract_wheel_files_test.py
+++ b/tests/whl_filegroup/extract_wheel_files_test.py
@@ -26,7 +26,7 @@ def test_get_wheel_record(self) -> None:
self.assertEqual(list(record), list(expected))
def test_get_files(self) -> None:
- pattern = "(examples/wheel/lib/.*\.txt$|.*main)"
+ pattern = r"(examples/wheel/lib/.*\.txt$|.*main)"
record = extract_wheel_files.get_record(_WHEEL)
files = extract_wheel_files.get_files(record, pattern)
expected = [
@@ -46,9 +46,7 @@ def test_extract(self) -> None:
outdir = Path(tmpdir)
extract_wheel_files.extract_files(_WHEEL, files, outdir)
extracted_files = {
- f.relative_to(outdir).as_posix()
- for f in outdir.glob("**/*")
- if f.is_file()
+ f.relative_to(outdir).as_posix() for f in outdir.glob("**/*") if f.is_file()
}
self.assertEqual(extracted_files, files)
diff --git a/tests/whl_with_build_files/verify_files_test.py b/tests/whl_with_build_files/verify_files_test.py
index cfbbaa3aff..547d33bb0c 100644
--- a/tests/whl_with_build_files/verify_files_test.py
+++ b/tests/whl_with_build_files/verify_files_test.py
@@ -2,15 +2,11 @@
class VerifyFilestest(unittest.TestCase):
-
def test_wheel_with_build_files_importable(self):
# If the BUILD files are present, then these imports should fail
# because globs won't pass package boundaries, and the necessary
# py files end up missing in runfiles.
- import somepkg
- import somepkg.a
- import somepkg.subpkg
- import somepkg.subpkg.b
+ pass
if __name__ == "__main__":
diff --git a/tools/precompiler/precompiler.py b/tools/precompiler/precompiler.py
index 0afc2be530..b8cbb58fab 100644
--- a/tools/precompiler/precompiler.py
+++ b/tools/precompiler/precompiler.py
@@ -20,6 +20,7 @@
import argparse
import py_compile
import sys
+import typing
def _create_parser() -> "argparse.Namespace":
@@ -40,18 +41,12 @@ def _create_parser() -> "argparse.Namespace":
def _compile(options: "argparse.Namespace") -> None:
try:
- invalidation_mode = py_compile.PycInvalidationMode[
- options.invalidation_mode.upper()
- ]
+ invalidation_mode = py_compile.PycInvalidationMode[options.invalidation_mode.upper()]
except KeyError as e:
- raise ValueError(
- f"Unknown PycInvalidationMode: {options.invalidation_mode}"
- ) from e
+ raise ValueError(f"Unknown PycInvalidationMode: {options.invalidation_mode}") from e
if not (len(options.srcs) == len(options.src_names) == len(options.pycs)):
- raise AssertionError(
- "Mismatched number of --src, --src_name, and/or --pyc args"
- )
+ raise AssertionError("Mismatched number of --src, --src_name, and/or --pyc args")
for src, src_name, pyc in zip(options.srcs, options.src_names, options.pycs):
py_compile.compile(
@@ -98,10 +93,7 @@ def run(self) -> None:
self._send_response(response)
except Exception:
_logger.exception("Unhandled error: request=%s", request)
- output = (
- f"Unhandled error:\nRequest: {request}\n"
- + traceback.format_exc()
- )
+ output = f"Unhandled error:\nRequest: {request}\n" + traceback.format_exc()
request_id = 0 if not request else request.get("requestId", 0)
self._send_response(
{
@@ -130,9 +122,7 @@ def _process_request(self, request: "JsonWorkRequest") -> "JsonWorkResponse | No
}
return response
- def _options_from_request(
- self, request: "JsonWorkResponse"
- ) -> "argparse.Namespace":
+ def _options_from_request(self, request: "JsonWorkResponse") -> "argparse.Namespace":
options = self._parser.parse_args(request["arguments"])
if request.get("sandboxDir"):
prefix = request["sandboxDir"]
@@ -180,9 +170,7 @@ async def run(self) -> None:
_logger.info("pending requests: %s", len(self._request_id_to_task))
request = await self._get_next_request()
request_id = request.get("requestId", 0)
- task = asyncio.create_task(
- self._process_request(request), name=f"request_{request_id}"
- )
+ task = asyncio.create_task(self._process_request(request), name=f"request_{request_id}")
self._request_id_to_task[request_id] = task
self._task_to_request_id[task] = request_id
task.add_done_callback(self._handle_task_done)
@@ -219,8 +207,7 @@ async def _process_request(self, request: "JsonWorkRequest") -> None:
self._send_response(
{
"exitCode": 3,
- "output": f"Unhandled error:\nRequest: {request}\n"
- + traceback.format_exc(),
+ "output": f"Unhandled error:\nRequest: {request}\n" + traceback.format_exc(),
"requestId": 0 if not request else request.get("requestId", 0),
}
)
diff --git a/tools/private/release/release.py b/tools/private/release/release.py
index 6fce0ff3b0..e1e9c7a248 100644
--- a/tools/private/release/release.py
+++ b/tools/private/release/release.py
@@ -30,8 +30,7 @@ def _iter_version_placeholder_files():
d
for d in dirs
if not any(
- fnmatch.fnmatch(os.path.join(root, d), pattern)
- for pattern in _EXCLUDE_PATTERNS
+ fnmatch.fnmatch(os.path.join(root, d), pattern) for pattern in _EXCLUDE_PATTERNS
)
]
@@ -156,9 +155,7 @@ def _semver_type(value):
def create_parser():
"""Creates the argument parser."""
- parser = argparse.ArgumentParser(
- description="Automate release steps for rules_python."
- )
+ parser = argparse.ArgumentParser(description="Automate release steps for rules_python.")
parser.add_argument(
"version",
nargs="?",
diff --git a/tools/private/update_deps/update_coverage_deps.py b/tools/private/update_deps/update_coverage_deps.py
index 81df6fc161..4528885ccd 100755
--- a/tools/private/update_deps/update_coverage_deps.py
+++ b/tools/private/update_deps/update_coverage_deps.py
@@ -20,11 +20,8 @@
# NOTE @aignas 2023-01-09: We should only depend on core Python 3 packages.
import argparse
-import difflib
import json
import os
-import pathlib
-import sys
import textwrap
from collections import defaultdict
from dataclasses import dataclass
@@ -183,7 +180,7 @@ def main():
if u["python_version"] not in args.py:
continue
- if f'_{u["python_version"]}m_' in u["filename"]:
+ if f"_{u['python_version']}m_" in u["filename"]:
continue
platforms = _get_platforms(
@@ -200,8 +197,14 @@ def main():
update_file(
path=args.update_file,
snippet=f"_coverage_deps = {repr(Deps(urls))}\n",
- start_marker="# START: maintained by 'bazel run //tools/private/update_deps:update_coverage_deps '",
- end_marker="# END: maintained by 'bazel run //tools/private/update_deps:update_coverage_deps '",
+ start_marker=(
+ "# START: maintained by 'bazel run"
+ " //tools/private/update_deps:update_coverage_deps '"
+ ),
+ end_marker=(
+ "# END: maintained by 'bazel run"
+ " //tools/private/update_deps:update_coverage_deps '"
+ ),
dry_run=args.dry_run,
)
diff --git a/tools/private/update_deps/update_file.py b/tools/private/update_deps/update_file.py
index ab3e8a817e..19a0719d28 100644
--- a/tools/private/update_deps/update_file.py
+++ b/tools/private/update_deps/update_file.py
@@ -17,10 +17,8 @@
This is reused in other files updating coverage deps and pip deps.
"""
-import argparse
import difflib
import pathlib
-import sys
def _writelines(path: pathlib.Path, out: str):
@@ -51,9 +49,9 @@ def replace_snippet(
path: pathlib.Path, the path to the file to be modified.
snippet: str, the snippet of code to insert between the markers.
start_marker: str, the text that marks the start of the region to be replaced.
- end_markr: str, the text that marks the end of the region to be replaced.
- dry_run: bool, if set to True, then the file will not be written and instead we are going to print a diff to
- stdout.
+ end_marker: str, the text that marks the end of the region to be replaced.
+ dry_run: bool, if set to True, then the file will not be written
+ and instead we are going to print a diff to stdout.
"""
lines = []
skip = False
@@ -92,9 +90,9 @@ def update_file(
path: pathlib.Path, the path to the file to be modified.
snippet: str, the snippet of code to insert between the markers.
start_marker: str, the text that marks the start of the region to be replaced.
- end_markr: str, the text that marks the end of the region to be replaced.
- dry_run: bool, if set to True, then the file will not be written and instead we are going to print a diff to
- stdout.
+ end_marker: str, the text that marks the end of the region to be replaced.
+ dry_run: bool, if set to True, then the file will not be written
+ and instead we are going to print a diff to stdout.
"""
current = path.read_text()
out = replace_snippet(current, snippet, start_marker, end_marker)
@@ -103,9 +101,7 @@ def update_file(
_writelines(path, out)
return
- relative = path.relative_to(
- pathlib.Path(__file__).resolve().parent.parent.parent.parent
- )
+ relative = path.relative_to(pathlib.Path(__file__).resolve().parent.parent.parent.parent)
name = f"{relative}"
diff = unified_diff(name, current, out)
if diff:
diff --git a/tools/private/update_deps/update_file_test.py b/tools/private/update_deps/update_file_test.py
index 01c6ec74b0..22c60c011e 100644
--- a/tools/private/update_deps/update_file_test.py
+++ b/tools/private/update_deps/update_file_test.py
@@ -30,7 +30,6 @@ def test_replace_simple(self):
After the snippet
"""
- snippet = "Replaced"
got = replace_snippet(
current=current,
snippet="Replaced",
diff --git a/tools/private/update_deps/update_pip_deps.py b/tools/private/update_deps/update_pip_deps.py
index 406697bc4d..9cbcfbc0a9 100755
--- a/tools/private/update_deps/update_pip_deps.py
+++ b/tools/private/update_deps/update_pip_deps.py
@@ -129,7 +129,7 @@ def main():
"--requirements-txt",
type=path_from_runfiles,
default=os.environ.get("REQUIREMENTS_TXT"),
- help="The requirements.txt path for the pypi tools, defaults to the value taken from REQUIREMENTS_TXT",
+ help="The requirements.txt path for the pypi tools, defaults to the value taken from REQUIREMENTS_TXT", # noqa: E501
)
parser.add_argument(
"--deps-bzl",
diff --git a/tools/private/zipapp/zipper.py b/tools/private/zipapp/zipper.py
index 870861bc07..7697dcfd7d 100644
--- a/tools/private/zipapp/zipper.py
+++ b/tools/private/zipapp/zipper.py
@@ -1,7 +1,6 @@
import argparse
import os
import shutil
-import stat
import sys
import zipfile
from os.path import dirname
@@ -15,9 +14,7 @@ def unix_join(*parts):
return "/".join(parts)
-def _get_zip_runfiles_path(
- path, workspace_name, legacy_external_runfiles, runfiles_dir
-):
+def _get_zip_runfiles_path(path, workspace_name, legacy_external_runfiles, runfiles_dir):
if legacy_external_runfiles and path.startswith("external/"):
path = path[len("external/") :]
elif path.startswith("../"):
@@ -75,9 +72,7 @@ def _parse_entry(
return type_, is_symlink_str, zip_path, content_path
-def read_manifest(
- manifest_path, workspace_name, legacy_external_runfiles, runfiles_dir
-):
+def read_manifest(manifest_path, workspace_name, legacy_external_runfiles, runfiles_dir):
with open(manifest_path, "r") as f:
entries = []
for line_idx, line in enumerate(f):
@@ -192,9 +187,7 @@ def create_zip(
compress_type = zipfile.ZIP_STORED if compress_level == 0 else zipfile.ZIP_DEFLATED
zf_level = compress_level if compress_level != 0 else None
- entries = read_manifest(
- manifest_path, workspace_name, legacy_external_runfiles, runfiles_dir
- )
+ entries = read_manifest(manifest_path, workspace_name, legacy_external_runfiles, runfiles_dir)
seen = set()
with zipfile.ZipFile(
@@ -260,9 +253,7 @@ def main():
choices=["0", "1"],
help="Whether to use legacy external runfiles behavior",
)
- parser.add_argument(
- "--runfiles-dir", default="runfiles", help="Name of the runfiles directory"
- )
+ parser.add_argument("--runfiles-dir", default="runfiles", help="Name of the runfiles directory")
parser.add_argument(
"--target-platform-pathsep", help="The path separator for the target platform"
)
diff --git a/tools/wheelmaker.py b/tools/wheelmaker.py
index ada525e9bf..d7f050af82 100644
--- a/tools/wheelmaker.py
+++ b/tools/wheelmaker.py
@@ -24,8 +24,8 @@
import stat
import sys
import zipfile
-from collections.abc import Iterable
from pathlib import Path
+from typing import List, Sequence
_ZIP_EPOCH = (1980, 1, 1, 0, 0, 0)
@@ -94,9 +94,7 @@ def normalize_pep440(version):
substituted = re.sub(r"\{\w+\}", "0", version)
delimiter = "." if "+" in substituted else "+"
try:
- return str(
- packaging.version.Version(f"{substituted}{delimiter}{sanitized}")
- )
+ return str(packaging.version.Version(f"{substituted}{delimiter}{sanitized}"))
except packaging.version.InvalidVersion:
return str(packaging.version.Version(f"0+{sanitized}"))
@@ -120,9 +118,7 @@ def arcname_from(
# Always use unix path separators.
normalized_arcname = name.replace(os.path.sep, "/")
# Don't manipulate names filenames in the .distinfo or .data directories.
- if distribution_prefix and normalized_arcname.startswith(
- distribution_prefix
- ):
+ if distribution_prefix and normalized_arcname.startswith(distribution_prefix):
return normalized_arcname
for prefix in strip_path_prefixes:
if normalized_arcname.startswith(prefix):
@@ -205,9 +201,7 @@ def add_string(self, filename, contents):
self.writestr(zinfo, contents)
hash = hashlib.sha256()
hash.update(contents)
- self._add_to_record(
- filename, self._serialize_digest(hash), len(contents)
- )
+ self._add_to_record(filename, self._serialize_digest(hash), len(contents))
def _serialize_digest(self, hash) -> str:
# https://www.python.org/dev/peps/pep-0376/#record
@@ -244,9 +238,7 @@ def _quote_filename(self, filename: str) -> str:
filename = filename.lstrip("/")
# Some RECORDs like torch have *all* filenames quoted and we must minimize diff.
# Otherwise, we quote only when necessary (e.g. for filenames with commas).
- quoting = (
- csv.QUOTE_ALL if self.quote_all_filenames else csv.QUOTE_MINIMAL
- )
+ quoting = csv.QUOTE_ALL if self.quote_all_filenames else csv.QUOTE_MINIMAL
with io.StringIO() as buf:
csv.writer(buf, quoting=quoting).writerow([filename])
return buf.getvalue().strip()
@@ -255,10 +247,7 @@ def add_recordfile(self) -> str:
"""Write RECORD file to the distribution."""
record_path = self.distinfo_path("RECORD")
entries = self._record + [(record_path, "", "")]
- entries = [
- (self._quote_filename(fname), digest, size)
- for fname, digest, size in entries
- ]
+ entries = [(self._quote_filename(fname), digest, size) for fname, digest, size in entries]
contents = "\n".join(",".join(entry) for entry in entries) + "\n"
self.add_string(record_path, contents)
return contents
@@ -288,13 +277,9 @@ def __init__(
self._strip_path_prefixes = strip_path_prefixes
self._add_path_prefix = add_path_prefix
self._compress = compress
- self._wheelname_fragment_distribution_name = (
- escape_filename_distribution_name(self._name)
- )
+ self._wheelname_fragment_distribution_name = escape_filename_distribution_name(self._name)
- self._distribution_prefix = (
- self._wheelname_fragment_distribution_name + "-" + self._version
- )
+ self._distribution_prefix = self._wheelname_fragment_distribution_name + "-" + self._version
self._whlfile = None
@@ -305,9 +290,7 @@ def __enter__(self):
distribution_prefix=self._distribution_prefix,
strip_path_prefixes=self._strip_path_prefixes,
add_path_prefix=self._add_path_prefix,
- compression=(
- zipfile.ZIP_DEFLATED if self._compress else zipfile.ZIP_STORED
- ),
+ compression=(zipfile.ZIP_DEFLATED if self._compress else zipfile.ZIP_STORED),
)
return self
@@ -350,9 +333,7 @@ def add_wheelfile(self):
Wheel-Version: 1.0
Generator: bazel-wheelmaker 1.0
Root-Is-Purelib: {}
-""".format(
- "true" if self._platform == "any" else "false"
- )
+""".format("true" if self._platform == "any" else "false")
for tag in self.disttags():
wheel_contents += "Tag: %s\n" % tag
self._whlfile.add_string(self.distinfo_path("WHEEL"), wheel_contents)
@@ -361,9 +342,7 @@ def add_metadata(self, metadata, name, description):
"""Write METADATA file to the distribution."""
# https://www.python.org/dev/peps/pep-0566/
# https://packaging.python.org/specifications/core-metadata/
- metadata = re.sub(
- "^Name: .*$", "Name: %s" % name, metadata, flags=re.MULTILINE
- )
+ metadata = re.sub("^Name: .*$", "Name: %s" % name, metadata, flags=re.MULTILINE)
metadata += "Version: %s\n\n" % self._version
# setuptools seems to insert UNKNOWN as description when none is
# provided.
@@ -442,12 +421,8 @@ def resolve_argument_stamp(
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Builds a python wheel")
- metadata_group = parser.add_argument_group(
- "Wheel name, version and platform"
- )
- metadata_group.add_argument(
- "--name", required=True, type=str, help="Name of the distribution"
- )
+ metadata_group = parser.add_argument_group("Wheel name, version and platform")
+ metadata_group.add_argument("--name", required=True, type=str, help="Name of the distribution")
metadata_group.add_argument(
"--version", required=True, type=str, help="Version of the distribution"
)
@@ -464,14 +439,10 @@ def parse_args() -> argparse.Namespace:
help="Python version, e.g. 'py2' or 'py3'",
)
metadata_group.add_argument("--abi", type=str, default="none")
- metadata_group.add_argument(
- "--platform", type=str, default="any", help="Target platform. "
- )
+ metadata_group.add_argument("--platform", type=str, default="any", help="Target platform. ")
output_group = parser.add_argument_group("Output file location")
- output_group.add_argument(
- "--out", type=str, default=None, help="Override name of ouptut file"
- )
+ output_group.add_argument("--out", type=str, default=None, help="Override name of ouptut file")
output_group.add_argument(
"--no_compress",
action="store_true",
@@ -503,12 +474,9 @@ def parse_args() -> argparse.Namespace:
wheel_group.add_argument(
"--metadata_file",
type=Path,
- help="Contents of the METADATA file (before appending contents of "
- "--description_file)",
- )
- wheel_group.add_argument(
- "--description_file", help="Path to the file with package description"
+ help="Contents of the METADATA file (before appending contents of --description_file)",
)
+ wheel_group.add_argument("--description_file", help="Path to the file with package description")
wheel_group.add_argument(
"--description_content_type",
help="Content type of the package description",
@@ -529,8 +497,7 @@ def parse_args() -> argparse.Namespace:
contents_group.add_argument(
"--input_file_list",
action="append",
- help="A file that has all the input files defined as a list to avoid "
- "the long command",
+ help="A file that has all the input files defined as a list to avoid the long command",
)
contents_group.add_argument(
"--extra_distinfo_file",
@@ -622,9 +589,7 @@ def main() -> None:
description = None
if arguments.description_file:
- with open(
- arguments.description_file, "rt", encoding="utf-8"
- ) as description_file:
+ with open(arguments.description_file, "rt", encoding="utf-8") as description_file:
description = description_file.read()
metadata = arguments.metadata_file.read_text(encoding="utf-8")
@@ -632,7 +597,6 @@ def main() -> None:
# This is not imported at the top of the file due to the reliance
# on this file in the `whl_library` repository rule which does not
# provide `packaging` but does import symbols defined here.
- from packaging.requirements import Requirement
# Search for any `Requires-Dist` entries that refer to other files and
# expand them.
@@ -643,16 +607,12 @@ def main() -> None:
if not meta_line[len("Requires-Dist: ") :].startswith("@"):
# This is a normal requirement.
- package, _, extra = meta_line[
- len("Requires-Dist: ") :
- ].rpartition(";")
+ package, _, extra = meta_line[len("Requires-Dist: ") :].rpartition(";")
if not package:
# This is when the package requirement does not have markers.
continue
extra = extra.strip()
- metadata = metadata.replace(
- meta_line, get_new_requirement_line(package, extra)
- )
+ metadata = metadata.replace(meta_line, get_new_requirement_line(package, extra))
continue
# This is a requirement that refers to a file.
@@ -660,9 +620,7 @@ def main() -> None:
extra = extra.strip()
reqs = []
- for reqs_line in (
- Path(file).read_text(encoding="utf-8").splitlines()
- ):
+ for reqs_line in Path(file).read_text(encoding="utf-8").splitlines():
reqs_text = reqs_line.strip()
if not reqs_text or reqs_text.startswith(("#", "-")):
continue
@@ -677,9 +635,7 @@ def main() -> None:
# File is empty
# So replace the meta_line entirely, including removing newline chars
else:
- metadata = re.sub(
- re.escape(meta_line) + r"(?:\r?\n)?", "", metadata, count=1
- )
+ metadata = re.sub(re.escape(meta_line) + r"(?:\r?\n)?", "", metadata, count=1)
maker.add_metadata(
metadata=metadata,